diff --git a/internal/api/bom_handlers.go b/internal/api/bom_handlers.go index 1e763cc..a9fdae6 100644 --- a/internal/api/bom_handlers.go +++ b/internal/api/bom_handlers.go @@ -1,7 +1,10 @@ package api import ( + "encoding/csv" "encoding/json" + "fmt" + "io" "net/http" "strconv" "strings" @@ -14,16 +17,17 @@ import ( // BOMEntryResponse represents a BOM entry in API responses. type BOMEntryResponse struct { - ID string `json:"id"` - ChildPartNumber string `json:"child_part_number"` - ChildDescription string `json:"child_description"` - RelType string `json:"rel_type"` - Quantity *float64 `json:"quantity"` - Unit *string `json:"unit,omitempty"` - ReferenceDesignators []string `json:"reference_designators,omitempty"` - ChildRevision *int `json:"child_revision,omitempty"` - EffectiveRevision int `json:"effective_revision"` - Depth *int `json:"depth,omitempty"` + ID string `json:"id"` + ChildPartNumber string `json:"child_part_number"` + ChildDescription string `json:"child_description"` + RelType string `json:"rel_type"` + Quantity *float64 `json:"quantity"` + Unit *string `json:"unit,omitempty"` + ReferenceDesignators []string `json:"reference_designators,omitempty"` + ChildRevision *int `json:"child_revision,omitempty"` + EffectiveRevision int `json:"effective_revision"` + Depth *int `json:"depth,omitempty"` + Metadata map[string]any `json:"metadata,omitempty"` } // WhereUsedResponse represents a where-used entry in API responses. @@ -264,6 +268,7 @@ func (s *Server) HandleAddBOMEntry(w http.ResponseWriter, r *http.Request) { ReferenceDesignators: req.ReferenceDesignators, ChildRevision: req.ChildRevision, EffectiveRevision: child.CurrentRevision, + Metadata: req.Metadata, } if req.ChildRevision != nil { entry.EffectiveRevision = *req.ChildRevision @@ -419,6 +424,7 @@ func bomEntryToResponse(e *db.BOMEntry) BOMEntryResponse { ReferenceDesignators: refDes, ChildRevision: e.ChildRevision, EffectiveRevision: e.EffectiveRevision, + Metadata: e.Metadata, } } @@ -438,3 +444,380 @@ func whereUsedToResponse(e *db.BOMEntry) WhereUsedResponse { ReferenceDesignators: refDes, } } + +// BOM CSV headers matching the user-specified format. +var bomCSVHeaders = []string{ + "Item", "Level", "Source", "PN", "Seller Description", + "Unit Cost", "QTY", "Ext Cost", "Sourcing Link", +} + +// getMetaString extracts a string value from metadata. +func getMetaString(m map[string]any, key string) string { + if m == nil { + return "" + } + if v, ok := m[key]; ok { + if s, ok := v.(string); ok { + return s + } + } + return "" +} + +// getMetaFloat extracts a float64 value from metadata. +func getMetaFloat(m map[string]any, key string) (float64, bool) { + if m == nil { + return 0, false + } + v, ok := m[key] + if !ok { + return 0, false + } + switch n := v.(type) { + case float64: + return n, true + case json.Number: + f, err := n.Float64() + return f, err == nil + } + return 0, false +} + +// HandleExportBOMCSV exports the expanded BOM as a CSV file. +func (s *Server) HandleExportBOMCSV(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + partNumber := chi.URLParam(r, "partNumber") + + item, err := s.items.GetByPartNumber(ctx, partNumber) + if err != nil { + s.logger.Error().Err(err).Msg("failed to get item") + writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item") + return + } + if item == nil { + writeError(w, http.StatusNotFound, "not_found", "Item not found") + return + } + + entries, err := s.relationships.GetExpandedBOM(ctx, item.ID, 10) + if err != nil { + s.logger.Error().Err(err).Msg("failed to get expanded BOM") + writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get BOM") + return + } + + w.Header().Set("Content-Type", "text/csv") + w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s-bom.csv"`, partNumber)) + + writer := csv.NewWriter(w) + defer writer.Flush() + + // Write header + if err := writer.Write(bomCSVHeaders); err != nil { + s.logger.Error().Err(err).Msg("failed to write CSV header") + return + } + + // Write rows + for i, e := range entries { + unitCost, hasUnitCost := getMetaFloat(e.Metadata, "unit_cost") + qty := 0.0 + if e.Quantity != nil { + qty = *e.Quantity + } + + extCost := "" + if hasUnitCost && qty > 0 { + extCost = fmt.Sprintf("%.2f", unitCost*qty) + } + + unitCostStr := "" + if hasUnitCost { + unitCostStr = fmt.Sprintf("%.2f", unitCost) + } + + qtyStr := "" + if e.Quantity != nil { + qtyStr = strconv.FormatFloat(*e.Quantity, 'f', -1, 64) + } + + row := []string{ + strconv.Itoa(i + 1), // Item + strconv.Itoa(e.Depth), // Level + getMetaString(e.Metadata, "source"), // Source + e.ChildPartNumber, // PN + getMetaString(e.Metadata, "seller_description"), // Seller Description + unitCostStr, // Unit Cost + qtyStr, // QTY + extCost, // Ext Cost + getMetaString(e.Metadata, "sourcing_link"), // Sourcing Link + } + if err := writer.Write(row); err != nil { + s.logger.Error().Err(err).Msg("failed to write CSV row") + return + } + } +} + +// HandleImportBOMCSV imports BOM entries from a CSV file. +func (s *Server) HandleImportBOMCSV(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + partNumber := chi.URLParam(r, "partNumber") + + parent, err := s.items.GetByPartNumber(ctx, partNumber) + if err != nil { + s.logger.Error().Err(err).Msg("failed to get parent item") + writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get parent item") + return + } + if parent == nil { + writeError(w, http.StatusNotFound, "not_found", "Parent item not found") + return + } + + // Parse multipart form (32MB max) + if err := r.ParseMultipartForm(32 << 20); err != nil { + writeError(w, http.StatusBadRequest, "invalid_form", "Failed to parse multipart form") + return + } + + file, _, err := r.FormFile("file") + if err != nil { + writeError(w, http.StatusBadRequest, "missing_file", "CSV file is required") + return + } + defer file.Close() + + dryRun := r.FormValue("dry_run") == "true" + clearExisting := r.FormValue("clear_existing") == "true" + + // Read CSV + reader := csv.NewReader(file) + reader.TrimLeadingSpace = true + + headers, err := reader.Read() + if err != nil { + writeError(w, http.StatusBadRequest, "invalid_csv", "Failed to read CSV headers") + return + } + + // Build case-insensitive header index + headerIdx := make(map[string]int) + for i, h := range headers { + headerIdx[strings.ToLower(strings.TrimSpace(h))] = i + } + + // Require PN column + pnIdx, hasPn := headerIdx["pn"] + if !hasPn { + writeError(w, http.StatusBadRequest, "missing_column", "CSV must have a 'PN' column") + return + } + + // Clear existing BOM if requested (only on real import) + if clearExisting && !dryRun { + existing, err := s.relationships.GetBOM(ctx, parent.ID) + if err != nil { + s.logger.Error().Err(err).Msg("failed to get existing BOM for clearing") + writeError(w, http.StatusInternalServerError, "internal_error", "Failed to clear existing BOM") + return + } + for _, e := range existing { + if err := s.relationships.Delete(ctx, e.RelationshipID); err != nil { + s.logger.Error().Err(err).Str("id", e.RelationshipID).Msg("failed to delete BOM entry during clear") + } + } + } + + result := CSVImportResult{} + var createdItems []string + + for { + record, err := reader.Read() + if err == io.EOF { + break + } + if err != nil { + result.TotalRows++ + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: result.TotalRows + 1, // +1 for header + Message: fmt.Sprintf("Failed to read row: %s", err.Error()), + }) + continue + } + + result.TotalRows++ + rowNum := result.TotalRows + 1 // +1 for header + + // Get part number + if pnIdx >= len(record) { + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: rowNum, + Field: "PN", + Message: "Row has fewer columns than expected", + }) + continue + } + + childPN := strings.TrimSpace(record[pnIdx]) + if childPN == "" { + // Skip blank PN rows silently + result.TotalRows-- + continue + } + + // Look up child item + child, err := s.items.GetByPartNumber(ctx, childPN) + if err != nil { + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: rowNum, + Field: "PN", + Message: fmt.Sprintf("Error looking up item: %s", err.Error()), + }) + continue + } + if child == nil { + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: rowNum, + Field: "PN", + Message: fmt.Sprintf("Item '%s' not found", childPN), + }) + continue + } + + // Parse quantity + var quantity *float64 + if idx, ok := headerIdx["qty"]; ok && idx < len(record) { + qtyStr := strings.TrimSpace(record[idx]) + if qtyStr != "" { + q, err := strconv.ParseFloat(qtyStr, 64) + if err != nil { + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: rowNum, + Field: "QTY", + Message: fmt.Sprintf("Invalid quantity '%s'", qtyStr), + }) + continue + } + quantity = &q + } + } + + // Build metadata from CSV columns + metadata := make(map[string]any) + if idx, ok := headerIdx["source"]; ok && idx < len(record) { + if v := strings.TrimSpace(record[idx]); v != "" { + metadata["source"] = v + } + } + if idx, ok := headerIdx["seller description"]; ok && idx < len(record) { + if v := strings.TrimSpace(record[idx]); v != "" { + metadata["seller_description"] = v + } + } + if idx, ok := headerIdx["unit cost"]; ok && idx < len(record) { + if v := strings.TrimSpace(record[idx]); v != "" { + // Strip leading $ or currency symbols + v = strings.TrimLeft(v, "$£€ ") + if f, err := strconv.ParseFloat(v, 64); err == nil { + metadata["unit_cost"] = f + } + } + } + if idx, ok := headerIdx["sourcing link"]; ok && idx < len(record) { + if v := strings.TrimSpace(record[idx]); v != "" { + metadata["sourcing_link"] = v + } + } + + if len(metadata) == 0 { + metadata = nil + } + + // Cycle check + hasCycle, err := s.relationships.HasCycle(ctx, parent.ID, child.ID) + if err != nil { + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: rowNum, + Field: "PN", + Message: fmt.Sprintf("Error checking for cycles: %s", err.Error()), + }) + continue + } + if hasCycle { + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: rowNum, + Field: "PN", + Message: fmt.Sprintf("Adding '%s' would create a cycle", childPN), + }) + continue + } + + if dryRun { + result.SuccessCount++ + continue + } + + // Check if relationship already exists (upsert) + existing, err := s.relationships.GetByParentAndChild(ctx, parent.ID, child.ID) + if err != nil { + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: rowNum, + Message: fmt.Sprintf("Error checking existing relationship: %s", err.Error()), + }) + continue + } + + if existing != nil { + // Update existing + if err := s.relationships.Update(ctx, existing.ID, nil, quantity, nil, nil, nil, metadata); err != nil { + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: rowNum, + Message: fmt.Sprintf("Failed to update: %s", err.Error()), + }) + continue + } + } else { + // Create new + rel := &db.Relationship{ + ParentItemID: parent.ID, + ChildItemID: child.ID, + RelType: "component", + Quantity: quantity, + Metadata: metadata, + } + if err := s.relationships.Create(ctx, rel); err != nil { + result.ErrorCount++ + result.Errors = append(result.Errors, CSVImportErr{ + Row: rowNum, + Message: fmt.Sprintf("Failed to create: %s", err.Error()), + }) + continue + } + createdItems = append(createdItems, childPN) + } + + result.SuccessCount++ + } + + result.CreatedItems = createdItems + + s.logger.Info(). + Str("parent", partNumber). + Bool("dry_run", dryRun). + Int("total", result.TotalRows). + Int("success", result.SuccessCount). + Int("errors", result.ErrorCount). + Msg("BOM CSV import completed") + + writeJSON(w, http.StatusOK, result) +} diff --git a/internal/api/routes.go b/internal/api/routes.go index 63db163..7b482e5 100644 --- a/internal/api/routes.go +++ b/internal/api/routes.go @@ -105,6 +105,8 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler { r.Post("/bom", server.HandleAddBOMEntry) r.Get("/bom/expanded", server.HandleGetExpandedBOM) r.Get("/bom/where-used", server.HandleGetWhereUsed) + r.Get("/bom/export.csv", server.HandleExportBOMCSV) + r.Post("/bom/import", server.HandleImportBOMCSV) r.Put("/bom/{childPartNumber}", server.HandleUpdateBOMEntry) r.Delete("/bom/{childPartNumber}", server.HandleDeleteBOMEntry) }) diff --git a/internal/api/templates/items.html b/internal/api/templates/items.html index 603eee2..218a1a3 100644 --- a/internal/api/templates/items.html +++ b/internal/api/templates/items.html @@ -345,6 +345,20 @@ > Revisions + + @@ -355,6 +369,12 @@ style="display: none" >
+ + @@ -855,6 +875,127 @@ min-height: 200px; } + /* BOM Tab */ + .bom-toolbar { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 1rem; + gap: 0.5rem; + flex-wrap: wrap; + } + .bom-toolbar-actions { + display: flex; + gap: 0.5rem; + } + .bom-toolbar .btn { + padding: 0.4rem 0.75rem; + font-size: 0.8rem; + } + .bom-table { + width: 100%; + border-collapse: collapse; + font-size: 0.85rem; + } + .bom-table th { + padding: 0.5rem 0.75rem; + background: var(--ctp-base); + color: var(--ctp-subtext1); + font-weight: 600; + font-size: 0.75rem; + text-transform: uppercase; + letter-spacing: 0.05em; + text-align: left; + border-bottom: 1px solid var(--ctp-surface1); + } + .bom-table td { + padding: 0.5rem 0.75rem; + border-bottom: 1px solid var(--ctp-surface1); + color: var(--ctp-text); + } + .bom-table tr:hover { + background: var(--ctp-surface1); + } + .bom-table tr:last-child td { + border-bottom: none; + } + .bom-table .pn-link { + cursor: pointer; + color: var(--ctp-peach); + font-family: "JetBrains Mono", "Fira Code", monospace; + font-weight: 500; + } + .bom-table .pn-link:hover { + text-decoration: underline; + } + .bom-cost { + font-family: "JetBrains Mono", "Fira Code", monospace; + text-align: right; + } + .bom-summary { + display: flex; + justify-content: flex-end; + padding: 0.75rem; + font-weight: 600; + border-top: 2px solid var(--ctp-surface2); + font-family: "JetBrains Mono", "Fira Code", monospace; + color: var(--ctp-green); + } + .bom-empty { + text-align: center; + padding: 2rem; + color: var(--ctp-subtext0); + } + .bom-add-form { + display: grid; + grid-template-columns: 1fr 1fr; + gap: 0.75rem; + padding: 1rem; + background: var(--ctp-base); + border-radius: 0.5rem; + margin-bottom: 1rem; + } + .bom-add-form .form-group { + margin-bottom: 0; + } + .bom-add-form .full-width { + grid-column: 1 / -1; + } + .bom-import-area { + border: 2px dashed var(--ctp-surface2); + border-radius: 0.5rem; + padding: 1.5rem; + text-align: center; + margin-bottom: 1rem; + transition: border-color 0.2s; + } + .bom-import-area:hover { + border-color: var(--ctp-mauve); + } + .bom-import-results { + margin-top: 1rem; + padding: 1rem; + border-radius: 0.5rem; + background: var(--ctp-base); + } + .bom-import-results.success { + border-left: 3px solid var(--ctp-green); + } + .bom-import-results.error { + border-left: 3px solid var(--ctp-red); + } + .bom-import-results.warning { + border-left: 3px solid var(--ctp-yellow); + } + .bom-actions { + display: flex; + gap: 0.25rem; + } + .bom-actions .btn { + padding: 0.2rem 0.5rem; + font-size: 0.75rem; + } + /* Properties Editor */ .properties-editor { padding: 0.5rem 0; @@ -1592,6 +1733,14 @@ content.style.display = "none"; }); document.getElementById(`tab-${tab}`).style.display = "block"; + + // Lazy-load BOM and Where Used data + if (tab === "bom" && currentItemPartNumber) { + loadBOMTab(currentItemPartNumber); + } + if (tab === "where-used" && currentItemPartNumber) { + loadWhereUsedTab(currentItemPartNumber); + } } // Format file size @@ -1618,6 +1767,8 @@ 'Error: ${error.message}
`; + } + } + + function renderBOMTab(partNumber, entries) { + const container = document.getElementById("tab-bom"); + const escapedPN = escapeAttr(partNumber); + + let totalExtCost = 0; + entries.forEach((e) => { + const unitCost = e.metadata?.unit_cost || 0; + const qty = e.quantity || 0; + totalExtCost += unitCost * qty; + }); + + let html = ` + + + `; + + if (entries.length === 0) { + html += + 'No BOM entries.
Add components or import a CSV.
| # | +PN | +Source | +Seller Description | +Unit Cost | +QTY | +Ext Cost | +Link | +Actions | +
|---|---|---|---|---|---|---|---|---|
| ${idx + 1} | +${escapeHtml(e.child_part_number)} | +${source} | +${sellerDesc} | +${unitCost ? "$" + unitCost.toFixed(2) : ""} | +${qty || ""} | +${extCost ? "$" + extCost.toFixed(2) : ""} | +${sourcingLink ? `Link` : ""} | +
+
+
+
+
+ |
+
Server returned non-JSON response (status ${response.status}).
${result.message || result.error}
${error.message}
This item is not used in any assemblies.
| Parent PN | +Description | +QTY | +Type | +
|---|---|---|---|
| ${escapeHtml(e.parent_part_number)} | +${escapeHtml(e.parent_description || "")} | +${e.quantity || ""} | +${escapeHtml(e.rel_type)} | +
Error: ${error.message}
`; + } + } + // Initialize loadSchema(); loadProjectCodes(); diff --git a/internal/db/relationships.go b/internal/db/relationships.go index dfcc0ed..5b70225 100644 --- a/internal/db/relationships.go +++ b/internal/db/relationships.go @@ -41,6 +41,7 @@ type BOMEntry struct { ReferenceDesignators []string ChildRevision *int EffectiveRevision int + Metadata map[string]any } // BOMTreeEntry extends BOMEntry with depth for multi-level BOM expansion. @@ -243,7 +244,8 @@ func (r *RelationshipRepository) GetBOM(ctx context.Context, parentItemID string rel.child_item_id, child.part_number, child.description, rel.rel_type, rel.quantity, rel.unit, rel.reference_designators, rel.child_revision, - COALESCE(rel.child_revision, child.current_revision) AS effective_revision + COALESCE(rel.child_revision, child.current_revision) AS effective_revision, + rel.metadata FROM relationships rel JOIN items parent ON parent.id = rel.parent_item_id JOIN items child ON child.id = rel.child_item_id @@ -267,7 +269,8 @@ func (r *RelationshipRepository) GetWhereUsed(ctx context.Context, childItemID s rel.child_item_id, child.part_number, child.description, rel.rel_type, rel.quantity, rel.unit, rel.reference_designators, rel.child_revision, - COALESCE(rel.child_revision, child.current_revision) AS effective_revision + COALESCE(rel.child_revision, child.current_revision) AS effective_revision, + rel.metadata FROM relationships rel JOIN items parent ON parent.id = rel.parent_item_id JOIN items child ON child.id = rel.child_item_id @@ -301,6 +304,7 @@ func (r *RelationshipRepository) GetExpandedBOM(ctx context.Context, parentItemI rel.rel_type, rel.quantity, rel.unit, rel.reference_designators, rel.child_revision, COALESCE(rel.child_revision, child.current_revision) AS effective_revision, + rel.metadata, 1 AS depth FROM relationships rel JOIN items parent ON parent.id = rel.parent_item_id @@ -319,6 +323,7 @@ func (r *RelationshipRepository) GetExpandedBOM(ctx context.Context, parentItemI rel.rel_type, rel.quantity, rel.unit, rel.reference_designators, rel.child_revision, COALESCE(rel.child_revision, child.current_revision), + rel.metadata, bt.depth + 1 FROM relationships rel JOIN items parent ON parent.id = rel.parent_item_id @@ -331,7 +336,7 @@ func (r *RelationshipRepository) GetExpandedBOM(ctx context.Context, parentItemI SELECT id, parent_item_id, parent_part_number, parent_description, child_item_id, child_part_number, child_description, rel_type, quantity, unit, reference_designators, - child_revision, effective_revision, depth + child_revision, effective_revision, metadata, depth FROM bom_tree ORDER BY depth, child_part_number `, parentItemID, maxDepth) @@ -344,12 +349,13 @@ func (r *RelationshipRepository) GetExpandedBOM(ctx context.Context, parentItemI for rows.Next() { e := &BOMTreeEntry{} var parentDesc, childDesc *string + var metadataJSON []byte err := rows.Scan( &e.RelationshipID, &e.ParentItemID, &e.ParentPartNumber, &parentDesc, &e.ChildItemID, &e.ChildPartNumber, &childDesc, &e.RelType, &e.Quantity, &e.Unit, &e.ReferenceDesignators, &e.ChildRevision, - &e.EffectiveRevision, &e.Depth, + &e.EffectiveRevision, &metadataJSON, &e.Depth, ) if err != nil { return nil, fmt.Errorf("scanning BOM tree entry: %w", err) @@ -360,6 +366,11 @@ func (r *RelationshipRepository) GetExpandedBOM(ctx context.Context, parentItemI if childDesc != nil { e.ChildDescription = *childDesc } + if metadataJSON != nil { + if err := json.Unmarshal(metadataJSON, &e.Metadata); err != nil { + return nil, fmt.Errorf("unmarshaling BOM entry metadata: %w", err) + } + } entries = append(entries, e) } @@ -407,12 +418,14 @@ func scanBOMEntries(rows pgx.Rows) ([]*BOMEntry, error) { for rows.Next() { e := &BOMEntry{} var parentDesc, childDesc *string + var metadataJSON []byte err := rows.Scan( &e.RelationshipID, &e.ParentItemID, &e.ParentPartNumber, &parentDesc, &e.ChildItemID, &e.ChildPartNumber, &childDesc, &e.RelType, &e.Quantity, &e.Unit, &e.ReferenceDesignators, &e.ChildRevision, &e.EffectiveRevision, + &metadataJSON, ) if err != nil { return nil, fmt.Errorf("scanning BOM entry: %w", err) @@ -423,6 +436,11 @@ func scanBOMEntries(rows pgx.Rows) ([]*BOMEntry, error) { if childDesc != nil { e.ChildDescription = *childDesc } + if metadataJSON != nil { + if err := json.Unmarshal(metadataJSON, &e.Metadata); err != nil { + return nil, fmt.Errorf("unmarshaling BOM entry metadata: %w", err) + } + } entries = append(entries, e) }