Add BOM handling and routes to API and web UI

This commit is contained in:
Forbes
2026-01-31 08:38:02 -06:00
parent a2a36141f0
commit 1518cbc299
4 changed files with 1055 additions and 14 deletions

View File

@@ -1,7 +1,10 @@
package api
import (
"encoding/csv"
"encoding/json"
"fmt"
"io"
"net/http"
"strconv"
"strings"
@@ -14,16 +17,17 @@ import (
// BOMEntryResponse represents a BOM entry in API responses.
type BOMEntryResponse struct {
ID string `json:"id"`
ChildPartNumber string `json:"child_part_number"`
ChildDescription string `json:"child_description"`
RelType string `json:"rel_type"`
Quantity *float64 `json:"quantity"`
Unit *string `json:"unit,omitempty"`
ReferenceDesignators []string `json:"reference_designators,omitempty"`
ChildRevision *int `json:"child_revision,omitempty"`
EffectiveRevision int `json:"effective_revision"`
Depth *int `json:"depth,omitempty"`
ID string `json:"id"`
ChildPartNumber string `json:"child_part_number"`
ChildDescription string `json:"child_description"`
RelType string `json:"rel_type"`
Quantity *float64 `json:"quantity"`
Unit *string `json:"unit,omitempty"`
ReferenceDesignators []string `json:"reference_designators,omitempty"`
ChildRevision *int `json:"child_revision,omitempty"`
EffectiveRevision int `json:"effective_revision"`
Depth *int `json:"depth,omitempty"`
Metadata map[string]any `json:"metadata,omitempty"`
}
// WhereUsedResponse represents a where-used entry in API responses.
@@ -264,6 +268,7 @@ func (s *Server) HandleAddBOMEntry(w http.ResponseWriter, r *http.Request) {
ReferenceDesignators: req.ReferenceDesignators,
ChildRevision: req.ChildRevision,
EffectiveRevision: child.CurrentRevision,
Metadata: req.Metadata,
}
if req.ChildRevision != nil {
entry.EffectiveRevision = *req.ChildRevision
@@ -419,6 +424,7 @@ func bomEntryToResponse(e *db.BOMEntry) BOMEntryResponse {
ReferenceDesignators: refDes,
ChildRevision: e.ChildRevision,
EffectiveRevision: e.EffectiveRevision,
Metadata: e.Metadata,
}
}
@@ -438,3 +444,380 @@ func whereUsedToResponse(e *db.BOMEntry) WhereUsedResponse {
ReferenceDesignators: refDes,
}
}
// BOM CSV headers matching the user-specified format.
var bomCSVHeaders = []string{
"Item", "Level", "Source", "PN", "Seller Description",
"Unit Cost", "QTY", "Ext Cost", "Sourcing Link",
}
// getMetaString extracts a string value from metadata.
func getMetaString(m map[string]any, key string) string {
if m == nil {
return ""
}
if v, ok := m[key]; ok {
if s, ok := v.(string); ok {
return s
}
}
return ""
}
// getMetaFloat extracts a float64 value from metadata.
func getMetaFloat(m map[string]any, key string) (float64, bool) {
if m == nil {
return 0, false
}
v, ok := m[key]
if !ok {
return 0, false
}
switch n := v.(type) {
case float64:
return n, true
case json.Number:
f, err := n.Float64()
return f, err == nil
}
return 0, false
}
// HandleExportBOMCSV exports the expanded BOM as a CSV file.
func (s *Server) HandleExportBOMCSV(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
partNumber := chi.URLParam(r, "partNumber")
item, err := s.items.GetByPartNumber(ctx, partNumber)
if err != nil {
s.logger.Error().Err(err).Msg("failed to get item")
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item")
return
}
if item == nil {
writeError(w, http.StatusNotFound, "not_found", "Item not found")
return
}
entries, err := s.relationships.GetExpandedBOM(ctx, item.ID, 10)
if err != nil {
s.logger.Error().Err(err).Msg("failed to get expanded BOM")
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get BOM")
return
}
w.Header().Set("Content-Type", "text/csv")
w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s-bom.csv"`, partNumber))
writer := csv.NewWriter(w)
defer writer.Flush()
// Write header
if err := writer.Write(bomCSVHeaders); err != nil {
s.logger.Error().Err(err).Msg("failed to write CSV header")
return
}
// Write rows
for i, e := range entries {
unitCost, hasUnitCost := getMetaFloat(e.Metadata, "unit_cost")
qty := 0.0
if e.Quantity != nil {
qty = *e.Quantity
}
extCost := ""
if hasUnitCost && qty > 0 {
extCost = fmt.Sprintf("%.2f", unitCost*qty)
}
unitCostStr := ""
if hasUnitCost {
unitCostStr = fmt.Sprintf("%.2f", unitCost)
}
qtyStr := ""
if e.Quantity != nil {
qtyStr = strconv.FormatFloat(*e.Quantity, 'f', -1, 64)
}
row := []string{
strconv.Itoa(i + 1), // Item
strconv.Itoa(e.Depth), // Level
getMetaString(e.Metadata, "source"), // Source
e.ChildPartNumber, // PN
getMetaString(e.Metadata, "seller_description"), // Seller Description
unitCostStr, // Unit Cost
qtyStr, // QTY
extCost, // Ext Cost
getMetaString(e.Metadata, "sourcing_link"), // Sourcing Link
}
if err := writer.Write(row); err != nil {
s.logger.Error().Err(err).Msg("failed to write CSV row")
return
}
}
}
// HandleImportBOMCSV imports BOM entries from a CSV file.
func (s *Server) HandleImportBOMCSV(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
partNumber := chi.URLParam(r, "partNumber")
parent, err := s.items.GetByPartNumber(ctx, partNumber)
if err != nil {
s.logger.Error().Err(err).Msg("failed to get parent item")
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get parent item")
return
}
if parent == nil {
writeError(w, http.StatusNotFound, "not_found", "Parent item not found")
return
}
// Parse multipart form (32MB max)
if err := r.ParseMultipartForm(32 << 20); err != nil {
writeError(w, http.StatusBadRequest, "invalid_form", "Failed to parse multipart form")
return
}
file, _, err := r.FormFile("file")
if err != nil {
writeError(w, http.StatusBadRequest, "missing_file", "CSV file is required")
return
}
defer file.Close()
dryRun := r.FormValue("dry_run") == "true"
clearExisting := r.FormValue("clear_existing") == "true"
// Read CSV
reader := csv.NewReader(file)
reader.TrimLeadingSpace = true
headers, err := reader.Read()
if err != nil {
writeError(w, http.StatusBadRequest, "invalid_csv", "Failed to read CSV headers")
return
}
// Build case-insensitive header index
headerIdx := make(map[string]int)
for i, h := range headers {
headerIdx[strings.ToLower(strings.TrimSpace(h))] = i
}
// Require PN column
pnIdx, hasPn := headerIdx["pn"]
if !hasPn {
writeError(w, http.StatusBadRequest, "missing_column", "CSV must have a 'PN' column")
return
}
// Clear existing BOM if requested (only on real import)
if clearExisting && !dryRun {
existing, err := s.relationships.GetBOM(ctx, parent.ID)
if err != nil {
s.logger.Error().Err(err).Msg("failed to get existing BOM for clearing")
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to clear existing BOM")
return
}
for _, e := range existing {
if err := s.relationships.Delete(ctx, e.RelationshipID); err != nil {
s.logger.Error().Err(err).Str("id", e.RelationshipID).Msg("failed to delete BOM entry during clear")
}
}
}
result := CSVImportResult{}
var createdItems []string
for {
record, err := reader.Read()
if err == io.EOF {
break
}
if err != nil {
result.TotalRows++
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: result.TotalRows + 1, // +1 for header
Message: fmt.Sprintf("Failed to read row: %s", err.Error()),
})
continue
}
result.TotalRows++
rowNum := result.TotalRows + 1 // +1 for header
// Get part number
if pnIdx >= len(record) {
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Field: "PN",
Message: "Row has fewer columns than expected",
})
continue
}
childPN := strings.TrimSpace(record[pnIdx])
if childPN == "" {
// Skip blank PN rows silently
result.TotalRows--
continue
}
// Look up child item
child, err := s.items.GetByPartNumber(ctx, childPN)
if err != nil {
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Field: "PN",
Message: fmt.Sprintf("Error looking up item: %s", err.Error()),
})
continue
}
if child == nil {
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Field: "PN",
Message: fmt.Sprintf("Item '%s' not found", childPN),
})
continue
}
// Parse quantity
var quantity *float64
if idx, ok := headerIdx["qty"]; ok && idx < len(record) {
qtyStr := strings.TrimSpace(record[idx])
if qtyStr != "" {
q, err := strconv.ParseFloat(qtyStr, 64)
if err != nil {
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Field: "QTY",
Message: fmt.Sprintf("Invalid quantity '%s'", qtyStr),
})
continue
}
quantity = &q
}
}
// Build metadata from CSV columns
metadata := make(map[string]any)
if idx, ok := headerIdx["source"]; ok && idx < len(record) {
if v := strings.TrimSpace(record[idx]); v != "" {
metadata["source"] = v
}
}
if idx, ok := headerIdx["seller description"]; ok && idx < len(record) {
if v := strings.TrimSpace(record[idx]); v != "" {
metadata["seller_description"] = v
}
}
if idx, ok := headerIdx["unit cost"]; ok && idx < len(record) {
if v := strings.TrimSpace(record[idx]); v != "" {
// Strip leading $ or currency symbols
v = strings.TrimLeft(v, "$£€ ")
if f, err := strconv.ParseFloat(v, 64); err == nil {
metadata["unit_cost"] = f
}
}
}
if idx, ok := headerIdx["sourcing link"]; ok && idx < len(record) {
if v := strings.TrimSpace(record[idx]); v != "" {
metadata["sourcing_link"] = v
}
}
if len(metadata) == 0 {
metadata = nil
}
// Cycle check
hasCycle, err := s.relationships.HasCycle(ctx, parent.ID, child.ID)
if err != nil {
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Field: "PN",
Message: fmt.Sprintf("Error checking for cycles: %s", err.Error()),
})
continue
}
if hasCycle {
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Field: "PN",
Message: fmt.Sprintf("Adding '%s' would create a cycle", childPN),
})
continue
}
if dryRun {
result.SuccessCount++
continue
}
// Check if relationship already exists (upsert)
existing, err := s.relationships.GetByParentAndChild(ctx, parent.ID, child.ID)
if err != nil {
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Message: fmt.Sprintf("Error checking existing relationship: %s", err.Error()),
})
continue
}
if existing != nil {
// Update existing
if err := s.relationships.Update(ctx, existing.ID, nil, quantity, nil, nil, nil, metadata); err != nil {
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Message: fmt.Sprintf("Failed to update: %s", err.Error()),
})
continue
}
} else {
// Create new
rel := &db.Relationship{
ParentItemID: parent.ID,
ChildItemID: child.ID,
RelType: "component",
Quantity: quantity,
Metadata: metadata,
}
if err := s.relationships.Create(ctx, rel); err != nil {
result.ErrorCount++
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Message: fmt.Sprintf("Failed to create: %s", err.Error()),
})
continue
}
createdItems = append(createdItems, childPN)
}
result.SuccessCount++
}
result.CreatedItems = createdItems
s.logger.Info().
Str("parent", partNumber).
Bool("dry_run", dryRun).
Int("total", result.TotalRows).
Int("success", result.SuccessCount).
Int("errors", result.ErrorCount).
Msg("BOM CSV import completed")
writeJSON(w, http.StatusOK, result)
}