10 Commits

Author SHA1 Message Date
Forbes
25c42bd70b feat(web): add BOM merge resolution UI with source badges and dropdown
- Add source badges (assembly=teal, manual=blue) to BOM display rows
- Add info banner when assembly-sourced entries exist
- Change source input from text field to select dropdown
- Add merge response types to types.ts

Closes #47
2026-02-08 19:56:33 -06:00
8d88f77ff6 Merge pull request 'feat: expose file attachment stats as item properties' (#54) from issue-37-file-stats into main
Reviewed-on: #54
2026-02-09 01:26:17 +00:00
Forbes
50985ed805 feat: expose file attachment stats as item properties (#37)
Add file_count and files_total_size to item API responses, computed
via batch query on item_files table (no migration needed).

- Add BatchGetFileStats() to audit_queries.go (follows BatchCheckBOM pattern)
- Add file stats to ItemResponse, HandleListItems, HandleGetItem, HandleGetItemByUUID
- Add 'Files' column to ItemTable (default visible in vertical mode)
- Add has_files computed field to audit completeness scoring (weight 1 for manufactured)
2026-02-08 19:25:46 -06:00
9be6f45f09 Merge pull request 'chore(docs): delete stale documentation files' (#52) from issue-31-delete-dead-docs into main
Reviewed-on: #52
2026-02-09 01:22:02 +00:00
ef05aec619 Merge branch 'main' into issue-31-delete-dead-docs 2026-02-09 01:21:52 +00:00
64075d88b5 Merge pull request 'feat(api): add POST /api/items/{partNumber}/bom/merge endpoint' (#51) from issue-45-bom-merge into main
Reviewed-on: #51
2026-02-09 01:21:44 +00:00
eac64f863b Merge branch 'main' into issue-45-bom-merge 2026-02-09 01:21:38 +00:00
aa414adc43 Merge pull request 'feat(db): add source column to relationships table' (#50) from issue-44-bom-source into main
Reviewed-on: #50
2026-02-09 01:21:30 +00:00
Forbes
08e84703d5 chore(docs): delete stale REPOSITORY_STATUS.md (#31)
Generated 2026-01-31, references HTML templates and 8 migrations
that are now outdated. Superseded by STATUS.md and SPECIFICATION.md.

API.md and silo-spec.md were already deleted in earlier commits.
2026-02-08 19:17:53 -06:00
Forbes
fbe4f3a36c feat(api): add POST /api/items/{partNumber}/bom/merge endpoint (#45)
Add BOM merge endpoint for syncing assembly-derived BOM entries from
FreeCAD's silo-mod plugin.

Merge rules:
- Added: entries in request but not in server BOM are auto-created
  with source='assembly'
- Quantity changed: existing entries with different quantity are
  auto-updated
- Unchanged: same part and quantity are skipped
- Unreferenced: assembly-sourced entries in server BOM but not in
  request are flagged as warnings (never auto-deleted)
- Manual entries are silently ignored in unreferenced detection

Also emits SSE 'bom.merged' event on successful merge (#46).
2026-02-08 19:15:27 -06:00
8 changed files with 448 additions and 7 deletions

View File

@@ -101,6 +101,8 @@ var manufacturedWeights = map[string]float64{
// Weight 1: engineering detail (category-specific default)
"sourcing_type": 1,
"lifecycle_status": 1,
// Weight 1: engineering detail
"has_files": 1,
// Weight 0.5: less relevant for in-house
"manufacturer": 0.5,
"supplier": 0.5,
@@ -207,6 +209,7 @@ func scoreItem(
categoryProps map[string]schema.PropertyDefinition,
hasBOM bool,
bomChildCount int,
hasFiles bool,
categoryName string,
projects []string,
includeFields bool,
@@ -276,6 +279,7 @@ func scoreItem(
// Score has_bom for manufactured/assembly items.
if sourcingType == "manufactured" || isAssembly {
processField("has_bom", "computed", "boolean", hasBOM)
processField("has_files", "computed", "boolean", hasFiles)
}
// Score property fields from schema.
@@ -412,6 +416,13 @@ func (s *Server) HandleAuditCompleteness(w http.ResponseWriter, r *http.Request)
return
}
fileStats, err := s.items.BatchGetFileStats(ctx, itemIDs)
if err != nil {
s.logger.Error().Err(err).Msg("failed to batch get file stats")
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to load file stats")
return
}
// Look up the schema for category resolution.
sch := s.schemas["kindred-rd"]
var catSegment *schema.Segment
@@ -440,9 +451,10 @@ func (s *Server) HandleAuditCompleteness(w http.ResponseWriter, r *http.Request)
bomCount := bomCounts[item.ID]
hasBOM := bomCount > 0
hasFiles := fileStats[item.ID].Count > 0
projects := projectCodes[item.ID]
result := scoreItem(item, categoryProps, hasBOM, bomCount, categoryName, projects, false)
result := scoreItem(item, categoryProps, hasBOM, bomCount, hasFiles, categoryName, projects, false)
allResults = append(allResults, *result)
}
@@ -544,6 +556,15 @@ func (s *Server) HandleAuditItemDetail(w http.ResponseWriter, r *http.Request) {
}
projects := projectCodes[item.ID]
// Get file stats.
fileStats, err := s.items.BatchGetFileStats(ctx, []string{item.ID})
if err != nil {
s.logger.Error().Err(err).Str("pn", partNumber).Msg("failed to get file stats for audit")
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to load file stats")
return
}
hasFiles := fileStats[item.ID].Count > 0
// Category resolution.
cat := extractCategory(item.PartNumber)
categoryName := cat
@@ -561,7 +582,7 @@ func (s *Server) HandleAuditItemDetail(w http.ResponseWriter, r *http.Request) {
categoryProps = sch.PropertySchemas.GetPropertiesForCategory(cat)
}
result := scoreItem(iwp, categoryProps, hasBOM, bomCount, categoryName, projects, true)
result := scoreItem(iwp, categoryProps, hasBOM, bomCount, hasFiles, categoryName, projects, true)
writeJSON(w, http.StatusOK, result)
}

View File

@@ -594,6 +594,56 @@ func (s *Server) HandleGetBOMCost(w http.ResponseWriter, r *http.Request) {
})
}
// BOM merge request/response types
// MergeBOMRequest represents a request to merge assembly BOM entries.
type MergeBOMRequest struct {
Source string `json:"source"`
Entries []MergeBOMEntry `json:"entries"`
}
// MergeBOMEntry represents a single entry in a merge request.
type MergeBOMEntry struct {
ChildPartNumber string `json:"child_part_number"`
Quantity *float64 `json:"quantity"`
}
// MergeBOMResponse represents the result of a BOM merge.
type MergeBOMResponse struct {
Status string `json:"status"`
Diff MergeBOMDiff `json:"diff"`
Warnings []MergeWarning `json:"warnings"`
ResolveURL string `json:"resolve_url"`
}
// MergeBOMDiff categorizes changes from a merge operation.
type MergeBOMDiff struct {
Added []MergeDiffEntry `json:"added"`
Removed []MergeDiffEntry `json:"removed"`
QuantityChanged []MergeQtyChange `json:"quantity_changed"`
Unchanged []MergeDiffEntry `json:"unchanged"`
}
// MergeDiffEntry represents an added, removed, or unchanged BOM entry.
type MergeDiffEntry struct {
PartNumber string `json:"part_number"`
Quantity *float64 `json:"quantity"`
}
// MergeQtyChange represents a BOM entry whose quantity changed.
type MergeQtyChange struct {
PartNumber string `json:"part_number"`
OldQuantity *float64 `json:"old_quantity"`
NewQuantity *float64 `json:"new_quantity"`
}
// MergeWarning represents a warning generated during merge.
type MergeWarning struct {
Type string `json:"type"`
PartNumber string `json:"part_number"`
Message string `json:"message"`
}
// BOM CSV headers matching the user-specified format.
var bomCSVHeaders = []string{
"Item", "Level", "Source", "PN", "Seller Description",
@@ -976,3 +1026,197 @@ func (s *Server) HandleImportBOMCSV(w http.ResponseWriter, r *http.Request) {
writeJSON(w, http.StatusOK, result)
}
// HandleMergeBOM merges assembly-derived BOM entries into the server's BOM.
// Added entries are created, quantity changes are applied, and entries present
// in the server but missing from the request are flagged as warnings (not deleted).
func (s *Server) HandleMergeBOM(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
partNumber := chi.URLParam(r, "partNumber")
parent, err := s.items.GetByPartNumber(ctx, partNumber)
if err != nil {
s.logger.Error().Err(err).Msg("failed to get parent item")
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get parent item")
return
}
if parent == nil {
writeError(w, http.StatusNotFound, "not_found", "Parent item not found")
return
}
var req MergeBOMRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
writeError(w, http.StatusBadRequest, "invalid_json", err.Error())
return
}
if len(req.Entries) == 0 {
writeError(w, http.StatusBadRequest, "invalid_request", "entries must not be empty")
return
}
// Fetch existing BOM (includes Source field)
existing, err := s.relationships.GetBOM(ctx, parent.ID)
if err != nil {
s.logger.Error().Err(err).Msg("failed to get existing BOM")
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get existing BOM")
return
}
// Build lookup map by child part number
existingMap := make(map[string]*db.BOMEntry, len(existing))
for _, e := range existing {
existingMap[e.ChildPartNumber] = e
}
var username *string
if user := auth.UserFromContext(ctx); user != nil {
username = &user.Username
}
diff := MergeBOMDiff{
Added: make([]MergeDiffEntry, 0),
Removed: make([]MergeDiffEntry, 0),
QuantityChanged: make([]MergeQtyChange, 0),
Unchanged: make([]MergeDiffEntry, 0),
}
var warnings []MergeWarning
// Process incoming entries
for _, entry := range req.Entries {
if entry.ChildPartNumber == "" {
continue
}
child, err := s.items.GetByPartNumber(ctx, entry.ChildPartNumber)
if err != nil {
s.logger.Error().Err(err).Str("child", entry.ChildPartNumber).Msg("failed to look up child")
warnings = append(warnings, MergeWarning{
Type: "error",
PartNumber: entry.ChildPartNumber,
Message: fmt.Sprintf("Error looking up item: %s", err.Error()),
})
continue
}
if child == nil {
warnings = append(warnings, MergeWarning{
Type: "not_found",
PartNumber: entry.ChildPartNumber,
Message: fmt.Sprintf("Item '%s' not found in database", entry.ChildPartNumber),
})
continue
}
if ex, ok := existingMap[entry.ChildPartNumber]; ok {
// Entry already exists — check quantity
oldQty := ex.Quantity
newQty := entry.Quantity
if quantitiesEqual(oldQty, newQty) {
diff.Unchanged = append(diff.Unchanged, MergeDiffEntry{
PartNumber: entry.ChildPartNumber,
Quantity: newQty,
})
} else {
// Update quantity
if err := s.relationships.Update(ctx, ex.RelationshipID, nil, newQty, nil, nil, nil, nil, username); err != nil {
s.logger.Error().Err(err).Str("child", entry.ChildPartNumber).Msg("failed to update quantity")
warnings = append(warnings, MergeWarning{
Type: "error",
PartNumber: entry.ChildPartNumber,
Message: fmt.Sprintf("Failed to update quantity: %s", err.Error()),
})
} else {
diff.QuantityChanged = append(diff.QuantityChanged, MergeQtyChange{
PartNumber: entry.ChildPartNumber,
OldQuantity: oldQty,
NewQuantity: newQty,
})
}
}
delete(existingMap, entry.ChildPartNumber)
} else {
// New entry — create
rel := &db.Relationship{
ParentItemID: parent.ID,
ChildItemID: child.ID,
RelType: "component",
Quantity: entry.Quantity,
Source: "assembly",
CreatedBy: username,
}
if err := s.relationships.Create(ctx, rel); err != nil {
if strings.Contains(err.Error(), "cycle") {
warnings = append(warnings, MergeWarning{
Type: "cycle",
PartNumber: entry.ChildPartNumber,
Message: fmt.Sprintf("Adding '%s' would create a cycle", entry.ChildPartNumber),
})
} else {
s.logger.Error().Err(err).Str("child", entry.ChildPartNumber).Msg("failed to create relationship")
warnings = append(warnings, MergeWarning{
Type: "error",
PartNumber: entry.ChildPartNumber,
Message: fmt.Sprintf("Failed to create: %s", err.Error()),
})
}
continue
}
diff.Added = append(diff.Added, MergeDiffEntry{
PartNumber: entry.ChildPartNumber,
Quantity: entry.Quantity,
})
}
}
// Remaining entries in existingMap are not in the merge request
for pn, e := range existingMap {
if e.Source == "assembly" {
diff.Removed = append(diff.Removed, MergeDiffEntry{
PartNumber: pn,
Quantity: e.Quantity,
})
warnings = append(warnings, MergeWarning{
Type: "unreferenced",
PartNumber: pn,
Message: "Present in server BOM but not in assembly",
})
}
}
resp := MergeBOMResponse{
Status: "merged",
Diff: diff,
Warnings: warnings,
ResolveURL: fmt.Sprintf("/items/%s/bom", partNumber),
}
s.logger.Info().
Str("parent", partNumber).
Int("added", len(diff.Added)).
Int("updated", len(diff.QuantityChanged)).
Int("unchanged", len(diff.Unchanged)).
Int("unreferenced", len(diff.Removed)).
Int("warnings", len(warnings)).
Msg("BOM merge completed")
s.broker.Publish("bom.merged", mustMarshal(map[string]any{
"part_number": partNumber,
"added": len(diff.Added),
"quantity_changed": len(diff.QuantityChanged),
"unchanged": len(diff.Unchanged),
"unreferenced": len(diff.Removed),
}))
writeJSON(w, http.StatusOK, resp)
}
// quantitiesEqual compares two nullable float64 quantities.
func quantitiesEqual(a, b *float64) bool {
if a == nil && b == nil {
return true
}
if a == nil || b == nil {
return false
}
return *a == *b
}

View File

@@ -260,6 +260,8 @@ type ItemResponse struct {
LongDescription *string `json:"long_description,omitempty"`
StandardCost *float64 `json:"standard_cost,omitempty"`
ThumbnailKey *string `json:"thumbnail_key,omitempty"`
FileCount int `json:"file_count"`
FilesTotalSize int64 `json:"files_total_size"`
Properties map[string]any `json:"properties,omitempty"`
}
@@ -304,9 +306,20 @@ func (s *Server) HandleListItems(w http.ResponseWriter, r *http.Request) {
return
}
// Batch-fetch file attachment stats
ids := make([]string, len(items))
for i, item := range items {
ids[i] = item.ID
}
fileStats, _ := s.items.BatchGetFileStats(ctx, ids)
response := make([]ItemResponse, len(items))
for i, item := range items {
response[i] = itemToResponse(item)
if fs, ok := fileStats[item.ID]; ok {
response[i].FileCount = fs.Count
response[i].FilesTotalSize = fs.TotalSize
}
}
writeJSON(w, http.StatusOK, response)
@@ -482,7 +495,15 @@ func (s *Server) HandleGetItemByUUID(w http.ResponseWriter, r *http.Request) {
return
}
writeJSON(w, http.StatusOK, itemToResponse(item))
response := itemToResponse(item)
if fileStats, err := s.items.BatchGetFileStats(ctx, []string{item.ID}); err == nil {
if fs, ok := fileStats[item.ID]; ok {
response.FileCount = fs.Count
response.FilesTotalSize = fs.TotalSize
}
}
writeJSON(w, http.StatusOK, response)
}
// HandleGetItem retrieves an item by part number.
@@ -504,6 +525,14 @@ func (s *Server) HandleGetItem(w http.ResponseWriter, r *http.Request) {
response := itemToResponse(item)
// File attachment stats
if fileStats, err := s.items.BatchGetFileStats(ctx, []string{item.ID}); err == nil {
if fs, ok := fileStats[item.ID]; ok {
response.FileCount = fs.Count
response.FilesTotalSize = fs.TotalSize
}
}
// Include properties from current revision if requested
if r.URL.Query().Get("include") == "properties" {
revisions, err := s.items.GetRevisions(ctx, item.ID)

View File

@@ -166,6 +166,7 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
r.Put("/thumbnail", server.HandleSetItemThumbnail)
r.Post("/bom", server.HandleAddBOMEntry)
r.Post("/bom/import", server.HandleImportBOMCSV)
r.Post("/bom/merge", server.HandleMergeBOM)
r.Put("/bom/{childPartNumber}", server.HandleUpdateBOMEntry)
r.Delete("/bom/{childPartNumber}", server.HandleDeleteBOMEntry)
})

View File

@@ -134,6 +134,43 @@ func (r *ItemRepository) BatchCheckBOM(ctx context.Context, itemIDs []string) (m
return result, nil
}
// FileStats holds aggregated file attachment statistics for an item.
type FileStats struct {
Count int
TotalSize int64
}
// BatchGetFileStats returns a map of item ID to file attachment statistics
// for the given item IDs. Items not in the map have no files.
func (r *ItemRepository) BatchGetFileStats(ctx context.Context, itemIDs []string) (map[string]FileStats, error) {
if len(itemIDs) == 0 {
return map[string]FileStats{}, nil
}
rows, err := r.db.pool.Query(ctx, `
SELECT item_id, COUNT(*), COALESCE(SUM(size), 0)
FROM item_files
WHERE item_id = ANY($1)
GROUP BY item_id
`, itemIDs)
if err != nil {
return nil, fmt.Errorf("batch getting file stats: %w", err)
}
defer rows.Close()
result := make(map[string]FileStats)
for rows.Next() {
var itemID string
var fs FileStats
if err := rows.Scan(&itemID, &fs.Count, &fs.TotalSize); err != nil {
return nil, fmt.Errorf("scanning file stats: %w", err)
}
result[itemID] = fs
}
return result, nil
}
// BatchGetProjectCodes returns a map of item ID to project code list for
// the given item IDs.
func (r *ItemRepository) BatchGetProjectCodes(ctx context.Context, itemIDs []string) (map[string][]string, error) {

View File

@@ -19,6 +19,8 @@ export interface Item {
sourcing_link?: string;
long_description?: string;
standard_cost?: number;
file_count: number;
files_total_size: number;
properties?: Record<string, unknown>;
}
@@ -210,6 +212,38 @@ export interface UpdateBOMEntryRequest {
metadata?: Record<string, unknown>;
}
// BOM Merge
export interface MergeBOMResponse {
status: string;
diff: MergeBOMDiff;
warnings: MergeWarning[];
resolve_url: string;
}
export interface MergeBOMDiff {
added: MergeDiffEntry[];
removed: MergeDiffEntry[];
quantity_changed: MergeQtyChange[];
unchanged: MergeDiffEntry[];
}
export interface MergeDiffEntry {
part_number: string;
quantity: number | null;
}
export interface MergeQtyChange {
part_number: string;
old_quantity: number | null;
new_quantity: number | null;
}
export interface MergeWarning {
type: string;
part_number: string;
message: string;
}
// Schema properties
export interface PropertyDef {
type: string;

View File

@@ -46,6 +46,7 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
const unitCost = (e: BOMEntry) => Number(meta(e).unit_cost) || 0;
const extCost = (e: BOMEntry) => unitCost(e) * (e.quantity ?? 0);
const totalCost = entries.reduce((sum, e) => sum + extCost(e), 0);
const assemblyCount = entries.filter((e) => e.source === "assembly").length;
const formToRequest = () => ({
child_part_number: form.child_part_number,
@@ -139,12 +140,15 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
/>
</td>
<td style={tdStyle}>
<input
<select
value={form.source}
onChange={(e) => setForm({ ...form, source: e.target.value })}
placeholder="Source"
style={inputStyle}
/>
>
<option value=""></option>
<option value="manual">manual</option>
<option value="assembly">assembly</option>
</select>
</td>
<td style={tdStyle}>
<input
@@ -247,6 +251,24 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
)}
</div>
{isEditor && assemblyCount > 0 && (
<div
style={{
padding: "0.35rem 0.6rem",
marginBottom: "0.5rem",
borderRadius: "0.3rem",
backgroundColor: "rgba(148,226,213,0.1)",
border: "1px solid rgba(148,226,213,0.3)",
fontSize: "0.75rem",
color: "var(--ctp-subtext1)",
}}
>
{assemblyCount} assembly-sourced{" "}
{assemblyCount === 1 ? "entry" : "entries"}. Entries removed from the
FreeCAD assembly will remain here until manually deleted.
</div>
)}
<div style={{ overflow: "auto" }}>
<table
style={{
@@ -289,7 +311,15 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
>
{e.child_part_number}
</td>
<td style={tdStyle}>{e.source ?? ""}</td>
<td style={tdStyle}>
{e.source === "assembly" ? (
<span style={assemblyBadge}>assembly</span>
) : e.source === "manual" ? (
<span style={manualBadge}>manual</span>
) : (
"—"
)}
</td>
<td
style={{
...tdStyle,
@@ -420,6 +450,25 @@ const saveBtnStyle: React.CSSProperties = {
marginRight: "0.25rem",
};
const sourceBadgeBase: React.CSSProperties = {
padding: "0.1rem 0.4rem",
borderRadius: "1rem",
fontSize: "0.7rem",
fontWeight: 500,
};
const assemblyBadge: React.CSSProperties = {
...sourceBadgeBase,
backgroundColor: "rgba(148,226,213,0.2)",
color: "var(--ctp-teal)",
};
const manualBadge: React.CSSProperties = {
...sourceBadgeBase,
backgroundColor: "rgba(137,180,250,0.2)",
color: "var(--ctp-blue)",
};
const cancelBtnStyle: React.CSSProperties = {
padding: "0.2rem 0.4rem",
fontSize: "0.75rem",

View File

@@ -12,6 +12,7 @@ export const ALL_COLUMNS: ColumnDef[] = [
{ key: "item_type", label: "Type" },
{ key: "description", label: "Description" },
{ key: "revision", label: "Rev" },
{ key: "files", label: "Files" },
{ key: "projects", label: "Projects" },
{ key: "created", label: "Created" },
{ key: "actions", label: "Actions" },
@@ -28,6 +29,7 @@ export const DEFAULT_COLUMNS_V = [
"item_type",
"description",
"revision",
"files",
"created",
"actions",
];
@@ -67,6 +69,12 @@ function copyPN(pn: string) {
void navigator.clipboard.writeText(pn);
}
function formatSize(bytes: number): string {
if (bytes < 1024) return `${bytes} B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
}
export function ItemTable({
items,
loading,
@@ -120,6 +128,10 @@ export function ItemTable({
av = a.current_revision;
bv = b.current_revision;
break;
case "files":
av = a.file_count;
bv = b.file_count;
break;
case "created":
av = a.created_at;
bv = b.created_at;
@@ -271,6 +283,20 @@ export function ItemTable({
Rev {item.current_revision}
</td>
);
case "files":
return (
<td
key={col.key}
style={{ ...tdStyle, textAlign: "center" }}
title={
item.file_count > 0
? `${item.file_count} file${item.file_count !== 1 ? "s" : ""}, ${formatSize(item.files_total_size)}`
: "No files"
}
>
{item.file_count > 0 ? item.file_count : "—"}
</td>
);
case "projects":
return (
<td key={col.key} style={tdStyle}>