Compare commits
12 Commits
issue-44-b
...
fix-sse-re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f7aa673d2c | ||
| 2157b40d06 | |||
|
|
25c42bd70b | ||
| 8d88f77ff6 | |||
|
|
50985ed805 | ||
| 9be6f45f09 | |||
| ef05aec619 | |||
| 64075d88b5 | |||
| eac64f863b | |||
| aa414adc43 | |||
|
|
08e84703d5 | ||
|
|
fbe4f3a36c |
@@ -101,6 +101,8 @@ var manufacturedWeights = map[string]float64{
|
||||
// Weight 1: engineering detail (category-specific default)
|
||||
"sourcing_type": 1,
|
||||
"lifecycle_status": 1,
|
||||
// Weight 1: engineering detail
|
||||
"has_files": 1,
|
||||
// Weight 0.5: less relevant for in-house
|
||||
"manufacturer": 0.5,
|
||||
"supplier": 0.5,
|
||||
@@ -207,6 +209,7 @@ func scoreItem(
|
||||
categoryProps map[string]schema.PropertyDefinition,
|
||||
hasBOM bool,
|
||||
bomChildCount int,
|
||||
hasFiles bool,
|
||||
categoryName string,
|
||||
projects []string,
|
||||
includeFields bool,
|
||||
@@ -276,6 +279,7 @@ func scoreItem(
|
||||
// Score has_bom for manufactured/assembly items.
|
||||
if sourcingType == "manufactured" || isAssembly {
|
||||
processField("has_bom", "computed", "boolean", hasBOM)
|
||||
processField("has_files", "computed", "boolean", hasFiles)
|
||||
}
|
||||
|
||||
// Score property fields from schema.
|
||||
@@ -412,6 +416,13 @@ func (s *Server) HandleAuditCompleteness(w http.ResponseWriter, r *http.Request)
|
||||
return
|
||||
}
|
||||
|
||||
fileStats, err := s.items.BatchGetFileStats(ctx, itemIDs)
|
||||
if err != nil {
|
||||
s.logger.Error().Err(err).Msg("failed to batch get file stats")
|
||||
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to load file stats")
|
||||
return
|
||||
}
|
||||
|
||||
// Look up the schema for category resolution.
|
||||
sch := s.schemas["kindred-rd"]
|
||||
var catSegment *schema.Segment
|
||||
@@ -440,9 +451,10 @@ func (s *Server) HandleAuditCompleteness(w http.ResponseWriter, r *http.Request)
|
||||
|
||||
bomCount := bomCounts[item.ID]
|
||||
hasBOM := bomCount > 0
|
||||
hasFiles := fileStats[item.ID].Count > 0
|
||||
projects := projectCodes[item.ID]
|
||||
|
||||
result := scoreItem(item, categoryProps, hasBOM, bomCount, categoryName, projects, false)
|
||||
result := scoreItem(item, categoryProps, hasBOM, bomCount, hasFiles, categoryName, projects, false)
|
||||
allResults = append(allResults, *result)
|
||||
}
|
||||
|
||||
@@ -544,6 +556,15 @@ func (s *Server) HandleAuditItemDetail(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
projects := projectCodes[item.ID]
|
||||
|
||||
// Get file stats.
|
||||
fileStats, err := s.items.BatchGetFileStats(ctx, []string{item.ID})
|
||||
if err != nil {
|
||||
s.logger.Error().Err(err).Str("pn", partNumber).Msg("failed to get file stats for audit")
|
||||
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to load file stats")
|
||||
return
|
||||
}
|
||||
hasFiles := fileStats[item.ID].Count > 0
|
||||
|
||||
// Category resolution.
|
||||
cat := extractCategory(item.PartNumber)
|
||||
categoryName := cat
|
||||
@@ -561,7 +582,7 @@ func (s *Server) HandleAuditItemDetail(w http.ResponseWriter, r *http.Request) {
|
||||
categoryProps = sch.PropertySchemas.GetPropertiesForCategory(cat)
|
||||
}
|
||||
|
||||
result := scoreItem(iwp, categoryProps, hasBOM, bomCount, categoryName, projects, true)
|
||||
result := scoreItem(iwp, categoryProps, hasBOM, bomCount, hasFiles, categoryName, projects, true)
|
||||
|
||||
writeJSON(w, http.StatusOK, result)
|
||||
}
|
||||
|
||||
@@ -594,6 +594,56 @@ func (s *Server) HandleGetBOMCost(w http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
}
|
||||
|
||||
// BOM merge request/response types
|
||||
|
||||
// MergeBOMRequest represents a request to merge assembly BOM entries.
|
||||
type MergeBOMRequest struct {
|
||||
Source string `json:"source"`
|
||||
Entries []MergeBOMEntry `json:"entries"`
|
||||
}
|
||||
|
||||
// MergeBOMEntry represents a single entry in a merge request.
|
||||
type MergeBOMEntry struct {
|
||||
ChildPartNumber string `json:"child_part_number"`
|
||||
Quantity *float64 `json:"quantity"`
|
||||
}
|
||||
|
||||
// MergeBOMResponse represents the result of a BOM merge.
|
||||
type MergeBOMResponse struct {
|
||||
Status string `json:"status"`
|
||||
Diff MergeBOMDiff `json:"diff"`
|
||||
Warnings []MergeWarning `json:"warnings"`
|
||||
ResolveURL string `json:"resolve_url"`
|
||||
}
|
||||
|
||||
// MergeBOMDiff categorizes changes from a merge operation.
|
||||
type MergeBOMDiff struct {
|
||||
Added []MergeDiffEntry `json:"added"`
|
||||
Removed []MergeDiffEntry `json:"removed"`
|
||||
QuantityChanged []MergeQtyChange `json:"quantity_changed"`
|
||||
Unchanged []MergeDiffEntry `json:"unchanged"`
|
||||
}
|
||||
|
||||
// MergeDiffEntry represents an added, removed, or unchanged BOM entry.
|
||||
type MergeDiffEntry struct {
|
||||
PartNumber string `json:"part_number"`
|
||||
Quantity *float64 `json:"quantity"`
|
||||
}
|
||||
|
||||
// MergeQtyChange represents a BOM entry whose quantity changed.
|
||||
type MergeQtyChange struct {
|
||||
PartNumber string `json:"part_number"`
|
||||
OldQuantity *float64 `json:"old_quantity"`
|
||||
NewQuantity *float64 `json:"new_quantity"`
|
||||
}
|
||||
|
||||
// MergeWarning represents a warning generated during merge.
|
||||
type MergeWarning struct {
|
||||
Type string `json:"type"`
|
||||
PartNumber string `json:"part_number"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
// BOM CSV headers matching the user-specified format.
|
||||
var bomCSVHeaders = []string{
|
||||
"Item", "Level", "Source", "PN", "Seller Description",
|
||||
@@ -976,3 +1026,197 @@ func (s *Server) HandleImportBOMCSV(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
writeJSON(w, http.StatusOK, result)
|
||||
}
|
||||
|
||||
// HandleMergeBOM merges assembly-derived BOM entries into the server's BOM.
|
||||
// Added entries are created, quantity changes are applied, and entries present
|
||||
// in the server but missing from the request are flagged as warnings (not deleted).
|
||||
func (s *Server) HandleMergeBOM(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
partNumber := chi.URLParam(r, "partNumber")
|
||||
|
||||
parent, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||
if err != nil {
|
||||
s.logger.Error().Err(err).Msg("failed to get parent item")
|
||||
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get parent item")
|
||||
return
|
||||
}
|
||||
if parent == nil {
|
||||
writeError(w, http.StatusNotFound, "not_found", "Parent item not found")
|
||||
return
|
||||
}
|
||||
|
||||
var req MergeBOMRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_json", err.Error())
|
||||
return
|
||||
}
|
||||
if len(req.Entries) == 0 {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "entries must not be empty")
|
||||
return
|
||||
}
|
||||
|
||||
// Fetch existing BOM (includes Source field)
|
||||
existing, err := s.relationships.GetBOM(ctx, parent.ID)
|
||||
if err != nil {
|
||||
s.logger.Error().Err(err).Msg("failed to get existing BOM")
|
||||
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get existing BOM")
|
||||
return
|
||||
}
|
||||
|
||||
// Build lookup map by child part number
|
||||
existingMap := make(map[string]*db.BOMEntry, len(existing))
|
||||
for _, e := range existing {
|
||||
existingMap[e.ChildPartNumber] = e
|
||||
}
|
||||
|
||||
var username *string
|
||||
if user := auth.UserFromContext(ctx); user != nil {
|
||||
username = &user.Username
|
||||
}
|
||||
|
||||
diff := MergeBOMDiff{
|
||||
Added: make([]MergeDiffEntry, 0),
|
||||
Removed: make([]MergeDiffEntry, 0),
|
||||
QuantityChanged: make([]MergeQtyChange, 0),
|
||||
Unchanged: make([]MergeDiffEntry, 0),
|
||||
}
|
||||
var warnings []MergeWarning
|
||||
|
||||
// Process incoming entries
|
||||
for _, entry := range req.Entries {
|
||||
if entry.ChildPartNumber == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
child, err := s.items.GetByPartNumber(ctx, entry.ChildPartNumber)
|
||||
if err != nil {
|
||||
s.logger.Error().Err(err).Str("child", entry.ChildPartNumber).Msg("failed to look up child")
|
||||
warnings = append(warnings, MergeWarning{
|
||||
Type: "error",
|
||||
PartNumber: entry.ChildPartNumber,
|
||||
Message: fmt.Sprintf("Error looking up item: %s", err.Error()),
|
||||
})
|
||||
continue
|
||||
}
|
||||
if child == nil {
|
||||
warnings = append(warnings, MergeWarning{
|
||||
Type: "not_found",
|
||||
PartNumber: entry.ChildPartNumber,
|
||||
Message: fmt.Sprintf("Item '%s' not found in database", entry.ChildPartNumber),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
if ex, ok := existingMap[entry.ChildPartNumber]; ok {
|
||||
// Entry already exists — check quantity
|
||||
oldQty := ex.Quantity
|
||||
newQty := entry.Quantity
|
||||
if quantitiesEqual(oldQty, newQty) {
|
||||
diff.Unchanged = append(diff.Unchanged, MergeDiffEntry{
|
||||
PartNumber: entry.ChildPartNumber,
|
||||
Quantity: newQty,
|
||||
})
|
||||
} else {
|
||||
// Update quantity
|
||||
if err := s.relationships.Update(ctx, ex.RelationshipID, nil, newQty, nil, nil, nil, nil, username); err != nil {
|
||||
s.logger.Error().Err(err).Str("child", entry.ChildPartNumber).Msg("failed to update quantity")
|
||||
warnings = append(warnings, MergeWarning{
|
||||
Type: "error",
|
||||
PartNumber: entry.ChildPartNumber,
|
||||
Message: fmt.Sprintf("Failed to update quantity: %s", err.Error()),
|
||||
})
|
||||
} else {
|
||||
diff.QuantityChanged = append(diff.QuantityChanged, MergeQtyChange{
|
||||
PartNumber: entry.ChildPartNumber,
|
||||
OldQuantity: oldQty,
|
||||
NewQuantity: newQty,
|
||||
})
|
||||
}
|
||||
}
|
||||
delete(existingMap, entry.ChildPartNumber)
|
||||
} else {
|
||||
// New entry — create
|
||||
rel := &db.Relationship{
|
||||
ParentItemID: parent.ID,
|
||||
ChildItemID: child.ID,
|
||||
RelType: "component",
|
||||
Quantity: entry.Quantity,
|
||||
Source: "assembly",
|
||||
CreatedBy: username,
|
||||
}
|
||||
if err := s.relationships.Create(ctx, rel); err != nil {
|
||||
if strings.Contains(err.Error(), "cycle") {
|
||||
warnings = append(warnings, MergeWarning{
|
||||
Type: "cycle",
|
||||
PartNumber: entry.ChildPartNumber,
|
||||
Message: fmt.Sprintf("Adding '%s' would create a cycle", entry.ChildPartNumber),
|
||||
})
|
||||
} else {
|
||||
s.logger.Error().Err(err).Str("child", entry.ChildPartNumber).Msg("failed to create relationship")
|
||||
warnings = append(warnings, MergeWarning{
|
||||
Type: "error",
|
||||
PartNumber: entry.ChildPartNumber,
|
||||
Message: fmt.Sprintf("Failed to create: %s", err.Error()),
|
||||
})
|
||||
}
|
||||
continue
|
||||
}
|
||||
diff.Added = append(diff.Added, MergeDiffEntry{
|
||||
PartNumber: entry.ChildPartNumber,
|
||||
Quantity: entry.Quantity,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Remaining entries in existingMap are not in the merge request
|
||||
for pn, e := range existingMap {
|
||||
if e.Source == "assembly" {
|
||||
diff.Removed = append(diff.Removed, MergeDiffEntry{
|
||||
PartNumber: pn,
|
||||
Quantity: e.Quantity,
|
||||
})
|
||||
warnings = append(warnings, MergeWarning{
|
||||
Type: "unreferenced",
|
||||
PartNumber: pn,
|
||||
Message: "Present in server BOM but not in assembly",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
resp := MergeBOMResponse{
|
||||
Status: "merged",
|
||||
Diff: diff,
|
||||
Warnings: warnings,
|
||||
ResolveURL: fmt.Sprintf("/items/%s/bom", partNumber),
|
||||
}
|
||||
|
||||
s.logger.Info().
|
||||
Str("parent", partNumber).
|
||||
Int("added", len(diff.Added)).
|
||||
Int("updated", len(diff.QuantityChanged)).
|
||||
Int("unchanged", len(diff.Unchanged)).
|
||||
Int("unreferenced", len(diff.Removed)).
|
||||
Int("warnings", len(warnings)).
|
||||
Msg("BOM merge completed")
|
||||
|
||||
s.broker.Publish("bom.merged", mustMarshal(map[string]any{
|
||||
"part_number": partNumber,
|
||||
"added": len(diff.Added),
|
||||
"quantity_changed": len(diff.QuantityChanged),
|
||||
"unchanged": len(diff.Unchanged),
|
||||
"unreferenced": len(diff.Removed),
|
||||
}))
|
||||
|
||||
writeJSON(w, http.StatusOK, resp)
|
||||
}
|
||||
|
||||
// quantitiesEqual compares two nullable float64 quantities.
|
||||
func quantitiesEqual(a, b *float64) bool {
|
||||
if a == nil && b == nil {
|
||||
return true
|
||||
}
|
||||
if a == nil || b == nil {
|
||||
return false
|
||||
}
|
||||
return *a == *b
|
||||
}
|
||||
|
||||
@@ -260,6 +260,8 @@ type ItemResponse struct {
|
||||
LongDescription *string `json:"long_description,omitempty"`
|
||||
StandardCost *float64 `json:"standard_cost,omitempty"`
|
||||
ThumbnailKey *string `json:"thumbnail_key,omitempty"`
|
||||
FileCount int `json:"file_count"`
|
||||
FilesTotalSize int64 `json:"files_total_size"`
|
||||
Properties map[string]any `json:"properties,omitempty"`
|
||||
}
|
||||
|
||||
@@ -304,9 +306,20 @@ func (s *Server) HandleListItems(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// Batch-fetch file attachment stats
|
||||
ids := make([]string, len(items))
|
||||
for i, item := range items {
|
||||
ids[i] = item.ID
|
||||
}
|
||||
fileStats, _ := s.items.BatchGetFileStats(ctx, ids)
|
||||
|
||||
response := make([]ItemResponse, len(items))
|
||||
for i, item := range items {
|
||||
response[i] = itemToResponse(item)
|
||||
if fs, ok := fileStats[item.ID]; ok {
|
||||
response[i].FileCount = fs.Count
|
||||
response[i].FilesTotalSize = fs.TotalSize
|
||||
}
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, response)
|
||||
@@ -482,7 +495,15 @@ func (s *Server) HandleGetItemByUUID(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, itemToResponse(item))
|
||||
response := itemToResponse(item)
|
||||
if fileStats, err := s.items.BatchGetFileStats(ctx, []string{item.ID}); err == nil {
|
||||
if fs, ok := fileStats[item.ID]; ok {
|
||||
response.FileCount = fs.Count
|
||||
response.FilesTotalSize = fs.TotalSize
|
||||
}
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, response)
|
||||
}
|
||||
|
||||
// HandleGetItem retrieves an item by part number.
|
||||
@@ -504,6 +525,14 @@ func (s *Server) HandleGetItem(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
response := itemToResponse(item)
|
||||
|
||||
// File attachment stats
|
||||
if fileStats, err := s.items.BatchGetFileStats(ctx, []string{item.ID}); err == nil {
|
||||
if fs, ok := fileStats[item.ID]; ok {
|
||||
response.FileCount = fs.Count
|
||||
response.FilesTotalSize = fs.TotalSize
|
||||
}
|
||||
}
|
||||
|
||||
// Include properties from current revision if requested
|
||||
if r.URL.Query().Get("include") == "properties" {
|
||||
revisions, err := s.items.GetRevisions(ctx, item.ID)
|
||||
|
||||
@@ -166,6 +166,7 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
||||
r.Put("/thumbnail", server.HandleSetItemThumbnail)
|
||||
r.Post("/bom", server.HandleAddBOMEntry)
|
||||
r.Post("/bom/import", server.HandleImportBOMCSV)
|
||||
r.Post("/bom/merge", server.HandleMergeBOM)
|
||||
r.Put("/bom/{childPartNumber}", server.HandleUpdateBOMEntry)
|
||||
r.Delete("/bom/{childPartNumber}", server.HandleDeleteBOMEntry)
|
||||
})
|
||||
|
||||
@@ -16,9 +16,12 @@ func (s *Server) HandleEvents(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// Disable the write deadline for this long-lived connection.
|
||||
// The server's WriteTimeout (15s) would otherwise kill it.
|
||||
// Disable read and write deadlines for this long-lived connection.
|
||||
// The server's ReadTimeout/WriteTimeout (15s) would otherwise kill it.
|
||||
rc := http.NewResponseController(w)
|
||||
if err := rc.SetReadDeadline(time.Time{}); err != nil {
|
||||
s.logger.Warn().Err(err).Msg("failed to disable read deadline for SSE")
|
||||
}
|
||||
if err := rc.SetWriteDeadline(time.Time{}); err != nil {
|
||||
s.logger.Warn().Err(err).Msg("failed to disable write deadline for SSE")
|
||||
}
|
||||
|
||||
@@ -134,6 +134,43 @@ func (r *ItemRepository) BatchCheckBOM(ctx context.Context, itemIDs []string) (m
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// FileStats holds aggregated file attachment statistics for an item.
|
||||
type FileStats struct {
|
||||
Count int
|
||||
TotalSize int64
|
||||
}
|
||||
|
||||
// BatchGetFileStats returns a map of item ID to file attachment statistics
|
||||
// for the given item IDs. Items not in the map have no files.
|
||||
func (r *ItemRepository) BatchGetFileStats(ctx context.Context, itemIDs []string) (map[string]FileStats, error) {
|
||||
if len(itemIDs) == 0 {
|
||||
return map[string]FileStats{}, nil
|
||||
}
|
||||
|
||||
rows, err := r.db.pool.Query(ctx, `
|
||||
SELECT item_id, COUNT(*), COALESCE(SUM(size), 0)
|
||||
FROM item_files
|
||||
WHERE item_id = ANY($1)
|
||||
GROUP BY item_id
|
||||
`, itemIDs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("batch getting file stats: %w", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
result := make(map[string]FileStats)
|
||||
for rows.Next() {
|
||||
var itemID string
|
||||
var fs FileStats
|
||||
if err := rows.Scan(&itemID, &fs.Count, &fs.TotalSize); err != nil {
|
||||
return nil, fmt.Errorf("scanning file stats: %w", err)
|
||||
}
|
||||
result[itemID] = fs
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// BatchGetProjectCodes returns a map of item ID to project code list for
|
||||
// the given item IDs.
|
||||
func (r *ItemRepository) BatchGetProjectCodes(ctx context.Context, itemIDs []string) (map[string][]string, error) {
|
||||
|
||||
@@ -19,6 +19,8 @@ export interface Item {
|
||||
sourcing_link?: string;
|
||||
long_description?: string;
|
||||
standard_cost?: number;
|
||||
file_count: number;
|
||||
files_total_size: number;
|
||||
properties?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
@@ -210,6 +212,38 @@ export interface UpdateBOMEntryRequest {
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
// BOM Merge
|
||||
export interface MergeBOMResponse {
|
||||
status: string;
|
||||
diff: MergeBOMDiff;
|
||||
warnings: MergeWarning[];
|
||||
resolve_url: string;
|
||||
}
|
||||
|
||||
export interface MergeBOMDiff {
|
||||
added: MergeDiffEntry[];
|
||||
removed: MergeDiffEntry[];
|
||||
quantity_changed: MergeQtyChange[];
|
||||
unchanged: MergeDiffEntry[];
|
||||
}
|
||||
|
||||
export interface MergeDiffEntry {
|
||||
part_number: string;
|
||||
quantity: number | null;
|
||||
}
|
||||
|
||||
export interface MergeQtyChange {
|
||||
part_number: string;
|
||||
old_quantity: number | null;
|
||||
new_quantity: number | null;
|
||||
}
|
||||
|
||||
export interface MergeWarning {
|
||||
type: string;
|
||||
part_number: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
// Schema properties
|
||||
export interface PropertyDef {
|
||||
type: string;
|
||||
|
||||
@@ -46,6 +46,7 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
||||
const unitCost = (e: BOMEntry) => Number(meta(e).unit_cost) || 0;
|
||||
const extCost = (e: BOMEntry) => unitCost(e) * (e.quantity ?? 0);
|
||||
const totalCost = entries.reduce((sum, e) => sum + extCost(e), 0);
|
||||
const assemblyCount = entries.filter((e) => e.source === "assembly").length;
|
||||
|
||||
const formToRequest = () => ({
|
||||
child_part_number: form.child_part_number,
|
||||
@@ -139,12 +140,15 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
||||
/>
|
||||
</td>
|
||||
<td style={tdStyle}>
|
||||
<input
|
||||
<select
|
||||
value={form.source}
|
||||
onChange={(e) => setForm({ ...form, source: e.target.value })}
|
||||
placeholder="Source"
|
||||
style={inputStyle}
|
||||
/>
|
||||
>
|
||||
<option value="">—</option>
|
||||
<option value="manual">manual</option>
|
||||
<option value="assembly">assembly</option>
|
||||
</select>
|
||||
</td>
|
||||
<td style={tdStyle}>
|
||||
<input
|
||||
@@ -247,6 +251,24 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isEditor && assemblyCount > 0 && (
|
||||
<div
|
||||
style={{
|
||||
padding: "0.35rem 0.6rem",
|
||||
marginBottom: "0.5rem",
|
||||
borderRadius: "0.3rem",
|
||||
backgroundColor: "rgba(148,226,213,0.1)",
|
||||
border: "1px solid rgba(148,226,213,0.3)",
|
||||
fontSize: "0.75rem",
|
||||
color: "var(--ctp-subtext1)",
|
||||
}}
|
||||
>
|
||||
{assemblyCount} assembly-sourced{" "}
|
||||
{assemblyCount === 1 ? "entry" : "entries"}. Entries removed from the
|
||||
FreeCAD assembly will remain here until manually deleted.
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div style={{ overflow: "auto" }}>
|
||||
<table
|
||||
style={{
|
||||
@@ -289,7 +311,15 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
||||
>
|
||||
{e.child_part_number}
|
||||
</td>
|
||||
<td style={tdStyle}>{e.source ?? ""}</td>
|
||||
<td style={tdStyle}>
|
||||
{e.source === "assembly" ? (
|
||||
<span style={assemblyBadge}>assembly</span>
|
||||
) : e.source === "manual" ? (
|
||||
<span style={manualBadge}>manual</span>
|
||||
) : (
|
||||
"—"
|
||||
)}
|
||||
</td>
|
||||
<td
|
||||
style={{
|
||||
...tdStyle,
|
||||
@@ -420,6 +450,25 @@ const saveBtnStyle: React.CSSProperties = {
|
||||
marginRight: "0.25rem",
|
||||
};
|
||||
|
||||
const sourceBadgeBase: React.CSSProperties = {
|
||||
padding: "0.1rem 0.4rem",
|
||||
borderRadius: "1rem",
|
||||
fontSize: "0.7rem",
|
||||
fontWeight: 500,
|
||||
};
|
||||
|
||||
const assemblyBadge: React.CSSProperties = {
|
||||
...sourceBadgeBase,
|
||||
backgroundColor: "rgba(148,226,213,0.2)",
|
||||
color: "var(--ctp-teal)",
|
||||
};
|
||||
|
||||
const manualBadge: React.CSSProperties = {
|
||||
...sourceBadgeBase,
|
||||
backgroundColor: "rgba(137,180,250,0.2)",
|
||||
color: "var(--ctp-blue)",
|
||||
};
|
||||
|
||||
const cancelBtnStyle: React.CSSProperties = {
|
||||
padding: "0.2rem 0.4rem",
|
||||
fontSize: "0.75rem",
|
||||
|
||||
@@ -12,6 +12,7 @@ export const ALL_COLUMNS: ColumnDef[] = [
|
||||
{ key: "item_type", label: "Type" },
|
||||
{ key: "description", label: "Description" },
|
||||
{ key: "revision", label: "Rev" },
|
||||
{ key: "files", label: "Files" },
|
||||
{ key: "projects", label: "Projects" },
|
||||
{ key: "created", label: "Created" },
|
||||
{ key: "actions", label: "Actions" },
|
||||
@@ -28,6 +29,7 @@ export const DEFAULT_COLUMNS_V = [
|
||||
"item_type",
|
||||
"description",
|
||||
"revision",
|
||||
"files",
|
||||
"created",
|
||||
"actions",
|
||||
];
|
||||
@@ -67,6 +69,12 @@ function copyPN(pn: string) {
|
||||
void navigator.clipboard.writeText(pn);
|
||||
}
|
||||
|
||||
function formatSize(bytes: number): string {
|
||||
if (bytes < 1024) return `${bytes} B`;
|
||||
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||
}
|
||||
|
||||
export function ItemTable({
|
||||
items,
|
||||
loading,
|
||||
@@ -120,6 +128,10 @@ export function ItemTable({
|
||||
av = a.current_revision;
|
||||
bv = b.current_revision;
|
||||
break;
|
||||
case "files":
|
||||
av = a.file_count;
|
||||
bv = b.file_count;
|
||||
break;
|
||||
case "created":
|
||||
av = a.created_at;
|
||||
bv = b.created_at;
|
||||
@@ -271,6 +283,20 @@ export function ItemTable({
|
||||
Rev {item.current_revision}
|
||||
</td>
|
||||
);
|
||||
case "files":
|
||||
return (
|
||||
<td
|
||||
key={col.key}
|
||||
style={{ ...tdStyle, textAlign: "center" }}
|
||||
title={
|
||||
item.file_count > 0
|
||||
? `${item.file_count} file${item.file_count !== 1 ? "s" : ""}, ${formatSize(item.files_total_size)}`
|
||||
: "No files"
|
||||
}
|
||||
>
|
||||
{item.file_count > 0 ? item.file_count : "—"}
|
||||
</td>
|
||||
);
|
||||
case "projects":
|
||||
return (
|
||||
<td key={col.key} style={tdStyle}>
|
||||
|
||||
Reference in New Issue
Block a user