Compare commits
6 Commits
issue-31-d
...
fix-sse-re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f7aa673d2c | ||
| 2157b40d06 | |||
|
|
25c42bd70b | ||
| 8d88f77ff6 | |||
|
|
50985ed805 | ||
| 9be6f45f09 |
@@ -101,6 +101,8 @@ var manufacturedWeights = map[string]float64{
|
||||
// Weight 1: engineering detail (category-specific default)
|
||||
"sourcing_type": 1,
|
||||
"lifecycle_status": 1,
|
||||
// Weight 1: engineering detail
|
||||
"has_files": 1,
|
||||
// Weight 0.5: less relevant for in-house
|
||||
"manufacturer": 0.5,
|
||||
"supplier": 0.5,
|
||||
@@ -207,6 +209,7 @@ func scoreItem(
|
||||
categoryProps map[string]schema.PropertyDefinition,
|
||||
hasBOM bool,
|
||||
bomChildCount int,
|
||||
hasFiles bool,
|
||||
categoryName string,
|
||||
projects []string,
|
||||
includeFields bool,
|
||||
@@ -276,6 +279,7 @@ func scoreItem(
|
||||
// Score has_bom for manufactured/assembly items.
|
||||
if sourcingType == "manufactured" || isAssembly {
|
||||
processField("has_bom", "computed", "boolean", hasBOM)
|
||||
processField("has_files", "computed", "boolean", hasFiles)
|
||||
}
|
||||
|
||||
// Score property fields from schema.
|
||||
@@ -412,6 +416,13 @@ func (s *Server) HandleAuditCompleteness(w http.ResponseWriter, r *http.Request)
|
||||
return
|
||||
}
|
||||
|
||||
fileStats, err := s.items.BatchGetFileStats(ctx, itemIDs)
|
||||
if err != nil {
|
||||
s.logger.Error().Err(err).Msg("failed to batch get file stats")
|
||||
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to load file stats")
|
||||
return
|
||||
}
|
||||
|
||||
// Look up the schema for category resolution.
|
||||
sch := s.schemas["kindred-rd"]
|
||||
var catSegment *schema.Segment
|
||||
@@ -440,9 +451,10 @@ func (s *Server) HandleAuditCompleteness(w http.ResponseWriter, r *http.Request)
|
||||
|
||||
bomCount := bomCounts[item.ID]
|
||||
hasBOM := bomCount > 0
|
||||
hasFiles := fileStats[item.ID].Count > 0
|
||||
projects := projectCodes[item.ID]
|
||||
|
||||
result := scoreItem(item, categoryProps, hasBOM, bomCount, categoryName, projects, false)
|
||||
result := scoreItem(item, categoryProps, hasBOM, bomCount, hasFiles, categoryName, projects, false)
|
||||
allResults = append(allResults, *result)
|
||||
}
|
||||
|
||||
@@ -544,6 +556,15 @@ func (s *Server) HandleAuditItemDetail(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
projects := projectCodes[item.ID]
|
||||
|
||||
// Get file stats.
|
||||
fileStats, err := s.items.BatchGetFileStats(ctx, []string{item.ID})
|
||||
if err != nil {
|
||||
s.logger.Error().Err(err).Str("pn", partNumber).Msg("failed to get file stats for audit")
|
||||
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to load file stats")
|
||||
return
|
||||
}
|
||||
hasFiles := fileStats[item.ID].Count > 0
|
||||
|
||||
// Category resolution.
|
||||
cat := extractCategory(item.PartNumber)
|
||||
categoryName := cat
|
||||
@@ -561,7 +582,7 @@ func (s *Server) HandleAuditItemDetail(w http.ResponseWriter, r *http.Request) {
|
||||
categoryProps = sch.PropertySchemas.GetPropertiesForCategory(cat)
|
||||
}
|
||||
|
||||
result := scoreItem(iwp, categoryProps, hasBOM, bomCount, categoryName, projects, true)
|
||||
result := scoreItem(iwp, categoryProps, hasBOM, bomCount, hasFiles, categoryName, projects, true)
|
||||
|
||||
writeJSON(w, http.StatusOK, result)
|
||||
}
|
||||
|
||||
@@ -260,6 +260,8 @@ type ItemResponse struct {
|
||||
LongDescription *string `json:"long_description,omitempty"`
|
||||
StandardCost *float64 `json:"standard_cost,omitempty"`
|
||||
ThumbnailKey *string `json:"thumbnail_key,omitempty"`
|
||||
FileCount int `json:"file_count"`
|
||||
FilesTotalSize int64 `json:"files_total_size"`
|
||||
Properties map[string]any `json:"properties,omitempty"`
|
||||
}
|
||||
|
||||
@@ -304,9 +306,20 @@ func (s *Server) HandleListItems(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// Batch-fetch file attachment stats
|
||||
ids := make([]string, len(items))
|
||||
for i, item := range items {
|
||||
ids[i] = item.ID
|
||||
}
|
||||
fileStats, _ := s.items.BatchGetFileStats(ctx, ids)
|
||||
|
||||
response := make([]ItemResponse, len(items))
|
||||
for i, item := range items {
|
||||
response[i] = itemToResponse(item)
|
||||
if fs, ok := fileStats[item.ID]; ok {
|
||||
response[i].FileCount = fs.Count
|
||||
response[i].FilesTotalSize = fs.TotalSize
|
||||
}
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, response)
|
||||
@@ -482,7 +495,15 @@ func (s *Server) HandleGetItemByUUID(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, itemToResponse(item))
|
||||
response := itemToResponse(item)
|
||||
if fileStats, err := s.items.BatchGetFileStats(ctx, []string{item.ID}); err == nil {
|
||||
if fs, ok := fileStats[item.ID]; ok {
|
||||
response.FileCount = fs.Count
|
||||
response.FilesTotalSize = fs.TotalSize
|
||||
}
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, response)
|
||||
}
|
||||
|
||||
// HandleGetItem retrieves an item by part number.
|
||||
@@ -504,6 +525,14 @@ func (s *Server) HandleGetItem(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
response := itemToResponse(item)
|
||||
|
||||
// File attachment stats
|
||||
if fileStats, err := s.items.BatchGetFileStats(ctx, []string{item.ID}); err == nil {
|
||||
if fs, ok := fileStats[item.ID]; ok {
|
||||
response.FileCount = fs.Count
|
||||
response.FilesTotalSize = fs.TotalSize
|
||||
}
|
||||
}
|
||||
|
||||
// Include properties from current revision if requested
|
||||
if r.URL.Query().Get("include") == "properties" {
|
||||
revisions, err := s.items.GetRevisions(ctx, item.ID)
|
||||
|
||||
@@ -16,9 +16,12 @@ func (s *Server) HandleEvents(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// Disable the write deadline for this long-lived connection.
|
||||
// The server's WriteTimeout (15s) would otherwise kill it.
|
||||
// Disable read and write deadlines for this long-lived connection.
|
||||
// The server's ReadTimeout/WriteTimeout (15s) would otherwise kill it.
|
||||
rc := http.NewResponseController(w)
|
||||
if err := rc.SetReadDeadline(time.Time{}); err != nil {
|
||||
s.logger.Warn().Err(err).Msg("failed to disable read deadline for SSE")
|
||||
}
|
||||
if err := rc.SetWriteDeadline(time.Time{}); err != nil {
|
||||
s.logger.Warn().Err(err).Msg("failed to disable write deadline for SSE")
|
||||
}
|
||||
|
||||
@@ -134,6 +134,43 @@ func (r *ItemRepository) BatchCheckBOM(ctx context.Context, itemIDs []string) (m
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// FileStats holds aggregated file attachment statistics for an item.
|
||||
type FileStats struct {
|
||||
Count int
|
||||
TotalSize int64
|
||||
}
|
||||
|
||||
// BatchGetFileStats returns a map of item ID to file attachment statistics
|
||||
// for the given item IDs. Items not in the map have no files.
|
||||
func (r *ItemRepository) BatchGetFileStats(ctx context.Context, itemIDs []string) (map[string]FileStats, error) {
|
||||
if len(itemIDs) == 0 {
|
||||
return map[string]FileStats{}, nil
|
||||
}
|
||||
|
||||
rows, err := r.db.pool.Query(ctx, `
|
||||
SELECT item_id, COUNT(*), COALESCE(SUM(size), 0)
|
||||
FROM item_files
|
||||
WHERE item_id = ANY($1)
|
||||
GROUP BY item_id
|
||||
`, itemIDs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("batch getting file stats: %w", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
result := make(map[string]FileStats)
|
||||
for rows.Next() {
|
||||
var itemID string
|
||||
var fs FileStats
|
||||
if err := rows.Scan(&itemID, &fs.Count, &fs.TotalSize); err != nil {
|
||||
return nil, fmt.Errorf("scanning file stats: %w", err)
|
||||
}
|
||||
result[itemID] = fs
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// BatchGetProjectCodes returns a map of item ID to project code list for
|
||||
// the given item IDs.
|
||||
func (r *ItemRepository) BatchGetProjectCodes(ctx context.Context, itemIDs []string) (map[string][]string, error) {
|
||||
|
||||
@@ -19,6 +19,8 @@ export interface Item {
|
||||
sourcing_link?: string;
|
||||
long_description?: string;
|
||||
standard_cost?: number;
|
||||
file_count: number;
|
||||
files_total_size: number;
|
||||
properties?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
@@ -210,6 +212,38 @@ export interface UpdateBOMEntryRequest {
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
// BOM Merge
|
||||
export interface MergeBOMResponse {
|
||||
status: string;
|
||||
diff: MergeBOMDiff;
|
||||
warnings: MergeWarning[];
|
||||
resolve_url: string;
|
||||
}
|
||||
|
||||
export interface MergeBOMDiff {
|
||||
added: MergeDiffEntry[];
|
||||
removed: MergeDiffEntry[];
|
||||
quantity_changed: MergeQtyChange[];
|
||||
unchanged: MergeDiffEntry[];
|
||||
}
|
||||
|
||||
export interface MergeDiffEntry {
|
||||
part_number: string;
|
||||
quantity: number | null;
|
||||
}
|
||||
|
||||
export interface MergeQtyChange {
|
||||
part_number: string;
|
||||
old_quantity: number | null;
|
||||
new_quantity: number | null;
|
||||
}
|
||||
|
||||
export interface MergeWarning {
|
||||
type: string;
|
||||
part_number: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
// Schema properties
|
||||
export interface PropertyDef {
|
||||
type: string;
|
||||
|
||||
@@ -46,6 +46,7 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
||||
const unitCost = (e: BOMEntry) => Number(meta(e).unit_cost) || 0;
|
||||
const extCost = (e: BOMEntry) => unitCost(e) * (e.quantity ?? 0);
|
||||
const totalCost = entries.reduce((sum, e) => sum + extCost(e), 0);
|
||||
const assemblyCount = entries.filter((e) => e.source === "assembly").length;
|
||||
|
||||
const formToRequest = () => ({
|
||||
child_part_number: form.child_part_number,
|
||||
@@ -139,12 +140,15 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
||||
/>
|
||||
</td>
|
||||
<td style={tdStyle}>
|
||||
<input
|
||||
<select
|
||||
value={form.source}
|
||||
onChange={(e) => setForm({ ...form, source: e.target.value })}
|
||||
placeholder="Source"
|
||||
style={inputStyle}
|
||||
/>
|
||||
>
|
||||
<option value="">—</option>
|
||||
<option value="manual">manual</option>
|
||||
<option value="assembly">assembly</option>
|
||||
</select>
|
||||
</td>
|
||||
<td style={tdStyle}>
|
||||
<input
|
||||
@@ -247,6 +251,24 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isEditor && assemblyCount > 0 && (
|
||||
<div
|
||||
style={{
|
||||
padding: "0.35rem 0.6rem",
|
||||
marginBottom: "0.5rem",
|
||||
borderRadius: "0.3rem",
|
||||
backgroundColor: "rgba(148,226,213,0.1)",
|
||||
border: "1px solid rgba(148,226,213,0.3)",
|
||||
fontSize: "0.75rem",
|
||||
color: "var(--ctp-subtext1)",
|
||||
}}
|
||||
>
|
||||
{assemblyCount} assembly-sourced{" "}
|
||||
{assemblyCount === 1 ? "entry" : "entries"}. Entries removed from the
|
||||
FreeCAD assembly will remain here until manually deleted.
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div style={{ overflow: "auto" }}>
|
||||
<table
|
||||
style={{
|
||||
@@ -289,7 +311,15 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
||||
>
|
||||
{e.child_part_number}
|
||||
</td>
|
||||
<td style={tdStyle}>{e.source ?? ""}</td>
|
||||
<td style={tdStyle}>
|
||||
{e.source === "assembly" ? (
|
||||
<span style={assemblyBadge}>assembly</span>
|
||||
) : e.source === "manual" ? (
|
||||
<span style={manualBadge}>manual</span>
|
||||
) : (
|
||||
"—"
|
||||
)}
|
||||
</td>
|
||||
<td
|
||||
style={{
|
||||
...tdStyle,
|
||||
@@ -420,6 +450,25 @@ const saveBtnStyle: React.CSSProperties = {
|
||||
marginRight: "0.25rem",
|
||||
};
|
||||
|
||||
const sourceBadgeBase: React.CSSProperties = {
|
||||
padding: "0.1rem 0.4rem",
|
||||
borderRadius: "1rem",
|
||||
fontSize: "0.7rem",
|
||||
fontWeight: 500,
|
||||
};
|
||||
|
||||
const assemblyBadge: React.CSSProperties = {
|
||||
...sourceBadgeBase,
|
||||
backgroundColor: "rgba(148,226,213,0.2)",
|
||||
color: "var(--ctp-teal)",
|
||||
};
|
||||
|
||||
const manualBadge: React.CSSProperties = {
|
||||
...sourceBadgeBase,
|
||||
backgroundColor: "rgba(137,180,250,0.2)",
|
||||
color: "var(--ctp-blue)",
|
||||
};
|
||||
|
||||
const cancelBtnStyle: React.CSSProperties = {
|
||||
padding: "0.2rem 0.4rem",
|
||||
fontSize: "0.75rem",
|
||||
|
||||
@@ -12,6 +12,7 @@ export const ALL_COLUMNS: ColumnDef[] = [
|
||||
{ key: "item_type", label: "Type" },
|
||||
{ key: "description", label: "Description" },
|
||||
{ key: "revision", label: "Rev" },
|
||||
{ key: "files", label: "Files" },
|
||||
{ key: "projects", label: "Projects" },
|
||||
{ key: "created", label: "Created" },
|
||||
{ key: "actions", label: "Actions" },
|
||||
@@ -28,6 +29,7 @@ export const DEFAULT_COLUMNS_V = [
|
||||
"item_type",
|
||||
"description",
|
||||
"revision",
|
||||
"files",
|
||||
"created",
|
||||
"actions",
|
||||
];
|
||||
@@ -67,6 +69,12 @@ function copyPN(pn: string) {
|
||||
void navigator.clipboard.writeText(pn);
|
||||
}
|
||||
|
||||
function formatSize(bytes: number): string {
|
||||
if (bytes < 1024) return `${bytes} B`;
|
||||
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||
}
|
||||
|
||||
export function ItemTable({
|
||||
items,
|
||||
loading,
|
||||
@@ -120,6 +128,10 @@ export function ItemTable({
|
||||
av = a.current_revision;
|
||||
bv = b.current_revision;
|
||||
break;
|
||||
case "files":
|
||||
av = a.file_count;
|
||||
bv = b.file_count;
|
||||
break;
|
||||
case "created":
|
||||
av = a.created_at;
|
||||
bv = b.created_at;
|
||||
@@ -271,6 +283,20 @@ export function ItemTable({
|
||||
Rev {item.current_revision}
|
||||
</td>
|
||||
);
|
||||
case "files":
|
||||
return (
|
||||
<td
|
||||
key={col.key}
|
||||
style={{ ...tdStyle, textAlign: "center" }}
|
||||
title={
|
||||
item.file_count > 0
|
||||
? `${item.file_count} file${item.file_count !== 1 ? "s" : ""}, ${formatSize(item.files_total_size)}`
|
||||
: "No files"
|
||||
}
|
||||
>
|
||||
{item.file_count > 0 ? item.file_count : "—"}
|
||||
</td>
|
||||
);
|
||||
case "projects":
|
||||
return (
|
||||
<td key={col.key} style={tdStyle}>
|
||||
|
||||
Reference in New Issue
Block a user