Files
silo/internal/api/csv.go
Zoe Forbes 1bd29e6a6a feat: add sourcing type, extended fields, and inline project tagging
- Add migration 009: sourcing_type (manufactured/purchased), sourcing_link,
  long_description, and standard_cost columns on items table
- Update Item struct, repository queries, and API handlers for new fields
- Add sourcing badge, long description block, standard cost, and sourcing
  link display to item detail panel
- Add inline project tag editor in detail panel (add/remove via dropdown)
- Add new fields to create and edit modals
- Update CSV import/export for new columns
- Merge with auth CreatedBy/UpdatedBy changes from stash
2026-01-31 14:27:11 -06:00

587 lines
15 KiB
Go

package api
import (
"encoding/csv"
"encoding/json"
"fmt"
"io"
"net/http"
"sort"
"strconv"
"strings"
"time"
"github.com/kindredsystems/silo/internal/auth"
"github.com/kindredsystems/silo/internal/db"
"github.com/kindredsystems/silo/internal/partnum"
)
// CSV Export/Import handlers for bulk data management
// CSVExportOptions controls what fields to include in export.
type CSVExportOptions struct {
IncludeProperties bool
IncludeRevisions bool
}
// CSVImportResult represents the result of an import operation.
type CSVImportResult struct {
TotalRows int `json:"total_rows"`
SuccessCount int `json:"success_count"`
ErrorCount int `json:"error_count"`
Errors []CSVImportErr `json:"errors,omitempty"`
CreatedItems []string `json:"created_items,omitempty"`
}
// CSVImportErr represents an error on a specific row.
type CSVImportErr struct {
Row int `json:"row"`
Field string `json:"field,omitempty"`
Message string `json:"message"`
}
// Standard CSV columns for export/import
var csvColumns = []string{
"part_number",
"item_type",
"description",
"current_revision",
"created_at",
"updated_at",
"category",
"projects", // comma-separated project codes
"sourcing_type",
"sourcing_link",
"long_description",
"standard_cost",
}
// HandleExportCSV exports items to CSV format.
func (s *Server) HandleExportCSV(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
// Parse query options
opts := db.ListOptions{
ItemType: r.URL.Query().Get("type"),
Search: r.URL.Query().Get("search"),
Project: r.URL.Query().Get("project"),
Limit: 10000, // Max export limit
}
includeProps := r.URL.Query().Get("include_properties") == "true"
// Fetch items
items, err := s.items.List(ctx, opts)
if err != nil {
s.logger.Error().Err(err).Msg("failed to list items for export")
writeError(w, http.StatusInternalServerError, "export_failed", err.Error())
return
}
// Collect all property keys if including properties
propKeys := make(map[string]bool)
itemProps := make(map[string]map[string]any) // part_number -> properties
if includeProps {
for _, item := range items {
revisions, err := s.items.GetRevisions(ctx, item.ID)
if err != nil {
continue
}
for _, rev := range revisions {
if rev.RevisionNumber == item.CurrentRevision && rev.Properties != nil {
itemProps[item.PartNumber] = rev.Properties
for k := range rev.Properties {
propKeys[k] = true
}
break
}
}
}
}
// Build header row
headers := make([]string, len(csvColumns))
copy(headers, csvColumns)
// Add property columns (sorted for consistency)
sortedPropKeys := make([]string, 0, len(propKeys))
for k := range propKeys {
// Skip internal/system properties
if !strings.HasPrefix(k, "_") {
sortedPropKeys = append(sortedPropKeys, k)
}
}
sort.Strings(sortedPropKeys)
headers = append(headers, sortedPropKeys...)
// Set response headers for CSV download
filename := fmt.Sprintf("silo-export-%s.csv", time.Now().Format("2006-01-02"))
w.Header().Set("Content-Type", "text/csv")
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
// Write CSV
writer := csv.NewWriter(w)
defer writer.Flush()
// Write header
if err := writer.Write(headers); err != nil {
s.logger.Error().Err(err).Msg("failed to write CSV header")
return
}
// Write data rows
for _, item := range items {
row := make([]string, len(headers))
// Extract category from part number (format: CCC-NNNN)
category := parseCategory(item.PartNumber)
// Get projects for this item
projects, err := s.projects.GetProjectsForItem(ctx, item.ID)
projectCodes := ""
if err == nil && len(projects) > 0 {
codes := make([]string, len(projects))
for i, p := range projects {
codes[i] = p.Code
}
projectCodes = strings.Join(codes, ",")
}
// Standard columns
row[0] = item.PartNumber
row[1] = item.ItemType
row[2] = item.Description
row[3] = strconv.Itoa(item.CurrentRevision)
row[4] = item.CreatedAt.Format(time.RFC3339)
row[5] = item.UpdatedAt.Format(time.RFC3339)
row[6] = category
row[7] = projectCodes
row[8] = item.SourcingType
if item.SourcingLink != nil {
row[9] = *item.SourcingLink
}
if item.LongDescription != nil {
row[10] = *item.LongDescription
}
if item.StandardCost != nil {
row[11] = strconv.FormatFloat(*item.StandardCost, 'f', -1, 64)
}
// Property columns
if includeProps {
props := itemProps[item.PartNumber]
for i, key := range sortedPropKeys {
colIdx := len(csvColumns) + i
if props != nil {
if val, ok := props[key]; ok {
row[colIdx] = formatPropertyValue(val)
}
}
}
}
if err := writer.Write(row); err != nil {
s.logger.Error().Err(err).Str("part_number", item.PartNumber).Msg("failed to write CSV row")
continue
}
}
s.logger.Info().Int("count", len(items)).Msg("exported items to CSV")
}
// HandleImportCSV imports items from a CSV file.
func (s *Server) HandleImportCSV(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
// Parse multipart form (max 10MB)
if err := r.ParseMultipartForm(10 << 20); err != nil {
writeError(w, http.StatusBadRequest, "invalid_form", err.Error())
return
}
// Get file
file, _, err := r.FormFile("file")
if err != nil {
writeError(w, http.StatusBadRequest, "missing_file", "CSV file is required")
return
}
defer file.Close()
// Get options
dryRun := r.FormValue("dry_run") == "true"
skipExisting := r.FormValue("skip_existing") == "true"
schemaName := r.FormValue("schema")
if schemaName == "" {
schemaName = "kindred-rd"
}
// Parse CSV
reader := csv.NewReader(file)
reader.TrimLeadingSpace = true
// Read header
headers, err := reader.Read()
if err != nil {
writeError(w, http.StatusBadRequest, "invalid_csv", "Failed to read CSV header")
return
}
// Build column index map
colIndex := make(map[string]int)
for i, h := range headers {
colIndex[strings.ToLower(strings.TrimSpace(h))] = i
}
// Validate required columns - only category is required now (projects are optional tags)
requiredCols := []string{"category"}
for _, col := range requiredCols {
if _, ok := colIndex[col]; !ok {
writeError(w, http.StatusBadRequest, "missing_column", fmt.Sprintf("Required column '%s' not found", col))
return
}
}
result := CSVImportResult{
Errors: make([]CSVImportErr, 0),
CreatedItems: make([]string, 0),
}
// Process rows
rowNum := 1 // Start at 1 (header is row 0)
for {
record, err := reader.Read()
if err == io.EOF {
break
}
if err != nil {
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Message: fmt.Sprintf("Failed to parse row: %v", err),
})
result.ErrorCount++
rowNum++
continue
}
result.TotalRows++
rowNum++
// Extract values
category := getCSVValue(record, colIndex, "category")
description := getCSVValue(record, colIndex, "description")
partNumber := getCSVValue(record, colIndex, "part_number")
projectsStr := getCSVValue(record, colIndex, "projects")
// Parse project codes (comma-separated)
var projectCodes []string
if projectsStr != "" {
for _, code := range strings.Split(projectsStr, ",") {
code = strings.TrimSpace(strings.ToUpper(code))
if code != "" {
projectCodes = append(projectCodes, code)
}
}
}
// Validate category
if category == "" {
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Field: "category",
Message: "Category code is required",
})
result.ErrorCount++
continue
}
// Build properties from extra columns
properties := make(map[string]any)
properties["category"] = strings.ToUpper(category)
for col, idx := range colIndex {
// Skip standard columns
if isStandardColumn(col) {
continue
}
if idx < len(record) && record[idx] != "" {
properties[col] = parsePropertyValue(record[idx])
}
}
// If part_number is provided, check if it exists
if partNumber != "" {
existing, _ := s.items.GetByPartNumber(ctx, partNumber)
if existing != nil {
if skipExisting {
// Silently skip existing items
continue
}
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Field: "part_number",
Message: fmt.Sprintf("Part number '%s' already exists", partNumber),
})
result.ErrorCount++
continue
}
}
if dryRun {
// In dry-run mode, just validate
result.SuccessCount++
continue
}
// Generate part number if not provided
if partNumber == "" {
input := partnum.Input{
SchemaName: schemaName,
Values: map[string]string{
"category": strings.ToUpper(category),
},
}
partNumber, err = s.partgen.Generate(ctx, input)
if err != nil {
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Message: fmt.Sprintf("Failed to generate part number: %v", err),
})
result.ErrorCount++
continue
}
}
// Determine item type from category
itemType := "part"
if len(category) > 0 {
switch category[0] {
case 'A':
itemType = "assembly"
case 'T':
itemType = "tooling"
}
}
// Parse extended fields
sourcingType := getCSVValue(record, colIndex, "sourcing_type")
sourcingLink := getCSVValue(record, colIndex, "sourcing_link")
longDesc := getCSVValue(record, colIndex, "long_description")
stdCostStr := getCSVValue(record, colIndex, "standard_cost")
// Create item
item := &db.Item{
PartNumber: partNumber,
ItemType: itemType,
Description: description,
}
if user := auth.UserFromContext(ctx); user != nil {
item.CreatedBy = &user.Username
}
if sourcingType != "" {
item.SourcingType = sourcingType
}
if sourcingLink != "" {
item.SourcingLink = &sourcingLink
}
if longDesc != "" {
item.LongDescription = &longDesc
}
if stdCostStr != "" {
if cost, err := strconv.ParseFloat(stdCostStr, 64); err == nil {
item.StandardCost = &cost
}
}
if err := s.items.Create(ctx, item, properties); err != nil {
result.Errors = append(result.Errors, CSVImportErr{
Row: rowNum,
Message: fmt.Sprintf("Failed to create item: %v", err),
})
result.ErrorCount++
continue
}
// Tag item with projects
if len(projectCodes) > 0 {
if err := s.projects.SetItemProjects(ctx, item.ID, projectCodes); err != nil {
// Item was created but tagging failed - log warning but don't fail the row
s.logger.Warn().
Err(err).
Str("part_number", partNumber).
Strs("projects", projectCodes).
Msg("failed to tag item with projects")
}
}
result.SuccessCount++
result.CreatedItems = append(result.CreatedItems, partNumber)
}
s.logger.Info().
Int("total", result.TotalRows).
Int("success", result.SuccessCount).
Int("errors", result.ErrorCount).
Bool("dry_run", dryRun).
Msg("CSV import completed")
writeJSON(w, http.StatusOK, result)
}
// HandleCSVTemplate returns an empty CSV template with headers.
func (s *Server) HandleCSVTemplate(w http.ResponseWriter, r *http.Request) {
schemaName := r.URL.Query().Get("schema")
if schemaName == "" {
schemaName = "kindred-rd"
}
sch, ok := s.schemas[schemaName]
if !ok {
writeError(w, http.StatusNotFound, "not_found", "Schema not found")
return
}
// Build headers: standard columns + default property columns from schema
headers := []string{
"category",
"description",
"projects", // comma-separated project codes (optional)
}
// Add default property columns from schema
if sch.PropertySchemas != nil && sch.PropertySchemas.Defaults != nil {
propNames := make([]string, 0, len(sch.PropertySchemas.Defaults))
for name := range sch.PropertySchemas.Defaults {
propNames = append(propNames, name)
}
sort.Strings(propNames)
headers = append(headers, propNames...)
}
// Set response headers
filename := fmt.Sprintf("silo-import-template-%s.csv", schemaName)
w.Header().Set("Content-Type", "text/csv")
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
writer := csv.NewWriter(w)
defer writer.Flush()
// Write header row
if err := writer.Write(headers); err != nil {
s.logger.Error().Err(err).Msg("failed to write CSV template header")
return
}
// Write example row
exampleRow := make([]string, len(headers))
exampleRow[0] = "F01" // category
exampleRow[1] = "Example Item Description" // description
exampleRow[2] = "PROJ1,PROJ2" // projects (comma-separated)
// Leave property columns empty
if err := writer.Write(exampleRow); err != nil {
s.logger.Error().Err(err).Msg("failed to write CSV template example")
return
}
}
// Helper functions
// parseCategory extracts category code from part number (format: CCC-NNNN)
func parseCategory(pn string) string {
parts := strings.Split(pn, "-")
if len(parts) >= 1 {
return parts[0]
}
return ""
}
func formatPropertyValue(v any) string {
switch val := v.(type) {
case string:
return val
case float64:
if val == float64(int64(val)) {
return strconv.FormatInt(int64(val), 10)
}
return strconv.FormatFloat(val, 'f', -1, 64)
case int:
return strconv.Itoa(val)
case int64:
return strconv.FormatInt(val, 10)
case bool:
return strconv.FormatBool(val)
case nil:
return ""
default:
// For complex types, use JSON
b, _ := json.Marshal(val)
return string(b)
}
}
func parsePropertyValue(s string) any {
s = strings.TrimSpace(s)
if s == "" {
return nil
}
// Try boolean
if s == "true" {
return true
}
if s == "false" {
return false
}
// Try integer
if i, err := strconv.ParseInt(s, 10, 64); err == nil {
return i
}
// Try float
if f, err := strconv.ParseFloat(s, 64); err == nil {
return f
}
// Try JSON (for arrays/objects)
if (strings.HasPrefix(s, "[") && strings.HasSuffix(s, "]")) ||
(strings.HasPrefix(s, "{") && strings.HasSuffix(s, "}")) {
var v any
if err := json.Unmarshal([]byte(s), &v); err == nil {
return v
}
}
// Default to string
return s
}
func getCSVValue(record []string, colIndex map[string]int, column string) string {
if idx, ok := colIndex[column]; ok && idx < len(record) {
return strings.TrimSpace(record[idx])
}
return ""
}
func isStandardColumn(col string) bool {
standardCols := map[string]bool{
"part_number": true,
"item_type": true,
"description": true,
"current_revision": true,
"created_at": true,
"updated_at": true,
"category": true,
"projects": true,
"objects": true, // FreeCAD objects data - skip on import
"archived_at": true,
"sourcing_type": true,
"sourcing_link": true,
"long_description": true,
"standard_cost": true,
}
return standardCols[col]
}