Compare commits
54 Commits
feat-dag-w
...
feat/kc-de
| Author | SHA1 | Date | |
|---|---|---|---|
| f8b8eda973 | |||
|
|
cffcf56085 | ||
| 1a34455ad5 | |||
|
|
c216d64702 | ||
| 28f133411e | |||
| 6528df0461 | |||
|
|
dd010331c0 | ||
| 628cd1d252 | |||
|
|
8d777e83bb | ||
| d96ba8d394 | |||
|
|
56c76940ed | ||
| 9dabaf5796 | |||
|
|
3bb335397c | ||
| 344a0cd0a0 | |||
|
|
f5b03989ff | ||
| 8cd92a4025 | |||
| ffa01ebeb7 | |||
| 9181673554 | |||
| 8cef4fa55f | |||
| 7a9dd057a5 | |||
| 9f347e7898 | |||
| b531617e39 | |||
| 906277149e | |||
|
|
fc4826f576 | ||
| fbfc955ccc | |||
| e0295e7180 | |||
|
|
7fec219152 | ||
| fa069eb05c | |||
|
|
8735c8341b | ||
| 7a172ce34c | |||
|
|
da65d4bc1a | ||
| 57d5a786d0 | |||
|
|
42a901f39c | ||
| 666cc2b23b | |||
|
|
747bae8354 | ||
| 71603bb6d7 | |||
|
|
4ef912cf4b | ||
| decb32c3e7 | |||
|
|
0be39065ac | ||
|
|
101d04ab6f | ||
|
|
8167d9c216 | ||
|
|
319a739adb | ||
| e20252a993 | |||
|
|
138ce16010 | ||
|
|
690ad73161 | ||
|
|
b8abd8859d | ||
|
|
4fd4013360 | ||
|
|
3adc155b14 | ||
|
|
9d8afa5981 | ||
|
|
f91cf2bc6f | ||
| ef44523ae8 | |||
|
|
ba92dd363c | ||
|
|
c7857fdfc9 | ||
| defb3af56f |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,6 +1,7 @@
|
|||||||
# Binaries
|
# Binaries
|
||||||
/silo
|
/silo
|
||||||
/silod
|
/silod
|
||||||
|
/migrate-storage
|
||||||
*.exe
|
*.exe
|
||||||
*.dll
|
*.dll
|
||||||
*.so
|
*.so
|
||||||
|
|||||||
10
Makefile
10
Makefile
@@ -1,7 +1,8 @@
|
|||||||
.PHONY: build run test test-integration clean migrate fmt lint \
|
.PHONY: build run test test-integration clean migrate fmt lint \
|
||||||
docker-build docker-up docker-down docker-logs docker-ps \
|
docker-build docker-up docker-down docker-logs docker-ps \
|
||||||
docker-clean docker-rebuild \
|
docker-clean docker-rebuild \
|
||||||
web-install web-dev web-build
|
web-install web-dev web-build \
|
||||||
|
migrate-storage
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Local Development
|
# Local Development
|
||||||
@@ -56,6 +57,13 @@ tidy:
|
|||||||
migrate:
|
migrate:
|
||||||
./scripts/init-db.sh
|
./scripts/init-db.sh
|
||||||
|
|
||||||
|
# Build and run MinIO → filesystem migration tool
|
||||||
|
# Usage: make migrate-storage DEST=/opt/silo/data [ARGS="--dry-run --verbose"]
|
||||||
|
migrate-storage:
|
||||||
|
go build -o migrate-storage ./cmd/migrate-storage
|
||||||
|
@echo "Built ./migrate-storage"
|
||||||
|
@echo "Run: ./migrate-storage -config <config.yaml> -dest <dir> [-dry-run] [-verbose]"
|
||||||
|
|
||||||
# Connect to database (requires psql)
|
# Connect to database (requires psql)
|
||||||
db-shell:
|
db-shell:
|
||||||
PGPASSWORD=$${SILO_DB_PASSWORD:-silodev} psql -h $${SILO_DB_HOST:-localhost} -U $${SILO_DB_USER:-silo} -d $${SILO_DB_NAME:-silo}
|
PGPASSWORD=$${SILO_DB_PASSWORD:-silodev} psql -h $${SILO_DB_HOST:-localhost} -U $${SILO_DB_USER:-silo} -d $${SILO_DB_NAME:-silo}
|
||||||
|
|||||||
288
cmd/migrate-storage/main.go
Normal file
288
cmd/migrate-storage/main.go
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
// Command migrate-storage downloads files from MinIO and writes them to the
|
||||||
|
// local filesystem. It is a one-shot migration tool for moving off MinIO.
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
//
|
||||||
|
// migrate-storage -config config.yaml -dest /opt/silo/data [-dry-run] [-verbose]
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/kindredsystems/silo/internal/config"
|
||||||
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
|
"github.com/kindredsystems/silo/internal/storage"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
)
|
||||||
|
|
||||||
|
// fileEntry represents a single file to migrate.
|
||||||
|
type fileEntry struct {
|
||||||
|
key string
|
||||||
|
versionID string // MinIO version ID; empty if not versioned
|
||||||
|
size int64 // expected size from DB; 0 if unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
configPath := flag.String("config", "config.yaml", "Path to configuration file")
|
||||||
|
dest := flag.String("dest", "", "Destination root directory (required)")
|
||||||
|
dryRun := flag.Bool("dry-run", false, "Preview what would be migrated without downloading")
|
||||||
|
verbose := flag.Bool("verbose", false, "Log every file, not just errors and summary")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
logger := zerolog.New(os.Stdout).With().Timestamp().Logger()
|
||||||
|
|
||||||
|
if *dest == "" {
|
||||||
|
logger.Fatal().Msg("-dest is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load config (reuses existing config for DB + MinIO credentials).
|
||||||
|
cfg, err := config.Load(*configPath)
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal().Err(err).Msg("failed to load configuration")
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Connect to PostgreSQL.
|
||||||
|
database, err := db.Connect(ctx, db.Config{
|
||||||
|
Host: cfg.Database.Host,
|
||||||
|
Port: cfg.Database.Port,
|
||||||
|
Name: cfg.Database.Name,
|
||||||
|
User: cfg.Database.User,
|
||||||
|
Password: cfg.Database.Password,
|
||||||
|
SSLMode: cfg.Database.SSLMode,
|
||||||
|
MaxConnections: cfg.Database.MaxConnections,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal().Err(err).Msg("failed to connect to database")
|
||||||
|
}
|
||||||
|
defer database.Close()
|
||||||
|
logger.Info().Msg("connected to database")
|
||||||
|
|
||||||
|
// Connect to MinIO.
|
||||||
|
store, err := storage.Connect(ctx, storage.Config{
|
||||||
|
Endpoint: cfg.Storage.Endpoint,
|
||||||
|
AccessKey: cfg.Storage.AccessKey,
|
||||||
|
SecretKey: cfg.Storage.SecretKey,
|
||||||
|
Bucket: cfg.Storage.Bucket,
|
||||||
|
UseSSL: cfg.Storage.UseSSL,
|
||||||
|
Region: cfg.Storage.Region,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal().Err(err).Msg("failed to connect to MinIO")
|
||||||
|
}
|
||||||
|
logger.Info().Str("bucket", cfg.Storage.Bucket).Msg("connected to MinIO")
|
||||||
|
|
||||||
|
// Collect all file references from the database.
|
||||||
|
entries, err := collectEntries(ctx, logger, database)
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal().Err(err).Msg("failed to collect file entries from database")
|
||||||
|
}
|
||||||
|
logger.Info().Int("total", len(entries)).Msg("file entries found")
|
||||||
|
|
||||||
|
if len(entries) == 0 {
|
||||||
|
logger.Info().Msg("nothing to migrate")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate.
|
||||||
|
var migrated, skipped, failed int
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
for i, e := range entries {
|
||||||
|
destPath := filepath.Join(*dest, e.key)
|
||||||
|
|
||||||
|
// Check if already migrated.
|
||||||
|
if info, err := os.Stat(destPath); err == nil {
|
||||||
|
if e.size > 0 && info.Size() == e.size {
|
||||||
|
if *verbose {
|
||||||
|
logger.Info().Str("key", e.key).Msg("skipped (already exists)")
|
||||||
|
}
|
||||||
|
skipped++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Size mismatch or unknown size — re-download.
|
||||||
|
}
|
||||||
|
|
||||||
|
if *dryRun {
|
||||||
|
logger.Info().
|
||||||
|
Str("key", e.key).
|
||||||
|
Int64("size", e.size).
|
||||||
|
Str("version", e.versionID).
|
||||||
|
Msgf("[%d/%d] would migrate", i+1, len(entries))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := migrateFile(ctx, store, e, destPath); err != nil {
|
||||||
|
logger.Error().Err(err).Str("key", e.key).Msg("failed to migrate")
|
||||||
|
failed++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
migrated++
|
||||||
|
if *verbose {
|
||||||
|
logger.Info().
|
||||||
|
Str("key", e.key).
|
||||||
|
Int64("size", e.size).
|
||||||
|
Msgf("[%d/%d] migrated", i+1, len(entries))
|
||||||
|
} else if (i+1)%50 == 0 {
|
||||||
|
logger.Info().Msgf("progress: %d/%d", i+1, len(entries))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
elapsed := time.Since(start)
|
||||||
|
ev := logger.Info().
|
||||||
|
Int("total", len(entries)).
|
||||||
|
Int("migrated", migrated).
|
||||||
|
Int("skipped", skipped).
|
||||||
|
Int("failed", failed).
|
||||||
|
Dur("elapsed", elapsed)
|
||||||
|
if *dryRun {
|
||||||
|
ev.Msg("dry run complete")
|
||||||
|
} else {
|
||||||
|
ev.Msg("migration complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
if failed > 0 {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// collectEntries queries the database for all file references across the three
|
||||||
|
// storage domains: revision files, item file attachments, and item thumbnails.
|
||||||
|
// It deduplicates by key.
|
||||||
|
func collectEntries(ctx context.Context, logger zerolog.Logger, database *db.DB) ([]fileEntry, error) {
|
||||||
|
pool := database.Pool()
|
||||||
|
seen := make(map[string]struct{})
|
||||||
|
var entries []fileEntry
|
||||||
|
|
||||||
|
add := func(key, versionID string, size int64) {
|
||||||
|
if key == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if _, ok := seen[key]; ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
seen[key] = struct{}{}
|
||||||
|
entries = append(entries, fileEntry{key: key, versionID: versionID, size: size})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Revision files.
|
||||||
|
rows, err := pool.Query(ctx,
|
||||||
|
`SELECT file_key, COALESCE(file_version, ''), COALESCE(file_size, 0)
|
||||||
|
FROM revisions WHERE file_key IS NOT NULL`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("querying revisions: %w", err)
|
||||||
|
}
|
||||||
|
for rows.Next() {
|
||||||
|
var key, version string
|
||||||
|
var size int64
|
||||||
|
if err := rows.Scan(&key, &version, &size); err != nil {
|
||||||
|
rows.Close()
|
||||||
|
return nil, fmt.Errorf("scanning revision row: %w", err)
|
||||||
|
}
|
||||||
|
add(key, version, size)
|
||||||
|
}
|
||||||
|
rows.Close()
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("iterating revisions: %w", err)
|
||||||
|
}
|
||||||
|
logger.Info().Int("count", len(entries)).Msg("revision files found")
|
||||||
|
|
||||||
|
// 2. Item file attachments.
|
||||||
|
countBefore := len(entries)
|
||||||
|
rows, err = pool.Query(ctx,
|
||||||
|
`SELECT object_key, size FROM item_files`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("querying item_files: %w", err)
|
||||||
|
}
|
||||||
|
for rows.Next() {
|
||||||
|
var key string
|
||||||
|
var size int64
|
||||||
|
if err := rows.Scan(&key, &size); err != nil {
|
||||||
|
rows.Close()
|
||||||
|
return nil, fmt.Errorf("scanning item_files row: %w", err)
|
||||||
|
}
|
||||||
|
add(key, "", size)
|
||||||
|
}
|
||||||
|
rows.Close()
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("iterating item_files: %w", err)
|
||||||
|
}
|
||||||
|
logger.Info().Int("count", len(entries)-countBefore).Msg("item file attachments found")
|
||||||
|
|
||||||
|
// 3. Item thumbnails.
|
||||||
|
countBefore = len(entries)
|
||||||
|
rows, err = pool.Query(ctx,
|
||||||
|
`SELECT thumbnail_key FROM items WHERE thumbnail_key IS NOT NULL`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("querying item thumbnails: %w", err)
|
||||||
|
}
|
||||||
|
for rows.Next() {
|
||||||
|
var key string
|
||||||
|
if err := rows.Scan(&key); err != nil {
|
||||||
|
rows.Close()
|
||||||
|
return nil, fmt.Errorf("scanning thumbnail row: %w", err)
|
||||||
|
}
|
||||||
|
add(key, "", 0)
|
||||||
|
}
|
||||||
|
rows.Close()
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("iterating thumbnails: %w", err)
|
||||||
|
}
|
||||||
|
logger.Info().Int("count", len(entries)-countBefore).Msg("item thumbnails found")
|
||||||
|
|
||||||
|
return entries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrateFile downloads a single file from MinIO and writes it atomically to destPath.
|
||||||
|
func migrateFile(ctx context.Context, store *storage.Storage, e fileEntry, destPath string) error {
|
||||||
|
// Ensure parent directory exists.
|
||||||
|
if err := os.MkdirAll(filepath.Dir(destPath), 0755); err != nil {
|
||||||
|
return fmt.Errorf("creating directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Download from MinIO.
|
||||||
|
var reader io.ReadCloser
|
||||||
|
var err error
|
||||||
|
if e.versionID != "" {
|
||||||
|
reader, err = store.GetVersion(ctx, e.key, e.versionID)
|
||||||
|
} else {
|
||||||
|
reader, err = store.Get(ctx, e.key)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("downloading from MinIO: %w", err)
|
||||||
|
}
|
||||||
|
defer reader.Close()
|
||||||
|
|
||||||
|
// Write to temp file then rename for atomicity.
|
||||||
|
tmpPath := destPath + ".tmp"
|
||||||
|
f, err := os.Create(tmpPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("creating temp file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := io.Copy(f, reader); err != nil {
|
||||||
|
f.Close()
|
||||||
|
os.Remove(tmpPath)
|
||||||
|
return fmt.Errorf("writing file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := f.Close(); err != nil {
|
||||||
|
os.Remove(tmpPath)
|
||||||
|
return fmt.Errorf("closing temp file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.Rename(tmpPath, destPath); err != nil {
|
||||||
|
os.Remove(tmpPath)
|
||||||
|
return fmt.Errorf("renaming temp file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -20,6 +20,7 @@ import (
|
|||||||
"github.com/kindredsystems/silo/internal/config"
|
"github.com/kindredsystems/silo/internal/config"
|
||||||
"github.com/kindredsystems/silo/internal/db"
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
"github.com/kindredsystems/silo/internal/jobdef"
|
"github.com/kindredsystems/silo/internal/jobdef"
|
||||||
|
"github.com/kindredsystems/silo/internal/modules"
|
||||||
"github.com/kindredsystems/silo/internal/schema"
|
"github.com/kindredsystems/silo/internal/schema"
|
||||||
"github.com/kindredsystems/silo/internal/storage"
|
"github.com/kindredsystems/silo/internal/storage"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
@@ -64,24 +65,39 @@ func main() {
|
|||||||
logger.Info().Msg("connected to database")
|
logger.Info().Msg("connected to database")
|
||||||
|
|
||||||
// Connect to storage (optional - may be externally managed)
|
// Connect to storage (optional - may be externally managed)
|
||||||
var store *storage.Storage
|
var store storage.FileStore
|
||||||
if cfg.Storage.Endpoint != "" {
|
switch cfg.Storage.Backend {
|
||||||
store, err = storage.Connect(ctx, storage.Config{
|
case "minio", "":
|
||||||
Endpoint: cfg.Storage.Endpoint,
|
if cfg.Storage.Endpoint != "" {
|
||||||
AccessKey: cfg.Storage.AccessKey,
|
s, connErr := storage.Connect(ctx, storage.Config{
|
||||||
SecretKey: cfg.Storage.SecretKey,
|
Endpoint: cfg.Storage.Endpoint,
|
||||||
Bucket: cfg.Storage.Bucket,
|
AccessKey: cfg.Storage.AccessKey,
|
||||||
UseSSL: cfg.Storage.UseSSL,
|
SecretKey: cfg.Storage.SecretKey,
|
||||||
Region: cfg.Storage.Region,
|
Bucket: cfg.Storage.Bucket,
|
||||||
})
|
UseSSL: cfg.Storage.UseSSL,
|
||||||
if err != nil {
|
Region: cfg.Storage.Region,
|
||||||
logger.Warn().Err(err).Msg("failed to connect to storage - file operations disabled")
|
})
|
||||||
store = nil
|
if connErr != nil {
|
||||||
|
logger.Warn().Err(connErr).Msg("failed to connect to storage - file operations disabled")
|
||||||
|
} else {
|
||||||
|
store = s
|
||||||
|
logger.Info().Msg("connected to storage")
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.Info().Msg("connected to storage")
|
logger.Info().Msg("storage not configured - file operations disabled")
|
||||||
}
|
}
|
||||||
} else {
|
case "filesystem":
|
||||||
logger.Info().Msg("storage not configured - file operations disabled")
|
if cfg.Storage.Filesystem.RootDir == "" {
|
||||||
|
logger.Fatal().Msg("storage.filesystem.root_dir is required when backend is \"filesystem\"")
|
||||||
|
}
|
||||||
|
s, fsErr := storage.NewFilesystemStore(cfg.Storage.Filesystem.RootDir)
|
||||||
|
if fsErr != nil {
|
||||||
|
logger.Fatal().Err(fsErr).Msg("failed to initialize filesystem storage")
|
||||||
|
}
|
||||||
|
store = s
|
||||||
|
logger.Info().Str("root", cfg.Storage.Filesystem.RootDir).Msg("connected to filesystem storage")
|
||||||
|
default:
|
||||||
|
logger.Fatal().Str("backend", cfg.Storage.Backend).Msg("unknown storage backend")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load schemas
|
// Load schemas
|
||||||
@@ -219,6 +235,16 @@ func main() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Initialize module registry
|
||||||
|
registry := modules.NewRegistry()
|
||||||
|
if err := modules.LoadState(registry, cfg, database.Pool()); err != nil {
|
||||||
|
logger.Fatal().Err(err).Msg("failed to load module state")
|
||||||
|
}
|
||||||
|
for _, m := range registry.All() {
|
||||||
|
logger.Info().Str("module", m.ID).Bool("enabled", registry.IsEnabled(m.ID)).
|
||||||
|
Bool("required", m.Required).Msg("module")
|
||||||
|
}
|
||||||
|
|
||||||
// Create SSE broker and server state
|
// Create SSE broker and server state
|
||||||
broker := api.NewBroker(logger)
|
broker := api.NewBroker(logger)
|
||||||
serverState := api.NewServerState(logger, store, broker)
|
serverState := api.NewServerState(logger, store, broker)
|
||||||
@@ -232,27 +258,30 @@ func main() {
|
|||||||
// Create API server
|
// Create API server
|
||||||
server := api.NewServer(logger, database, schemas, cfg.Schemas.Directory, store,
|
server := api.NewServer(logger, database, schemas, cfg.Schemas.Directory, store,
|
||||||
authService, sessionManager, oidcBackend, &cfg.Auth, broker, serverState,
|
authService, sessionManager, oidcBackend, &cfg.Auth, broker, serverState,
|
||||||
jobDefs, cfg.Jobs.Directory)
|
jobDefs, cfg.Jobs.Directory, registry, cfg)
|
||||||
router := api.NewRouter(server, logger)
|
router := api.NewRouter(server, logger)
|
||||||
|
|
||||||
// Start background sweepers for job/runner timeouts
|
// Start background sweepers for job/runner timeouts (only when jobs module enabled)
|
||||||
go func() {
|
if registry.IsEnabled(modules.Jobs) {
|
||||||
ticker := time.NewTicker(time.Duration(cfg.Jobs.JobTimeoutCheck) * time.Second)
|
go func() {
|
||||||
defer ticker.Stop()
|
ticker := time.NewTicker(time.Duration(cfg.Jobs.JobTimeoutCheck) * time.Second)
|
||||||
for range ticker.C {
|
defer ticker.Stop()
|
||||||
if n, err := jobRepo.TimeoutExpiredJobs(ctx); err != nil {
|
for range ticker.C {
|
||||||
logger.Error().Err(err).Msg("job timeout sweep failed")
|
if n, err := jobRepo.TimeoutExpiredJobs(ctx); err != nil {
|
||||||
} else if n > 0 {
|
logger.Error().Err(err).Msg("job timeout sweep failed")
|
||||||
logger.Info().Int64("count", n).Msg("timed out expired jobs")
|
} else if n > 0 {
|
||||||
}
|
logger.Info().Int64("count", n).Msg("timed out expired jobs")
|
||||||
|
}
|
||||||
|
|
||||||
if n, err := jobRepo.ExpireStaleRunners(ctx, time.Duration(cfg.Jobs.RunnerTimeout)*time.Second); err != nil {
|
if n, err := jobRepo.ExpireStaleRunners(ctx, time.Duration(cfg.Jobs.RunnerTimeout)*time.Second); err != nil {
|
||||||
logger.Error().Err(err).Msg("runner expiry sweep failed")
|
logger.Error().Err(err).Msg("runner expiry sweep failed")
|
||||||
} else if n > 0 {
|
} else if n > 0 {
|
||||||
logger.Info().Int64("count", n).Msg("expired stale runners")
|
logger.Info().Int64("count", n).Msg("expired stale runners")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}()
|
||||||
}()
|
logger.Info().Msg("job/runner sweepers started")
|
||||||
|
}
|
||||||
|
|
||||||
// Create HTTP server
|
// Create HTTP server
|
||||||
addr := fmt.Sprintf("%s:%d", cfg.Server.Host, cfg.Server.Port)
|
addr := fmt.Sprintf("%s:%d", cfg.Server.Host, cfg.Server.Port)
|
||||||
|
|||||||
@@ -17,12 +17,17 @@ database:
|
|||||||
max_connections: 10
|
max_connections: 10
|
||||||
|
|
||||||
storage:
|
storage:
|
||||||
|
backend: "minio" # "minio" (default) or "filesystem"
|
||||||
|
# MinIO/S3 settings (used when backend: "minio")
|
||||||
endpoint: "localhost:9000" # Use "minio:9000" for Docker Compose
|
endpoint: "localhost:9000" # Use "minio:9000" for Docker Compose
|
||||||
access_key: "" # Use SILO_MINIO_ACCESS_KEY env var
|
access_key: "" # Use SILO_MINIO_ACCESS_KEY env var
|
||||||
secret_key: "" # Use SILO_MINIO_SECRET_KEY env var
|
secret_key: "" # Use SILO_MINIO_SECRET_KEY env var
|
||||||
bucket: "silo-files"
|
bucket: "silo-files"
|
||||||
use_ssl: true # Use false for Docker Compose (internal network)
|
use_ssl: true # Use false for Docker Compose (internal network)
|
||||||
region: "us-east-1"
|
region: "us-east-1"
|
||||||
|
# Filesystem settings (used when backend: "filesystem")
|
||||||
|
# filesystem:
|
||||||
|
# root_dir: "/var/lib/silo/objects"
|
||||||
|
|
||||||
schemas:
|
schemas:
|
||||||
# Directory containing YAML schema files
|
# Directory containing YAML schema files
|
||||||
|
|||||||
@@ -10,8 +10,6 @@
|
|||||||
#
|
#
|
||||||
# Credentials via environment variables (set in /etc/silo/silod.env):
|
# Credentials via environment variables (set in /etc/silo/silod.env):
|
||||||
# SILO_DB_PASSWORD
|
# SILO_DB_PASSWORD
|
||||||
# SILO_MINIO_ACCESS_KEY
|
|
||||||
# SILO_MINIO_SECRET_KEY
|
|
||||||
# SILO_SESSION_SECRET
|
# SILO_SESSION_SECRET
|
||||||
# SILO_ADMIN_PASSWORD
|
# SILO_ADMIN_PASSWORD
|
||||||
|
|
||||||
@@ -30,12 +28,9 @@ database:
|
|||||||
max_connections: 20
|
max_connections: 20
|
||||||
|
|
||||||
storage:
|
storage:
|
||||||
endpoint: "minio.example.internal:9000"
|
backend: "filesystem"
|
||||||
access_key: "" # Set via SILO_MINIO_ACCESS_KEY
|
filesystem:
|
||||||
secret_key: "" # Set via SILO_MINIO_SECRET_KEY
|
root_dir: "/opt/silo/data"
|
||||||
bucket: "silo-files"
|
|
||||||
use_ssl: true
|
|
||||||
region: "us-east-1"
|
|
||||||
|
|
||||||
schemas:
|
schemas:
|
||||||
directory: "/opt/silo/schemas"
|
directory: "/opt/silo/schemas"
|
||||||
|
|||||||
@@ -6,10 +6,6 @@
|
|||||||
# Database: silo, User: silo
|
# Database: silo, User: silo
|
||||||
SILO_DB_PASSWORD=
|
SILO_DB_PASSWORD=
|
||||||
|
|
||||||
# MinIO credentials (minio.example.internal)
|
|
||||||
# User: silouser
|
|
||||||
SILO_MINIO_ACCESS_KEY=silouser
|
|
||||||
SILO_MINIO_SECRET_KEY=
|
|
||||||
|
|
||||||
# Authentication
|
# Authentication
|
||||||
# Session secret (required when auth is enabled)
|
# Session secret (required when auth is enabled)
|
||||||
|
|||||||
@@ -73,25 +73,27 @@ database:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Storage (MinIO/S3)
|
## Storage (Filesystem)
|
||||||
|
|
||||||
| Key | Type | Default | Env Override | Description |
|
Files are stored on the local filesystem under a configurable root directory.
|
||||||
|-----|------|---------|-------------|-------------|
|
|
||||||
| `storage.endpoint` | string | — | `SILO_MINIO_ENDPOINT` | MinIO/S3 endpoint (`host:port`) |
|
| Key | Type | Default | Description |
|
||||||
| `storage.access_key` | string | — | `SILO_MINIO_ACCESS_KEY` | Access key |
|
|-----|------|---------|-------------|
|
||||||
| `storage.secret_key` | string | — | `SILO_MINIO_SECRET_KEY` | Secret key |
|
| `storage.backend` | string | `"filesystem"` | Storage backend (`filesystem`) |
|
||||||
| `storage.bucket` | string | — | — | S3 bucket name (created automatically if missing) |
|
| `storage.filesystem.root_dir` | string | — | Root directory for file storage (required) |
|
||||||
| `storage.use_ssl` | bool | `false` | — | Use HTTPS for MinIO connections |
|
|
||||||
| `storage.region` | string | `"us-east-1"` | — | S3 region |
|
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
storage:
|
storage:
|
||||||
endpoint: "localhost:9000"
|
backend: "filesystem"
|
||||||
access_key: "" # use SILO_MINIO_ACCESS_KEY env var
|
filesystem:
|
||||||
secret_key: "" # use SILO_MINIO_SECRET_KEY env var
|
root_dir: "/opt/silo/data"
|
||||||
bucket: "silo-files"
|
```
|
||||||
use_ssl: false
|
|
||||||
region: "us-east-1"
|
Ensure the directory exists and is writable by the `silo` user:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo mkdir -p /opt/silo/data
|
||||||
|
sudo chown silo:silo /opt/silo/data
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -264,9 +266,6 @@ All environment variable overrides. These take precedence over values in `config
|
|||||||
| `SILO_DB_NAME` | `database.name` | PostgreSQL database name |
|
| `SILO_DB_NAME` | `database.name` | PostgreSQL database name |
|
||||||
| `SILO_DB_USER` | `database.user` | PostgreSQL user |
|
| `SILO_DB_USER` | `database.user` | PostgreSQL user |
|
||||||
| `SILO_DB_PASSWORD` | `database.password` | PostgreSQL password |
|
| `SILO_DB_PASSWORD` | `database.password` | PostgreSQL password |
|
||||||
| `SILO_MINIO_ENDPOINT` | `storage.endpoint` | MinIO endpoint |
|
|
||||||
| `SILO_MINIO_ACCESS_KEY` | `storage.access_key` | MinIO access key |
|
|
||||||
| `SILO_MINIO_SECRET_KEY` | `storage.secret_key` | MinIO secret key |
|
|
||||||
| `SILO_SESSION_SECRET` | `auth.session_secret` | Session cookie signing secret |
|
| `SILO_SESSION_SECRET` | `auth.session_secret` | Session cookie signing secret |
|
||||||
| `SILO_ADMIN_USERNAME` | `auth.local.default_admin_username` | Default admin username |
|
| `SILO_ADMIN_USERNAME` | `auth.local.default_admin_username` | Default admin username |
|
||||||
| `SILO_ADMIN_PASSWORD` | `auth.local.default_admin_password` | Default admin password |
|
| `SILO_ADMIN_PASSWORD` | `auth.local.default_admin_password` | Default admin password |
|
||||||
@@ -296,11 +295,9 @@ database:
|
|||||||
sslmode: "disable"
|
sslmode: "disable"
|
||||||
|
|
||||||
storage:
|
storage:
|
||||||
endpoint: "localhost:9000"
|
backend: "filesystem"
|
||||||
access_key: "minioadmin"
|
filesystem:
|
||||||
secret_key: "minioadmin"
|
root_dir: "./data"
|
||||||
bucket: "silo-files"
|
|
||||||
use_ssl: false
|
|
||||||
|
|
||||||
schemas:
|
schemas:
|
||||||
directory: "./schemas"
|
directory: "./schemas"
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
> instructions. This document covers ongoing maintenance and operations for an
|
> instructions. This document covers ongoing maintenance and operations for an
|
||||||
> existing deployment.
|
> existing deployment.
|
||||||
|
|
||||||
This guide covers deploying Silo to a dedicated VM using external PostgreSQL and MinIO services.
|
This guide covers deploying Silo to a dedicated VM using external PostgreSQL and local filesystem storage.
|
||||||
|
|
||||||
## Table of Contents
|
## Table of Contents
|
||||||
|
|
||||||
@@ -26,28 +26,25 @@ This guide covers deploying Silo to a dedicated VM using external PostgreSQL and
|
|||||||
│ │ silod │ │
|
│ │ silod │ │
|
||||||
│ │ (Silo API Server) │ │
|
│ │ (Silo API Server) │ │
|
||||||
│ │ :8080 │ │
|
│ │ :8080 │ │
|
||||||
|
│ │ Files: /opt/silo/data │ │
|
||||||
│ └───────────────────────────────────────────────────────────┘ │
|
│ └───────────────────────────────────────────────────────────┘ │
|
||||||
└─────────────────────────────────────────────────────────────────┘
|
└─────────────────────────────────────────────────────────────────┘
|
||||||
│ │
|
│
|
||||||
▼ ▼
|
▼
|
||||||
┌─────────────────────────┐ ┌─────────────────────────────────┐
|
┌─────────────────────────┐
|
||||||
│ psql.example.internal │ │ minio.example.internal │
|
│ psql.example.internal │
|
||||||
│ PostgreSQL 16 │ │ MinIO S3 │
|
│ PostgreSQL 16 │
|
||||||
│ :5432 │ │ :9000 (API) │
|
│ :5432 │
|
||||||
│ │ │ :9001 (Console) │
|
└─────────────────────────┘
|
||||||
└─────────────────────────┘ └─────────────────────────────────┘
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## External Services
|
## External Services
|
||||||
|
|
||||||
The following external services are already configured:
|
| Service | Host | Database | User |
|
||||||
|
|---------|------|----------|------|
|
||||||
| Service | Host | Database/Bucket | User |
|
|
||||||
|---------|------|-----------------|------|
|
|
||||||
| PostgreSQL | psql.example.internal:5432 | silo | silo |
|
| PostgreSQL | psql.example.internal:5432 | silo | silo |
|
||||||
| MinIO | minio.example.internal:9000 | silo-files | silouser |
|
|
||||||
|
|
||||||
Migrations have been applied to the database.
|
Files are stored on the local filesystem at `/opt/silo/data`. Migrations have been applied to the database.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -107,21 +104,15 @@ Fill in the values:
|
|||||||
# Database credentials (psql.example.internal)
|
# Database credentials (psql.example.internal)
|
||||||
SILO_DB_PASSWORD=your-database-password
|
SILO_DB_PASSWORD=your-database-password
|
||||||
|
|
||||||
# MinIO credentials (minio.example.internal)
|
|
||||||
SILO_MINIO_ACCESS_KEY=silouser
|
|
||||||
SILO_MINIO_SECRET_KEY=your-minio-secret-key
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Verify External Services
|
### Verify External Services
|
||||||
|
|
||||||
Before deploying, verify connectivity to external services:
|
Before deploying, verify connectivity to PostgreSQL:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Test PostgreSQL
|
|
||||||
psql -h psql.example.internal -U silo -d silo -c 'SELECT 1'
|
psql -h psql.example.internal -U silo -d silo -c 'SELECT 1'
|
||||||
|
|
||||||
# Test MinIO
|
|
||||||
curl -I http://minio.example.internal:9000/minio/health/live
|
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -183,6 +174,7 @@ sudo -E /opt/silo/src/scripts/deploy.sh
|
|||||||
| File | Purpose |
|
| File | Purpose |
|
||||||
|------|---------|
|
|------|---------|
|
||||||
| `/opt/silo/bin/silod` | Server binary |
|
| `/opt/silo/bin/silod` | Server binary |
|
||||||
|
| `/opt/silo/data/` | File storage root |
|
||||||
| `/opt/silo/src/` | Git repository checkout |
|
| `/opt/silo/src/` | Git repository checkout |
|
||||||
| `/etc/silo/config.yaml` | Server configuration |
|
| `/etc/silo/config.yaml` | Server configuration |
|
||||||
| `/etc/silo/silod.env` | Environment variables (secrets) |
|
| `/etc/silo/silod.env` | Environment variables (secrets) |
|
||||||
@@ -242,7 +234,7 @@ sudo journalctl -u silod --since "2024-01-15 10:00:00"
|
|||||||
# Basic health check
|
# Basic health check
|
||||||
curl http://localhost:8080/health
|
curl http://localhost:8080/health
|
||||||
|
|
||||||
# Full readiness check (includes DB and MinIO)
|
# Full readiness check (includes DB)
|
||||||
curl http://localhost:8080/ready
|
curl http://localhost:8080/ready
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -318,24 +310,6 @@ psql -h psql.example.internal -U silo -d silo -f /opt/silo/src/migrations/008_ne
|
|||||||
|
|
||||||
3. Check `pg_hba.conf` on PostgreSQL server allows connections from this host.
|
3. Check `pg_hba.conf` on PostgreSQL server allows connections from this host.
|
||||||
|
|
||||||
### Connection Refused to MinIO
|
|
||||||
|
|
||||||
1. Test network connectivity:
|
|
||||||
```bash
|
|
||||||
nc -zv minio.example.internal 9000
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Test with curl:
|
|
||||||
```bash
|
|
||||||
curl -I http://minio.example.internal:9000/minio/health/live
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Check SSL settings in config match MinIO setup:
|
|
||||||
```yaml
|
|
||||||
storage:
|
|
||||||
use_ssl: true # or false
|
|
||||||
```
|
|
||||||
|
|
||||||
### Health Check Fails
|
### Health Check Fails
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -345,7 +319,9 @@ curl -v http://localhost:8080/ready
|
|||||||
|
|
||||||
# If ready fails but health passes, check external services
|
# If ready fails but health passes, check external services
|
||||||
psql -h psql.example.internal -U silo -d silo -c 'SELECT 1'
|
psql -h psql.example.internal -U silo -d silo -c 'SELECT 1'
|
||||||
curl http://minio.example.internal:9000/minio/health/live
|
|
||||||
|
# Check file storage directory
|
||||||
|
ls -la /opt/silo/data
|
||||||
```
|
```
|
||||||
|
|
||||||
### Build Fails
|
### Build Fails
|
||||||
@@ -460,10 +436,9 @@ sudo systemctl reload nginx
|
|||||||
|
|
||||||
- [ ] `/etc/silo/silod.env` has mode 600 (`chmod 600`)
|
- [ ] `/etc/silo/silod.env` has mode 600 (`chmod 600`)
|
||||||
- [ ] Database password is strong and unique
|
- [ ] Database password is strong and unique
|
||||||
- [ ] MinIO credentials are specific to silo (not admin)
|
|
||||||
- [ ] SSL/TLS enabled for PostgreSQL (`sslmode: require`)
|
- [ ] SSL/TLS enabled for PostgreSQL (`sslmode: require`)
|
||||||
- [ ] SSL/TLS enabled for MinIO (`use_ssl: true`) if available
|
|
||||||
- [ ] HTTPS enabled via nginx reverse proxy
|
- [ ] HTTPS enabled via nginx reverse proxy
|
||||||
|
- [ ] File storage directory (`/opt/silo/data`) owned by `silo` user with mode 750
|
||||||
- [ ] Silod listens on localhost only (`host: 127.0.0.1`)
|
- [ ] Silod listens on localhost only (`host: 127.0.0.1`)
|
||||||
- [ ] Firewall allows only ports 80, 443 (not 8080)
|
- [ ] Firewall allows only ports 80, 443 (not 8080)
|
||||||
- [ ] Service runs as non-root `silo` user
|
- [ ] Service runs as non-root `silo` user
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ See [ROADMAP.md](ROADMAP.md) for the platform roadmap and dependency tier struct
|
|||||||
| Append-only revision history | Complete | `internal/db/items.go` |
|
| Append-only revision history | Complete | `internal/db/items.go` |
|
||||||
| Sequential revision numbering | Complete | Database trigger |
|
| Sequential revision numbering | Complete | Database trigger |
|
||||||
| Property snapshots (JSONB) | Complete | `revisions.properties` |
|
| Property snapshots (JSONB) | Complete | `revisions.properties` |
|
||||||
| File versioning (MinIO) | Complete | `internal/storage/` |
|
| File storage (filesystem) | Complete | `internal/storage/` |
|
||||||
| SHA256 checksums | Complete | Captured on upload |
|
| SHA256 checksums | Complete | Captured on upload |
|
||||||
| Revision comments | Complete | `revisions.comment` |
|
| Revision comments | Complete | `revisions.comment` |
|
||||||
| User attribution | Complete | `revisions.created_by` |
|
| User attribution | Complete | `revisions.created_by` |
|
||||||
@@ -93,7 +93,7 @@ CREATE TABLE revisions (
|
|||||||
revision_number INTEGER NOT NULL,
|
revision_number INTEGER NOT NULL,
|
||||||
properties JSONB NOT NULL DEFAULT '{}',
|
properties JSONB NOT NULL DEFAULT '{}',
|
||||||
file_key TEXT,
|
file_key TEXT,
|
||||||
file_version TEXT, -- MinIO version ID
|
file_version TEXT, -- storage version ID
|
||||||
file_checksum TEXT, -- SHA256
|
file_checksum TEXT, -- SHA256
|
||||||
file_size BIGINT,
|
file_size BIGINT,
|
||||||
thumbnail_key TEXT,
|
thumbnail_key TEXT,
|
||||||
@@ -283,7 +283,7 @@ Effort: Medium | Priority: Low | Risk: Low
|
|||||||
|
|
||||||
**Changes:**
|
**Changes:**
|
||||||
- Add thumbnail generation on file upload
|
- Add thumbnail generation on file upload
|
||||||
- Store in MinIO at `thumbnails/{part_number}/rev{n}.png`
|
- Store at `thumbnails/{part_number}/rev{n}.png`
|
||||||
- Expose via `GET /api/items/{pn}/thumbnail/{rev}`
|
- Expose via `GET /api/items/{pn}/thumbnail/{rev}`
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -377,7 +377,7 @@ internal/
|
|||||||
relationships.go # BOM repository
|
relationships.go # BOM repository
|
||||||
projects.go # Project repository
|
projects.go # Project repository
|
||||||
storage/
|
storage/
|
||||||
storage.go # MinIO file storage helpers
|
storage.go # File storage helpers
|
||||||
migrations/
|
migrations/
|
||||||
001_initial.sql # Core schema
|
001_initial.sql # Core schema
|
||||||
...
|
...
|
||||||
@@ -572,7 +572,7 @@ Reporting capabilities are absent. Basic reports (item counts, revision activity
|
|||||||
|
|
||||||
| Feature | SOLIDWORKS PDM | Silo Status | Priority | Complexity |
|
| Feature | SOLIDWORKS PDM | Silo Status | Priority | Complexity |
|
||||||
|---------|---------------|-------------|----------|------------|
|
|---------|---------------|-------------|----------|------------|
|
||||||
| File versioning | Automatic | Full (MinIO) | - | - |
|
| File versioning | Automatic | Full (filesystem) | - | - |
|
||||||
| File preview | Thumbnails, 3D preview | None | Medium | Complex |
|
| File preview | Thumbnails, 3D preview | None | Medium | Complex |
|
||||||
| File conversion | PDF, DXF generation | None | Medium | Complex |
|
| File conversion | PDF, DXF generation | None | Medium | Complex |
|
||||||
| Replication | Multi-site sync | None | Low | Complex |
|
| Replication | Multi-site sync | None | Low | Complex |
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
This guide covers two installation methods:
|
This guide covers two installation methods:
|
||||||
|
|
||||||
- **[Option A: Docker Compose](#option-a-docker-compose)** — self-contained stack with all services. Recommended for evaluation, small teams, and environments where Docker is the standard.
|
- **[Option A: Docker Compose](#option-a-docker-compose)** — self-contained stack with all services. Recommended for evaluation, small teams, and environments where Docker is the standard.
|
||||||
- **[Option B: Daemon Install](#option-b-daemon-install-systemd--external-services)** — systemd service with external PostgreSQL, MinIO, and optional LDAP/nginx. Recommended for production deployments integrated with existing infrastructure.
|
- **[Option B: Daemon Install](#option-b-daemon-install-systemd--external-services)** — systemd service with external PostgreSQL and optional LDAP/nginx. Files are stored on the local filesystem. Recommended for production deployments integrated with existing infrastructure.
|
||||||
|
|
||||||
Both methods produce the same result: a running Silo server with a web UI, REST API, and authentication.
|
Both methods produce the same result: a running Silo server with a web UI, REST API, and authentication.
|
||||||
|
|
||||||
@@ -48,7 +48,7 @@ Regardless of which method you choose:
|
|||||||
|
|
||||||
## Option A: Docker Compose
|
## Option A: Docker Compose
|
||||||
|
|
||||||
A single Docker Compose file runs everything: PostgreSQL, MinIO, OpenLDAP, and Silo. An optional nginx container can be enabled for reverse proxying.
|
A single Docker Compose file runs everything: PostgreSQL, OpenLDAP, and Silo. Files are stored on the local filesystem. An optional nginx container can be enabled for reverse proxying.
|
||||||
|
|
||||||
### A.1 Prerequisites
|
### A.1 Prerequisites
|
||||||
|
|
||||||
@@ -80,7 +80,6 @@ The setup script generates credentials and configuration files:
|
|||||||
It prompts for:
|
It prompts for:
|
||||||
- Server domain (default: `localhost`)
|
- Server domain (default: `localhost`)
|
||||||
- PostgreSQL password (auto-generated if you press Enter)
|
- PostgreSQL password (auto-generated if you press Enter)
|
||||||
- MinIO credentials (auto-generated)
|
|
||||||
- OpenLDAP admin password and initial user (auto-generated)
|
- OpenLDAP admin password and initial user (auto-generated)
|
||||||
- Silo local admin account (fallback when LDAP is unavailable)
|
- Silo local admin account (fallback when LDAP is unavailable)
|
||||||
|
|
||||||
@@ -106,7 +105,7 @@ Wait for all services to become healthy:
|
|||||||
docker compose -f deployments/docker-compose.allinone.yaml ps
|
docker compose -f deployments/docker-compose.allinone.yaml ps
|
||||||
```
|
```
|
||||||
|
|
||||||
You should see `silo-postgres`, `silo-minio`, `silo-openldap`, and `silo-api` all in a healthy state.
|
You should see `silo-postgres`, `silo-openldap`, and `silo-api` all in a healthy state.
|
||||||
|
|
||||||
View logs:
|
View logs:
|
||||||
|
|
||||||
@@ -124,7 +123,7 @@ docker compose -f deployments/docker-compose.allinone.yaml logs -f silo
|
|||||||
# Health check
|
# Health check
|
||||||
curl http://localhost:8080/health
|
curl http://localhost:8080/health
|
||||||
|
|
||||||
# Readiness check (includes database and storage connectivity)
|
# Readiness check (includes database connectivity)
|
||||||
curl http://localhost:8080/ready
|
curl http://localhost:8080/ready
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -226,7 +225,7 @@ The Silo container is rebuilt from the updated source. Database migrations in `m
|
|||||||
|
|
||||||
## Option B: Daemon Install (systemd + External Services)
|
## Option B: Daemon Install (systemd + External Services)
|
||||||
|
|
||||||
This method runs Silo as a systemd service on a dedicated host, connecting to externally managed PostgreSQL, MinIO, and optionally LDAP services.
|
This method runs Silo as a systemd service on a dedicated host, connecting to externally managed PostgreSQL and optionally LDAP services. Files are stored on the local filesystem.
|
||||||
|
|
||||||
### B.1 Architecture Overview
|
### B.1 Architecture Overview
|
||||||
|
|
||||||
@@ -240,21 +239,22 @@ This method runs Silo as a systemd service on a dedicated host, connecting to ex
|
|||||||
│ ┌───────▼────────┐ │
|
│ ┌───────▼────────┐ │
|
||||||
│ │ silod │ │
|
│ │ silod │ │
|
||||||
│ │ (API server) │ │
|
│ │ (API server) │ │
|
||||||
│ └──┬─────────┬───┘ │
|
│ │ Files: /opt/ │ │
|
||||||
└─────┼─────────┼──────┘
|
│ │ silo/data │ │
|
||||||
│ │
|
│ └──────┬─────────┘ │
|
||||||
┌───────────▼──┐ ┌───▼──────────────┐
|
└─────────┼────────────┘
|
||||||
│ PostgreSQL 16│ │ MinIO (S3) │
|
│
|
||||||
│ :5432 │ │ :9000 API │
|
┌───────────▼──┐
|
||||||
└──────────────┘ │ :9001 Console │
|
│ PostgreSQL 16│
|
||||||
└──────────────────┘
|
│ :5432 │
|
||||||
|
└──────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
### B.2 Prerequisites
|
### B.2 Prerequisites
|
||||||
|
|
||||||
- Linux host (Debian/Ubuntu or RHEL/Fedora/AlmaLinux)
|
- Linux host (Debian/Ubuntu or RHEL/Fedora/AlmaLinux)
|
||||||
- Root or sudo access
|
- Root or sudo access
|
||||||
- Network access to your PostgreSQL and MinIO servers
|
- Network access to your PostgreSQL server
|
||||||
|
|
||||||
The setup script installs Go and other build dependencies automatically.
|
The setup script installs Go and other build dependencies automatically.
|
||||||
|
|
||||||
@@ -281,26 +281,6 @@ Verify:
|
|||||||
psql -h YOUR_PG_HOST -U silo -d silo -c 'SELECT 1'
|
psql -h YOUR_PG_HOST -U silo -d silo -c 'SELECT 1'
|
||||||
```
|
```
|
||||||
|
|
||||||
#### MinIO
|
|
||||||
|
|
||||||
Install MinIO and create a bucket and service account:
|
|
||||||
|
|
||||||
- [MinIO quickstart](https://min.io/docs/minio/linux/index.html)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Using the MinIO client (mc):
|
|
||||||
mc alias set local http://YOUR_MINIO_HOST:9000 minioadmin minioadmin
|
|
||||||
mc mb local/silo-files
|
|
||||||
mc admin user add local silouser YOUR_MINIO_SECRET
|
|
||||||
mc admin policy attach local readwrite --user silouser
|
|
||||||
```
|
|
||||||
|
|
||||||
Verify:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl -I http://YOUR_MINIO_HOST:9000/minio/health/live
|
|
||||||
```
|
|
||||||
|
|
||||||
#### LDAP / FreeIPA (Optional)
|
#### LDAP / FreeIPA (Optional)
|
||||||
|
|
||||||
For LDAP authentication, you need an LDAP server with user and group entries. Options:
|
For LDAP authentication, you need an LDAP server with user and group entries. Options:
|
||||||
@@ -339,10 +319,10 @@ The script:
|
|||||||
4. Clones the repository
|
4. Clones the repository
|
||||||
5. Creates the environment file template
|
5. Creates the environment file template
|
||||||
|
|
||||||
To override the default service hostnames:
|
To override the default database hostname:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
SILO_DB_HOST=db.example.com SILO_MINIO_HOST=s3.example.com sudo -E bash scripts/setup-host.sh
|
SILO_DB_HOST=db.example.com sudo -E bash scripts/setup-host.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
### B.5 Configure Credentials
|
### B.5 Configure Credentials
|
||||||
@@ -357,10 +337,6 @@ sudo nano /etc/silo/silod.env
|
|||||||
# Database
|
# Database
|
||||||
SILO_DB_PASSWORD=your-database-password
|
SILO_DB_PASSWORD=your-database-password
|
||||||
|
|
||||||
# MinIO
|
|
||||||
SILO_MINIO_ACCESS_KEY=silouser
|
|
||||||
SILO_MINIO_SECRET_KEY=your-minio-secret
|
|
||||||
|
|
||||||
# Authentication
|
# Authentication
|
||||||
SILO_SESSION_SECRET=generate-a-long-random-string
|
SILO_SESSION_SECRET=generate-a-long-random-string
|
||||||
SILO_ADMIN_USERNAME=admin
|
SILO_ADMIN_USERNAME=admin
|
||||||
@@ -379,7 +355,7 @@ Review the server configuration:
|
|||||||
sudo nano /etc/silo/config.yaml
|
sudo nano /etc/silo/config.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
Update `database.host`, `storage.endpoint`, `server.base_url`, and authentication settings for your environment. See [CONFIGURATION.md](CONFIGURATION.md) for all options.
|
Update `database.host`, `storage.filesystem.root_dir`, `server.base_url`, and authentication settings for your environment. See [CONFIGURATION.md](CONFIGURATION.md) for all options.
|
||||||
|
|
||||||
### B.6 Deploy
|
### B.6 Deploy
|
||||||
|
|
||||||
@@ -412,10 +388,10 @@ sudo /opt/silo/src/scripts/deploy.sh --restart-only
|
|||||||
sudo /opt/silo/src/scripts/deploy.sh --status
|
sudo /opt/silo/src/scripts/deploy.sh --status
|
||||||
```
|
```
|
||||||
|
|
||||||
To override the target host or database host:
|
To override the target host:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
SILO_DEPLOY_TARGET=silo.example.com SILO_DB_HOST=db.example.com sudo -E scripts/deploy.sh
|
SILO_DEPLOY_TARGET=silo.example.com sudo -E scripts/deploy.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
### B.7 Set Up Nginx and TLS
|
### B.7 Set Up Nginx and TLS
|
||||||
|
|||||||
485
docs/KC_SERVER.md
Normal file
485
docs/KC_SERVER.md
Normal file
@@ -0,0 +1,485 @@
|
|||||||
|
# .kc Server-Side Metadata Integration
|
||||||
|
|
||||||
|
**Status:** Draft
|
||||||
|
**Date:** February 2026
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Purpose
|
||||||
|
|
||||||
|
When a `.kc` file is committed to Silo, the server extracts and indexes the `silo/` directory contents so that metadata is queryable, diffable, and streamable without downloading the full file. This document specifies the server-side processing pipeline, database storage, API endpoints, and SSE events that support the Create viewport widgets defined in [SILO_VIEWPORT.md](SILO_VIEWPORT.md).
|
||||||
|
|
||||||
|
The core principle: **the `.kc` file is the transport format; Silo is the index.** The `silo/` directory entries are extracted into database columns on commit and packed back into the ZIP on checkout. The server never modifies the FreeCAD standard zone (`Document.xml`, `.brp` files, `thumbnails/`).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Commit Pipeline
|
||||||
|
|
||||||
|
When a `.kc` file is uploaded via `POST /api/items/{partNumber}/file`, the server runs an extraction pipeline before returning success.
|
||||||
|
|
||||||
|
### 2.1 Pipeline Steps
|
||||||
|
|
||||||
|
```
|
||||||
|
Client uploads .kc file
|
||||||
|
|
|
||||||
|
v
|
||||||
|
+-----------------------------+
|
||||||
|
| 1. Store file to disk | (existing behavior -- unchanged)
|
||||||
|
| items/{pn}/rev{N}.kc |
|
||||||
|
+-----------------------------+
|
||||||
|
|
|
||||||
|
v
|
||||||
|
+-----------------------------+
|
||||||
|
| 2. Open ZIP, read silo/ |
|
||||||
|
| Parse each entry |
|
||||||
|
+-----------------------------+
|
||||||
|
|
|
||||||
|
v
|
||||||
|
+-----------------------------+
|
||||||
|
| 3. Validate manifest.json |
|
||||||
|
| - UUID matches item |
|
||||||
|
| - kc_version supported |
|
||||||
|
| - revision_hash present |
|
||||||
|
+-----------------------------+
|
||||||
|
|
|
||||||
|
v
|
||||||
|
+-----------------------------+
|
||||||
|
| 4. Index metadata |
|
||||||
|
| - Upsert item_metadata |
|
||||||
|
| - Upsert dependencies |
|
||||||
|
| - Append history entry |
|
||||||
|
| - Snapshot approvals |
|
||||||
|
| - Register macros |
|
||||||
|
| - Register job defs |
|
||||||
|
+-----------------------------+
|
||||||
|
|
|
||||||
|
v
|
||||||
|
+-----------------------------+
|
||||||
|
| 5. Broadcast SSE events |
|
||||||
|
| - revision.created |
|
||||||
|
| - metadata.updated |
|
||||||
|
| - bom.changed (if deps |
|
||||||
|
| differ from previous) |
|
||||||
|
+-----------------------------+
|
||||||
|
|
|
||||||
|
v
|
||||||
|
Return 201 Created
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.2 Validation Rules
|
||||||
|
|
||||||
|
| Check | Failure response |
|
||||||
|
|-------|-----------------|
|
||||||
|
| `silo/manifest.json` missing | `400 Bad Request` -- file is `.fcstd` not `.kc` |
|
||||||
|
| `manifest.uuid` doesn't match item's UUID | `409 Conflict` -- wrong item |
|
||||||
|
| `manifest.kc_version` > server's supported version | `422 Unprocessable` -- client newer than server |
|
||||||
|
| `manifest.revision_hash` matches current head | `200 OK` (no-op, file unchanged) |
|
||||||
|
| Any `silo/` JSON fails to parse | `422 Unprocessable` with path and parse error |
|
||||||
|
|
||||||
|
If validation fails, the blob is still stored (the user uploaded it), but no metadata indexing occurs. The item's revision is created with a `metadata_error` flag so the web UI can surface the problem.
|
||||||
|
|
||||||
|
### 2.3 Backward Compatibility
|
||||||
|
|
||||||
|
Plain `.fcstd` files (no `silo/` directory) continue to work exactly as today -- stored on disk, revision created, no metadata extraction. The pipeline short-circuits at step 2 when no `silo/` directory is found.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Database Schema
|
||||||
|
|
||||||
|
### 3.1 `item_metadata` Table
|
||||||
|
|
||||||
|
Stores the indexed contents of `silo/metadata.json` as structured JSONB, searchable and filterable via the existing item query endpoints.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE item_metadata (
|
||||||
|
item_id UUID PRIMARY KEY REFERENCES items(id) ON DELETE CASCADE,
|
||||||
|
schema_name TEXT,
|
||||||
|
tags TEXT[] NOT NULL DEFAULT '{}',
|
||||||
|
lifecycle_state TEXT NOT NULL DEFAULT 'draft',
|
||||||
|
fields JSONB NOT NULL DEFAULT '{}',
|
||||||
|
kc_version TEXT,
|
||||||
|
manifest_uuid UUID,
|
||||||
|
silo_instance TEXT,
|
||||||
|
revision_hash TEXT,
|
||||||
|
updated_at TIMESTAMPTZ DEFAULT now(),
|
||||||
|
updated_by TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_item_metadata_tags ON item_metadata USING GIN (tags);
|
||||||
|
CREATE INDEX idx_item_metadata_lifecycle ON item_metadata (lifecycle_state);
|
||||||
|
CREATE INDEX idx_item_metadata_fields ON item_metadata USING GIN (fields);
|
||||||
|
```
|
||||||
|
|
||||||
|
On commit, the server upserts this row from `silo/manifest.json` and `silo/metadata.json`. The `fields` column contains the schema-driven key-value pairs exactly as they appear in the JSON.
|
||||||
|
|
||||||
|
### 3.2 `item_dependencies` Table
|
||||||
|
|
||||||
|
Stores the indexed contents of `silo/dependencies.json`. Replaces the BOM for assembly relationships that originate from the CAD model.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE item_dependencies (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
parent_item_id UUID REFERENCES items(id) ON DELETE CASCADE,
|
||||||
|
child_uuid UUID NOT NULL,
|
||||||
|
child_part_number TEXT,
|
||||||
|
child_revision INTEGER,
|
||||||
|
quantity DECIMAL,
|
||||||
|
label TEXT,
|
||||||
|
relationship TEXT NOT NULL DEFAULT 'component',
|
||||||
|
revision_number INTEGER NOT NULL,
|
||||||
|
created_at TIMESTAMPTZ DEFAULT now()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_item_deps_parent ON item_dependencies (parent_item_id);
|
||||||
|
CREATE INDEX idx_item_deps_child ON item_dependencies (child_uuid);
|
||||||
|
```
|
||||||
|
|
||||||
|
This table complements the existing `relationships` table. The `relationships` table is the server-authoritative BOM (editable via the web UI and API). The `item_dependencies` table is the CAD-authoritative record extracted from the file. BOM merge (per [BOM_MERGE.md](BOM_MERGE.md)) reconciles the two.
|
||||||
|
|
||||||
|
### 3.3 `item_approvals` Table
|
||||||
|
|
||||||
|
Stores the indexed contents of `silo/approvals.json`. Server-authoritative -- the `.kc` snapshot is a read cache.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE item_approvals (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
item_id UUID REFERENCES items(id) ON DELETE CASCADE,
|
||||||
|
eco_number TEXT,
|
||||||
|
state TEXT NOT NULL DEFAULT 'draft',
|
||||||
|
updated_at TIMESTAMPTZ DEFAULT now(),
|
||||||
|
updated_by TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE approval_signatures (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
approval_id UUID REFERENCES item_approvals(id) ON DELETE CASCADE,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
role TEXT NOT NULL,
|
||||||
|
status TEXT NOT NULL DEFAULT 'pending',
|
||||||
|
signed_at TIMESTAMPTZ,
|
||||||
|
comment TEXT
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
These tables exist independent of `.kc` commits -- approvals are created and managed through the web UI and API. On `.kc` checkout, the current approval state is serialized into `silo/approvals.json` for offline display.
|
||||||
|
|
||||||
|
### 3.4 `item_macros` Table
|
||||||
|
|
||||||
|
Registers macros from `silo/macros/` for server-side discoverability and the future Macro Store module.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE item_macros (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
item_id UUID REFERENCES items(id) ON DELETE CASCADE,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
trigger TEXT NOT NULL DEFAULT 'manual',
|
||||||
|
content TEXT NOT NULL,
|
||||||
|
revision_number INTEGER NOT NULL,
|
||||||
|
created_at TIMESTAMPTZ DEFAULT now(),
|
||||||
|
UNIQUE(item_id, filename)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. API Endpoints
|
||||||
|
|
||||||
|
These endpoints serve the viewport widgets in Create. All are under `/api/items/{partNumber}` and follow the existing auth model.
|
||||||
|
|
||||||
|
### 4.1 Metadata
|
||||||
|
|
||||||
|
| Method | Path | Auth | Description |
|
||||||
|
|--------|------|------|-------------|
|
||||||
|
| `GET` | `/metadata` | viewer | Get indexed metadata (schema fields, tags, lifecycle) |
|
||||||
|
| `PUT` | `/metadata` | editor | Update metadata fields from client |
|
||||||
|
| `PATCH` | `/metadata/lifecycle` | editor | Transition lifecycle state |
|
||||||
|
| `PATCH` | `/metadata/tags` | editor | Add/remove tags |
|
||||||
|
|
||||||
|
**`GET /api/items/{partNumber}/metadata`**
|
||||||
|
|
||||||
|
Returns the indexed metadata for viewport display. This is the fast path -- reads from `item_metadata` rather than downloading and parsing the `.kc` ZIP.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"schema_name": "mechanical-part-v2",
|
||||||
|
"lifecycle_state": "draft",
|
||||||
|
"tags": ["structural", "aluminum"],
|
||||||
|
"fields": {
|
||||||
|
"material": "6061-T6",
|
||||||
|
"finish": "anodized",
|
||||||
|
"weight_kg": 0.34,
|
||||||
|
"category": "bracket"
|
||||||
|
},
|
||||||
|
"manifest": {
|
||||||
|
"uuid": "550e8400-e29b-41d4-a716-446655440000",
|
||||||
|
"silo_instance": "https://silo.example.com",
|
||||||
|
"revision_hash": "a1b2c3d4e5f6",
|
||||||
|
"kc_version": "1.0"
|
||||||
|
},
|
||||||
|
"updated_at": "2026-02-13T20:30:00Z",
|
||||||
|
"updated_by": "joseph"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**`PUT /api/items/{partNumber}/metadata`**
|
||||||
|
|
||||||
|
Accepts a partial update of schema fields. The server merges into the existing `fields` JSONB. This is the write-back path for the Metadata Editor widget.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"fields": {
|
||||||
|
"material": "7075-T6",
|
||||||
|
"weight_kg": 0.31
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The server validates field names against the schema descriptor. Unknown fields are rejected with `422`.
|
||||||
|
|
||||||
|
**`PATCH /api/items/{partNumber}/metadata/lifecycle`**
|
||||||
|
|
||||||
|
Transitions lifecycle state. The server validates the transition is permitted (e.g., `draft` -> `review` is allowed, `released` -> `draft` is not without admin override).
|
||||||
|
|
||||||
|
```json
|
||||||
|
{ "state": "review" }
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.2 Dependencies
|
||||||
|
|
||||||
|
| Method | Path | Auth | Description |
|
||||||
|
|--------|------|------|-------------|
|
||||||
|
| `GET` | `/dependencies` | viewer | Get CAD-extracted dependency list |
|
||||||
|
| `GET` | `/dependencies/resolve` | viewer | Resolve UUIDs to current part numbers and file status |
|
||||||
|
|
||||||
|
**`GET /api/items/{partNumber}/dependencies`**
|
||||||
|
|
||||||
|
Returns the raw dependency list from the last `.kc` commit.
|
||||||
|
|
||||||
|
**`GET /api/items/{partNumber}/dependencies/resolve`**
|
||||||
|
|
||||||
|
Returns the dependency list with each UUID resolved to its current part number, revision, and whether the file exists on disk. This is what the Dependency Table widget calls to populate the status column.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"uuid": "660e8400-...",
|
||||||
|
"part_number": "KC-BRK-0042",
|
||||||
|
"label": "Base Plate",
|
||||||
|
"revision": 2,
|
||||||
|
"quantity": 1,
|
||||||
|
"resolved": true,
|
||||||
|
"file_available": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"uuid": "770e8400-...",
|
||||||
|
"part_number": "KC-HDW-0108",
|
||||||
|
"label": "M6 SHCS",
|
||||||
|
"revision": 1,
|
||||||
|
"quantity": 4,
|
||||||
|
"resolved": true,
|
||||||
|
"file_available": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"uuid": "880e8400-...",
|
||||||
|
"part_number": null,
|
||||||
|
"label": "Cover Panel",
|
||||||
|
"revision": 1,
|
||||||
|
"quantity": 1,
|
||||||
|
"resolved": false,
|
||||||
|
"file_available": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.3 Approvals
|
||||||
|
|
||||||
|
| Method | Path | Auth | Description |
|
||||||
|
|--------|------|------|-------------|
|
||||||
|
| `GET` | `/approvals` | viewer | Get current approval state |
|
||||||
|
| `POST` | `/approvals` | editor | Create ECO / start approval workflow |
|
||||||
|
| `POST` | `/approvals/{id}/sign` | editor | Sign (approve/reject) |
|
||||||
|
|
||||||
|
These endpoints power the Approvals Viewer widget. The viewer is read-only in Create -- sign actions happen in the web UI, but the API exists for both.
|
||||||
|
|
||||||
|
### 4.4 Macros
|
||||||
|
|
||||||
|
| Method | Path | Auth | Description |
|
||||||
|
|--------|------|------|-------------|
|
||||||
|
| `GET` | `/macros` | viewer | List registered macros |
|
||||||
|
| `GET` | `/macros/{filename}` | viewer | Get macro source |
|
||||||
|
|
||||||
|
Read-only server-side. Macros are authored in Create and committed inside the `.kc`. The server indexes them for discoverability in the future Macro Store.
|
||||||
|
|
||||||
|
### 4.5 Existing Endpoints (unchanged)
|
||||||
|
|
||||||
|
The viewport widgets also consume these existing endpoints:
|
||||||
|
|
||||||
|
| Widget | Endpoint | Purpose |
|
||||||
|
|--------|----------|---------|
|
||||||
|
| History Viewer | `GET /api/items/{pn}/revisions` | Full revision list |
|
||||||
|
| History Viewer | `GET /api/items/{pn}/revisions/compare` | Property diff |
|
||||||
|
| Job Viewer | `GET /api/jobs?item={pn}&definition={name}&limit=1` | Last job run |
|
||||||
|
| Job Viewer | `POST /api/jobs` | Trigger job |
|
||||||
|
| Job Viewer | `GET /api/jobs/{id}/logs` | Job log |
|
||||||
|
| Manifest Viewer | `GET /api/items/{pn}` | Item details (UUID, etc.) |
|
||||||
|
|
||||||
|
No changes needed to these -- they already exist and return the data the widgets need.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Checkout Pipeline
|
||||||
|
|
||||||
|
When a client downloads a `.kc` via `GET /api/items/{partNumber}/file`, the server packs current server-side state into the `silo/` directory before serving the file. This ensures the client always gets the latest metadata, even if it was edited via the web UI since the last commit.
|
||||||
|
|
||||||
|
### 5.1 Pipeline Steps
|
||||||
|
|
||||||
|
```
|
||||||
|
Client requests file download
|
||||||
|
|
|
||||||
|
v
|
||||||
|
+-----------------------------+
|
||||||
|
| 1. Read .kc from disk |
|
||||||
|
+-----------------------------+
|
||||||
|
|
|
||||||
|
v
|
||||||
|
+-----------------------------+
|
||||||
|
| 2. Pack silo/ from DB |
|
||||||
|
| - manifest.json (item) |
|
||||||
|
| - metadata.json (index) |
|
||||||
|
| - history.json (revs) |
|
||||||
|
| - approvals.json (ECO) |
|
||||||
|
| - dependencies.json |
|
||||||
|
| - macros/ (index) |
|
||||||
|
| - jobs/ (job defs) |
|
||||||
|
+-----------------------------+
|
||||||
|
|
|
||||||
|
v
|
||||||
|
+-----------------------------+
|
||||||
|
| 3. Replace silo/ in ZIP |
|
||||||
|
| Remove old entries |
|
||||||
|
| Write packed entries |
|
||||||
|
+-----------------------------+
|
||||||
|
|
|
||||||
|
v
|
||||||
|
Stream .kc to client
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.2 Packing Rules
|
||||||
|
|
||||||
|
| `silo/` entry | Source | Notes |
|
||||||
|
|---------------|--------|-------|
|
||||||
|
| `manifest.json` | `item_metadata` + `items` table | UUID from item, revision_hash from latest revision |
|
||||||
|
| `metadata.json` | `item_metadata.fields` + tags + lifecycle | Serialized from indexed columns |
|
||||||
|
| `history.json` | `revisions` table | Last 20 revisions for this item |
|
||||||
|
| `approvals.json` | `item_approvals` + `approval_signatures` | Current ECO state, omitted if no active ECO |
|
||||||
|
| `dependencies.json` | `item_dependencies` | Current revision's dependency list |
|
||||||
|
| `macros/*.py` | `item_macros` | All registered macros |
|
||||||
|
| `jobs/*.yaml` | `job_definitions` filtered by item type | Job definitions matching this item's trigger filters |
|
||||||
|
|
||||||
|
### 5.3 Caching
|
||||||
|
|
||||||
|
Packing the `silo/` directory on every download has a cost. To mitigate:
|
||||||
|
|
||||||
|
- **ETag header**: The response includes an ETag computed from the revision number + metadata `updated_at`. If the client sends `If-None-Match`, the server can return `304 Not Modified`.
|
||||||
|
- **Lazy packing**: If the `.kc` blob's `silo/manifest.json` revision_hash matches the current head *and* `item_metadata.updated_at` is older than the blob's upload time, skip repacking entirely -- the blob is already current.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. SSE Events
|
||||||
|
|
||||||
|
The viewport widgets subscribe to SSE for live updates. These events are broadcast when server-side metadata changes, whether via `.kc` commit, web UI edit, or API call.
|
||||||
|
|
||||||
|
| Event | Payload | Trigger |
|
||||||
|
|-------|---------|---------|
|
||||||
|
| `metadata.updated` | `{part_number, changed_fields[], lifecycle_state, updated_by}` | Metadata PUT/PATCH |
|
||||||
|
| `metadata.lifecycle` | `{part_number, from_state, to_state, updated_by}` | Lifecycle transition |
|
||||||
|
| `metadata.tags` | `{part_number, added[], removed[]}` | Tag add/remove |
|
||||||
|
| `approval.created` | `{part_number, eco_number, state}` | ECO created |
|
||||||
|
| `approval.signed` | `{part_number, eco_number, user, role, status}` | Approver action |
|
||||||
|
| `approval.completed` | `{part_number, eco_number, final_state}` | All approvers acted |
|
||||||
|
| `dependencies.changed` | `{part_number, added[], removed[], changed[]}` | Dependency diff on commit |
|
||||||
|
|
||||||
|
Existing events (`revision.created`, `job.*`, `bom.changed`) continue to work as documented in [SPECIFICATION.md](SPECIFICATION.md) and [WORKERS.md](WORKERS.md).
|
||||||
|
|
||||||
|
### 6.1 Widget Subscription Map
|
||||||
|
|
||||||
|
| Viewport widget | Subscribes to |
|
||||||
|
|-----------------|---------------|
|
||||||
|
| Manifest Viewer | -- (read-only, no live updates) |
|
||||||
|
| Metadata Editor | `metadata.updated`, `metadata.lifecycle`, `metadata.tags` |
|
||||||
|
| History Viewer | `revision.created` |
|
||||||
|
| Approvals Viewer | `approval.created`, `approval.signed`, `approval.completed` |
|
||||||
|
| Dependency Table | `dependencies.changed` |
|
||||||
|
| Job Viewer | `job.created`, `job.progress`, `job.completed`, `job.failed` |
|
||||||
|
| Macro Editor | -- (local-only until committed) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Web UI Integration
|
||||||
|
|
||||||
|
The Silo web UI also benefits from indexed metadata. These are additions to existing pages, not new pages.
|
||||||
|
|
||||||
|
### 7.1 Items Page
|
||||||
|
|
||||||
|
The item detail panel gains a **Metadata** tab (alongside Main, Properties, Revisions, BOM, Where Used) showing the schema-driven form from `GET /api/items/{pn}/metadata`. Editable for editors.
|
||||||
|
|
||||||
|
### 7.2 Items List
|
||||||
|
|
||||||
|
New filterable columns: `lifecycle_state`, `tags`. The existing search endpoint gains metadata-aware filtering:
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /api/items?lifecycle=released&tag=aluminum
|
||||||
|
GET /api/items/search?q=bracket&lifecycle=draft
|
||||||
|
```
|
||||||
|
|
||||||
|
### 7.3 Approvals Page
|
||||||
|
|
||||||
|
A new page accessible from the top navigation (visible when a future `approvals` module is enabled). Lists all active ECOs with their approval progress.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Migration
|
||||||
|
|
||||||
|
### 8.1 Database Migration
|
||||||
|
|
||||||
|
A single migration adds the `item_metadata`, `item_dependencies`, `item_approvals`, `approval_signatures`, and `item_macros` tables. Existing items have no metadata rows -- they're created on first `.kc` commit or via `PUT /api/items/{pn}/metadata`.
|
||||||
|
|
||||||
|
### 8.2 Backfill
|
||||||
|
|
||||||
|
For items that already have `.kc` files stored on disk (committed before this feature), an admin endpoint re-runs the extraction pipeline:
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /api/admin/reindex-metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
This iterates all items with `.kc` files, opens each ZIP, and indexes the `silo/` contents. Idempotent -- safe to run multiple times.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Implementation Order
|
||||||
|
|
||||||
|
| Phase | Server work | Supports client phase |
|
||||||
|
|-------|------------|----------------------|
|
||||||
|
| 1 | `item_metadata` table + `GET/PUT /metadata` + commit extraction | SILO_VIEWPORT Phase 1-2 (Manifest, Metadata) |
|
||||||
|
| 2 | Pack `silo/` on checkout + ETag caching | SILO_VIEWPORT Phase 1-3 |
|
||||||
|
| 3 | `item_dependencies` table + `/dependencies/resolve` | SILO_VIEWPORT Phase 5 (Dependency Table) |
|
||||||
|
| 4 | `item_macros` table + `/macros` endpoints | SILO_VIEWPORT Phase 6 (Macro Editor) |
|
||||||
|
| 5 | `item_approvals` tables + `/approvals` endpoints | SILO_VIEWPORT Phase 7 (Approvals Viewer) |
|
||||||
|
| 6 | SSE events for metadata/approvals/dependencies | SILO_VIEWPORT Phase 8 (Live integration) |
|
||||||
|
| 7 | Web UI metadata tab + list filters | Independent of client |
|
||||||
|
|
||||||
|
Phases 1-2 are prerequisite for the viewport to work with live data. Phases 3-6 can be built in parallel with client widget development. Phase 7 is web-UI-only and independent.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. References
|
||||||
|
|
||||||
|
- [SILO_VIEWPORT.md](SILO_VIEWPORT.md) -- Client-side viewport widget specification
|
||||||
|
- [KC_SPECIFICATION.md](KC_SPECIFICATION.md) -- .kc file format specification
|
||||||
|
- [SPECIFICATION.md](SPECIFICATION.md) -- Silo server API reference
|
||||||
|
- [BOM_MERGE.md](BOM_MERGE.md) -- BOM merge rules (dependency reconciliation)
|
||||||
|
- [WORKERS.md](WORKERS.md) -- Job queue (job viewer data source)
|
||||||
|
- [MODULES.md](MODULES.md) -- Module system (approval module gating)
|
||||||
|
- [ROADMAP.md](ROADMAP.md) -- Platform roadmap tiers
|
||||||
@@ -313,7 +313,7 @@ For full SOLIDWORKS PDM comparison tables, see [GAP_ANALYSIS.md Appendix C](GAP_
|
|||||||
- Rollback functionality
|
- Rollback functionality
|
||||||
|
|
||||||
#### File Management
|
#### File Management
|
||||||
- MinIO integration with versioning
|
- Filesystem-based file storage
|
||||||
- File upload/download via REST API
|
- File upload/download via REST API
|
||||||
- SHA256 checksums for integrity
|
- SHA256 checksums for integrity
|
||||||
- Storage path: `items/{partNumber}/rev{N}.FCStd`
|
- Storage path: `items/{partNumber}/rev{N}.FCStd`
|
||||||
@@ -377,8 +377,8 @@ For full SOLIDWORKS PDM comparison tables, see [GAP_ANALYSIS.md Appendix C](GAP_
|
|||||||
|
|
||||||
## Appendix B: Phase 1 Detailed Tasks
|
## Appendix B: Phase 1 Detailed Tasks
|
||||||
|
|
||||||
### 1.1 MinIO Integration -- COMPLETE
|
### 1.1 File Storage -- COMPLETE
|
||||||
- [x] MinIO service configured in Docker Compose
|
- [x] Filesystem storage backend
|
||||||
- [x] File upload via REST API
|
- [x] File upload via REST API
|
||||||
- [x] File download via REST API (latest and by revision)
|
- [x] File download via REST API (latest and by revision)
|
||||||
- [x] SHA256 checksums on upload
|
- [x] SHA256 checksums on upload
|
||||||
|
|||||||
@@ -49,9 +49,9 @@ Silo treats **part numbering schemas as configuration, not code**. Multiple numb
|
|||||||
┌───────────────┴───────────────┐
|
┌───────────────┴───────────────┐
|
||||||
▼ ▼
|
▼ ▼
|
||||||
┌─────────────────────────┐ ┌─────────────────────────────┐
|
┌─────────────────────────┐ ┌─────────────────────────────┐
|
||||||
│ PostgreSQL │ │ MinIO │
|
│ PostgreSQL │ │ Local Filesystem │
|
||||||
│ (psql.example.internal)│ │ - File storage │
|
│ (psql.example.internal)│ │ - File storage │
|
||||||
│ - Item metadata │ │ - Versioned objects │
|
│ - Item metadata │ │ - Revision files │
|
||||||
│ - Relationships │ │ - Thumbnails │
|
│ - Relationships │ │ - Thumbnails │
|
||||||
│ - Revision history │ │ │
|
│ - Revision history │ │ │
|
||||||
│ - Auth / Sessions │ │ │
|
│ - Auth / Sessions │ │ │
|
||||||
@@ -64,7 +64,7 @@ Silo treats **part numbering schemas as configuration, not code**. Multiple numb
|
|||||||
| Component | Technology | Notes |
|
| Component | Technology | Notes |
|
||||||
|-----------|------------|-------|
|
|-----------|------------|-------|
|
||||||
| Database | PostgreSQL 16 | Existing instance at psql.example.internal |
|
| Database | PostgreSQL 16 | Existing instance at psql.example.internal |
|
||||||
| File Storage | MinIO | S3-compatible, versioning enabled |
|
| File Storage | Local filesystem | Files stored under configurable root directory |
|
||||||
| CLI & API Server | Go (1.24) | chi/v5 router, pgx/v5 driver, zerolog |
|
| CLI & API Server | Go (1.24) | chi/v5 router, pgx/v5 driver, zerolog |
|
||||||
| Authentication | Multi-backend | Local (bcrypt), LDAP/FreeIPA, OIDC/Keycloak |
|
| Authentication | Multi-backend | Local (bcrypt), LDAP/FreeIPA, OIDC/Keycloak |
|
||||||
| Sessions | PostgreSQL pgxstore | alexedwards/scs, 24h lifetime |
|
| Sessions | PostgreSQL pgxstore | alexedwards/scs, 24h lifetime |
|
||||||
@@ -83,7 +83,7 @@ An **item** is the fundamental entity. Items have:
|
|||||||
- **Properties** (key-value pairs, schema-defined and custom)
|
- **Properties** (key-value pairs, schema-defined and custom)
|
||||||
- **Relationships** to other items
|
- **Relationships** to other items
|
||||||
- **Revisions** (append-only history)
|
- **Revisions** (append-only history)
|
||||||
- **Files** (optional, stored in MinIO)
|
- **Files** (optional, stored on the local filesystem)
|
||||||
- **Location** (optional physical inventory location)
|
- **Location** (optional physical inventory location)
|
||||||
|
|
||||||
### 3.2 Database Schema (Conceptual)
|
### 3.2 Database Schema (Conceptual)
|
||||||
@@ -115,7 +115,7 @@ CREATE TABLE revisions (
|
|||||||
item_id UUID REFERENCES items(id) NOT NULL,
|
item_id UUID REFERENCES items(id) NOT NULL,
|
||||||
revision_number INTEGER NOT NULL,
|
revision_number INTEGER NOT NULL,
|
||||||
properties JSONB NOT NULL, -- all properties at this revision
|
properties JSONB NOT NULL, -- all properties at this revision
|
||||||
file_version TEXT, -- MinIO version ID if applicable
|
file_version TEXT, -- storage version ID if applicable
|
||||||
created_at TIMESTAMPTZ DEFAULT now(),
|
created_at TIMESTAMPTZ DEFAULT now(),
|
||||||
created_by TEXT, -- user identifier (future: LDAP DN)
|
created_by TEXT, -- user identifier (future: LDAP DN)
|
||||||
comment TEXT,
|
comment TEXT,
|
||||||
@@ -345,7 +345,7 @@ CAD workbench and spreadsheet extension implementations are maintained in separa
|
|||||||
|
|
||||||
### 5.1 File Storage Strategy
|
### 5.1 File Storage Strategy
|
||||||
|
|
||||||
Files are stored as whole objects in MinIO with versioning enabled. Storage path convention: `items/{partNumber}/rev{N}.ext`. SHA-256 checksums are captured on upload for integrity verification.
|
Files are stored on the local filesystem under a configurable root directory. Storage path convention: `items/{partNumber}/rev{N}.ext`. SHA-256 checksums are captured on upload for integrity verification.
|
||||||
|
|
||||||
Future option: exploded storage (unpack ZIP-based CAD archives for better diffing).
|
Future option: exploded storage (unpack ZIP-based CAD archives for better diffing).
|
||||||
|
|
||||||
@@ -439,7 +439,7 @@ Revisions are created explicitly by user action (not automatic):
|
|||||||
### 7.3 Revision vs. File Version
|
### 7.3 Revision vs. File Version
|
||||||
|
|
||||||
- **Revision**: Silo metadata revision (tracked in PostgreSQL)
|
- **Revision**: Silo metadata revision (tracked in PostgreSQL)
|
||||||
- **File Version**: MinIO object version (automatic on upload)
|
- **File Version**: File on disk corresponding to a revision
|
||||||
|
|
||||||
A single Silo revision may span multiple file uploads during editing. Only committed revisions create formal revision records.
|
A single Silo revision may span multiple file uploads during editing. Only committed revisions create formal revision records.
|
||||||
|
|
||||||
@@ -603,7 +603,7 @@ See [AUTH.md](AUTH.md) for full architecture details and [AUTH_USER_GUIDE.md](AU
|
|||||||
```
|
```
|
||||||
# Health (no auth)
|
# Health (no auth)
|
||||||
GET /health # Basic health check
|
GET /health # Basic health check
|
||||||
GET /ready # Readiness (DB + MinIO)
|
GET /ready # Readiness (DB)
|
||||||
|
|
||||||
# Auth (no auth required)
|
# Auth (no auth required)
|
||||||
GET /login # Login page
|
GET /login # Login page
|
||||||
@@ -624,8 +624,8 @@ GET /api/auth/tokens # List user's API to
|
|||||||
POST /api/auth/tokens # Create API token
|
POST /api/auth/tokens # Create API token
|
||||||
DELETE /api/auth/tokens/{id} # Revoke API token
|
DELETE /api/auth/tokens/{id} # Revoke API token
|
||||||
|
|
||||||
# Presigned Uploads (editor)
|
# Direct Uploads (editor)
|
||||||
POST /api/uploads/presign # Get presigned MinIO upload URL [editor]
|
POST /api/uploads/presign # Get upload URL [editor]
|
||||||
|
|
||||||
# Schemas (read: viewer, write: editor)
|
# Schemas (read: viewer, write: editor)
|
||||||
GET /api/schemas # List all schemas
|
GET /api/schemas # List all schemas
|
||||||
@@ -744,7 +744,7 @@ POST /api/inventory/{partNumber}/move
|
|||||||
- [x] Part number generation engine
|
- [x] Part number generation engine
|
||||||
- [x] CLI tool (`cmd/silo`)
|
- [x] CLI tool (`cmd/silo`)
|
||||||
- [x] API server (`cmd/silod`) with 78 endpoints
|
- [x] API server (`cmd/silod`) with 78 endpoints
|
||||||
- [x] MinIO integration for file storage with versioning
|
- [x] Filesystem-based file storage
|
||||||
- [x] BOM relationships (component, alternate, reference)
|
- [x] BOM relationships (component, alternate, reference)
|
||||||
- [x] Multi-level BOM (recursive expansion with configurable depth)
|
- [x] Multi-level BOM (recursive expansion with configurable depth)
|
||||||
- [x] Where-used queries (reverse parent lookup)
|
- [x] Where-used queries (reverse parent lookup)
|
||||||
|
|||||||
@@ -15,7 +15,7 @@
|
|||||||
| Part number generator | Complete | Scoped sequences, category-based format |
|
| Part number generator | Complete | Scoped sequences, category-based format |
|
||||||
| API server (`silod`) | Complete | 78 REST endpoints via chi/v5 |
|
| API server (`silod`) | Complete | 78 REST endpoints via chi/v5 |
|
||||||
| CLI tool (`silo`) | Complete | Item registration and management |
|
| CLI tool (`silo`) | Complete | Item registration and management |
|
||||||
| MinIO file storage | Complete | Upload, download, versioning, checksums |
|
| Filesystem file storage | Complete | Upload, download, checksums |
|
||||||
| Revision control | Complete | Append-only history, rollback, comparison, status/labels |
|
| Revision control | Complete | Append-only history, rollback, comparison, status/labels |
|
||||||
| Project management | Complete | CRUD, many-to-many item tagging |
|
| Project management | Complete | CRUD, many-to-many item tagging |
|
||||||
| CSV import/export | Complete | Dry-run validation, template generation |
|
| CSV import/export | Complete | Dry-run validation, template generation |
|
||||||
@@ -29,7 +29,7 @@
|
|||||||
| CSRF protection | Complete | nosurf on web forms |
|
| CSRF protection | Complete | nosurf on web forms |
|
||||||
| Fuzzy search | Complete | sahilm/fuzzy library |
|
| Fuzzy search | Complete | sahilm/fuzzy library |
|
||||||
| Web UI | Complete | React SPA (Vite + TypeScript), 6 pages, Catppuccin Mocha theme |
|
| Web UI | Complete | React SPA (Vite + TypeScript), 6 pages, Catppuccin Mocha theme |
|
||||||
| File attachments | Complete | Presigned uploads, item file association, thumbnails |
|
| File attachments | Complete | Direct uploads, item file association, thumbnails |
|
||||||
| Odoo ERP integration | Partial | Config and sync-log CRUD functional; push/pull are stubs |
|
| Odoo ERP integration | Partial | Config and sync-log CRUD functional; push/pull are stubs |
|
||||||
| Docker Compose | Complete | Dev and production configurations |
|
| Docker Compose | Complete | Dev and production configurations |
|
||||||
| Deployment scripts | Complete | setup-host, deploy, init-db, setup-ipa-nginx |
|
| Deployment scripts | Complete | setup-host, deploy, init-db, setup-ipa-nginx |
|
||||||
@@ -56,7 +56,7 @@ FreeCAD workbench and LibreOffice Calc extension are maintained in separate repo
|
|||||||
| Service | Host | Status |
|
| Service | Host | Status |
|
||||||
|---------|------|--------|
|
|---------|------|--------|
|
||||||
| PostgreSQL | psql.example.internal:5432 | Running |
|
| PostgreSQL | psql.example.internal:5432 | Running |
|
||||||
| MinIO | localhost:9000 (API) / :9001 (console) | Configured |
|
| File Storage | /opt/silo/data (filesystem) | Configured |
|
||||||
| Silo API | localhost:8080 | Builds successfully |
|
| Silo API | localhost:8080 | Builds successfully |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import (
|
|||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
"github.com/kindredsystems/silo/internal/auth"
|
"github.com/kindredsystems/silo/internal/auth"
|
||||||
"github.com/kindredsystems/silo/internal/db"
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
|
"github.com/kindredsystems/silo/internal/modules"
|
||||||
"github.com/kindredsystems/silo/internal/schema"
|
"github.com/kindredsystems/silo/internal/schema"
|
||||||
"github.com/kindredsystems/silo/internal/testutil"
|
"github.com/kindredsystems/silo/internal/testutil"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
@@ -38,8 +39,10 @@ func newAuthTestServer(t *testing.T) *Server {
|
|||||||
nil, // authConfig
|
nil, // authConfig
|
||||||
broker,
|
broker,
|
||||||
state,
|
state,
|
||||||
nil, // jobDefs
|
nil, // jobDefs
|
||||||
"", // jobDefsDir
|
"", // jobDefsDir
|
||||||
|
modules.NewRegistry(), // modules
|
||||||
|
nil, // cfg
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -285,6 +285,8 @@ func (s *Server) HandleAddBOMEntry(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
writeJSON(w, http.StatusCreated, entry)
|
writeJSON(w, http.StatusCreated, entry)
|
||||||
|
|
||||||
|
go s.triggerJobs(context.Background(), "bom_changed", parent.ID, parent)
|
||||||
}
|
}
|
||||||
|
|
||||||
// HandleUpdateBOMEntry updates an existing BOM relationship.
|
// HandleUpdateBOMEntry updates an existing BOM relationship.
|
||||||
@@ -353,6 +355,8 @@ func (s *Server) HandleUpdateBOMEntry(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
go s.triggerJobs(context.Background(), "bom_changed", parent.ID, parent)
|
||||||
|
|
||||||
// Reload and return updated entry
|
// Reload and return updated entry
|
||||||
entries, err := s.relationships.GetBOM(ctx, parent.ID)
|
entries, err := s.relationships.GetBOM(ctx, parent.ID)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
@@ -419,6 +423,8 @@ func (s *Server) HandleDeleteBOMEntry(w http.ResponseWriter, r *http.Request) {
|
|||||||
Msg("BOM entry removed")
|
Msg("BOM entry removed")
|
||||||
|
|
||||||
w.WriteHeader(http.StatusNoContent)
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
|
||||||
|
go s.triggerJobs(context.Background(), "bom_changed", parent.ID, parent)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper functions
|
// Helper functions
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import (
|
|||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
"github.com/kindredsystems/silo/internal/auth"
|
"github.com/kindredsystems/silo/internal/auth"
|
||||||
"github.com/kindredsystems/silo/internal/db"
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
|
"github.com/kindredsystems/silo/internal/modules"
|
||||||
"github.com/kindredsystems/silo/internal/schema"
|
"github.com/kindredsystems/silo/internal/schema"
|
||||||
"github.com/kindredsystems/silo/internal/testutil"
|
"github.com/kindredsystems/silo/internal/testutil"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
@@ -35,8 +36,10 @@ func newTestServer(t *testing.T) *Server {
|
|||||||
nil, // authConfig (nil = dev mode)
|
nil, // authConfig (nil = dev mode)
|
||||||
broker,
|
broker,
|
||||||
state,
|
state,
|
||||||
nil, // jobDefs
|
nil, // jobDefs
|
||||||
"", // jobDefsDir
|
"", // jobDefsDir
|
||||||
|
modules.NewRegistry(), // modules
|
||||||
|
nil, // cfg
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/kindredsystems/silo/internal/modules"
|
||||||
"github.com/kindredsystems/silo/internal/schema"
|
"github.com/kindredsystems/silo/internal/schema"
|
||||||
"github.com/kindredsystems/silo/internal/testutil"
|
"github.com/kindredsystems/silo/internal/testutil"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
@@ -64,8 +65,10 @@ func newTestServerWithSchemas(t *testing.T) *Server {
|
|||||||
nil, // authConfig
|
nil, // authConfig
|
||||||
broker,
|
broker,
|
||||||
state,
|
state,
|
||||||
nil, // jobDefs
|
nil, // jobDefs
|
||||||
"", // jobDefsDir
|
"", // jobDefsDir
|
||||||
|
modules.NewRegistry(), // modules
|
||||||
|
nil, // cfg
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
|
|
||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
"github.com/kindredsystems/silo/internal/db"
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
|
"github.com/kindredsystems/silo/internal/modules"
|
||||||
"github.com/kindredsystems/silo/internal/schema"
|
"github.com/kindredsystems/silo/internal/schema"
|
||||||
"github.com/kindredsystems/silo/internal/testutil"
|
"github.com/kindredsystems/silo/internal/testutil"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
@@ -29,6 +30,7 @@ func newDAGTestServer(t *testing.T) *Server {
|
|||||||
nil, nil, nil, nil, nil,
|
nil, nil, nil, nil, nil,
|
||||||
broker, state,
|
broker, state,
|
||||||
nil, "",
|
nil, "",
|
||||||
|
modules.NewRegistry(), nil,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
125
internal/api/dependency_handlers.go
Normal file
125
internal/api/dependency_handlers.go
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/kindredsystems/silo/internal/storage"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DependencyResponse is the JSON representation for GET /dependencies.
|
||||||
|
type DependencyResponse struct {
|
||||||
|
UUID string `json:"uuid"`
|
||||||
|
PartNumber *string `json:"part_number"`
|
||||||
|
Revision *int `json:"revision"`
|
||||||
|
Quantity *float64 `json:"quantity"`
|
||||||
|
Label *string `json:"label"`
|
||||||
|
Relationship string `json:"relationship"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolvedDependencyResponse is the JSON representation for GET /dependencies/resolve.
|
||||||
|
type ResolvedDependencyResponse struct {
|
||||||
|
UUID string `json:"uuid"`
|
||||||
|
PartNumber *string `json:"part_number"`
|
||||||
|
Label *string `json:"label"`
|
||||||
|
Revision *int `json:"revision"`
|
||||||
|
Quantity *float64 `json:"quantity"`
|
||||||
|
Resolved bool `json:"resolved"`
|
||||||
|
FileAvailable bool `json:"file_available"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleGetDependencies returns the raw dependency list for an item.
|
||||||
|
// GET /api/items/{partNumber}/dependencies
|
||||||
|
func (s *Server) HandleGetDependencies(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
|
|
||||||
|
item, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get item")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if item == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Item not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
deps, err := s.deps.ListByItem(ctx, item.ID)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to list dependencies")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to list dependencies")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
resp := make([]DependencyResponse, len(deps))
|
||||||
|
for i, d := range deps {
|
||||||
|
resp[i] = DependencyResponse{
|
||||||
|
UUID: d.ChildUUID,
|
||||||
|
PartNumber: d.ChildPartNumber,
|
||||||
|
Revision: d.ChildRevision,
|
||||||
|
Quantity: d.Quantity,
|
||||||
|
Label: d.Label,
|
||||||
|
Relationship: d.Relationship,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, resp)
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleResolveDependencies returns dependencies with UUIDs resolved to part numbers
|
||||||
|
// and file availability status.
|
||||||
|
// GET /api/items/{partNumber}/dependencies/resolve
|
||||||
|
func (s *Server) HandleResolveDependencies(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
|
|
||||||
|
item, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get item")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if item == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Item not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
deps, err := s.deps.Resolve(ctx, item.ID)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to resolve dependencies")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to resolve dependencies")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
resp := make([]ResolvedDependencyResponse, len(deps))
|
||||||
|
for i, d := range deps {
|
||||||
|
// Use resolved part number if available, fall back to .kc-provided value.
|
||||||
|
pn := d.ChildPartNumber
|
||||||
|
rev := d.ChildRevision
|
||||||
|
if d.Resolved {
|
||||||
|
pn = d.ResolvedPartNumber
|
||||||
|
rev = d.ResolvedRevision
|
||||||
|
}
|
||||||
|
|
||||||
|
fileAvailable := false
|
||||||
|
if d.Resolved && pn != nil && rev != nil && s.storage != nil {
|
||||||
|
key := storage.FileKey(*pn, *rev)
|
||||||
|
if exists, err := s.storage.Exists(ctx, key); err == nil {
|
||||||
|
fileAvailable = exists
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resp[i] = ResolvedDependencyResponse{
|
||||||
|
UUID: d.ChildUUID,
|
||||||
|
PartNumber: pn,
|
||||||
|
Label: d.Label,
|
||||||
|
Revision: rev,
|
||||||
|
Quantity: d.Quantity,
|
||||||
|
Resolved: d.Resolved,
|
||||||
|
FileAvailable: fileAvailable,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, resp)
|
||||||
|
}
|
||||||
@@ -3,7 +3,9 @@ package api
|
|||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -314,3 +316,188 @@ func (s *Server) HandleSetItemThumbnail(w http.ResponseWriter, r *http.Request)
|
|||||||
|
|
||||||
w.WriteHeader(http.StatusNoContent)
|
w.WriteHeader(http.StatusNoContent)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// storageBackend returns the configured storage backend name, defaulting to "minio".
|
||||||
|
func (s *Server) storageBackend() string {
|
||||||
|
if s.cfg != nil && s.cfg.Storage.Backend != "" {
|
||||||
|
return s.cfg.Storage.Backend
|
||||||
|
}
|
||||||
|
return "minio"
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleUploadItemFile accepts a multipart file upload and stores it as an item attachment.
|
||||||
|
func (s *Server) HandleUploadItemFile(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
|
|
||||||
|
if s.storage == nil {
|
||||||
|
writeError(w, http.StatusServiceUnavailable, "storage_unavailable", "File storage not configured")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
item, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get item")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if item == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Item not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse multipart form (max 500MB)
|
||||||
|
if err := r.ParseMultipartForm(500 << 20); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_form", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
file, header, err := r.FormFile("file")
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "missing_file", "File is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
contentType := header.Header.Get("Content-Type")
|
||||||
|
if contentType == "" {
|
||||||
|
contentType = "application/octet-stream"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate permanent key
|
||||||
|
fileID := uuid.New().String()
|
||||||
|
permanentKey := fmt.Sprintf("items/%s/files/%s/%s", item.ID, fileID, header.Filename)
|
||||||
|
|
||||||
|
// Write directly to storage
|
||||||
|
result, err := s.storage.Put(ctx, permanentKey, file, header.Size, contentType)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to upload file")
|
||||||
|
writeError(w, http.StatusInternalServerError, "upload_failed", "Failed to store file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create DB record
|
||||||
|
itemFile := &db.ItemFile{
|
||||||
|
ItemID: item.ID,
|
||||||
|
Filename: header.Filename,
|
||||||
|
ContentType: contentType,
|
||||||
|
Size: result.Size,
|
||||||
|
ObjectKey: permanentKey,
|
||||||
|
StorageBackend: s.storageBackend(),
|
||||||
|
}
|
||||||
|
if err := s.itemFiles.Create(ctx, itemFile); err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to create item file record")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to save file record")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info().
|
||||||
|
Str("part_number", partNumber).
|
||||||
|
Str("file_id", itemFile.ID).
|
||||||
|
Str("filename", header.Filename).
|
||||||
|
Int64("size", result.Size).
|
||||||
|
Msg("file uploaded to item")
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusCreated, itemFileToResponse(itemFile))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleUploadItemThumbnail accepts a multipart file upload and sets it as the item thumbnail.
|
||||||
|
func (s *Server) HandleUploadItemThumbnail(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
|
|
||||||
|
if s.storage == nil {
|
||||||
|
writeError(w, http.StatusServiceUnavailable, "storage_unavailable", "File storage not configured")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
item, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get item")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if item == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Item not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse multipart form (max 10MB for thumbnails)
|
||||||
|
if err := r.ParseMultipartForm(10 << 20); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_form", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
file, header, err := r.FormFile("file")
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "missing_file", "File is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
contentType := header.Header.Get("Content-Type")
|
||||||
|
if contentType == "" {
|
||||||
|
contentType = "image/png"
|
||||||
|
}
|
||||||
|
|
||||||
|
thumbnailKey := fmt.Sprintf("items/%s/thumbnail.png", item.ID)
|
||||||
|
|
||||||
|
if _, err := s.storage.Put(ctx, thumbnailKey, file, header.Size, contentType); err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to upload thumbnail")
|
||||||
|
writeError(w, http.StatusInternalServerError, "upload_failed", "Failed to store thumbnail")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.items.SetThumbnailKey(ctx, item.ID, thumbnailKey); err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to update thumbnail key")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to save thumbnail")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleDownloadItemFile streams an item file attachment to the client.
|
||||||
|
func (s *Server) HandleDownloadItemFile(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
|
fileID := chi.URLParam(r, "fileId")
|
||||||
|
|
||||||
|
if s.storage == nil {
|
||||||
|
writeError(w, http.StatusServiceUnavailable, "storage_unavailable", "File storage not configured")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
item, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||||
|
if err != nil || item == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Item not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
file, err := s.itemFiles.Get(ctx, fileID)
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "File not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if file.ItemID != item.ID {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "File not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
reader, err := s.storage.Get(ctx, file.ObjectKey)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Str("key", file.ObjectKey).Msg("failed to get file")
|
||||||
|
writeError(w, http.StatusInternalServerError, "download_failed", "Failed to retrieve file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer reader.Close()
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", file.ContentType)
|
||||||
|
w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, file.Filename))
|
||||||
|
if file.Size > 0 {
|
||||||
|
w.Header().Set("Content-Length", strconv.FormatInt(file.Size, 10))
|
||||||
|
}
|
||||||
|
|
||||||
|
io.Copy(w, reader)
|
||||||
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@@ -19,6 +20,8 @@ import (
|
|||||||
"github.com/kindredsystems/silo/internal/config"
|
"github.com/kindredsystems/silo/internal/config"
|
||||||
"github.com/kindredsystems/silo/internal/db"
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
"github.com/kindredsystems/silo/internal/jobdef"
|
"github.com/kindredsystems/silo/internal/jobdef"
|
||||||
|
"github.com/kindredsystems/silo/internal/kc"
|
||||||
|
"github.com/kindredsystems/silo/internal/modules"
|
||||||
"github.com/kindredsystems/silo/internal/partnum"
|
"github.com/kindredsystems/silo/internal/partnum"
|
||||||
"github.com/kindredsystems/silo/internal/schema"
|
"github.com/kindredsystems/silo/internal/schema"
|
||||||
"github.com/kindredsystems/silo/internal/storage"
|
"github.com/kindredsystems/silo/internal/storage"
|
||||||
@@ -36,7 +39,7 @@ type Server struct {
|
|||||||
schemas map[string]*schema.Schema
|
schemas map[string]*schema.Schema
|
||||||
schemasDir string
|
schemasDir string
|
||||||
partgen *partnum.Generator
|
partgen *partnum.Generator
|
||||||
storage *storage.Storage
|
storage storage.FileStore
|
||||||
auth *auth.Service
|
auth *auth.Service
|
||||||
sessions *scs.SessionManager
|
sessions *scs.SessionManager
|
||||||
oidc *auth.OIDCBackend
|
oidc *auth.OIDCBackend
|
||||||
@@ -46,8 +49,14 @@ type Server struct {
|
|||||||
serverState *ServerState
|
serverState *ServerState
|
||||||
dag *db.DAGRepository
|
dag *db.DAGRepository
|
||||||
jobs *db.JobRepository
|
jobs *db.JobRepository
|
||||||
|
locations *db.LocationRepository
|
||||||
jobDefs map[string]*jobdef.Definition
|
jobDefs map[string]*jobdef.Definition
|
||||||
jobDefsDir string
|
jobDefsDir string
|
||||||
|
modules *modules.Registry
|
||||||
|
cfg *config.Config
|
||||||
|
settings *db.SettingsRepository
|
||||||
|
metadata *db.ItemMetadataRepository
|
||||||
|
deps *db.ItemDependencyRepository
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewServer creates a new API server.
|
// NewServer creates a new API server.
|
||||||
@@ -56,7 +65,7 @@ func NewServer(
|
|||||||
database *db.DB,
|
database *db.DB,
|
||||||
schemas map[string]*schema.Schema,
|
schemas map[string]*schema.Schema,
|
||||||
schemasDir string,
|
schemasDir string,
|
||||||
store *storage.Storage,
|
store storage.FileStore,
|
||||||
authService *auth.Service,
|
authService *auth.Service,
|
||||||
sessionManager *scs.SessionManager,
|
sessionManager *scs.SessionManager,
|
||||||
oidcBackend *auth.OIDCBackend,
|
oidcBackend *auth.OIDCBackend,
|
||||||
@@ -65,6 +74,8 @@ func NewServer(
|
|||||||
state *ServerState,
|
state *ServerState,
|
||||||
jobDefs map[string]*jobdef.Definition,
|
jobDefs map[string]*jobdef.Definition,
|
||||||
jobDefsDir string,
|
jobDefsDir string,
|
||||||
|
registry *modules.Registry,
|
||||||
|
cfg *config.Config,
|
||||||
) *Server {
|
) *Server {
|
||||||
items := db.NewItemRepository(database)
|
items := db.NewItemRepository(database)
|
||||||
projects := db.NewProjectRepository(database)
|
projects := db.NewProjectRepository(database)
|
||||||
@@ -72,6 +83,10 @@ func NewServer(
|
|||||||
itemFiles := db.NewItemFileRepository(database)
|
itemFiles := db.NewItemFileRepository(database)
|
||||||
dag := db.NewDAGRepository(database)
|
dag := db.NewDAGRepository(database)
|
||||||
jobs := db.NewJobRepository(database)
|
jobs := db.NewJobRepository(database)
|
||||||
|
settings := db.NewSettingsRepository(database)
|
||||||
|
locations := db.NewLocationRepository(database)
|
||||||
|
metadata := db.NewItemMetadataRepository(database)
|
||||||
|
itemDeps := db.NewItemDependencyRepository(database)
|
||||||
seqStore := &dbSequenceStore{db: database, schemas: schemas}
|
seqStore := &dbSequenceStore{db: database, schemas: schemas}
|
||||||
partgen := partnum.NewGenerator(schemas, seqStore)
|
partgen := partnum.NewGenerator(schemas, seqStore)
|
||||||
|
|
||||||
@@ -94,8 +109,14 @@ func NewServer(
|
|||||||
serverState: state,
|
serverState: state,
|
||||||
dag: dag,
|
dag: dag,
|
||||||
jobs: jobs,
|
jobs: jobs,
|
||||||
|
locations: locations,
|
||||||
jobDefs: jobDefs,
|
jobDefs: jobDefs,
|
||||||
jobDefsDir: jobDefsDir,
|
jobDefsDir: jobDefsDir,
|
||||||
|
modules: registry,
|
||||||
|
cfg: cfg,
|
||||||
|
settings: settings,
|
||||||
|
metadata: metadata,
|
||||||
|
deps: itemDeps,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -166,6 +187,54 @@ func (s *Server) HandleReady(w http.ResponseWriter, r *http.Request) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HandleGetModules returns the public module discovery response.
|
||||||
|
// No authentication required — clients call this pre-login.
|
||||||
|
func (s *Server) HandleGetModules(w http.ResponseWriter, r *http.Request) {
|
||||||
|
mods := make(map[string]any, 10)
|
||||||
|
for _, m := range s.modules.All() {
|
||||||
|
entry := map[string]any{
|
||||||
|
"enabled": s.modules.IsEnabled(m.ID),
|
||||||
|
"required": m.Required,
|
||||||
|
"name": m.Name,
|
||||||
|
}
|
||||||
|
if m.Version != "" {
|
||||||
|
entry["version"] = m.Version
|
||||||
|
}
|
||||||
|
if len(m.DependsOn) > 0 {
|
||||||
|
entry["depends_on"] = m.DependsOn
|
||||||
|
}
|
||||||
|
|
||||||
|
// Public config (non-secret) for specific modules.
|
||||||
|
switch m.ID {
|
||||||
|
case "auth":
|
||||||
|
if s.cfg != nil {
|
||||||
|
entry["config"] = map[string]any{
|
||||||
|
"local_enabled": s.cfg.Auth.Local.Enabled,
|
||||||
|
"ldap_enabled": s.cfg.Auth.LDAP.Enabled,
|
||||||
|
"oidc_enabled": s.cfg.Auth.OIDC.Enabled,
|
||||||
|
"oidc_issuer_url": s.cfg.Auth.OIDC.IssuerURL,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case "freecad":
|
||||||
|
if s.cfg != nil {
|
||||||
|
entry["config"] = map[string]any{
|
||||||
|
"uri_scheme": s.cfg.FreeCAD.URIScheme,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mods[m.ID] = entry
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, map[string]any{
|
||||||
|
"modules": mods,
|
||||||
|
"server": map[string]any{
|
||||||
|
"version": "0.2",
|
||||||
|
"read_only": s.serverState.IsReadOnly(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// Schema handlers
|
// Schema handlers
|
||||||
|
|
||||||
// SchemaResponse represents a schema in API responses.
|
// SchemaResponse represents a schema in API responses.
|
||||||
@@ -1591,10 +1660,14 @@ func (s *Server) HandleUploadFile(w http.ResponseWriter, r *http.Request) {
|
|||||||
Int64("size", result.Size).
|
Int64("size", result.Size).
|
||||||
Msg("file uploaded")
|
Msg("file uploaded")
|
||||||
|
|
||||||
|
// .kc metadata extraction (best-effort)
|
||||||
|
s.extractKCMetadata(ctx, item, fileKey, rev)
|
||||||
|
|
||||||
writeJSON(w, http.StatusCreated, revisionToResponse(rev))
|
writeJSON(w, http.StatusCreated, revisionToResponse(rev))
|
||||||
}
|
}
|
||||||
|
|
||||||
// HandleDownloadFile downloads the file for a specific revision.
|
// HandleDownloadFile downloads the file for a specific revision.
|
||||||
|
// For .kc files, silo/ entries are repacked with current DB state.
|
||||||
func (s *Server) HandleDownloadFile(w http.ResponseWriter, r *http.Request) {
|
func (s *Server) HandleDownloadFile(w http.ResponseWriter, r *http.Request) {
|
||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
partNumber := chi.URLParam(r, "partNumber")
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
@@ -1649,18 +1722,23 @@ func (s *Server) HandleDownloadFile(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get file from storage
|
// ETag: computed from revision + metadata freshness.
|
||||||
var reader interface {
|
meta, _ := s.metadata.Get(ctx, item.ID) // nil is ok (plain .fcstd)
|
||||||
Read(p []byte) (n int, err error)
|
etag := computeETag(revision, meta)
|
||||||
Close() error
|
|
||||||
|
if match := r.Header.Get("If-None-Match"); match == etag {
|
||||||
|
w.Header().Set("ETag", etag)
|
||||||
|
w.WriteHeader(http.StatusNotModified)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get file from storage
|
||||||
|
var reader io.ReadCloser
|
||||||
if revision.FileVersion != nil && *revision.FileVersion != "" {
|
if revision.FileVersion != nil && *revision.FileVersion != "" {
|
||||||
reader, err = s.storage.GetVersion(ctx, *revision.FileKey, *revision.FileVersion)
|
reader, err = s.storage.GetVersion(ctx, *revision.FileKey, *revision.FileVersion)
|
||||||
} else {
|
} else {
|
||||||
reader, err = s.storage.Get(ctx, *revision.FileKey)
|
reader, err = s.storage.Get(ctx, *revision.FileKey)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.logger.Error().Err(err).Str("key", *revision.FileKey).Msg("failed to get file")
|
s.logger.Error().Err(err).Str("key", *revision.FileKey).Msg("failed to get file")
|
||||||
writeError(w, http.StatusInternalServerError, "download_failed", err.Error())
|
writeError(w, http.StatusInternalServerError, "download_failed", err.Error())
|
||||||
@@ -1668,28 +1746,37 @@ func (s *Server) HandleDownloadFile(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
defer reader.Close()
|
defer reader.Close()
|
||||||
|
|
||||||
|
// Read entire file for potential .kc repacking.
|
||||||
|
data, err := io.ReadAll(reader)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to read file")
|
||||||
|
writeError(w, http.StatusInternalServerError, "download_failed", "Failed to read file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Repack silo/ entries for .kc files with indexed metadata.
|
||||||
|
output := data
|
||||||
|
if meta != nil {
|
||||||
|
if hasSilo, chkErr := kc.HasSiloDir(data); chkErr == nil && hasSilo {
|
||||||
|
if !canSkipRepack(revision, meta) {
|
||||||
|
if packed, packErr := s.packKCFile(ctx, data, item, revision, meta); packErr != nil {
|
||||||
|
s.logger.Warn().Err(packErr).Str("part_number", partNumber).Msg("kc: packing failed, serving original")
|
||||||
|
} else {
|
||||||
|
output = packed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Set response headers
|
// Set response headers
|
||||||
filename := partNumber + "_rev" + strconv.Itoa(revNum) + ".FCStd"
|
filename := partNumber + "_rev" + strconv.Itoa(revNum) + ".FCStd"
|
||||||
w.Header().Set("Content-Type", "application/octet-stream")
|
w.Header().Set("Content-Type", "application/octet-stream")
|
||||||
w.Header().Set("Content-Disposition", "attachment; filename=\""+filename+"\"")
|
w.Header().Set("Content-Disposition", "attachment; filename=\""+filename+"\"")
|
||||||
if revision.FileSize != nil {
|
w.Header().Set("Content-Length", strconv.Itoa(len(output)))
|
||||||
w.Header().Set("Content-Length", strconv.FormatInt(*revision.FileSize, 10))
|
w.Header().Set("ETag", etag)
|
||||||
}
|
w.Header().Set("Cache-Control", "private, must-revalidate")
|
||||||
|
|
||||||
// Stream file to response
|
w.Write(output)
|
||||||
buf := make([]byte, 32*1024)
|
|
||||||
for {
|
|
||||||
n, readErr := reader.Read(buf)
|
|
||||||
if n > 0 {
|
|
||||||
if _, writeErr := w.Write(buf[:n]); writeErr != nil {
|
|
||||||
s.logger.Error().Err(writeErr).Msg("failed to write response")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if readErr != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// HandleDownloadLatestFile downloads the file for the latest revision.
|
// HandleDownloadLatestFile downloads the file for the latest revision.
|
||||||
|
|||||||
@@ -326,6 +326,10 @@ func (s *Server) HandleDeleteRunner(w http.ResponseWriter, r *http.Request) {
|
|||||||
// triggerJobs creates jobs for all enabled definitions matching the trigger type.
|
// triggerJobs creates jobs for all enabled definitions matching the trigger type.
|
||||||
// It applies trigger filters (e.g. item_type) before creating each job.
|
// It applies trigger filters (e.g. item_type) before creating each job.
|
||||||
func (s *Server) triggerJobs(ctx context.Context, triggerType string, itemID string, item *db.Item) {
|
func (s *Server) triggerJobs(ctx context.Context, triggerType string, itemID string, item *db.Item) {
|
||||||
|
if !s.modules.IsEnabled("jobs") {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
defs, err := s.jobs.GetDefinitionsByTrigger(ctx, triggerType)
|
defs, err := s.jobs.GetDefinitionsByTrigger(ctx, triggerType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.logger.Error().Err(err).Str("trigger", triggerType).Msg("failed to get job definitions for trigger")
|
s.logger.Error().Err(err).Str("trigger", triggerType).Msg("failed to get job definitions for trigger")
|
||||||
|
|||||||
@@ -7,9 +7,11 @@ import (
|
|||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
"github.com/kindredsystems/silo/internal/db"
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
|
"github.com/kindredsystems/silo/internal/modules"
|
||||||
"github.com/kindredsystems/silo/internal/schema"
|
"github.com/kindredsystems/silo/internal/schema"
|
||||||
"github.com/kindredsystems/silo/internal/testutil"
|
"github.com/kindredsystems/silo/internal/testutil"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
@@ -29,6 +31,7 @@ func newJobTestServer(t *testing.T) *Server {
|
|||||||
nil, nil, nil, nil, nil,
|
nil, nil, nil, nil, nil,
|
||||||
broker, state,
|
broker, state,
|
||||||
nil, "",
|
nil, "",
|
||||||
|
modules.NewRegistry(), nil,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -317,6 +320,260 @@ func TestHandleDeleteRunner(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- Trigger integration tests ---
|
||||||
|
|
||||||
|
// newTriggerRouter builds a router with items, revisions, BOM, and jobs routes
|
||||||
|
// so that HTTP-based actions can fire triggerJobs via goroutine.
|
||||||
|
func newTriggerRouter(s *Server) http.Handler {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
r.Route("/api/items", func(r chi.Router) {
|
||||||
|
r.Post("/", s.HandleCreateItem)
|
||||||
|
r.Route("/{partNumber}", func(r chi.Router) {
|
||||||
|
r.Post("/revisions", s.HandleCreateRevision)
|
||||||
|
r.Post("/bom", s.HandleAddBOMEntry)
|
||||||
|
r.Put("/bom/{childPartNumber}", s.HandleUpdateBOMEntry)
|
||||||
|
r.Delete("/bom/{childPartNumber}", s.HandleDeleteBOMEntry)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
r.Route("/api/jobs", func(r chi.Router) {
|
||||||
|
r.Get("/", s.HandleListJobs)
|
||||||
|
})
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func waitForJobs(t *testing.T, s *Server, itemID string, wantCount int) []*db.Job {
|
||||||
|
t.Helper()
|
||||||
|
// triggerJobs runs in a goroutine; poll up to 2 seconds.
|
||||||
|
for i := 0; i < 20; i++ {
|
||||||
|
jobs, err := s.jobs.ListJobs(context.Background(), "", itemID, 50, 0)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("listing jobs: %v", err)
|
||||||
|
}
|
||||||
|
if len(jobs) >= wantCount {
|
||||||
|
return jobs
|
||||||
|
}
|
||||||
|
time.Sleep(100 * time.Millisecond)
|
||||||
|
}
|
||||||
|
jobs, _ := s.jobs.ListJobs(context.Background(), "", itemID, 50, 0)
|
||||||
|
return jobs
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTriggerJobsOnRevisionCreate(t *testing.T) {
|
||||||
|
s := newJobTestServer(t)
|
||||||
|
if err := s.modules.SetEnabled("jobs", true); err != nil {
|
||||||
|
t.Fatalf("enabling jobs module: %v", err)
|
||||||
|
}
|
||||||
|
router := newTriggerRouter(s)
|
||||||
|
|
||||||
|
// Create an item.
|
||||||
|
createItemDirect(t, s, "TRIG-REV-001", "trigger test item", nil)
|
||||||
|
|
||||||
|
// Seed a job definition that triggers on revision_created.
|
||||||
|
def := &db.JobDefinitionRecord{
|
||||||
|
Name: "rev-trigger-test",
|
||||||
|
Version: 1,
|
||||||
|
TriggerType: "revision_created",
|
||||||
|
ScopeType: "item",
|
||||||
|
ComputeType: "validate",
|
||||||
|
RunnerTags: []string{"test"},
|
||||||
|
TimeoutSeconds: 60,
|
||||||
|
MaxRetries: 0,
|
||||||
|
Priority: 100,
|
||||||
|
Enabled: true,
|
||||||
|
}
|
||||||
|
if err := s.jobs.UpsertDefinition(context.Background(), def); err != nil {
|
||||||
|
t.Fatalf("seeding definition: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a revision via HTTP (fires triggerJobs in goroutine).
|
||||||
|
body := `{"properties":{"material":"steel"},"comment":"trigger test"}`
|
||||||
|
req := authRequest(httptest.NewRequest("POST", "/api/items/TRIG-REV-001/revisions", strings.NewReader(body)))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("create revision: expected 201, got %d: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the item ID to filter jobs.
|
||||||
|
item, _ := s.items.GetByPartNumber(context.Background(), "TRIG-REV-001")
|
||||||
|
if item == nil {
|
||||||
|
t.Fatal("item not found after creation")
|
||||||
|
}
|
||||||
|
|
||||||
|
jobs := waitForJobs(t, s, item.ID, 1)
|
||||||
|
if len(jobs) == 0 {
|
||||||
|
t.Fatal("expected at least 1 triggered job, got 0")
|
||||||
|
}
|
||||||
|
if jobs[0].DefinitionName != "rev-trigger-test" {
|
||||||
|
t.Errorf("expected definition name rev-trigger-test, got %s", jobs[0].DefinitionName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTriggerJobsOnBOMChange(t *testing.T) {
|
||||||
|
s := newJobTestServer(t)
|
||||||
|
if err := s.modules.SetEnabled("jobs", true); err != nil {
|
||||||
|
t.Fatalf("enabling jobs module: %v", err)
|
||||||
|
}
|
||||||
|
router := newTriggerRouter(s)
|
||||||
|
|
||||||
|
// Create parent and child items.
|
||||||
|
createItemDirect(t, s, "TRIG-BOM-P", "parent", nil)
|
||||||
|
createItemDirect(t, s, "TRIG-BOM-C", "child", nil)
|
||||||
|
|
||||||
|
// Seed a bom_changed job definition.
|
||||||
|
def := &db.JobDefinitionRecord{
|
||||||
|
Name: "bom-trigger-test",
|
||||||
|
Version: 1,
|
||||||
|
TriggerType: "bom_changed",
|
||||||
|
ScopeType: "item",
|
||||||
|
ComputeType: "validate",
|
||||||
|
RunnerTags: []string{"test"},
|
||||||
|
TimeoutSeconds: 60,
|
||||||
|
MaxRetries: 0,
|
||||||
|
Priority: 100,
|
||||||
|
Enabled: true,
|
||||||
|
}
|
||||||
|
if err := s.jobs.UpsertDefinition(context.Background(), def); err != nil {
|
||||||
|
t.Fatalf("seeding definition: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a BOM entry via HTTP.
|
||||||
|
body := `{"child_part_number":"TRIG-BOM-C","rel_type":"component","quantity":2}`
|
||||||
|
req := authRequest(httptest.NewRequest("POST", "/api/items/TRIG-BOM-P/bom", strings.NewReader(body)))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("add BOM entry: expected 201, got %d: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the parent item ID.
|
||||||
|
parent, _ := s.items.GetByPartNumber(context.Background(), "TRIG-BOM-P")
|
||||||
|
if parent == nil {
|
||||||
|
t.Fatal("parent item not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
jobs := waitForJobs(t, s, parent.ID, 1)
|
||||||
|
if len(jobs) == 0 {
|
||||||
|
t.Fatal("expected at least 1 triggered job, got 0")
|
||||||
|
}
|
||||||
|
if jobs[0].DefinitionName != "bom-trigger-test" {
|
||||||
|
t.Errorf("expected definition name bom-trigger-test, got %s", jobs[0].DefinitionName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTriggerJobsFilterMismatch(t *testing.T) {
|
||||||
|
s := newJobTestServer(t)
|
||||||
|
if err := s.modules.SetEnabled("jobs", true); err != nil {
|
||||||
|
t.Fatalf("enabling jobs module: %v", err)
|
||||||
|
}
|
||||||
|
router := newTriggerRouter(s)
|
||||||
|
|
||||||
|
// Create a "part" type item (not "assembly").
|
||||||
|
createItemDirect(t, s, "TRIG-FILT-P", "filter parent", nil)
|
||||||
|
createItemDirect(t, s, "TRIG-FILT-C", "filter child", nil)
|
||||||
|
|
||||||
|
// Seed a definition that only triggers for assembly items.
|
||||||
|
def := &db.JobDefinitionRecord{
|
||||||
|
Name: "assembly-only-test",
|
||||||
|
Version: 1,
|
||||||
|
TriggerType: "bom_changed",
|
||||||
|
ScopeType: "item",
|
||||||
|
ComputeType: "validate",
|
||||||
|
RunnerTags: []string{"test"},
|
||||||
|
TimeoutSeconds: 60,
|
||||||
|
MaxRetries: 0,
|
||||||
|
Priority: 100,
|
||||||
|
Enabled: true,
|
||||||
|
Definition: map[string]any{
|
||||||
|
"trigger": map[string]any{
|
||||||
|
"filter": map[string]any{
|
||||||
|
"item_type": "assembly",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if err := s.jobs.UpsertDefinition(context.Background(), def); err != nil {
|
||||||
|
t.Fatalf("seeding definition: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a BOM entry on a "part" item (should NOT match assembly filter).
|
||||||
|
body := `{"child_part_number":"TRIG-FILT-C","rel_type":"component","quantity":1}`
|
||||||
|
req := authRequest(httptest.NewRequest("POST", "/api/items/TRIG-FILT-P/bom", strings.NewReader(body)))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("add BOM entry: expected 201, got %d: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait briefly, then verify no jobs were created.
|
||||||
|
parent, _ := s.items.GetByPartNumber(context.Background(), "TRIG-FILT-P")
|
||||||
|
time.Sleep(500 * time.Millisecond)
|
||||||
|
|
||||||
|
jobs, err := s.jobs.ListJobs(context.Background(), "", parent.ID, 50, 0)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("listing jobs: %v", err)
|
||||||
|
}
|
||||||
|
if len(jobs) != 0 {
|
||||||
|
t.Errorf("expected 0 jobs (filter mismatch), got %d", len(jobs))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTriggerJobsModuleDisabled(t *testing.T) {
|
||||||
|
s := newJobTestServer(t)
|
||||||
|
// Jobs module is disabled by default in NewRegistry().
|
||||||
|
router := newTriggerRouter(s)
|
||||||
|
|
||||||
|
// Create items.
|
||||||
|
createItemDirect(t, s, "TRIG-DIS-P", "disabled parent", nil)
|
||||||
|
createItemDirect(t, s, "TRIG-DIS-C", "disabled child", nil)
|
||||||
|
|
||||||
|
// Seed a bom_changed definition (it exists in DB but module is off).
|
||||||
|
def := &db.JobDefinitionRecord{
|
||||||
|
Name: "disabled-trigger-test",
|
||||||
|
Version: 1,
|
||||||
|
TriggerType: "bom_changed",
|
||||||
|
ScopeType: "item",
|
||||||
|
ComputeType: "validate",
|
||||||
|
RunnerTags: []string{"test"},
|
||||||
|
TimeoutSeconds: 60,
|
||||||
|
MaxRetries: 0,
|
||||||
|
Priority: 100,
|
||||||
|
Enabled: true,
|
||||||
|
}
|
||||||
|
if err := s.jobs.UpsertDefinition(context.Background(), def); err != nil {
|
||||||
|
t.Fatalf("seeding definition: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a BOM entry with jobs module disabled.
|
||||||
|
body := `{"child_part_number":"TRIG-DIS-C","rel_type":"component","quantity":1}`
|
||||||
|
req := authRequest(httptest.NewRequest("POST", "/api/items/TRIG-DIS-P/bom", strings.NewReader(body)))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("add BOM entry: expected 201, got %d: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait briefly, then verify no jobs were created.
|
||||||
|
parent, _ := s.items.GetByPartNumber(context.Background(), "TRIG-DIS-P")
|
||||||
|
time.Sleep(500 * time.Millisecond)
|
||||||
|
|
||||||
|
jobs, err := s.jobs.ListJobs(context.Background(), "", parent.ID, 50, 0)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("listing jobs: %v", err)
|
||||||
|
}
|
||||||
|
if len(jobs) != 0 {
|
||||||
|
t.Errorf("expected 0 jobs (module disabled), got %d", len(jobs))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestGenerateRunnerToken(t *testing.T) {
|
func TestGenerateRunnerToken(t *testing.T) {
|
||||||
raw, hash, prefix := generateRunnerToken()
|
raw, hash, prefix := generateRunnerToken()
|
||||||
|
|
||||||
|
|||||||
234
internal/api/location_handlers.go
Normal file
234
internal/api/location_handlers.go
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LocationResponse is the API representation of a location.
|
||||||
|
type LocationResponse struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
ParentID *string `json:"parent_id,omitempty"`
|
||||||
|
LocationType string `json:"location_type"`
|
||||||
|
Depth int `json:"depth"`
|
||||||
|
Metadata map[string]any `json:"metadata,omitempty"`
|
||||||
|
CreatedAt string `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateLocationRequest represents a request to create a location.
|
||||||
|
type CreateLocationRequest struct {
|
||||||
|
Path string `json:"path"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
LocationType string `json:"location_type"`
|
||||||
|
Metadata map[string]any `json:"metadata,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateLocationRequest represents a request to update a location.
|
||||||
|
type UpdateLocationRequest struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
LocationType string `json:"location_type"`
|
||||||
|
Metadata map[string]any `json:"metadata,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func locationToResponse(loc *db.Location) LocationResponse {
|
||||||
|
return LocationResponse{
|
||||||
|
ID: loc.ID,
|
||||||
|
Path: loc.Path,
|
||||||
|
Name: loc.Name,
|
||||||
|
ParentID: loc.ParentID,
|
||||||
|
LocationType: loc.LocationType,
|
||||||
|
Depth: loc.Depth,
|
||||||
|
Metadata: loc.Metadata,
|
||||||
|
CreatedAt: loc.CreatedAt.Format("2006-01-02T15:04:05Z07:00"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleListLocations lists all locations. If ?tree={path} is set, returns that
|
||||||
|
// subtree. If ?root=true, returns only root-level locations (depth 0).
|
||||||
|
func (s *Server) HandleListLocations(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
|
||||||
|
treePath := r.URL.Query().Get("tree")
|
||||||
|
if treePath != "" {
|
||||||
|
locs, err := s.locations.GetTree(ctx, treePath)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Str("tree", treePath).Msg("failed to get location tree")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get location tree")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(w, http.StatusOK, locationsToResponse(locs))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
locs, err := s.locations.List(ctx)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to list locations")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to list locations")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, locationsToResponse(locs))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleCreateLocation creates a new location.
|
||||||
|
func (s *Server) HandleCreateLocation(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
|
||||||
|
var req CreateLocationRequest
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_json", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Path == "" {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "Path is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if req.Name == "" {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "Name is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if req.LocationType == "" {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "Location type is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize: trim slashes
|
||||||
|
req.Path = strings.Trim(req.Path, "/")
|
||||||
|
|
||||||
|
loc := &db.Location{
|
||||||
|
Path: req.Path,
|
||||||
|
Name: req.Name,
|
||||||
|
LocationType: req.LocationType,
|
||||||
|
Metadata: req.Metadata,
|
||||||
|
}
|
||||||
|
if loc.Metadata == nil {
|
||||||
|
loc.Metadata = map[string]any{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.locations.Create(ctx, loc); err != nil {
|
||||||
|
if strings.Contains(err.Error(), "parent location") || strings.Contains(err.Error(), "does not exist") {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_parent", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if strings.Contains(err.Error(), "duplicate key") || strings.Contains(err.Error(), "unique") {
|
||||||
|
writeError(w, http.StatusConflict, "already_exists", "Location path already exists")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s.logger.Error().Err(err).Str("path", req.Path).Msg("failed to create location")
|
||||||
|
writeError(w, http.StatusInternalServerError, "create_failed", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusCreated, locationToResponse(loc))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleGetLocation retrieves a location by path. The path is the rest of the
|
||||||
|
// URL after /api/locations/, which chi captures as a wildcard.
|
||||||
|
func (s *Server) HandleGetLocation(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
path := strings.Trim(chi.URLParam(r, "*"), "/")
|
||||||
|
|
||||||
|
if path == "" {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "Location path is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
loc, err := s.locations.GetByPath(ctx, path)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Str("path", path).Msg("failed to get location")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get location")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if loc == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Location not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, locationToResponse(loc))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleUpdateLocation updates a location by path.
|
||||||
|
func (s *Server) HandleUpdateLocation(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
path := strings.Trim(chi.URLParam(r, "*"), "/")
|
||||||
|
|
||||||
|
if path == "" {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "Location path is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req UpdateLocationRequest
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_json", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Name == "" {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "Name is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if req.LocationType == "" {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "Location type is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
meta := req.Metadata
|
||||||
|
if meta == nil {
|
||||||
|
meta = map[string]any{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.locations.Update(ctx, path, req.Name, req.LocationType, meta); err != nil {
|
||||||
|
if strings.Contains(err.Error(), "not found") {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Location not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s.logger.Error().Err(err).Str("path", path).Msg("failed to update location")
|
||||||
|
writeError(w, http.StatusInternalServerError, "update_failed", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
loc, _ := s.locations.GetByPath(ctx, path)
|
||||||
|
writeJSON(w, http.StatusOK, locationToResponse(loc))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleDeleteLocation deletes a location by path. Rejects if inventory exists.
|
||||||
|
func (s *Server) HandleDeleteLocation(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
path := strings.Trim(chi.URLParam(r, "*"), "/")
|
||||||
|
|
||||||
|
if path == "" {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "Location path is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.locations.Delete(ctx, path); err != nil {
|
||||||
|
if strings.Contains(err.Error(), "inventory record") {
|
||||||
|
writeError(w, http.StatusConflict, "has_inventory", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if strings.Contains(err.Error(), "not found") {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Location not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s.logger.Error().Err(err).Str("path", path).Msg("failed to delete location")
|
||||||
|
writeError(w, http.StatusInternalServerError, "delete_failed", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
func locationsToResponse(locs []*db.Location) []LocationResponse {
|
||||||
|
result := make([]LocationResponse, len(locs))
|
||||||
|
for i, l := range locs {
|
||||||
|
result[i] = locationToResponse(l)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
323
internal/api/location_handlers_test.go
Normal file
323
internal/api/location_handlers_test.go
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newLocationRouter(s *Server) http.Handler {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
r.Get("/api/locations", s.HandleListLocations)
|
||||||
|
r.Post("/api/locations", s.HandleCreateLocation)
|
||||||
|
r.Get("/api/locations/*", s.HandleGetLocation)
|
||||||
|
r.Put("/api/locations/*", s.HandleUpdateLocation)
|
||||||
|
r.Delete("/api/locations/*", s.HandleDeleteLocation)
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleListLocationsEmpty(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
req := httptest.NewRequest("GET", "/api/locations", nil)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusOK {
|
||||||
|
t.Fatalf("status: got %d, want %d; body: %s", w.Code, http.StatusOK, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var locs []LocationResponse
|
||||||
|
if err := json.Unmarshal(w.Body.Bytes(), &locs); err != nil {
|
||||||
|
t.Fatalf("decoding response: %v", err)
|
||||||
|
}
|
||||||
|
if len(locs) != 0 {
|
||||||
|
t.Fatalf("expected 0 locations, got %d", len(locs))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleCreateAndGetLocation(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
// Create root location
|
||||||
|
body := `{"path": "lab", "name": "Lab", "location_type": "building"}`
|
||||||
|
req := httptest.NewRequest("POST", "/api/locations", strings.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("create status: got %d, want %d; body: %s", w.Code, http.StatusCreated, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var created LocationResponse
|
||||||
|
if err := json.Unmarshal(w.Body.Bytes(), &created); err != nil {
|
||||||
|
t.Fatalf("decoding create response: %v", err)
|
||||||
|
}
|
||||||
|
if created.Path != "lab" {
|
||||||
|
t.Errorf("path: got %q, want %q", created.Path, "lab")
|
||||||
|
}
|
||||||
|
if created.Name != "Lab" {
|
||||||
|
t.Errorf("name: got %q, want %q", created.Name, "Lab")
|
||||||
|
}
|
||||||
|
if created.Depth != 0 {
|
||||||
|
t.Errorf("depth: got %d, want 0", created.Depth)
|
||||||
|
}
|
||||||
|
if created.ID == "" {
|
||||||
|
t.Error("expected ID to be set")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get by path
|
||||||
|
req = httptest.NewRequest("GET", "/api/locations/lab", nil)
|
||||||
|
w = httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusOK {
|
||||||
|
t.Fatalf("get status: got %d, want %d; body: %s", w.Code, http.StatusOK, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var got LocationResponse
|
||||||
|
if err := json.Unmarshal(w.Body.Bytes(), &got); err != nil {
|
||||||
|
t.Fatalf("decoding get response: %v", err)
|
||||||
|
}
|
||||||
|
if got.ID != created.ID {
|
||||||
|
t.Errorf("ID mismatch: got %q, want %q", got.ID, created.ID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleCreateNestedLocation(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
// Create root
|
||||||
|
body := `{"path": "warehouse", "name": "Warehouse", "location_type": "building"}`
|
||||||
|
req := httptest.NewRequest("POST", "/api/locations", strings.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("create root: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create child
|
||||||
|
body = `{"path": "warehouse/shelf-a", "name": "Shelf A", "location_type": "shelf"}`
|
||||||
|
req = httptest.NewRequest("POST", "/api/locations", strings.NewReader(body))
|
||||||
|
w = httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("create child: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var child LocationResponse
|
||||||
|
json.Unmarshal(w.Body.Bytes(), &child)
|
||||||
|
if child.Depth != 1 {
|
||||||
|
t.Errorf("child depth: got %d, want 1", child.Depth)
|
||||||
|
}
|
||||||
|
if child.ParentID == nil {
|
||||||
|
t.Error("expected parent_id to be set")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create grandchild
|
||||||
|
body = `{"path": "warehouse/shelf-a/bin-3", "name": "Bin 3", "location_type": "bin"}`
|
||||||
|
req = httptest.NewRequest("POST", "/api/locations", strings.NewReader(body))
|
||||||
|
w = httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("create grandchild: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var gc LocationResponse
|
||||||
|
json.Unmarshal(w.Body.Bytes(), &gc)
|
||||||
|
if gc.Depth != 2 {
|
||||||
|
t.Errorf("grandchild depth: got %d, want 2", gc.Depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get nested path
|
||||||
|
req = httptest.NewRequest("GET", "/api/locations/warehouse/shelf-a/bin-3", nil)
|
||||||
|
w = httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusOK {
|
||||||
|
t.Fatalf("get nested: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleCreateLocationMissingParent(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
body := `{"path": "nonexistent/child", "name": "Child", "location_type": "shelf"}`
|
||||||
|
req := httptest.NewRequest("POST", "/api/locations", strings.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusBadRequest {
|
||||||
|
t.Fatalf("expected 400, got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleUpdateLocation(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
// Create
|
||||||
|
body := `{"path": "office", "name": "Office", "location_type": "room"}`
|
||||||
|
req := httptest.NewRequest("POST", "/api/locations", strings.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("create: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update
|
||||||
|
body = `{"name": "Main Office", "location_type": "building", "metadata": {"floor": 2}}`
|
||||||
|
req = httptest.NewRequest("PUT", "/api/locations/office", strings.NewReader(body))
|
||||||
|
w = httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusOK {
|
||||||
|
t.Fatalf("update: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var updated LocationResponse
|
||||||
|
json.Unmarshal(w.Body.Bytes(), &updated)
|
||||||
|
if updated.Name != "Main Office" {
|
||||||
|
t.Errorf("name: got %q, want %q", updated.Name, "Main Office")
|
||||||
|
}
|
||||||
|
if updated.LocationType != "building" {
|
||||||
|
t.Errorf("type: got %q, want %q", updated.LocationType, "building")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleDeleteLocation(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
// Create
|
||||||
|
body := `{"path": "temp", "name": "Temp", "location_type": "area"}`
|
||||||
|
req := httptest.NewRequest("POST", "/api/locations", strings.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("create: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete
|
||||||
|
req = httptest.NewRequest("DELETE", "/api/locations/temp", nil)
|
||||||
|
w = httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusNoContent {
|
||||||
|
t.Fatalf("delete: got %d, want %d; body: %s", w.Code, http.StatusNoContent, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify gone
|
||||||
|
req = httptest.NewRequest("GET", "/api/locations/temp", nil)
|
||||||
|
w = httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusNotFound {
|
||||||
|
t.Fatalf("get after delete: got %d, want %d", w.Code, http.StatusNotFound)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleDeleteLocationNotFound(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
req := httptest.NewRequest("DELETE", "/api/locations/doesnotexist", nil)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusNotFound {
|
||||||
|
t.Fatalf("delete missing: got %d, want %d; body: %s", w.Code, http.StatusNotFound, w.Body.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleListLocationsTree(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
// Create hierarchy
|
||||||
|
for _, loc := range []string{
|
||||||
|
`{"path": "site", "name": "Site", "location_type": "site"}`,
|
||||||
|
`{"path": "site/bldg", "name": "Building", "location_type": "building"}`,
|
||||||
|
`{"path": "site/bldg/room1", "name": "Room 1", "location_type": "room"}`,
|
||||||
|
`{"path": "other", "name": "Other", "location_type": "site"}`,
|
||||||
|
} {
|
||||||
|
req := httptest.NewRequest("POST", "/api/locations", strings.NewReader(loc))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("create: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List tree under "site"
|
||||||
|
req := httptest.NewRequest("GET", "/api/locations?tree=site", nil)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusOK {
|
||||||
|
t.Fatalf("tree: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var locs []LocationResponse
|
||||||
|
json.Unmarshal(w.Body.Bytes(), &locs)
|
||||||
|
if len(locs) != 3 {
|
||||||
|
t.Fatalf("tree count: got %d, want 3 (site + bldg + room1)", len(locs))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Full list should have 4
|
||||||
|
req = httptest.NewRequest("GET", "/api/locations", nil)
|
||||||
|
w = httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
json.Unmarshal(w.Body.Bytes(), &locs)
|
||||||
|
if len(locs) != 4 {
|
||||||
|
t.Fatalf("full list: got %d, want 4", len(locs))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleCreateLocationDuplicate(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
body := `{"path": "dup", "name": "Dup", "location_type": "area"}`
|
||||||
|
req := httptest.NewRequest("POST", "/api/locations", strings.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusCreated {
|
||||||
|
t.Fatalf("first create: got %d; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Duplicate
|
||||||
|
req = httptest.NewRequest("POST", "/api/locations", strings.NewReader(body))
|
||||||
|
w = httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusConflict {
|
||||||
|
t.Fatalf("duplicate: got %d, want %d; body: %s", w.Code, http.StatusConflict, w.Body.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleCreateLocationValidation(t *testing.T) {
|
||||||
|
s := newTestServer(t)
|
||||||
|
router := newLocationRouter(s)
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
body string
|
||||||
|
}{
|
||||||
|
{"missing path", `{"name": "X", "location_type": "area"}`},
|
||||||
|
{"missing name", `{"path": "x", "location_type": "area"}`},
|
||||||
|
{"missing type", `{"path": "x", "name": "X"}`},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range tests {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
req := httptest.NewRequest("POST", "/api/locations", strings.NewReader(tc.body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
if w.Code != http.StatusBadRequest {
|
||||||
|
t.Fatalf("got %d, want 400; body: %s", w.Code, w.Body.String())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
451
internal/api/metadata_handlers.go
Normal file
451
internal/api/metadata_handlers.go
Normal file
@@ -0,0 +1,451 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/kindredsystems/silo/internal/auth"
|
||||||
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
|
"github.com/kindredsystems/silo/internal/kc"
|
||||||
|
)
|
||||||
|
|
||||||
|
// validTransitions defines allowed lifecycle state transitions for Phase 1.
|
||||||
|
var validTransitions = map[string][]string{
|
||||||
|
"draft": {"review"},
|
||||||
|
"review": {"draft", "released"},
|
||||||
|
"released": {"obsolete"},
|
||||||
|
"obsolete": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
// MetadataResponse is the JSON representation returned by GET /metadata.
|
||||||
|
type MetadataResponse struct {
|
||||||
|
SchemaName *string `json:"schema_name"`
|
||||||
|
LifecycleState string `json:"lifecycle_state"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
Fields map[string]any `json:"fields"`
|
||||||
|
Manifest *ManifestInfo `json:"manifest,omitempty"`
|
||||||
|
UpdatedAt string `json:"updated_at"`
|
||||||
|
UpdatedBy *string `json:"updated_by,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ManifestInfo is the manifest subset included in MetadataResponse.
|
||||||
|
type ManifestInfo struct {
|
||||||
|
UUID *string `json:"uuid,omitempty"`
|
||||||
|
SiloInstance *string `json:"silo_instance,omitempty"`
|
||||||
|
RevisionHash *string `json:"revision_hash,omitempty"`
|
||||||
|
KCVersion *string `json:"kc_version,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func metadataToResponse(m *db.ItemMetadata) MetadataResponse {
|
||||||
|
resp := MetadataResponse{
|
||||||
|
SchemaName: m.SchemaName,
|
||||||
|
LifecycleState: m.LifecycleState,
|
||||||
|
Tags: m.Tags,
|
||||||
|
Fields: m.Fields,
|
||||||
|
UpdatedAt: m.UpdatedAt.UTC().Format("2006-01-02T15:04:05Z"),
|
||||||
|
UpdatedBy: m.UpdatedBy,
|
||||||
|
}
|
||||||
|
if m.ManifestUUID != nil || m.SiloInstance != nil || m.RevisionHash != nil || m.KCVersion != nil {
|
||||||
|
resp.Manifest = &ManifestInfo{
|
||||||
|
UUID: m.ManifestUUID,
|
||||||
|
SiloInstance: m.SiloInstance,
|
||||||
|
RevisionHash: m.RevisionHash,
|
||||||
|
KCVersion: m.KCVersion,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return resp
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleGetMetadata returns indexed metadata for an item.
|
||||||
|
// GET /api/items/{partNumber}/metadata
|
||||||
|
func (s *Server) HandleGetMetadata(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
|
|
||||||
|
item, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get item")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if item == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Item not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
meta, err := s.metadata.Get(ctx, item.ID)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get metadata")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get metadata")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if meta == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "No metadata indexed for this item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, metadataToResponse(meta))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleUpdateMetadata merges fields into the metadata JSONB.
|
||||||
|
// PUT /api/items/{partNumber}/metadata
|
||||||
|
func (s *Server) HandleUpdateMetadata(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
|
|
||||||
|
item, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get item")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if item == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Item not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Fields map[string]any `json:"fields"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_body", "Invalid JSON body")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(req.Fields) == 0 {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_body", "Fields must not be empty")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
username := ""
|
||||||
|
if user := auth.UserFromContext(ctx); user != nil {
|
||||||
|
username = user.Username
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.metadata.UpdateFields(ctx, item.ID, req.Fields, username); err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to update metadata fields")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to update metadata")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
meta, err := s.metadata.Get(ctx, item.ID)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to read back metadata")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to read metadata")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
s.broker.Publish("metadata.updated", mustMarshal(map[string]any{
|
||||||
|
"part_number": partNumber,
|
||||||
|
"changed_fields": fieldKeys(req.Fields),
|
||||||
|
"lifecycle_state": meta.LifecycleState,
|
||||||
|
"updated_by": username,
|
||||||
|
}))
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, metadataToResponse(meta))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleUpdateLifecycle transitions the lifecycle state.
|
||||||
|
// PATCH /api/items/{partNumber}/metadata/lifecycle
|
||||||
|
func (s *Server) HandleUpdateLifecycle(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
|
|
||||||
|
item, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get item")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if item == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Item not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
State string `json:"state"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_body", "Invalid JSON body")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if req.State == "" {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_body", "State is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
meta, err := s.metadata.Get(ctx, item.ID)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get metadata")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get metadata")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if meta == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "No metadata indexed for this item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate transition
|
||||||
|
allowed := validTransitions[meta.LifecycleState]
|
||||||
|
valid := false
|
||||||
|
for _, s := range allowed {
|
||||||
|
if s == req.State {
|
||||||
|
valid = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !valid {
|
||||||
|
writeError(w, http.StatusUnprocessableEntity, "invalid_transition",
|
||||||
|
"Cannot transition from '"+meta.LifecycleState+"' to '"+req.State+"'")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
username := ""
|
||||||
|
if user := auth.UserFromContext(ctx); user != nil {
|
||||||
|
username = user.Username
|
||||||
|
}
|
||||||
|
|
||||||
|
fromState := meta.LifecycleState
|
||||||
|
if err := s.metadata.UpdateLifecycle(ctx, item.ID, req.State, username); err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to update lifecycle")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to update lifecycle")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
s.broker.Publish("metadata.lifecycle", mustMarshal(map[string]any{
|
||||||
|
"part_number": partNumber,
|
||||||
|
"from_state": fromState,
|
||||||
|
"to_state": req.State,
|
||||||
|
"updated_by": username,
|
||||||
|
}))
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, map[string]string{"lifecycle_state": req.State})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleUpdateTags adds/removes tags.
|
||||||
|
// PATCH /api/items/{partNumber}/metadata/tags
|
||||||
|
func (s *Server) HandleUpdateTags(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
partNumber := chi.URLParam(r, "partNumber")
|
||||||
|
|
||||||
|
item, err := s.items.GetByPartNumber(ctx, partNumber)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get item")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if item == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Item not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Add []string `json:"add"`
|
||||||
|
Remove []string `json:"remove"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_body", "Invalid JSON body")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(req.Add) == 0 && len(req.Remove) == 0 {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_body", "Must provide 'add' or 'remove'")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
meta, err := s.metadata.Get(ctx, item.ID)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to get metadata")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to get metadata")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if meta == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "No metadata indexed for this item")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute new tag set: (existing + add) - remove
|
||||||
|
tagSet := make(map[string]struct{})
|
||||||
|
for _, t := range meta.Tags {
|
||||||
|
tagSet[t] = struct{}{}
|
||||||
|
}
|
||||||
|
for _, t := range req.Add {
|
||||||
|
tagSet[t] = struct{}{}
|
||||||
|
}
|
||||||
|
removeSet := make(map[string]struct{})
|
||||||
|
for _, t := range req.Remove {
|
||||||
|
removeSet[t] = struct{}{}
|
||||||
|
}
|
||||||
|
var newTags []string
|
||||||
|
for t := range tagSet {
|
||||||
|
if _, removed := removeSet[t]; !removed {
|
||||||
|
newTags = append(newTags, t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if newTags == nil {
|
||||||
|
newTags = []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
username := ""
|
||||||
|
if user := auth.UserFromContext(ctx); user != nil {
|
||||||
|
username = user.Username
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.metadata.SetTags(ctx, item.ID, newTags, username); err != nil {
|
||||||
|
s.logger.Error().Err(err).Msg("failed to update tags")
|
||||||
|
writeError(w, http.StatusInternalServerError, "internal_error", "Failed to update tags")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
s.broker.Publish("metadata.tags", mustMarshal(map[string]any{
|
||||||
|
"part_number": partNumber,
|
||||||
|
"added": req.Add,
|
||||||
|
"removed": req.Remove,
|
||||||
|
}))
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, map[string]any{"tags": newTags})
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractKCMetadata attempts to extract and index silo/ metadata from an
|
||||||
|
// uploaded .kc file. Failures are logged but non-fatal for Phase 1.
|
||||||
|
func (s *Server) extractKCMetadata(ctx context.Context, item *db.Item, fileKey string, rev *db.Revision) {
|
||||||
|
if s.storage == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
reader, err := s.storage.Get(ctx, fileKey)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn().Err(err).Str("file_key", fileKey).Msg("kc: failed to read back file for extraction")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer reader.Close()
|
||||||
|
|
||||||
|
data, err := io.ReadAll(reader)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn().Err(err).Msg("kc: failed to read file bytes")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := kc.Extract(data)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn().Err(err).Str("part_number", item.PartNumber).Msg("kc: extraction failed")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if result == nil {
|
||||||
|
return // plain .fcstd, no silo/ directory
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate manifest UUID matches item
|
||||||
|
if result.Manifest != nil && result.Manifest.UUID != "" && result.Manifest.UUID != item.ID {
|
||||||
|
s.logger.Warn().
|
||||||
|
Str("manifest_uuid", result.Manifest.UUID).
|
||||||
|
Str("item_id", item.ID).
|
||||||
|
Msg("kc: manifest UUID does not match item, skipping indexing")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for no-op (revision_hash unchanged)
|
||||||
|
if result.Manifest != nil && result.Manifest.RevisionHash != "" {
|
||||||
|
existing, _ := s.metadata.Get(ctx, item.ID)
|
||||||
|
if existing != nil && existing.RevisionHash != nil && *existing.RevisionHash == result.Manifest.RevisionHash {
|
||||||
|
s.logger.Debug().Str("part_number", item.PartNumber).Msg("kc: revision_hash unchanged, skipping")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
username := ""
|
||||||
|
if rev.CreatedBy != nil {
|
||||||
|
username = *rev.CreatedBy
|
||||||
|
}
|
||||||
|
|
||||||
|
meta := &db.ItemMetadata{
|
||||||
|
ItemID: item.ID,
|
||||||
|
LifecycleState: "draft",
|
||||||
|
Fields: make(map[string]any),
|
||||||
|
Tags: []string{},
|
||||||
|
UpdatedBy: strPtr(username),
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.Manifest != nil {
|
||||||
|
meta.KCVersion = strPtr(result.Manifest.KCVersion)
|
||||||
|
meta.ManifestUUID = strPtr(result.Manifest.UUID)
|
||||||
|
meta.SiloInstance = strPtr(result.Manifest.SiloInstance)
|
||||||
|
meta.RevisionHash = strPtr(result.Manifest.RevisionHash)
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.Metadata != nil {
|
||||||
|
meta.SchemaName = strPtr(result.Metadata.SchemaName)
|
||||||
|
if result.Metadata.Tags != nil {
|
||||||
|
meta.Tags = result.Metadata.Tags
|
||||||
|
}
|
||||||
|
if result.Metadata.LifecycleState != "" {
|
||||||
|
meta.LifecycleState = result.Metadata.LifecycleState
|
||||||
|
}
|
||||||
|
if result.Metadata.Fields != nil {
|
||||||
|
meta.Fields = result.Metadata.Fields
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.metadata.Upsert(ctx, meta); err != nil {
|
||||||
|
s.logger.Warn().Err(err).Str("part_number", item.PartNumber).Msg("kc: failed to upsert metadata")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
s.broker.Publish("metadata.updated", mustMarshal(map[string]any{
|
||||||
|
"part_number": item.PartNumber,
|
||||||
|
"lifecycle_state": meta.LifecycleState,
|
||||||
|
"updated_by": username,
|
||||||
|
}))
|
||||||
|
|
||||||
|
// Index dependencies from silo/dependencies.json.
|
||||||
|
if result.Dependencies != nil {
|
||||||
|
dbDeps := make([]*db.ItemDependency, len(result.Dependencies))
|
||||||
|
for i, d := range result.Dependencies {
|
||||||
|
pn := d.PartNumber
|
||||||
|
rev := d.Revision
|
||||||
|
qty := d.Quantity
|
||||||
|
label := d.Label
|
||||||
|
rel := d.Relationship
|
||||||
|
if rel == "" {
|
||||||
|
rel = "component"
|
||||||
|
}
|
||||||
|
dbDeps[i] = &db.ItemDependency{
|
||||||
|
ParentItemID: item.ID,
|
||||||
|
ChildUUID: d.UUID,
|
||||||
|
ChildPartNumber: &pn,
|
||||||
|
ChildRevision: &rev,
|
||||||
|
Quantity: &qty,
|
||||||
|
Label: &label,
|
||||||
|
Relationship: rel,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := s.deps.ReplaceForRevision(ctx, item.ID, rev.RevisionNumber, dbDeps); err != nil {
|
||||||
|
s.logger.Warn().Err(err).Str("part_number", item.PartNumber).Msg("kc: failed to index dependencies")
|
||||||
|
} else {
|
||||||
|
s.broker.Publish("dependencies.changed", mustMarshal(map[string]any{
|
||||||
|
"part_number": item.PartNumber,
|
||||||
|
"count": len(dbDeps),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
s.logger.Info().Str("part_number", item.PartNumber).Msg("kc: metadata indexed successfully")
|
||||||
|
}
|
||||||
|
|
||||||
|
// strPtr returns a pointer to s, or nil if s is empty.
|
||||||
|
func strPtr(s string) *string {
|
||||||
|
if s == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &s
|
||||||
|
}
|
||||||
|
|
||||||
|
// fieldKeys returns the keys from a map.
|
||||||
|
func fieldKeys(m map[string]any) []string {
|
||||||
|
keys := make([]string, 0, len(m))
|
||||||
|
for k := range m {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
return keys
|
||||||
|
}
|
||||||
@@ -183,6 +183,22 @@ func (s *Server) RequireRunnerAuth(next http.Handler) http.Handler {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RequireModule returns middleware that rejects requests with 404 when
|
||||||
|
// the named module is not enabled.
|
||||||
|
func (s *Server) RequireModule(id string) func(http.Handler) http.Handler {
|
||||||
|
return func(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if !s.modules.IsEnabled(id) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(http.StatusNotFound)
|
||||||
|
w.Write([]byte(`{"error":"module '` + id + `' is not enabled"}`))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func extractBearerToken(r *http.Request) string {
|
func extractBearerToken(r *http.Request) string {
|
||||||
h := r.Header.Get("Authorization")
|
h := r.Header.Get("Authorization")
|
||||||
if strings.HasPrefix(h, "Bearer ") {
|
if strings.HasPrefix(h, "Bearer ") {
|
||||||
|
|||||||
135
internal/api/pack_handlers.go
Normal file
135
internal/api/pack_handlers.go
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
|
"github.com/kindredsystems/silo/internal/kc"
|
||||||
|
)
|
||||||
|
|
||||||
|
// packKCFile gathers DB state and repacks silo/ entries in a .kc file.
|
||||||
|
func (s *Server) packKCFile(ctx context.Context, data []byte, item *db.Item, rev *db.Revision, meta *db.ItemMetadata) ([]byte, error) {
|
||||||
|
manifest := &kc.Manifest{
|
||||||
|
UUID: item.ID,
|
||||||
|
KCVersion: derefStr(meta.KCVersion, "1.0"),
|
||||||
|
RevisionHash: derefStr(meta.RevisionHash, ""),
|
||||||
|
SiloInstance: derefStr(meta.SiloInstance, ""),
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata := &kc.Metadata{
|
||||||
|
SchemaName: derefStr(meta.SchemaName, ""),
|
||||||
|
Tags: meta.Tags,
|
||||||
|
LifecycleState: meta.LifecycleState,
|
||||||
|
Fields: meta.Fields,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build history from last 20 revisions.
|
||||||
|
revisions, err := s.items.GetRevisions(ctx, item.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("getting revisions: %w", err)
|
||||||
|
}
|
||||||
|
limit := 20
|
||||||
|
if len(revisions) < limit {
|
||||||
|
limit = len(revisions)
|
||||||
|
}
|
||||||
|
history := make([]kc.HistoryEntry, limit)
|
||||||
|
for i, r := range revisions[:limit] {
|
||||||
|
labels := r.Labels
|
||||||
|
if labels == nil {
|
||||||
|
labels = []string{}
|
||||||
|
}
|
||||||
|
history[i] = kc.HistoryEntry{
|
||||||
|
RevisionNumber: r.RevisionNumber,
|
||||||
|
CreatedAt: r.CreatedAt.UTC().Format(time.RFC3339),
|
||||||
|
CreatedBy: r.CreatedBy,
|
||||||
|
Comment: r.Comment,
|
||||||
|
Status: r.Status,
|
||||||
|
Labels: labels,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build dependencies from item_dependencies table.
|
||||||
|
var deps []kc.Dependency
|
||||||
|
dbDeps, err := s.deps.ListByItem(ctx, item.ID)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn().Err(err).Str("part_number", item.PartNumber).Msg("kc: failed to query dependencies for packing")
|
||||||
|
} else {
|
||||||
|
deps = make([]kc.Dependency, len(dbDeps))
|
||||||
|
for i, d := range dbDeps {
|
||||||
|
deps[i] = kc.Dependency{
|
||||||
|
UUID: d.ChildUUID,
|
||||||
|
PartNumber: derefStr(d.ChildPartNumber, ""),
|
||||||
|
Revision: derefInt(d.ChildRevision, 0),
|
||||||
|
Quantity: derefFloat(d.Quantity, 0),
|
||||||
|
Label: derefStr(d.Label, ""),
|
||||||
|
Relationship: d.Relationship,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if deps == nil {
|
||||||
|
deps = []kc.Dependency{}
|
||||||
|
}
|
||||||
|
|
||||||
|
input := &kc.PackInput{
|
||||||
|
Manifest: manifest,
|
||||||
|
Metadata: metadata,
|
||||||
|
History: history,
|
||||||
|
Dependencies: deps,
|
||||||
|
}
|
||||||
|
|
||||||
|
return kc.Pack(data, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
// computeETag generates a quoted ETag from the revision number and metadata freshness.
|
||||||
|
func computeETag(rev *db.Revision, meta *db.ItemMetadata) string {
|
||||||
|
var ts int64
|
||||||
|
if meta != nil {
|
||||||
|
ts = meta.UpdatedAt.UnixNano()
|
||||||
|
} else {
|
||||||
|
ts = rev.CreatedAt.UnixNano()
|
||||||
|
}
|
||||||
|
raw := fmt.Sprintf("%d:%d", rev.RevisionNumber, ts)
|
||||||
|
h := sha256.Sum256([]byte(raw))
|
||||||
|
return `"` + hex.EncodeToString(h[:8]) + `"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// canSkipRepack returns true if the stored blob already has up-to-date silo/ data.
|
||||||
|
func canSkipRepack(rev *db.Revision, meta *db.ItemMetadata) bool {
|
||||||
|
if meta == nil {
|
||||||
|
return true // no metadata row = plain .fcstd
|
||||||
|
}
|
||||||
|
if meta.RevisionHash != nil && rev.FileChecksum != nil &&
|
||||||
|
*meta.RevisionHash == *rev.FileChecksum &&
|
||||||
|
meta.UpdatedAt.Before(rev.CreatedAt) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// derefStr returns the value of a *string pointer, or fallback if nil.
|
||||||
|
func derefStr(p *string, fallback string) string {
|
||||||
|
if p != nil {
|
||||||
|
return *p
|
||||||
|
}
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
// derefInt returns the value of a *int pointer, or fallback if nil.
|
||||||
|
func derefInt(p *int, fallback int) int {
|
||||||
|
if p != nil {
|
||||||
|
return *p
|
||||||
|
}
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
// derefFloat returns the value of a *float64 pointer, or fallback if nil.
|
||||||
|
func derefFloat(p *float64, fallback float64) float64 {
|
||||||
|
if p != nil {
|
||||||
|
return *p
|
||||||
|
}
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
@@ -58,6 +58,7 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
r.Get("/auth/callback", server.HandleOIDCCallback)
|
r.Get("/auth/callback", server.HandleOIDCCallback)
|
||||||
|
|
||||||
// Public API endpoints (no auth required)
|
// Public API endpoints (no auth required)
|
||||||
|
r.Get("/api/modules", server.HandleGetModules)
|
||||||
r.Get("/api/auth/config", server.HandleAuthConfig)
|
r.Get("/api/auth/config", server.HandleAuthConfig)
|
||||||
|
|
||||||
// API routes (require auth, no CSRF — token auth instead)
|
// API routes (require auth, no CSRF — token auth instead)
|
||||||
@@ -101,6 +102,7 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
|
|
||||||
// Projects (read: viewer, write: editor)
|
// Projects (read: viewer, write: editor)
|
||||||
r.Route("/projects", func(r chi.Router) {
|
r.Route("/projects", func(r chi.Router) {
|
||||||
|
r.Use(server.RequireModule("projects"))
|
||||||
r.Get("/", server.HandleListProjects)
|
r.Get("/", server.HandleListProjects)
|
||||||
r.Get("/{code}", server.HandleGetProject)
|
r.Get("/{code}", server.HandleGetProject)
|
||||||
r.Get("/{code}/items", server.HandleGetProjectItems)
|
r.Get("/{code}/items", server.HandleGetProjectItems)
|
||||||
@@ -115,6 +117,26 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Locations (read: viewer, write: editor)
|
||||||
|
r.Route("/locations", func(r chi.Router) {
|
||||||
|
r.Get("/", server.HandleListLocations)
|
||||||
|
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
r.Use(server.RequireWritable)
|
||||||
|
r.Use(server.RequireRole(auth.RoleEditor))
|
||||||
|
r.Post("/", server.HandleCreateLocation)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Wildcard routes for path-based lookup (e.g., /api/locations/lab/shelf-a/bin-3)
|
||||||
|
r.Get("/*", server.HandleGetLocation)
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
r.Use(server.RequireWritable)
|
||||||
|
r.Use(server.RequireRole(auth.RoleEditor))
|
||||||
|
r.Put("/*", server.HandleUpdateLocation)
|
||||||
|
r.Delete("/*", server.HandleDeleteLocation)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
// Items (read: viewer, write: editor)
|
// Items (read: viewer, write: editor)
|
||||||
r.Route("/items", func(r chi.Router) {
|
r.Route("/items", func(r chi.Router) {
|
||||||
r.Get("/", server.HandleListItems)
|
r.Get("/", server.HandleListItems)
|
||||||
@@ -140,6 +162,7 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
r.Get("/revisions/compare", server.HandleCompareRevisions)
|
r.Get("/revisions/compare", server.HandleCompareRevisions)
|
||||||
r.Get("/revisions/{revision}", server.HandleGetRevision)
|
r.Get("/revisions/{revision}", server.HandleGetRevision)
|
||||||
r.Get("/files", server.HandleListItemFiles)
|
r.Get("/files", server.HandleListItemFiles)
|
||||||
|
r.Get("/files/{fileId}/download", server.HandleDownloadItemFile)
|
||||||
r.Get("/file", server.HandleDownloadLatestFile)
|
r.Get("/file", server.HandleDownloadLatestFile)
|
||||||
r.Get("/file/{revision}", server.HandleDownloadFile)
|
r.Get("/file/{revision}", server.HandleDownloadFile)
|
||||||
r.Get("/bom", server.HandleGetBOM)
|
r.Get("/bom", server.HandleGetBOM)
|
||||||
@@ -149,11 +172,24 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
r.Get("/bom/where-used", server.HandleGetWhereUsed)
|
r.Get("/bom/where-used", server.HandleGetWhereUsed)
|
||||||
r.Get("/bom/export.csv", server.HandleExportBOMCSV)
|
r.Get("/bom/export.csv", server.HandleExportBOMCSV)
|
||||||
r.Get("/bom/export.ods", server.HandleExportBOMODS)
|
r.Get("/bom/export.ods", server.HandleExportBOMODS)
|
||||||
|
r.Get("/metadata", server.HandleGetMetadata)
|
||||||
|
r.Get("/dependencies", server.HandleGetDependencies)
|
||||||
|
r.Get("/dependencies/resolve", server.HandleResolveDependencies)
|
||||||
|
|
||||||
// DAG (read: viewer, write: editor)
|
// DAG (gated by dag module)
|
||||||
r.Get("/dag", server.HandleGetDAG)
|
r.Route("/dag", func(r chi.Router) {
|
||||||
r.Get("/dag/forward-cone/{nodeKey}", server.HandleGetForwardCone)
|
r.Use(server.RequireModule("dag"))
|
||||||
r.Get("/dag/dirty", server.HandleGetDirtySubgraph)
|
r.Get("/", server.HandleGetDAG)
|
||||||
|
r.Get("/forward-cone/{nodeKey}", server.HandleGetForwardCone)
|
||||||
|
r.Get("/dirty", server.HandleGetDirtySubgraph)
|
||||||
|
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
r.Use(server.RequireWritable)
|
||||||
|
r.Use(server.RequireRole(auth.RoleEditor))
|
||||||
|
r.Put("/", server.HandleSyncDAG)
|
||||||
|
r.Post("/mark-dirty/{nodeKey}", server.HandleMarkDirty)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
r.Group(func(r chi.Router) {
|
r.Group(func(r chi.Router) {
|
||||||
r.Use(server.RequireWritable)
|
r.Use(server.RequireWritable)
|
||||||
@@ -167,27 +203,32 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
r.Post("/revisions/{revision}/rollback", server.HandleRollbackRevision)
|
r.Post("/revisions/{revision}/rollback", server.HandleRollbackRevision)
|
||||||
r.Post("/file", server.HandleUploadFile)
|
r.Post("/file", server.HandleUploadFile)
|
||||||
r.Post("/files", server.HandleAssociateItemFile)
|
r.Post("/files", server.HandleAssociateItemFile)
|
||||||
|
r.Post("/files/upload", server.HandleUploadItemFile)
|
||||||
r.Delete("/files/{fileId}", server.HandleDeleteItemFile)
|
r.Delete("/files/{fileId}", server.HandleDeleteItemFile)
|
||||||
r.Put("/thumbnail", server.HandleSetItemThumbnail)
|
r.Put("/thumbnail", server.HandleSetItemThumbnail)
|
||||||
|
r.Post("/thumbnail/upload", server.HandleUploadItemThumbnail)
|
||||||
r.Post("/bom", server.HandleAddBOMEntry)
|
r.Post("/bom", server.HandleAddBOMEntry)
|
||||||
r.Post("/bom/import", server.HandleImportBOMCSV)
|
r.Post("/bom/import", server.HandleImportBOMCSV)
|
||||||
r.Post("/bom/merge", server.HandleMergeBOM)
|
r.Post("/bom/merge", server.HandleMergeBOM)
|
||||||
r.Put("/bom/{childPartNumber}", server.HandleUpdateBOMEntry)
|
r.Put("/bom/{childPartNumber}", server.HandleUpdateBOMEntry)
|
||||||
r.Delete("/bom/{childPartNumber}", server.HandleDeleteBOMEntry)
|
r.Delete("/bom/{childPartNumber}", server.HandleDeleteBOMEntry)
|
||||||
r.Put("/dag", server.HandleSyncDAG)
|
r.Put("/metadata", server.HandleUpdateMetadata)
|
||||||
r.Post("/dag/mark-dirty/{nodeKey}", server.HandleMarkDirty)
|
r.Patch("/metadata/lifecycle", server.HandleUpdateLifecycle)
|
||||||
|
r.Patch("/metadata/tags", server.HandleUpdateTags)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// Audit (read-only, viewer role)
|
// Audit (read-only, viewer role)
|
||||||
r.Route("/audit", func(r chi.Router) {
|
r.Route("/audit", func(r chi.Router) {
|
||||||
|
r.Use(server.RequireModule("audit"))
|
||||||
r.Get("/completeness", server.HandleAuditCompleteness)
|
r.Get("/completeness", server.HandleAuditCompleteness)
|
||||||
r.Get("/completeness/{partNumber}", server.HandleAuditItemDetail)
|
r.Get("/completeness/{partNumber}", server.HandleAuditItemDetail)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Integrations (read: viewer, write: editor)
|
// Integrations (read: viewer, write: editor)
|
||||||
r.Route("/integrations/odoo", func(r chi.Router) {
|
r.Route("/integrations/odoo", func(r chi.Router) {
|
||||||
|
r.Use(server.RequireModule("odoo"))
|
||||||
r.Get("/config", server.HandleGetOdooConfig)
|
r.Get("/config", server.HandleGetOdooConfig)
|
||||||
r.Get("/sync-log", server.HandleGetOdooSyncLog)
|
r.Get("/sync-log", server.HandleGetOdooSyncLog)
|
||||||
|
|
||||||
@@ -210,6 +251,7 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
|
|
||||||
// Jobs (read: viewer, write: editor)
|
// Jobs (read: viewer, write: editor)
|
||||||
r.Route("/jobs", func(r chi.Router) {
|
r.Route("/jobs", func(r chi.Router) {
|
||||||
|
r.Use(server.RequireModule("jobs"))
|
||||||
r.Get("/", server.HandleListJobs)
|
r.Get("/", server.HandleListJobs)
|
||||||
r.Get("/{jobID}", server.HandleGetJob)
|
r.Get("/{jobID}", server.HandleGetJob)
|
||||||
r.Get("/{jobID}/logs", server.HandleGetJobLogs)
|
r.Get("/{jobID}/logs", server.HandleGetJobLogs)
|
||||||
@@ -224,6 +266,7 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
|
|
||||||
// Job definitions (read: viewer, reload: admin)
|
// Job definitions (read: viewer, reload: admin)
|
||||||
r.Route("/job-definitions", func(r chi.Router) {
|
r.Route("/job-definitions", func(r chi.Router) {
|
||||||
|
r.Use(server.RequireModule("jobs"))
|
||||||
r.Get("/", server.HandleListJobDefinitions)
|
r.Get("/", server.HandleListJobDefinitions)
|
||||||
r.Get("/{name}", server.HandleGetJobDefinition)
|
r.Get("/{name}", server.HandleGetJobDefinition)
|
||||||
|
|
||||||
@@ -235,6 +278,7 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
|
|
||||||
// Runners (admin)
|
// Runners (admin)
|
||||||
r.Route("/runners", func(r chi.Router) {
|
r.Route("/runners", func(r chi.Router) {
|
||||||
|
r.Use(server.RequireModule("jobs"))
|
||||||
r.Use(server.RequireRole(auth.RoleAdmin))
|
r.Use(server.RequireRole(auth.RoleAdmin))
|
||||||
r.Get("/", server.HandleListRunners)
|
r.Get("/", server.HandleListRunners)
|
||||||
r.Post("/", server.HandleRegisterRunner)
|
r.Post("/", server.HandleRegisterRunner)
|
||||||
@@ -247,10 +291,20 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
|||||||
r.Use(server.RequireRole(auth.RoleEditor))
|
r.Use(server.RequireRole(auth.RoleEditor))
|
||||||
r.Post("/generate-part-number", server.HandleGeneratePartNumber)
|
r.Post("/generate-part-number", server.HandleGeneratePartNumber)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Admin settings (admin only)
|
||||||
|
r.Route("/admin/settings", func(r chi.Router) {
|
||||||
|
r.Use(server.RequireRole(auth.RoleAdmin))
|
||||||
|
r.Get("/", server.HandleGetAllSettings)
|
||||||
|
r.Get("/{module}", server.HandleGetModuleSettings)
|
||||||
|
r.Put("/{module}", server.HandleUpdateModuleSettings)
|
||||||
|
r.Post("/{module}/test", server.HandleTestModuleConnectivity)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// Runner-facing API (runner token auth, not user auth)
|
// Runner-facing API (runner token auth, not user auth)
|
||||||
r.Route("/api/runner", func(r chi.Router) {
|
r.Route("/api/runner", func(r chi.Router) {
|
||||||
|
r.Use(server.RequireModule("jobs"))
|
||||||
r.Use(server.RequireRunnerAuth)
|
r.Use(server.RequireRunnerAuth)
|
||||||
r.Post("/heartbeat", server.HandleRunnerHeartbeat)
|
r.Post("/heartbeat", server.HandleRunnerHeartbeat)
|
||||||
r.Post("/claim", server.HandleRunnerClaim)
|
r.Post("/claim", server.HandleRunnerClaim)
|
||||||
|
|||||||
@@ -26,13 +26,13 @@ type ServerState struct {
|
|||||||
mu sync.RWMutex
|
mu sync.RWMutex
|
||||||
readOnly bool
|
readOnly bool
|
||||||
storageOK bool
|
storageOK bool
|
||||||
storage *storage.Storage
|
storage storage.FileStore
|
||||||
broker *Broker
|
broker *Broker
|
||||||
done chan struct{}
|
done chan struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewServerState creates a new server state tracker.
|
// NewServerState creates a new server state tracker.
|
||||||
func NewServerState(logger zerolog.Logger, store *storage.Storage, broker *Broker) *ServerState {
|
func NewServerState(logger zerolog.Logger, store storage.FileStore, broker *Broker) *ServerState {
|
||||||
return &ServerState{
|
return &ServerState{
|
||||||
logger: logger.With().Str("component", "server-state").Logger(),
|
logger: logger.With().Str("component", "server-state").Logger(),
|
||||||
storageOK: store != nil, // assume healthy if configured
|
storageOK: store != nil, // assume healthy if configured
|
||||||
|
|||||||
316
internal/api/settings_handlers.go
Normal file
316
internal/api/settings_handlers.go
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/kindredsystems/silo/internal/auth"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HandleGetAllSettings returns the full config grouped by module with secrets redacted.
|
||||||
|
func (s *Server) HandleGetAllSettings(w http.ResponseWriter, r *http.Request) {
|
||||||
|
resp := map[string]any{
|
||||||
|
"core": s.buildCoreSettings(),
|
||||||
|
"schemas": s.buildSchemasSettings(),
|
||||||
|
"storage": s.buildStorageSettings(r.Context()),
|
||||||
|
"database": s.buildDatabaseSettings(r.Context()),
|
||||||
|
"auth": s.buildAuthSettings(),
|
||||||
|
"projects": map[string]any{"enabled": s.modules.IsEnabled("projects")},
|
||||||
|
"audit": map[string]any{"enabled": s.modules.IsEnabled("audit")},
|
||||||
|
"odoo": s.buildOdooSettings(),
|
||||||
|
"freecad": s.buildFreecadSettings(),
|
||||||
|
"jobs": s.buildJobsSettings(),
|
||||||
|
"dag": map[string]any{"enabled": s.modules.IsEnabled("dag")},
|
||||||
|
}
|
||||||
|
writeJSON(w, http.StatusOK, resp)
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleGetModuleSettings returns settings for a single module.
|
||||||
|
func (s *Server) HandleGetModuleSettings(w http.ResponseWriter, r *http.Request) {
|
||||||
|
module := chi.URLParam(r, "module")
|
||||||
|
|
||||||
|
var settings any
|
||||||
|
switch module {
|
||||||
|
case "core":
|
||||||
|
settings = s.buildCoreSettings()
|
||||||
|
case "schemas":
|
||||||
|
settings = s.buildSchemasSettings()
|
||||||
|
case "storage":
|
||||||
|
settings = s.buildStorageSettings(r.Context())
|
||||||
|
case "database":
|
||||||
|
settings = s.buildDatabaseSettings(r.Context())
|
||||||
|
case "auth":
|
||||||
|
settings = s.buildAuthSettings()
|
||||||
|
case "projects":
|
||||||
|
settings = map[string]any{"enabled": s.modules.IsEnabled("projects")}
|
||||||
|
case "audit":
|
||||||
|
settings = map[string]any{"enabled": s.modules.IsEnabled("audit")}
|
||||||
|
case "odoo":
|
||||||
|
settings = s.buildOdooSettings()
|
||||||
|
case "freecad":
|
||||||
|
settings = s.buildFreecadSettings()
|
||||||
|
case "jobs":
|
||||||
|
settings = s.buildJobsSettings()
|
||||||
|
case "dag":
|
||||||
|
settings = map[string]any{"enabled": s.modules.IsEnabled("dag")}
|
||||||
|
default:
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Unknown module: "+module)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, settings)
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleUpdateModuleSettings handles module toggle and config overrides.
|
||||||
|
func (s *Server) HandleUpdateModuleSettings(w http.ResponseWriter, r *http.Request) {
|
||||||
|
module := chi.URLParam(r, "module")
|
||||||
|
|
||||||
|
// Validate module exists
|
||||||
|
if s.modules.Get(module) == nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "Unknown module: "+module)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var body map[string]any
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_json", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
user := auth.UserFromContext(r.Context())
|
||||||
|
username := "system"
|
||||||
|
if user != nil {
|
||||||
|
username = user.Username
|
||||||
|
}
|
||||||
|
|
||||||
|
var updated []string
|
||||||
|
restartRequired := false
|
||||||
|
|
||||||
|
// Handle module toggle
|
||||||
|
if enabledVal, ok := body["enabled"]; ok {
|
||||||
|
enabled, ok := enabledVal.(bool)
|
||||||
|
if !ok {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_value", "'enabled' must be a boolean")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.modules.SetEnabled(module, enabled); err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "toggle_failed", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.settings.SetModuleState(r.Context(), module, enabled, username); err != nil {
|
||||||
|
s.logger.Error().Err(err).Str("module", module).Msg("failed to persist module state")
|
||||||
|
writeError(w, http.StatusInternalServerError, "persist_failed", "Failed to save module state")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
updated = append(updated, module+".enabled")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle config overrides (future use — persisted but not hot-reloaded)
|
||||||
|
for key, value := range body {
|
||||||
|
if key == "enabled" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fullKey := module + "." + key
|
||||||
|
if err := s.settings.SetOverride(r.Context(), fullKey, value, username); err != nil {
|
||||||
|
s.logger.Error().Err(err).Str("key", fullKey).Msg("failed to persist setting override")
|
||||||
|
writeError(w, http.StatusInternalServerError, "persist_failed", "Failed to save setting: "+key)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
updated = append(updated, fullKey)
|
||||||
|
|
||||||
|
// These namespaces require a restart to take effect
|
||||||
|
if strings.HasPrefix(fullKey, "database.") ||
|
||||||
|
strings.HasPrefix(fullKey, "storage.") ||
|
||||||
|
strings.HasPrefix(fullKey, "server.") ||
|
||||||
|
strings.HasPrefix(fullKey, "schemas.") {
|
||||||
|
restartRequired = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, map[string]any{
|
||||||
|
"updated": updated,
|
||||||
|
"restart_required": restartRequired,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Publish SSE event
|
||||||
|
s.broker.Publish("settings.changed", mustMarshal(map[string]any{
|
||||||
|
"module": module,
|
||||||
|
"changed_keys": updated,
|
||||||
|
"updated_by": username,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleTestModuleConnectivity tests external connectivity for a module.
|
||||||
|
func (s *Server) HandleTestModuleConnectivity(w http.ResponseWriter, r *http.Request) {
|
||||||
|
module := chi.URLParam(r, "module")
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
var success bool
|
||||||
|
var message string
|
||||||
|
|
||||||
|
switch module {
|
||||||
|
case "database":
|
||||||
|
if err := s.db.Pool().Ping(r.Context()); err != nil {
|
||||||
|
success = false
|
||||||
|
message = "Database ping failed: " + err.Error()
|
||||||
|
} else {
|
||||||
|
success = true
|
||||||
|
message = "Database connection OK"
|
||||||
|
}
|
||||||
|
case "storage":
|
||||||
|
if s.storage == nil {
|
||||||
|
success = false
|
||||||
|
message = "Storage not configured"
|
||||||
|
} else if err := s.storage.Ping(r.Context()); err != nil {
|
||||||
|
success = false
|
||||||
|
message = "Storage ping failed: " + err.Error()
|
||||||
|
} else {
|
||||||
|
success = true
|
||||||
|
message = "Storage connection OK"
|
||||||
|
}
|
||||||
|
case "auth", "odoo":
|
||||||
|
success = false
|
||||||
|
message = "Connectivity test not implemented for " + module
|
||||||
|
default:
|
||||||
|
writeError(w, http.StatusBadRequest, "not_testable", "No connectivity test available for module: "+module)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
latency := time.Since(start).Milliseconds()
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, map[string]any{
|
||||||
|
"success": success,
|
||||||
|
"message": message,
|
||||||
|
"latency_ms": latency,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- build helpers (read config, redact secrets) ---
|
||||||
|
|
||||||
|
func redact(s string) string {
|
||||||
|
if s == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return "****"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) buildCoreSettings() map[string]any {
|
||||||
|
return map[string]any{
|
||||||
|
"enabled": true,
|
||||||
|
"host": s.cfg.Server.Host,
|
||||||
|
"port": s.cfg.Server.Port,
|
||||||
|
"base_url": s.cfg.Server.BaseURL,
|
||||||
|
"readonly": s.cfg.Server.ReadOnly,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) buildSchemasSettings() map[string]any {
|
||||||
|
return map[string]any{
|
||||||
|
"enabled": true,
|
||||||
|
"directory": s.cfg.Schemas.Directory,
|
||||||
|
"default": s.cfg.Schemas.Default,
|
||||||
|
"count": len(s.schemas),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) buildStorageSettings(ctx context.Context) map[string]any {
|
||||||
|
result := map[string]any{
|
||||||
|
"enabled": true,
|
||||||
|
"endpoint": s.cfg.Storage.Endpoint,
|
||||||
|
"bucket": s.cfg.Storage.Bucket,
|
||||||
|
"use_ssl": s.cfg.Storage.UseSSL,
|
||||||
|
"region": s.cfg.Storage.Region,
|
||||||
|
}
|
||||||
|
if s.storage != nil {
|
||||||
|
if err := s.storage.Ping(ctx); err != nil {
|
||||||
|
result["status"] = "unavailable"
|
||||||
|
} else {
|
||||||
|
result["status"] = "ok"
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
result["status"] = "not_configured"
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) buildDatabaseSettings(ctx context.Context) map[string]any {
|
||||||
|
result := map[string]any{
|
||||||
|
"enabled": true,
|
||||||
|
"host": s.cfg.Database.Host,
|
||||||
|
"port": s.cfg.Database.Port,
|
||||||
|
"name": s.cfg.Database.Name,
|
||||||
|
"user": s.cfg.Database.User,
|
||||||
|
"password": redact(s.cfg.Database.Password),
|
||||||
|
"sslmode": s.cfg.Database.SSLMode,
|
||||||
|
"max_connections": s.cfg.Database.MaxConnections,
|
||||||
|
}
|
||||||
|
if err := s.db.Pool().Ping(ctx); err != nil {
|
||||||
|
result["status"] = "unavailable"
|
||||||
|
} else {
|
||||||
|
result["status"] = "ok"
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) buildAuthSettings() map[string]any {
|
||||||
|
return map[string]any{
|
||||||
|
"enabled": s.modules.IsEnabled("auth"),
|
||||||
|
"session_secret": redact(s.cfg.Auth.SessionSecret),
|
||||||
|
"local": map[string]any{
|
||||||
|
"enabled": s.cfg.Auth.Local.Enabled,
|
||||||
|
"default_admin_username": s.cfg.Auth.Local.DefaultAdminUsername,
|
||||||
|
"default_admin_password": redact(s.cfg.Auth.Local.DefaultAdminPassword),
|
||||||
|
},
|
||||||
|
"ldap": map[string]any{
|
||||||
|
"enabled": s.cfg.Auth.LDAP.Enabled,
|
||||||
|
"url": s.cfg.Auth.LDAP.URL,
|
||||||
|
"base_dn": s.cfg.Auth.LDAP.BaseDN,
|
||||||
|
"bind_dn": s.cfg.Auth.LDAP.BindDN,
|
||||||
|
"bind_password": redact(s.cfg.Auth.LDAP.BindPassword),
|
||||||
|
},
|
||||||
|
"oidc": map[string]any{
|
||||||
|
"enabled": s.cfg.Auth.OIDC.Enabled,
|
||||||
|
"issuer_url": s.cfg.Auth.OIDC.IssuerURL,
|
||||||
|
"client_id": s.cfg.Auth.OIDC.ClientID,
|
||||||
|
"client_secret": redact(s.cfg.Auth.OIDC.ClientSecret),
|
||||||
|
"redirect_url": s.cfg.Auth.OIDC.RedirectURL,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) buildOdooSettings() map[string]any {
|
||||||
|
return map[string]any{
|
||||||
|
"enabled": s.modules.IsEnabled("odoo"),
|
||||||
|
"url": s.cfg.Odoo.URL,
|
||||||
|
"database": s.cfg.Odoo.Database,
|
||||||
|
"username": s.cfg.Odoo.Username,
|
||||||
|
"api_key": redact(s.cfg.Odoo.APIKey),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) buildFreecadSettings() map[string]any {
|
||||||
|
return map[string]any{
|
||||||
|
"enabled": s.modules.IsEnabled("freecad"),
|
||||||
|
"uri_scheme": s.cfg.FreeCAD.URIScheme,
|
||||||
|
"executable": s.cfg.FreeCAD.Executable,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) buildJobsSettings() map[string]any {
|
||||||
|
return map[string]any{
|
||||||
|
"enabled": s.modules.IsEnabled("jobs"),
|
||||||
|
"directory": s.cfg.Jobs.Directory,
|
||||||
|
"runner_timeout": s.cfg.Jobs.RunnerTimeout,
|
||||||
|
"job_timeout_check": s.cfg.Jobs.JobTimeoutCheck,
|
||||||
|
"default_priority": s.cfg.Jobs.DefaultPriority,
|
||||||
|
"definitions_count": len(s.jobDefs),
|
||||||
|
}
|
||||||
|
}
|
||||||
285
internal/api/settings_handlers_test.go
Normal file
285
internal/api/settings_handlers_test.go
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/kindredsystems/silo/internal/auth"
|
||||||
|
"github.com/kindredsystems/silo/internal/config"
|
||||||
|
"github.com/kindredsystems/silo/internal/db"
|
||||||
|
"github.com/kindredsystems/silo/internal/modules"
|
||||||
|
"github.com/kindredsystems/silo/internal/schema"
|
||||||
|
"github.com/kindredsystems/silo/internal/testutil"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newSettingsTestServer(t *testing.T) *Server {
|
||||||
|
t.Helper()
|
||||||
|
pool := testutil.MustConnectTestPool(t)
|
||||||
|
database := db.NewFromPool(pool)
|
||||||
|
broker := NewBroker(zerolog.Nop())
|
||||||
|
state := NewServerState(zerolog.Nop(), nil, broker)
|
||||||
|
cfg := &config.Config{
|
||||||
|
Server: config.ServerConfig{Host: "0.0.0.0", Port: 8080},
|
||||||
|
Database: config.DatabaseConfig{
|
||||||
|
Host: "localhost", Port: 5432, Name: "silo_test",
|
||||||
|
User: "silo", Password: "secret", SSLMode: "disable",
|
||||||
|
MaxConnections: 10,
|
||||||
|
},
|
||||||
|
Storage: config.StorageConfig{
|
||||||
|
Endpoint: "minio:9000", Bucket: "silo", Region: "us-east-1",
|
||||||
|
AccessKey: "minioadmin", SecretKey: "miniosecret",
|
||||||
|
},
|
||||||
|
Schemas: config.SchemasConfig{Directory: "/etc/silo/schemas", Default: "kindred-rd"},
|
||||||
|
Auth: config.AuthConfig{
|
||||||
|
SessionSecret: "supersecret",
|
||||||
|
Local: config.LocalAuth{Enabled: true, DefaultAdminUsername: "admin", DefaultAdminPassword: "changeme"},
|
||||||
|
LDAP: config.LDAPAuth{Enabled: false, BindPassword: "ldapsecret"},
|
||||||
|
OIDC: config.OIDCAuth{Enabled: false, ClientSecret: "oidcsecret"},
|
||||||
|
},
|
||||||
|
FreeCAD: config.FreeCADConfig{URIScheme: "silo"},
|
||||||
|
Odoo: config.OdooConfig{URL: "https://odoo.example.com", APIKey: "odoo-api-key"},
|
||||||
|
Jobs: config.JobsConfig{Directory: "/etc/silo/jobdefs", RunnerTimeout: 90, JobTimeoutCheck: 30, DefaultPriority: 100},
|
||||||
|
}
|
||||||
|
return NewServer(
|
||||||
|
zerolog.Nop(),
|
||||||
|
database,
|
||||||
|
map[string]*schema.Schema{"test": {Name: "test"}},
|
||||||
|
cfg.Schemas.Directory,
|
||||||
|
nil, // storage
|
||||||
|
nil, // authService
|
||||||
|
nil, // sessionManager
|
||||||
|
nil, // oidcBackend
|
||||||
|
nil, // authConfig
|
||||||
|
broker,
|
||||||
|
state,
|
||||||
|
nil, // jobDefs
|
||||||
|
"", // jobDefsDir
|
||||||
|
modules.NewRegistry(), // modules
|
||||||
|
cfg,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSettingsRouter(s *Server) http.Handler {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
r.Route("/api/admin/settings", func(r chi.Router) {
|
||||||
|
r.Get("/", s.HandleGetAllSettings)
|
||||||
|
r.Get("/{module}", s.HandleGetModuleSettings)
|
||||||
|
r.Put("/{module}", s.HandleUpdateModuleSettings)
|
||||||
|
r.Post("/{module}/test", s.HandleTestModuleConnectivity)
|
||||||
|
})
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func adminSettingsRequest(r *http.Request) *http.Request {
|
||||||
|
u := &auth.User{
|
||||||
|
ID: "admin-id",
|
||||||
|
Username: "testadmin",
|
||||||
|
Role: auth.RoleAdmin,
|
||||||
|
}
|
||||||
|
return r.WithContext(auth.ContextWithUser(r.Context(), u))
|
||||||
|
}
|
||||||
|
|
||||||
|
func viewerSettingsRequest(r *http.Request) *http.Request {
|
||||||
|
u := &auth.User{
|
||||||
|
ID: "viewer-id",
|
||||||
|
Username: "testviewer",
|
||||||
|
Role: auth.RoleViewer,
|
||||||
|
}
|
||||||
|
return r.WithContext(auth.ContextWithUser(r.Context(), u))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetAllSettings(t *testing.T) {
|
||||||
|
s := newSettingsTestServer(t)
|
||||||
|
router := newSettingsRouter(s)
|
||||||
|
|
||||||
|
req := adminSettingsRequest(httptest.NewRequest("GET", "/api/admin/settings", nil))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusOK {
|
||||||
|
t.Fatalf("status: got %d, want %d; body: %s", w.Code, http.StatusOK, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp map[string]any
|
||||||
|
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
||||||
|
t.Fatalf("decoding: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify all module keys present
|
||||||
|
expectedModules := []string{"core", "schemas", "storage", "database", "auth", "projects", "audit", "odoo", "freecad", "jobs", "dag"}
|
||||||
|
for _, mod := range expectedModules {
|
||||||
|
if _, ok := resp[mod]; !ok {
|
||||||
|
t.Errorf("missing module key: %s", mod)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify secrets are redacted
|
||||||
|
dbSettings, _ := resp["database"].(map[string]any)
|
||||||
|
if dbSettings["password"] != "****" {
|
||||||
|
t.Errorf("database password not redacted: got %v", dbSettings["password"])
|
||||||
|
}
|
||||||
|
|
||||||
|
authSettings, _ := resp["auth"].(map[string]any)
|
||||||
|
if authSettings["session_secret"] != "****" {
|
||||||
|
t.Errorf("session_secret not redacted: got %v", authSettings["session_secret"])
|
||||||
|
}
|
||||||
|
|
||||||
|
ldap, _ := authSettings["ldap"].(map[string]any)
|
||||||
|
if ldap["bind_password"] != "****" {
|
||||||
|
t.Errorf("ldap bind_password not redacted: got %v", ldap["bind_password"])
|
||||||
|
}
|
||||||
|
|
||||||
|
oidc, _ := authSettings["oidc"].(map[string]any)
|
||||||
|
if oidc["client_secret"] != "****" {
|
||||||
|
t.Errorf("oidc client_secret not redacted: got %v", oidc["client_secret"])
|
||||||
|
}
|
||||||
|
|
||||||
|
odoo, _ := resp["odoo"].(map[string]any)
|
||||||
|
if odoo["api_key"] != "****" {
|
||||||
|
t.Errorf("odoo api_key not redacted: got %v", odoo["api_key"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetModuleSettings(t *testing.T) {
|
||||||
|
s := newSettingsTestServer(t)
|
||||||
|
router := newSettingsRouter(s)
|
||||||
|
|
||||||
|
req := adminSettingsRequest(httptest.NewRequest("GET", "/api/admin/settings/jobs", nil))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusOK {
|
||||||
|
t.Fatalf("status: got %d, want %d; body: %s", w.Code, http.StatusOK, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp map[string]any
|
||||||
|
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
||||||
|
t.Fatalf("decoding: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp["directory"] != "/etc/silo/jobdefs" {
|
||||||
|
t.Errorf("jobs directory: got %v, want /etc/silo/jobdefs", resp["directory"])
|
||||||
|
}
|
||||||
|
if resp["runner_timeout"] != float64(90) {
|
||||||
|
t.Errorf("runner_timeout: got %v, want 90", resp["runner_timeout"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetModuleSettings_Unknown(t *testing.T) {
|
||||||
|
s := newSettingsTestServer(t)
|
||||||
|
router := newSettingsRouter(s)
|
||||||
|
|
||||||
|
req := adminSettingsRequest(httptest.NewRequest("GET", "/api/admin/settings/nonexistent", nil))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusNotFound {
|
||||||
|
t.Errorf("status: got %d, want %d", w.Code, http.StatusNotFound)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestToggleModule(t *testing.T) {
|
||||||
|
s := newSettingsTestServer(t)
|
||||||
|
router := newSettingsRouter(s)
|
||||||
|
|
||||||
|
// Projects is enabled by default; disable it
|
||||||
|
body := `{"enabled": false}`
|
||||||
|
req := adminSettingsRequest(httptest.NewRequest("PUT", "/api/admin/settings/projects", strings.NewReader(body)))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusOK {
|
||||||
|
t.Fatalf("status: got %d, want %d; body: %s", w.Code, http.StatusOK, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp map[string]any
|
||||||
|
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
||||||
|
t.Fatalf("decoding: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
updated, _ := resp["updated"].([]any)
|
||||||
|
if len(updated) != 1 || updated[0] != "projects.enabled" {
|
||||||
|
t.Errorf("updated: got %v, want [projects.enabled]", updated)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify registry state
|
||||||
|
if s.modules.IsEnabled("projects") {
|
||||||
|
t.Error("projects should be disabled after toggle")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestToggleModule_DependencyError(t *testing.T) {
|
||||||
|
s := newSettingsTestServer(t)
|
||||||
|
router := newSettingsRouter(s)
|
||||||
|
|
||||||
|
// DAG depends on Jobs. Jobs is disabled by default.
|
||||||
|
// Enabling DAG without Jobs should fail.
|
||||||
|
body := `{"enabled": true}`
|
||||||
|
req := adminSettingsRequest(httptest.NewRequest("PUT", "/api/admin/settings/dag", strings.NewReader(body)))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusBadRequest {
|
||||||
|
t.Errorf("status: got %d, want %d; body: %s", w.Code, http.StatusBadRequest, w.Body.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestToggleRequiredModule(t *testing.T) {
|
||||||
|
s := newSettingsTestServer(t)
|
||||||
|
router := newSettingsRouter(s)
|
||||||
|
|
||||||
|
body := `{"enabled": false}`
|
||||||
|
req := adminSettingsRequest(httptest.NewRequest("PUT", "/api/admin/settings/core", strings.NewReader(body)))
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusBadRequest {
|
||||||
|
t.Errorf("status: got %d, want %d; body: %s", w.Code, http.StatusBadRequest, w.Body.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTestConnectivity_Database(t *testing.T) {
|
||||||
|
s := newSettingsTestServer(t)
|
||||||
|
router := newSettingsRouter(s)
|
||||||
|
|
||||||
|
req := adminSettingsRequest(httptest.NewRequest("POST", "/api/admin/settings/database/test", nil))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusOK {
|
||||||
|
t.Fatalf("status: got %d, want %d; body: %s", w.Code, http.StatusOK, w.Body.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp map[string]any
|
||||||
|
if err := json.Unmarshal(w.Body.Bytes(), &resp); err != nil {
|
||||||
|
t.Fatalf("decoding: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp["success"] != true {
|
||||||
|
t.Errorf("expected success=true, got %v; message: %v", resp["success"], resp["message"])
|
||||||
|
}
|
||||||
|
if resp["latency_ms"] == nil {
|
||||||
|
t.Error("expected latency_ms in response")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTestConnectivity_NotTestable(t *testing.T) {
|
||||||
|
s := newSettingsTestServer(t)
|
||||||
|
router := newSettingsRouter(s)
|
||||||
|
|
||||||
|
req := adminSettingsRequest(httptest.NewRequest("POST", "/api/admin/settings/core/test", nil))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
router.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
if w.Code != http.StatusBadRequest {
|
||||||
|
t.Errorf("status: got %d, want %d; body: %s", w.Code, http.StatusBadRequest, w.Body.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -18,6 +18,25 @@ type Config struct {
|
|||||||
Odoo OdooConfig `yaml:"odoo"`
|
Odoo OdooConfig `yaml:"odoo"`
|
||||||
Auth AuthConfig `yaml:"auth"`
|
Auth AuthConfig `yaml:"auth"`
|
||||||
Jobs JobsConfig `yaml:"jobs"`
|
Jobs JobsConfig `yaml:"jobs"`
|
||||||
|
Modules ModulesConfig `yaml:"modules"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModulesConfig holds explicit enable/disable toggles for optional modules.
|
||||||
|
// A nil pointer means "use the module's default state".
|
||||||
|
type ModulesConfig struct {
|
||||||
|
Auth *ModuleToggle `yaml:"auth"`
|
||||||
|
Projects *ModuleToggle `yaml:"projects"`
|
||||||
|
Audit *ModuleToggle `yaml:"audit"`
|
||||||
|
Odoo *ModuleToggle `yaml:"odoo"`
|
||||||
|
FreeCAD *ModuleToggle `yaml:"freecad"`
|
||||||
|
Jobs *ModuleToggle `yaml:"jobs"`
|
||||||
|
DAG *ModuleToggle `yaml:"dag"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModuleToggle holds an optional enabled flag. The pointer allows
|
||||||
|
// distinguishing "not set" (nil) from "explicitly false".
|
||||||
|
type ModuleToggle struct {
|
||||||
|
Enabled *bool `yaml:"enabled"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// AuthConfig holds authentication and authorization settings.
|
// AuthConfig holds authentication and authorization settings.
|
||||||
@@ -90,14 +109,21 @@ type DatabaseConfig struct {
|
|||||||
MaxConnections int `yaml:"max_connections"`
|
MaxConnections int `yaml:"max_connections"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// StorageConfig holds MinIO connection settings.
|
// StorageConfig holds object storage settings.
|
||||||
type StorageConfig struct {
|
type StorageConfig struct {
|
||||||
Endpoint string `yaml:"endpoint"`
|
Backend string `yaml:"backend"` // "minio" (default) or "filesystem"
|
||||||
AccessKey string `yaml:"access_key"`
|
Endpoint string `yaml:"endpoint"`
|
||||||
SecretKey string `yaml:"secret_key"`
|
AccessKey string `yaml:"access_key"`
|
||||||
Bucket string `yaml:"bucket"`
|
SecretKey string `yaml:"secret_key"`
|
||||||
UseSSL bool `yaml:"use_ssl"`
|
Bucket string `yaml:"bucket"`
|
||||||
Region string `yaml:"region"`
|
UseSSL bool `yaml:"use_ssl"`
|
||||||
|
Region string `yaml:"region"`
|
||||||
|
Filesystem FilesystemConfig `yaml:"filesystem"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilesystemConfig holds local filesystem storage settings.
|
||||||
|
type FilesystemConfig struct {
|
||||||
|
RootDir string `yaml:"root_dir"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SchemasConfig holds schema loading settings.
|
// SchemasConfig holds schema loading settings.
|
||||||
|
|||||||
127
internal/db/item_dependencies.go
Normal file
127
internal/db/item_dependencies.go
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
package db
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ItemDependency represents a row in the item_dependencies table.
|
||||||
|
type ItemDependency struct {
|
||||||
|
ID string
|
||||||
|
ParentItemID string
|
||||||
|
ChildUUID string
|
||||||
|
ChildPartNumber *string
|
||||||
|
ChildRevision *int
|
||||||
|
Quantity *float64
|
||||||
|
Label *string
|
||||||
|
Relationship string
|
||||||
|
RevisionNumber int
|
||||||
|
CreatedAt time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolvedDependency extends ItemDependency with resolution info from a LEFT JOIN.
|
||||||
|
type ResolvedDependency struct {
|
||||||
|
ItemDependency
|
||||||
|
ResolvedPartNumber *string
|
||||||
|
ResolvedRevision *int
|
||||||
|
Resolved bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// ItemDependencyRepository provides item_dependencies database operations.
|
||||||
|
type ItemDependencyRepository struct {
|
||||||
|
db *DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewItemDependencyRepository creates a new item dependency repository.
|
||||||
|
func NewItemDependencyRepository(db *DB) *ItemDependencyRepository {
|
||||||
|
return &ItemDependencyRepository{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReplaceForRevision atomically replaces all dependencies for an item's revision.
|
||||||
|
// Deletes existing rows for the parent item and inserts the new set.
|
||||||
|
func (r *ItemDependencyRepository) ReplaceForRevision(ctx context.Context, parentItemID string, revisionNumber int, deps []*ItemDependency) error {
|
||||||
|
return r.db.Tx(ctx, func(tx pgx.Tx) error {
|
||||||
|
_, err := tx.Exec(ctx, `DELETE FROM item_dependencies WHERE parent_item_id = $1`, parentItemID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("deleting old dependencies: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, d := range deps {
|
||||||
|
_, err := tx.Exec(ctx, `
|
||||||
|
INSERT INTO item_dependencies
|
||||||
|
(parent_item_id, child_uuid, child_part_number, child_revision,
|
||||||
|
quantity, label, relationship, revision_number)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||||
|
`, parentItemID, d.ChildUUID, d.ChildPartNumber, d.ChildRevision,
|
||||||
|
d.Quantity, d.Label, d.Relationship, revisionNumber)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("inserting dependency: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListByItem returns all dependencies for an item.
|
||||||
|
func (r *ItemDependencyRepository) ListByItem(ctx context.Context, parentItemID string) ([]*ItemDependency, error) {
|
||||||
|
rows, err := r.db.pool.Query(ctx, `
|
||||||
|
SELECT id, parent_item_id, child_uuid, child_part_number, child_revision,
|
||||||
|
quantity, label, relationship, revision_number, created_at
|
||||||
|
FROM item_dependencies
|
||||||
|
WHERE parent_item_id = $1
|
||||||
|
ORDER BY label NULLS LAST
|
||||||
|
`, parentItemID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("listing dependencies: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var deps []*ItemDependency
|
||||||
|
for rows.Next() {
|
||||||
|
d := &ItemDependency{}
|
||||||
|
if err := rows.Scan(
|
||||||
|
&d.ID, &d.ParentItemID, &d.ChildUUID, &d.ChildPartNumber, &d.ChildRevision,
|
||||||
|
&d.Quantity, &d.Label, &d.Relationship, &d.RevisionNumber, &d.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanning dependency: %w", err)
|
||||||
|
}
|
||||||
|
deps = append(deps, d)
|
||||||
|
}
|
||||||
|
return deps, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve returns dependencies with child UUIDs resolved against the items table.
|
||||||
|
// Unresolvable UUIDs (external or deleted items) have Resolved=false.
|
||||||
|
func (r *ItemDependencyRepository) Resolve(ctx context.Context, parentItemID string) ([]*ResolvedDependency, error) {
|
||||||
|
rows, err := r.db.pool.Query(ctx, `
|
||||||
|
SELECT d.id, d.parent_item_id, d.child_uuid, d.child_part_number, d.child_revision,
|
||||||
|
d.quantity, d.label, d.relationship, d.revision_number, d.created_at,
|
||||||
|
i.part_number, i.current_revision
|
||||||
|
FROM item_dependencies d
|
||||||
|
LEFT JOIN items i ON i.id = d.child_uuid AND i.archived_at IS NULL
|
||||||
|
WHERE d.parent_item_id = $1
|
||||||
|
ORDER BY d.label NULLS LAST
|
||||||
|
`, parentItemID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("resolving dependencies: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var deps []*ResolvedDependency
|
||||||
|
for rows.Next() {
|
||||||
|
d := &ResolvedDependency{}
|
||||||
|
if err := rows.Scan(
|
||||||
|
&d.ID, &d.ParentItemID, &d.ChildUUID, &d.ChildPartNumber, &d.ChildRevision,
|
||||||
|
&d.Quantity, &d.Label, &d.Relationship, &d.RevisionNumber, &d.CreatedAt,
|
||||||
|
&d.ResolvedPartNumber, &d.ResolvedRevision,
|
||||||
|
); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanning resolved dependency: %w", err)
|
||||||
|
}
|
||||||
|
d.Resolved = d.ResolvedPartNumber != nil
|
||||||
|
deps = append(deps, d)
|
||||||
|
}
|
||||||
|
return deps, nil
|
||||||
|
}
|
||||||
@@ -8,13 +8,14 @@ import (
|
|||||||
|
|
||||||
// ItemFile represents a file attachment on an item.
|
// ItemFile represents a file attachment on an item.
|
||||||
type ItemFile struct {
|
type ItemFile struct {
|
||||||
ID string
|
ID string
|
||||||
ItemID string
|
ItemID string
|
||||||
Filename string
|
Filename string
|
||||||
ContentType string
|
ContentType string
|
||||||
Size int64
|
Size int64
|
||||||
ObjectKey string
|
ObjectKey string
|
||||||
CreatedAt time.Time
|
StorageBackend string // "minio" or "filesystem"
|
||||||
|
CreatedAt time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
// ItemFileRepository provides item_files database operations.
|
// ItemFileRepository provides item_files database operations.
|
||||||
@@ -29,11 +30,14 @@ func NewItemFileRepository(db *DB) *ItemFileRepository {
|
|||||||
|
|
||||||
// Create inserts a new item file record.
|
// Create inserts a new item file record.
|
||||||
func (r *ItemFileRepository) Create(ctx context.Context, f *ItemFile) error {
|
func (r *ItemFileRepository) Create(ctx context.Context, f *ItemFile) error {
|
||||||
|
if f.StorageBackend == "" {
|
||||||
|
f.StorageBackend = "minio"
|
||||||
|
}
|
||||||
err := r.db.pool.QueryRow(ctx,
|
err := r.db.pool.QueryRow(ctx,
|
||||||
`INSERT INTO item_files (item_id, filename, content_type, size, object_key)
|
`INSERT INTO item_files (item_id, filename, content_type, size, object_key, storage_backend)
|
||||||
VALUES ($1, $2, $3, $4, $5)
|
VALUES ($1, $2, $3, $4, $5, $6)
|
||||||
RETURNING id, created_at`,
|
RETURNING id, created_at`,
|
||||||
f.ItemID, f.Filename, f.ContentType, f.Size, f.ObjectKey,
|
f.ItemID, f.Filename, f.ContentType, f.Size, f.ObjectKey, f.StorageBackend,
|
||||||
).Scan(&f.ID, &f.CreatedAt)
|
).Scan(&f.ID, &f.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("creating item file: %w", err)
|
return fmt.Errorf("creating item file: %w", err)
|
||||||
@@ -44,7 +48,8 @@ func (r *ItemFileRepository) Create(ctx context.Context, f *ItemFile) error {
|
|||||||
// ListByItem returns all file attachments for an item.
|
// ListByItem returns all file attachments for an item.
|
||||||
func (r *ItemFileRepository) ListByItem(ctx context.Context, itemID string) ([]*ItemFile, error) {
|
func (r *ItemFileRepository) ListByItem(ctx context.Context, itemID string) ([]*ItemFile, error) {
|
||||||
rows, err := r.db.pool.Query(ctx,
|
rows, err := r.db.pool.Query(ctx,
|
||||||
`SELECT id, item_id, filename, content_type, size, object_key, created_at
|
`SELECT id, item_id, filename, content_type, size, object_key,
|
||||||
|
COALESCE(storage_backend, 'minio'), created_at
|
||||||
FROM item_files WHERE item_id = $1 ORDER BY created_at`,
|
FROM item_files WHERE item_id = $1 ORDER BY created_at`,
|
||||||
itemID,
|
itemID,
|
||||||
)
|
)
|
||||||
@@ -56,7 +61,7 @@ func (r *ItemFileRepository) ListByItem(ctx context.Context, itemID string) ([]*
|
|||||||
var files []*ItemFile
|
var files []*ItemFile
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
f := &ItemFile{}
|
f := &ItemFile{}
|
||||||
if err := rows.Scan(&f.ID, &f.ItemID, &f.Filename, &f.ContentType, &f.Size, &f.ObjectKey, &f.CreatedAt); err != nil {
|
if err := rows.Scan(&f.ID, &f.ItemID, &f.Filename, &f.ContentType, &f.Size, &f.ObjectKey, &f.StorageBackend, &f.CreatedAt); err != nil {
|
||||||
return nil, fmt.Errorf("scanning item file: %w", err)
|
return nil, fmt.Errorf("scanning item file: %w", err)
|
||||||
}
|
}
|
||||||
files = append(files, f)
|
files = append(files, f)
|
||||||
@@ -68,10 +73,11 @@ func (r *ItemFileRepository) ListByItem(ctx context.Context, itemID string) ([]*
|
|||||||
func (r *ItemFileRepository) Get(ctx context.Context, id string) (*ItemFile, error) {
|
func (r *ItemFileRepository) Get(ctx context.Context, id string) (*ItemFile, error) {
|
||||||
f := &ItemFile{}
|
f := &ItemFile{}
|
||||||
err := r.db.pool.QueryRow(ctx,
|
err := r.db.pool.QueryRow(ctx,
|
||||||
`SELECT id, item_id, filename, content_type, size, object_key, created_at
|
`SELECT id, item_id, filename, content_type, size, object_key,
|
||||||
|
COALESCE(storage_backend, 'minio'), created_at
|
||||||
FROM item_files WHERE id = $1`,
|
FROM item_files WHERE id = $1`,
|
||||||
id,
|
id,
|
||||||
).Scan(&f.ID, &f.ItemID, &f.Filename, &f.ContentType, &f.Size, &f.ObjectKey, &f.CreatedAt)
|
).Scan(&f.ID, &f.ItemID, &f.Filename, &f.ContentType, &f.Size, &f.ObjectKey, &f.StorageBackend, &f.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("getting item file: %w", err)
|
return nil, fmt.Errorf("getting item file: %w", err)
|
||||||
}
|
}
|
||||||
|
|||||||
161
internal/db/item_metadata.go
Normal file
161
internal/db/item_metadata.go
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
package db
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ItemMetadata represents a row in the item_metadata table.
|
||||||
|
type ItemMetadata struct {
|
||||||
|
ItemID string
|
||||||
|
SchemaName *string
|
||||||
|
Tags []string
|
||||||
|
LifecycleState string
|
||||||
|
Fields map[string]any
|
||||||
|
KCVersion *string
|
||||||
|
ManifestUUID *string
|
||||||
|
SiloInstance *string
|
||||||
|
RevisionHash *string
|
||||||
|
UpdatedAt time.Time
|
||||||
|
UpdatedBy *string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ItemMetadataRepository provides item_metadata database operations.
|
||||||
|
type ItemMetadataRepository struct {
|
||||||
|
db *DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewItemMetadataRepository creates a new item metadata repository.
|
||||||
|
func NewItemMetadataRepository(db *DB) *ItemMetadataRepository {
|
||||||
|
return &ItemMetadataRepository{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns metadata for an item, or nil if none exists.
|
||||||
|
func (r *ItemMetadataRepository) Get(ctx context.Context, itemID string) (*ItemMetadata, error) {
|
||||||
|
m := &ItemMetadata{}
|
||||||
|
var fieldsJSON []byte
|
||||||
|
err := r.db.pool.QueryRow(ctx, `
|
||||||
|
SELECT item_id, schema_name, tags, lifecycle_state, fields,
|
||||||
|
kc_version, manifest_uuid, silo_instance, revision_hash,
|
||||||
|
updated_at, updated_by
|
||||||
|
FROM item_metadata
|
||||||
|
WHERE item_id = $1
|
||||||
|
`, itemID).Scan(
|
||||||
|
&m.ItemID, &m.SchemaName, &m.Tags, &m.LifecycleState, &fieldsJSON,
|
||||||
|
&m.KCVersion, &m.ManifestUUID, &m.SiloInstance, &m.RevisionHash,
|
||||||
|
&m.UpdatedAt, &m.UpdatedBy,
|
||||||
|
)
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("getting item metadata: %w", err)
|
||||||
|
}
|
||||||
|
if fieldsJSON != nil {
|
||||||
|
if err := json.Unmarshal(fieldsJSON, &m.Fields); err != nil {
|
||||||
|
return nil, fmt.Errorf("unmarshaling fields: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if m.Fields == nil {
|
||||||
|
m.Fields = make(map[string]any)
|
||||||
|
}
|
||||||
|
if m.Tags == nil {
|
||||||
|
m.Tags = []string{}
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert inserts or updates the metadata row for an item.
|
||||||
|
// Used by the commit extraction pipeline.
|
||||||
|
func (r *ItemMetadataRepository) Upsert(ctx context.Context, m *ItemMetadata) error {
|
||||||
|
fieldsJSON, err := json.Marshal(m.Fields)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("marshaling fields: %w", err)
|
||||||
|
}
|
||||||
|
_, err = r.db.pool.Exec(ctx, `
|
||||||
|
INSERT INTO item_metadata
|
||||||
|
(item_id, schema_name, tags, lifecycle_state, fields,
|
||||||
|
kc_version, manifest_uuid, silo_instance, revision_hash,
|
||||||
|
updated_at, updated_by)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, now(), $10)
|
||||||
|
ON CONFLICT (item_id) DO UPDATE SET
|
||||||
|
schema_name = EXCLUDED.schema_name,
|
||||||
|
tags = EXCLUDED.tags,
|
||||||
|
lifecycle_state = EXCLUDED.lifecycle_state,
|
||||||
|
fields = EXCLUDED.fields,
|
||||||
|
kc_version = EXCLUDED.kc_version,
|
||||||
|
manifest_uuid = EXCLUDED.manifest_uuid,
|
||||||
|
silo_instance = EXCLUDED.silo_instance,
|
||||||
|
revision_hash = EXCLUDED.revision_hash,
|
||||||
|
updated_at = now(),
|
||||||
|
updated_by = EXCLUDED.updated_by
|
||||||
|
`, m.ItemID, m.SchemaName, m.Tags, m.LifecycleState, fieldsJSON,
|
||||||
|
m.KCVersion, m.ManifestUUID, m.SiloInstance, m.RevisionHash,
|
||||||
|
m.UpdatedBy)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("upserting item metadata: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateFields merges the given fields into the existing JSONB fields column.
|
||||||
|
func (r *ItemMetadataRepository) UpdateFields(ctx context.Context, itemID string, fields map[string]any, updatedBy string) error {
|
||||||
|
fieldsJSON, err := json.Marshal(fields)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("marshaling fields: %w", err)
|
||||||
|
}
|
||||||
|
tag, err := r.db.pool.Exec(ctx, `
|
||||||
|
UPDATE item_metadata
|
||||||
|
SET fields = fields || $2::jsonb,
|
||||||
|
updated_at = now(),
|
||||||
|
updated_by = $3
|
||||||
|
WHERE item_id = $1
|
||||||
|
`, itemID, fieldsJSON, updatedBy)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("updating metadata fields: %w", err)
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("item metadata not found")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateLifecycle sets the lifecycle_state column.
|
||||||
|
func (r *ItemMetadataRepository) UpdateLifecycle(ctx context.Context, itemID, state, updatedBy string) error {
|
||||||
|
tag, err := r.db.pool.Exec(ctx, `
|
||||||
|
UPDATE item_metadata
|
||||||
|
SET lifecycle_state = $2,
|
||||||
|
updated_at = now(),
|
||||||
|
updated_by = $3
|
||||||
|
WHERE item_id = $1
|
||||||
|
`, itemID, state, updatedBy)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("updating lifecycle state: %w", err)
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("item metadata not found")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTags replaces the tags array.
|
||||||
|
func (r *ItemMetadataRepository) SetTags(ctx context.Context, itemID string, tags []string, updatedBy string) error {
|
||||||
|
tag, err := r.db.pool.Exec(ctx, `
|
||||||
|
UPDATE item_metadata
|
||||||
|
SET tags = $2,
|
||||||
|
updated_at = now(),
|
||||||
|
updated_by = $3
|
||||||
|
WHERE item_id = $1
|
||||||
|
`, itemID, tags, updatedBy)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("updating tags: %w", err)
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("item metadata not found")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -35,11 +35,12 @@ type Revision struct {
|
|||||||
ItemID string
|
ItemID string
|
||||||
RevisionNumber int
|
RevisionNumber int
|
||||||
Properties map[string]any
|
Properties map[string]any
|
||||||
FileKey *string
|
FileKey *string
|
||||||
FileVersion *string
|
FileVersion *string
|
||||||
FileChecksum *string
|
FileChecksum *string
|
||||||
FileSize *int64
|
FileSize *int64
|
||||||
ThumbnailKey *string
|
FileStorageBackend string // "minio" or "filesystem"
|
||||||
|
ThumbnailKey *string
|
||||||
CreatedAt time.Time
|
CreatedAt time.Time
|
||||||
CreatedBy *string
|
CreatedBy *string
|
||||||
Comment *string
|
Comment *string
|
||||||
@@ -306,16 +307,20 @@ func (r *ItemRepository) CreateRevision(ctx context.Context, rev *Revision) erro
|
|||||||
return fmt.Errorf("marshaling properties: %w", err)
|
return fmt.Errorf("marshaling properties: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if rev.FileStorageBackend == "" {
|
||||||
|
rev.FileStorageBackend = "minio"
|
||||||
|
}
|
||||||
|
|
||||||
err = r.db.pool.QueryRow(ctx, `
|
err = r.db.pool.QueryRow(ctx, `
|
||||||
INSERT INTO revisions (
|
INSERT INTO revisions (
|
||||||
item_id, revision_number, properties, file_key, file_version,
|
item_id, revision_number, properties, file_key, file_version,
|
||||||
file_checksum, file_size, thumbnail_key, created_by, comment
|
file_checksum, file_size, file_storage_backend, thumbnail_key, created_by, comment
|
||||||
)
|
)
|
||||||
SELECT $1, current_revision + 1, $2, $3, $4, $5, $6, $7, $8, $9
|
SELECT $1, current_revision + 1, $2, $3, $4, $5, $6, $7, $8, $9, $10
|
||||||
FROM items WHERE id = $1
|
FROM items WHERE id = $1
|
||||||
RETURNING id, revision_number, created_at
|
RETURNING id, revision_number, created_at
|
||||||
`, rev.ItemID, propsJSON, rev.FileKey, rev.FileVersion,
|
`, rev.ItemID, propsJSON, rev.FileKey, rev.FileVersion,
|
||||||
rev.FileChecksum, rev.FileSize, rev.ThumbnailKey, rev.CreatedBy, rev.Comment,
|
rev.FileChecksum, rev.FileSize, rev.FileStorageBackend, rev.ThumbnailKey, rev.CreatedBy, rev.Comment,
|
||||||
).Scan(&rev.ID, &rev.RevisionNumber, &rev.CreatedAt)
|
).Scan(&rev.ID, &rev.RevisionNumber, &rev.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("inserting revision: %w", err)
|
return fmt.Errorf("inserting revision: %w", err)
|
||||||
@@ -342,7 +347,8 @@ func (r *ItemRepository) GetRevisions(ctx context.Context, itemID string) ([]*Re
|
|||||||
if hasStatusColumn {
|
if hasStatusColumn {
|
||||||
rows, err = r.db.pool.Query(ctx, `
|
rows, err = r.db.pool.Query(ctx, `
|
||||||
SELECT id, item_id, revision_number, properties, file_key, file_version,
|
SELECT id, item_id, revision_number, properties, file_key, file_version,
|
||||||
file_checksum, file_size, thumbnail_key, created_at, created_by, comment,
|
file_checksum, file_size, COALESCE(file_storage_backend, 'minio'),
|
||||||
|
thumbnail_key, created_at, created_by, comment,
|
||||||
COALESCE(status, 'draft') as status, COALESCE(labels, ARRAY[]::TEXT[]) as labels
|
COALESCE(status, 'draft') as status, COALESCE(labels, ARRAY[]::TEXT[]) as labels
|
||||||
FROM revisions
|
FROM revisions
|
||||||
WHERE item_id = $1
|
WHERE item_id = $1
|
||||||
@@ -369,7 +375,8 @@ func (r *ItemRepository) GetRevisions(ctx context.Context, itemID string) ([]*Re
|
|||||||
if hasStatusColumn {
|
if hasStatusColumn {
|
||||||
err = rows.Scan(
|
err = rows.Scan(
|
||||||
&rev.ID, &rev.ItemID, &rev.RevisionNumber, &propsJSON, &rev.FileKey, &rev.FileVersion,
|
&rev.ID, &rev.ItemID, &rev.RevisionNumber, &propsJSON, &rev.FileKey, &rev.FileVersion,
|
||||||
&rev.FileChecksum, &rev.FileSize, &rev.ThumbnailKey, &rev.CreatedAt, &rev.CreatedBy, &rev.Comment,
|
&rev.FileChecksum, &rev.FileSize, &rev.FileStorageBackend,
|
||||||
|
&rev.ThumbnailKey, &rev.CreatedAt, &rev.CreatedBy, &rev.Comment,
|
||||||
&rev.Status, &rev.Labels,
|
&rev.Status, &rev.Labels,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
@@ -379,6 +386,7 @@ func (r *ItemRepository) GetRevisions(ctx context.Context, itemID string) ([]*Re
|
|||||||
)
|
)
|
||||||
rev.Status = "draft"
|
rev.Status = "draft"
|
||||||
rev.Labels = []string{}
|
rev.Labels = []string{}
|
||||||
|
rev.FileStorageBackend = "minio"
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("scanning revision: %w", err)
|
return nil, fmt.Errorf("scanning revision: %w", err)
|
||||||
@@ -412,13 +420,15 @@ func (r *ItemRepository) GetRevision(ctx context.Context, itemID string, revisio
|
|||||||
if hasStatusColumn {
|
if hasStatusColumn {
|
||||||
err = r.db.pool.QueryRow(ctx, `
|
err = r.db.pool.QueryRow(ctx, `
|
||||||
SELECT id, item_id, revision_number, properties, file_key, file_version,
|
SELECT id, item_id, revision_number, properties, file_key, file_version,
|
||||||
file_checksum, file_size, thumbnail_key, created_at, created_by, comment,
|
file_checksum, file_size, COALESCE(file_storage_backend, 'minio'),
|
||||||
|
thumbnail_key, created_at, created_by, comment,
|
||||||
COALESCE(status, 'draft') as status, COALESCE(labels, ARRAY[]::TEXT[]) as labels
|
COALESCE(status, 'draft') as status, COALESCE(labels, ARRAY[]::TEXT[]) as labels
|
||||||
FROM revisions
|
FROM revisions
|
||||||
WHERE item_id = $1 AND revision_number = $2
|
WHERE item_id = $1 AND revision_number = $2
|
||||||
`, itemID, revisionNumber).Scan(
|
`, itemID, revisionNumber).Scan(
|
||||||
&rev.ID, &rev.ItemID, &rev.RevisionNumber, &propsJSON, &rev.FileKey, &rev.FileVersion,
|
&rev.ID, &rev.ItemID, &rev.RevisionNumber, &propsJSON, &rev.FileKey, &rev.FileVersion,
|
||||||
&rev.FileChecksum, &rev.FileSize, &rev.ThumbnailKey, &rev.CreatedAt, &rev.CreatedBy, &rev.Comment,
|
&rev.FileChecksum, &rev.FileSize, &rev.FileStorageBackend,
|
||||||
|
&rev.ThumbnailKey, &rev.CreatedAt, &rev.CreatedBy, &rev.Comment,
|
||||||
&rev.Status, &rev.Labels,
|
&rev.Status, &rev.Labels,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
@@ -433,6 +443,7 @@ func (r *ItemRepository) GetRevision(ctx context.Context, itemID string, revisio
|
|||||||
)
|
)
|
||||||
rev.Status = "draft"
|
rev.Status = "draft"
|
||||||
rev.Labels = []string{}
|
rev.Labels = []string{}
|
||||||
|
rev.FileStorageBackend = "minio"
|
||||||
}
|
}
|
||||||
|
|
||||||
if err == pgx.ErrNoRows {
|
if err == pgx.ErrNoRows {
|
||||||
@@ -606,15 +617,16 @@ func (r *ItemRepository) CreateRevisionFromExisting(ctx context.Context, itemID
|
|||||||
|
|
||||||
// Create new revision with copied properties (and optionally file reference)
|
// Create new revision with copied properties (and optionally file reference)
|
||||||
newRev := &Revision{
|
newRev := &Revision{
|
||||||
ItemID: itemID,
|
ItemID: itemID,
|
||||||
Properties: source.Properties,
|
Properties: source.Properties,
|
||||||
FileKey: source.FileKey,
|
FileKey: source.FileKey,
|
||||||
FileVersion: source.FileVersion,
|
FileVersion: source.FileVersion,
|
||||||
FileChecksum: source.FileChecksum,
|
FileChecksum: source.FileChecksum,
|
||||||
FileSize: source.FileSize,
|
FileSize: source.FileSize,
|
||||||
ThumbnailKey: source.ThumbnailKey,
|
FileStorageBackend: source.FileStorageBackend,
|
||||||
CreatedBy: createdBy,
|
ThumbnailKey: source.ThumbnailKey,
|
||||||
Comment: &comment,
|
CreatedBy: createdBy,
|
||||||
|
Comment: &comment,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert the new revision
|
// Insert the new revision
|
||||||
@@ -626,13 +638,13 @@ func (r *ItemRepository) CreateRevisionFromExisting(ctx context.Context, itemID
|
|||||||
err = r.db.pool.QueryRow(ctx, `
|
err = r.db.pool.QueryRow(ctx, `
|
||||||
INSERT INTO revisions (
|
INSERT INTO revisions (
|
||||||
item_id, revision_number, properties, file_key, file_version,
|
item_id, revision_number, properties, file_key, file_version,
|
||||||
file_checksum, file_size, thumbnail_key, created_by, comment, status
|
file_checksum, file_size, file_storage_backend, thumbnail_key, created_by, comment, status
|
||||||
)
|
)
|
||||||
SELECT $1, current_revision + 1, $2, $3, $4, $5, $6, $7, $8, $9, 'draft'
|
SELECT $1, current_revision + 1, $2, $3, $4, $5, $6, $7, $8, $9, $10, 'draft'
|
||||||
FROM items WHERE id = $1
|
FROM items WHERE id = $1
|
||||||
RETURNING id, revision_number, created_at
|
RETURNING id, revision_number, created_at
|
||||||
`, newRev.ItemID, propsJSON, newRev.FileKey, newRev.FileVersion,
|
`, newRev.ItemID, propsJSON, newRev.FileKey, newRev.FileVersion,
|
||||||
newRev.FileChecksum, newRev.FileSize, newRev.ThumbnailKey, newRev.CreatedBy, newRev.Comment,
|
newRev.FileChecksum, newRev.FileSize, newRev.FileStorageBackend, newRev.ThumbnailKey, newRev.CreatedBy, newRev.Comment,
|
||||||
).Scan(&newRev.ID, &newRev.RevisionNumber, &newRev.CreatedAt)
|
).Scan(&newRev.ID, &newRev.RevisionNumber, &newRev.CreatedAt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("inserting revision: %w", err)
|
return nil, fmt.Errorf("inserting revision: %w", err)
|
||||||
|
|||||||
230
internal/db/locations.go
Normal file
230
internal/db/locations.go
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
package db
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Location represents a location in the hierarchy.
|
||||||
|
type Location struct {
|
||||||
|
ID string
|
||||||
|
Path string
|
||||||
|
Name string
|
||||||
|
ParentID *string
|
||||||
|
LocationType string
|
||||||
|
Depth int
|
||||||
|
Metadata map[string]any
|
||||||
|
CreatedAt time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// LocationRepository provides location database operations.
|
||||||
|
type LocationRepository struct {
|
||||||
|
db *DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewLocationRepository creates a new location repository.
|
||||||
|
func NewLocationRepository(db *DB) *LocationRepository {
|
||||||
|
return &LocationRepository{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List returns all locations ordered by path.
|
||||||
|
func (r *LocationRepository) List(ctx context.Context) ([]*Location, error) {
|
||||||
|
rows, err := r.db.pool.Query(ctx, `
|
||||||
|
SELECT id, path, name, parent_id, location_type, depth, metadata, created_at
|
||||||
|
FROM locations
|
||||||
|
ORDER BY path
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return scanLocations(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetByPath returns a location by its path.
|
||||||
|
func (r *LocationRepository) GetByPath(ctx context.Context, path string) (*Location, error) {
|
||||||
|
loc := &Location{}
|
||||||
|
var meta []byte
|
||||||
|
err := r.db.pool.QueryRow(ctx, `
|
||||||
|
SELECT id, path, name, parent_id, location_type, depth, metadata, created_at
|
||||||
|
FROM locations
|
||||||
|
WHERE path = $1
|
||||||
|
`, path).Scan(&loc.ID, &loc.Path, &loc.Name, &loc.ParentID, &loc.LocationType, &loc.Depth, &meta, &loc.CreatedAt)
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if meta != nil {
|
||||||
|
json.Unmarshal(meta, &loc.Metadata)
|
||||||
|
}
|
||||||
|
return loc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetByID returns a location by its ID.
|
||||||
|
func (r *LocationRepository) GetByID(ctx context.Context, id string) (*Location, error) {
|
||||||
|
loc := &Location{}
|
||||||
|
var meta []byte
|
||||||
|
err := r.db.pool.QueryRow(ctx, `
|
||||||
|
SELECT id, path, name, parent_id, location_type, depth, metadata, created_at
|
||||||
|
FROM locations
|
||||||
|
WHERE id = $1
|
||||||
|
`, id).Scan(&loc.ID, &loc.Path, &loc.Name, &loc.ParentID, &loc.LocationType, &loc.Depth, &meta, &loc.CreatedAt)
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if meta != nil {
|
||||||
|
json.Unmarshal(meta, &loc.Metadata)
|
||||||
|
}
|
||||||
|
return loc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetChildren returns direct children of a location.
|
||||||
|
func (r *LocationRepository) GetChildren(ctx context.Context, parentID string) ([]*Location, error) {
|
||||||
|
rows, err := r.db.pool.Query(ctx, `
|
||||||
|
SELECT id, path, name, parent_id, location_type, depth, metadata, created_at
|
||||||
|
FROM locations
|
||||||
|
WHERE parent_id = $1
|
||||||
|
ORDER BY path
|
||||||
|
`, parentID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return scanLocations(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTree returns a location and all its descendants (by path prefix).
|
||||||
|
func (r *LocationRepository) GetTree(ctx context.Context, rootPath string) ([]*Location, error) {
|
||||||
|
rows, err := r.db.pool.Query(ctx, `
|
||||||
|
SELECT id, path, name, parent_id, location_type, depth, metadata, created_at
|
||||||
|
FROM locations
|
||||||
|
WHERE path = $1 OR path LIKE $2
|
||||||
|
ORDER BY path
|
||||||
|
`, rootPath, rootPath+"/%")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return scanLocations(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create inserts a new location. ParentID and Depth are resolved from the path.
|
||||||
|
func (r *LocationRepository) Create(ctx context.Context, loc *Location) error {
|
||||||
|
// Auto-calculate depth from path segments
|
||||||
|
loc.Depth = strings.Count(loc.Path, "/")
|
||||||
|
|
||||||
|
// Resolve parent_id from path if not explicitly set
|
||||||
|
if loc.ParentID == nil && loc.Depth > 0 {
|
||||||
|
parentPath := loc.Path[:strings.LastIndex(loc.Path, "/")]
|
||||||
|
parent, err := r.GetByPath(ctx, parentPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("looking up parent %q: %w", parentPath, err)
|
||||||
|
}
|
||||||
|
if parent == nil {
|
||||||
|
return fmt.Errorf("parent location %q does not exist", parentPath)
|
||||||
|
}
|
||||||
|
loc.ParentID = &parent.ID
|
||||||
|
}
|
||||||
|
|
||||||
|
meta, err := json.Marshal(loc.Metadata)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("marshaling metadata: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.db.pool.QueryRow(ctx, `
|
||||||
|
INSERT INTO locations (path, name, parent_id, location_type, depth, metadata)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6)
|
||||||
|
RETURNING id, created_at
|
||||||
|
`, loc.Path, loc.Name, loc.ParentID, loc.LocationType, loc.Depth, meta).Scan(&loc.ID, &loc.CreatedAt)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update updates a location's name, type, and metadata.
|
||||||
|
func (r *LocationRepository) Update(ctx context.Context, path string, name, locationType string, metadata map[string]any) error {
|
||||||
|
meta, err := json.Marshal(metadata)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("marshaling metadata: %w", err)
|
||||||
|
}
|
||||||
|
tag, err := r.db.pool.Exec(ctx, `
|
||||||
|
UPDATE locations
|
||||||
|
SET name = $2, location_type = $3, metadata = $4
|
||||||
|
WHERE path = $1
|
||||||
|
`, path, name, locationType, meta)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("location %q not found", path)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete removes a location. Returns an error if inventory rows reference it.
|
||||||
|
func (r *LocationRepository) Delete(ctx context.Context, path string) error {
|
||||||
|
// Check for inventory references
|
||||||
|
var count int
|
||||||
|
err := r.db.pool.QueryRow(ctx, `
|
||||||
|
SELECT COUNT(*) FROM inventory
|
||||||
|
WHERE location_id = (SELECT id FROM locations WHERE path = $1)
|
||||||
|
`, path).Scan(&count)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if count > 0 {
|
||||||
|
return fmt.Errorf("cannot delete location %q: %d inventory record(s) exist", path, count)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete children first (cascade by path prefix), deepest first
|
||||||
|
_, err = r.db.pool.Exec(ctx, `
|
||||||
|
DELETE FROM locations
|
||||||
|
WHERE path LIKE $1
|
||||||
|
`, path+"/%")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
tag, err := r.db.pool.Exec(ctx, `DELETE FROM locations WHERE path = $1`, path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if tag.RowsAffected() == 0 {
|
||||||
|
return fmt.Errorf("location %q not found", path)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasInventory checks if a location (or descendants) have inventory records.
|
||||||
|
func (r *LocationRepository) HasInventory(ctx context.Context, path string) (bool, error) {
|
||||||
|
var count int
|
||||||
|
err := r.db.pool.QueryRow(ctx, `
|
||||||
|
SELECT COUNT(*) FROM inventory i
|
||||||
|
JOIN locations l ON l.id = i.location_id
|
||||||
|
WHERE l.path = $1 OR l.path LIKE $2
|
||||||
|
`, path, path+"/%").Scan(&count)
|
||||||
|
return count > 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanLocations(rows pgx.Rows) ([]*Location, error) {
|
||||||
|
var locs []*Location
|
||||||
|
for rows.Next() {
|
||||||
|
loc := &Location{}
|
||||||
|
var meta []byte
|
||||||
|
if err := rows.Scan(&loc.ID, &loc.Path, &loc.Name, &loc.ParentID, &loc.LocationType, &loc.Depth, &meta, &loc.CreatedAt); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if meta != nil {
|
||||||
|
json.Unmarshal(meta, &loc.Metadata)
|
||||||
|
}
|
||||||
|
locs = append(locs, loc)
|
||||||
|
}
|
||||||
|
return locs, rows.Err()
|
||||||
|
}
|
||||||
105
internal/db/settings.go
Normal file
105
internal/db/settings.go
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
package db
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SettingsRepository provides access to module_state and settings_overrides tables.
|
||||||
|
type SettingsRepository struct {
|
||||||
|
db *DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSettingsRepository creates a new SettingsRepository.
|
||||||
|
func NewSettingsRepository(db *DB) *SettingsRepository {
|
||||||
|
return &SettingsRepository{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetModuleStates returns all module enabled/disabled states from the database.
|
||||||
|
func (r *SettingsRepository) GetModuleStates(ctx context.Context) (map[string]bool, error) {
|
||||||
|
rows, err := r.db.pool.Query(ctx,
|
||||||
|
`SELECT module_id, enabled FROM module_state`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("querying module states: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
states := make(map[string]bool)
|
||||||
|
for rows.Next() {
|
||||||
|
var id string
|
||||||
|
var enabled bool
|
||||||
|
if err := rows.Scan(&id, &enabled); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanning module state: %w", err)
|
||||||
|
}
|
||||||
|
states[id] = enabled
|
||||||
|
}
|
||||||
|
return states, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetModuleState persists a module's enabled state. Uses upsert semantics.
|
||||||
|
func (r *SettingsRepository) SetModuleState(ctx context.Context, moduleID string, enabled bool, updatedBy string) error {
|
||||||
|
_, err := r.db.pool.Exec(ctx,
|
||||||
|
`INSERT INTO module_state (module_id, enabled, updated_by, updated_at)
|
||||||
|
VALUES ($1, $2, $3, now())
|
||||||
|
ON CONFLICT (module_id) DO UPDATE
|
||||||
|
SET enabled = EXCLUDED.enabled,
|
||||||
|
updated_by = EXCLUDED.updated_by,
|
||||||
|
updated_at = now()`,
|
||||||
|
moduleID, enabled, updatedBy)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("setting module state: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetOverrides returns all settings overrides from the database.
|
||||||
|
func (r *SettingsRepository) GetOverrides(ctx context.Context) (map[string]json.RawMessage, error) {
|
||||||
|
rows, err := r.db.pool.Query(ctx,
|
||||||
|
`SELECT key, value FROM settings_overrides`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("querying settings overrides: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
overrides := make(map[string]json.RawMessage)
|
||||||
|
for rows.Next() {
|
||||||
|
var key string
|
||||||
|
var value json.RawMessage
|
||||||
|
if err := rows.Scan(&key, &value); err != nil {
|
||||||
|
return nil, fmt.Errorf("scanning settings override: %w", err)
|
||||||
|
}
|
||||||
|
overrides[key] = value
|
||||||
|
}
|
||||||
|
return overrides, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetOverride persists a settings override. Uses upsert semantics.
|
||||||
|
func (r *SettingsRepository) SetOverride(ctx context.Context, key string, value any, updatedBy string) error {
|
||||||
|
jsonVal, err := json.Marshal(value)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("marshaling override value: %w", err)
|
||||||
|
}
|
||||||
|
_, err = r.db.pool.Exec(ctx,
|
||||||
|
`INSERT INTO settings_overrides (key, value, updated_by, updated_at)
|
||||||
|
VALUES ($1, $2, $3, now())
|
||||||
|
ON CONFLICT (key) DO UPDATE
|
||||||
|
SET value = EXCLUDED.value,
|
||||||
|
updated_by = EXCLUDED.updated_by,
|
||||||
|
updated_at = now()`,
|
||||||
|
key, jsonVal, updatedBy)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("setting override: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOverride removes a settings override.
|
||||||
|
func (r *SettingsRepository) DeleteOverride(ctx context.Context, key string) error {
|
||||||
|
_, err := r.db.pool.Exec(ctx,
|
||||||
|
`DELETE FROM settings_overrides WHERE key = $1`, key)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("deleting override: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
148
internal/kc/kc.go
Normal file
148
internal/kc/kc.go
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
// Package kc extracts and parses the silo/ metadata directory from .kc files.
|
||||||
|
//
|
||||||
|
// A .kc file is a ZIP archive (superset of .fcstd) that contains a silo/
|
||||||
|
// directory with JSON metadata entries. This package handles extraction and
|
||||||
|
// packing — no database or HTTP dependencies.
|
||||||
|
package kc
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Manifest represents the contents of silo/manifest.json.
|
||||||
|
type Manifest struct {
|
||||||
|
UUID string `json:"uuid"`
|
||||||
|
KCVersion string `json:"kc_version"`
|
||||||
|
RevisionHash string `json:"revision_hash"`
|
||||||
|
SiloInstance string `json:"silo_instance"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Metadata represents the contents of silo/metadata.json.
|
||||||
|
type Metadata struct {
|
||||||
|
SchemaName string `json:"schema_name"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
LifecycleState string `json:"lifecycle_state"`
|
||||||
|
Fields map[string]any `json:"fields"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dependency represents one entry in silo/dependencies.json.
|
||||||
|
type Dependency struct {
|
||||||
|
UUID string `json:"uuid"`
|
||||||
|
PartNumber string `json:"part_number"`
|
||||||
|
Revision int `json:"revision"`
|
||||||
|
Quantity float64 `json:"quantity"`
|
||||||
|
Label string `json:"label"`
|
||||||
|
Relationship string `json:"relationship"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExtractResult holds the parsed silo/ directory contents from a .kc file.
|
||||||
|
type ExtractResult struct {
|
||||||
|
Manifest *Manifest
|
||||||
|
Metadata *Metadata
|
||||||
|
Dependencies []Dependency
|
||||||
|
}
|
||||||
|
|
||||||
|
// HistoryEntry represents one entry in silo/history.json.
|
||||||
|
type HistoryEntry struct {
|
||||||
|
RevisionNumber int `json:"revision_number"`
|
||||||
|
CreatedAt string `json:"created_at"`
|
||||||
|
CreatedBy *string `json:"created_by,omitempty"`
|
||||||
|
Comment *string `json:"comment,omitempty"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
Labels []string `json:"labels"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PackInput holds all the data needed to repack silo/ entries in a .kc file.
|
||||||
|
// Each field is optional — nil/empty means the entry is omitted from the ZIP.
|
||||||
|
type PackInput struct {
|
||||||
|
Manifest *Manifest
|
||||||
|
Metadata *Metadata
|
||||||
|
History []HistoryEntry
|
||||||
|
Dependencies []Dependency
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract opens a ZIP archive from data and parses the silo/ directory.
|
||||||
|
// Returns nil, nil if no silo/ directory is found (plain .fcstd file).
|
||||||
|
// Returns nil, error if silo/ entries exist but fail to parse.
|
||||||
|
func Extract(data []byte) (*ExtractResult, error) {
|
||||||
|
r, err := zip.NewReader(bytes.NewReader(data), int64(len(data)))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: open zip: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var manifestFile, metadataFile, dependenciesFile *zip.File
|
||||||
|
hasSiloDir := false
|
||||||
|
|
||||||
|
for _, f := range r.File {
|
||||||
|
if f.Name == "silo/" || strings.HasPrefix(f.Name, "silo/") {
|
||||||
|
hasSiloDir = true
|
||||||
|
}
|
||||||
|
switch f.Name {
|
||||||
|
case "silo/manifest.json":
|
||||||
|
manifestFile = f
|
||||||
|
case "silo/metadata.json":
|
||||||
|
metadataFile = f
|
||||||
|
case "silo/dependencies.json":
|
||||||
|
dependenciesFile = f
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !hasSiloDir {
|
||||||
|
return nil, nil // plain .fcstd, no extraction
|
||||||
|
}
|
||||||
|
|
||||||
|
result := &ExtractResult{}
|
||||||
|
|
||||||
|
if manifestFile != nil {
|
||||||
|
m, err := readJSON[Manifest](manifestFile)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: parse manifest.json: %w", err)
|
||||||
|
}
|
||||||
|
result.Manifest = m
|
||||||
|
}
|
||||||
|
|
||||||
|
if metadataFile != nil {
|
||||||
|
m, err := readJSON[Metadata](metadataFile)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: parse metadata.json: %w", err)
|
||||||
|
}
|
||||||
|
result.Metadata = m
|
||||||
|
}
|
||||||
|
|
||||||
|
if dependenciesFile != nil {
|
||||||
|
deps, err := readJSON[[]Dependency](dependenciesFile)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: parse dependencies.json: %w", err)
|
||||||
|
}
|
||||||
|
if deps != nil {
|
||||||
|
result.Dependencies = *deps
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// readJSON opens a zip.File and decodes its contents as JSON into T.
|
||||||
|
func readJSON[T any](f *zip.File) (*T, error) {
|
||||||
|
rc, err := f.Open()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rc.Close()
|
||||||
|
|
||||||
|
data, err := io.ReadAll(rc)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var v T
|
||||||
|
if err := json.Unmarshal(data, &v); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &v, nil
|
||||||
|
}
|
||||||
188
internal/kc/kc_test.go
Normal file
188
internal/kc/kc_test.go
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
package kc
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
// buildZip creates a ZIP archive in memory from a map of filename → content.
|
||||||
|
func buildZip(t *testing.T, files map[string][]byte) []byte {
|
||||||
|
t.Helper()
|
||||||
|
var buf bytes.Buffer
|
||||||
|
w := zip.NewWriter(&buf)
|
||||||
|
for name, content := range files {
|
||||||
|
f, err := w.Create(name)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("creating zip entry %s: %v", name, err)
|
||||||
|
}
|
||||||
|
if _, err := f.Write(content); err != nil {
|
||||||
|
t.Fatalf("writing zip entry %s: %v", name, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := w.Close(); err != nil {
|
||||||
|
t.Fatalf("closing zip: %v", err)
|
||||||
|
}
|
||||||
|
return buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
func mustJSON(t *testing.T, v any) []byte {
|
||||||
|
t.Helper()
|
||||||
|
data, err := json.Marshal(v)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("marshaling JSON: %v", err)
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtract_PlainFCStd(t *testing.T) {
|
||||||
|
data := buildZip(t, map[string][]byte{
|
||||||
|
"Document.xml": []byte("<xml/>"),
|
||||||
|
"thumbnails/a.png": []byte("png"),
|
||||||
|
})
|
||||||
|
|
||||||
|
result, err := Extract(data)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if result != nil {
|
||||||
|
t.Fatalf("expected nil result for plain .fcstd, got %+v", result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtract_ValidKC(t *testing.T) {
|
||||||
|
manifest := Manifest{
|
||||||
|
UUID: "550e8400-e29b-41d4-a716-446655440000",
|
||||||
|
KCVersion: "1.0",
|
||||||
|
RevisionHash: "abc123",
|
||||||
|
SiloInstance: "https://silo.example.com",
|
||||||
|
}
|
||||||
|
metadata := Metadata{
|
||||||
|
SchemaName: "mechanical-part-v2",
|
||||||
|
Tags: []string{"structural", "aluminum"},
|
||||||
|
LifecycleState: "draft",
|
||||||
|
Fields: map[string]any{
|
||||||
|
"material": "6061-T6",
|
||||||
|
"weight_kg": 0.34,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
data := buildZip(t, map[string][]byte{
|
||||||
|
"Document.xml": []byte("<xml/>"),
|
||||||
|
"silo/manifest.json": mustJSON(t, manifest),
|
||||||
|
"silo/metadata.json": mustJSON(t, metadata),
|
||||||
|
})
|
||||||
|
|
||||||
|
result, err := Extract(data)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if result == nil {
|
||||||
|
t.Fatal("expected non-nil result")
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.Manifest == nil {
|
||||||
|
t.Fatal("expected manifest")
|
||||||
|
}
|
||||||
|
if result.Manifest.UUID != manifest.UUID {
|
||||||
|
t.Errorf("manifest UUID = %q, want %q", result.Manifest.UUID, manifest.UUID)
|
||||||
|
}
|
||||||
|
if result.Manifest.KCVersion != manifest.KCVersion {
|
||||||
|
t.Errorf("manifest KCVersion = %q, want %q", result.Manifest.KCVersion, manifest.KCVersion)
|
||||||
|
}
|
||||||
|
if result.Manifest.RevisionHash != manifest.RevisionHash {
|
||||||
|
t.Errorf("manifest RevisionHash = %q, want %q", result.Manifest.RevisionHash, manifest.RevisionHash)
|
||||||
|
}
|
||||||
|
if result.Manifest.SiloInstance != manifest.SiloInstance {
|
||||||
|
t.Errorf("manifest SiloInstance = %q, want %q", result.Manifest.SiloInstance, manifest.SiloInstance)
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.Metadata == nil {
|
||||||
|
t.Fatal("expected metadata")
|
||||||
|
}
|
||||||
|
if result.Metadata.SchemaName != metadata.SchemaName {
|
||||||
|
t.Errorf("metadata SchemaName = %q, want %q", result.Metadata.SchemaName, metadata.SchemaName)
|
||||||
|
}
|
||||||
|
if result.Metadata.LifecycleState != metadata.LifecycleState {
|
||||||
|
t.Errorf("metadata LifecycleState = %q, want %q", result.Metadata.LifecycleState, metadata.LifecycleState)
|
||||||
|
}
|
||||||
|
if len(result.Metadata.Tags) != 2 {
|
||||||
|
t.Errorf("metadata Tags len = %d, want 2", len(result.Metadata.Tags))
|
||||||
|
}
|
||||||
|
if result.Metadata.Fields["material"] != "6061-T6" {
|
||||||
|
t.Errorf("metadata Fields[material] = %v, want 6061-T6", result.Metadata.Fields["material"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtract_ManifestOnly(t *testing.T) {
|
||||||
|
manifest := Manifest{
|
||||||
|
UUID: "550e8400-e29b-41d4-a716-446655440000",
|
||||||
|
KCVersion: "1.0",
|
||||||
|
}
|
||||||
|
|
||||||
|
data := buildZip(t, map[string][]byte{
|
||||||
|
"Document.xml": []byte("<xml/>"),
|
||||||
|
"silo/manifest.json": mustJSON(t, manifest),
|
||||||
|
})
|
||||||
|
|
||||||
|
result, err := Extract(data)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if result == nil {
|
||||||
|
t.Fatal("expected non-nil result")
|
||||||
|
}
|
||||||
|
if result.Manifest == nil {
|
||||||
|
t.Fatal("expected manifest")
|
||||||
|
}
|
||||||
|
if result.Metadata != nil {
|
||||||
|
t.Errorf("expected nil metadata, got %+v", result.Metadata)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtract_InvalidJSON(t *testing.T) {
|
||||||
|
data := buildZip(t, map[string][]byte{
|
||||||
|
"silo/manifest.json": []byte("{not valid json"),
|
||||||
|
})
|
||||||
|
|
||||||
|
result, err := Extract(data)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected error for invalid JSON")
|
||||||
|
}
|
||||||
|
if result != nil {
|
||||||
|
t.Errorf("expected nil result on error, got %+v", result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtract_NotAZip(t *testing.T) {
|
||||||
|
result, err := Extract([]byte("this is not a zip file"))
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected error for non-ZIP data")
|
||||||
|
}
|
||||||
|
if result != nil {
|
||||||
|
t.Errorf("expected nil result on error, got %+v", result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExtract_EmptySiloDir(t *testing.T) {
|
||||||
|
// silo/ directory entry exists but no manifest or metadata files
|
||||||
|
data := buildZip(t, map[string][]byte{
|
||||||
|
"Document.xml": []byte("<xml/>"),
|
||||||
|
"silo/": {},
|
||||||
|
})
|
||||||
|
|
||||||
|
result, err := Extract(data)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if result == nil {
|
||||||
|
t.Fatal("expected non-nil result for silo/ dir")
|
||||||
|
}
|
||||||
|
if result.Manifest != nil {
|
||||||
|
t.Errorf("expected nil manifest, got %+v", result.Manifest)
|
||||||
|
}
|
||||||
|
if result.Metadata != nil {
|
||||||
|
t.Errorf("expected nil metadata, got %+v", result.Metadata)
|
||||||
|
}
|
||||||
|
}
|
||||||
131
internal/kc/pack.go
Normal file
131
internal/kc/pack.go
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
package kc
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HasSiloDir opens a ZIP archive and returns true if any entry starts with "silo/".
|
||||||
|
// This is a lightweight check used to short-circuit before gathering DB data.
|
||||||
|
func HasSiloDir(data []byte) (bool, error) {
|
||||||
|
r, err := zip.NewReader(bytes.NewReader(data), int64(len(data)))
|
||||||
|
if err != nil {
|
||||||
|
return false, fmt.Errorf("kc: open zip: %w", err)
|
||||||
|
}
|
||||||
|
for _, f := range r.File {
|
||||||
|
if f.Name == "silo/" || strings.HasPrefix(f.Name, "silo/") {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pack takes original ZIP file bytes and a PackInput, and returns new ZIP bytes
|
||||||
|
// with all silo/ entries replaced by the data from input. Non-silo entries
|
||||||
|
// (FreeCAD Document.xml, thumbnails, etc.) are copied verbatim with their
|
||||||
|
// original compression method and timestamps preserved.
|
||||||
|
//
|
||||||
|
// If the original ZIP contains no silo/ directory, the original bytes are
|
||||||
|
// returned unchanged (plain .fcstd pass-through).
|
||||||
|
func Pack(original []byte, input *PackInput) ([]byte, error) {
|
||||||
|
r, err := zip.NewReader(bytes.NewReader(original), int64(len(original)))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: open zip: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Partition entries into silo/ vs non-silo.
|
||||||
|
hasSilo := false
|
||||||
|
for _, f := range r.File {
|
||||||
|
if f.Name == "silo/" || strings.HasPrefix(f.Name, "silo/") {
|
||||||
|
hasSilo = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !hasSilo {
|
||||||
|
return original, nil // plain .fcstd, no repacking needed
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
zw := zip.NewWriter(&buf)
|
||||||
|
|
||||||
|
// Copy all non-silo entries verbatim.
|
||||||
|
for _, f := range r.File {
|
||||||
|
if f.Name == "silo/" || strings.HasPrefix(f.Name, "silo/") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := copyZipEntry(zw, f); err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: copying entry %s: %w", f.Name, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write new silo/ entries from PackInput.
|
||||||
|
if input.Manifest != nil {
|
||||||
|
if err := writeJSONEntry(zw, "silo/manifest.json", input.Manifest); err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: writing manifest.json: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if input.Metadata != nil {
|
||||||
|
if err := writeJSONEntry(zw, "silo/metadata.json", input.Metadata); err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: writing metadata.json: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if input.History != nil {
|
||||||
|
if err := writeJSONEntry(zw, "silo/history.json", input.History); err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: writing history.json: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if input.Dependencies != nil {
|
||||||
|
if err := writeJSONEntry(zw, "silo/dependencies.json", input.Dependencies); err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: writing dependencies.json: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := zw.Close(); err != nil {
|
||||||
|
return nil, fmt.Errorf("kc: closing zip writer: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// copyZipEntry copies a single entry from the original ZIP to the new writer,
|
||||||
|
// preserving the file header (compression method, timestamps, etc.).
|
||||||
|
func copyZipEntry(zw *zip.Writer, f *zip.File) error {
|
||||||
|
header := f.FileHeader
|
||||||
|
w, err := zw.CreateHeader(&header)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
rc, err := f.Open()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rc.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(w, rc)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeJSONEntry writes a new silo/ entry as JSON with Deflate compression.
|
||||||
|
func writeJSONEntry(zw *zip.Writer, name string, v any) error {
|
||||||
|
data, err := json.MarshalIndent(v, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
header := &zip.FileHeader{
|
||||||
|
Name: name,
|
||||||
|
Method: zip.Deflate,
|
||||||
|
}
|
||||||
|
w, err := zw.CreateHeader(header)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = w.Write(data)
|
||||||
|
return err
|
||||||
|
}
|
||||||
229
internal/kc/pack_test.go
Normal file
229
internal/kc/pack_test.go
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
package kc
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestHasSiloDir_PlainFCStd(t *testing.T) {
|
||||||
|
data := buildZip(t, map[string][]byte{
|
||||||
|
"Document.xml": []byte("<xml/>"),
|
||||||
|
})
|
||||||
|
has, err := HasSiloDir(data)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if has {
|
||||||
|
t.Fatal("expected false for plain .fcstd")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHasSiloDir_KC(t *testing.T) {
|
||||||
|
data := buildZip(t, map[string][]byte{
|
||||||
|
"Document.xml": []byte("<xml/>"),
|
||||||
|
"silo/manifest.json": []byte("{}"),
|
||||||
|
})
|
||||||
|
has, err := HasSiloDir(data)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if !has {
|
||||||
|
t.Fatal("expected true for .kc with silo/ dir")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHasSiloDir_NotAZip(t *testing.T) {
|
||||||
|
_, err := HasSiloDir([]byte("not a zip"))
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected error for non-ZIP data")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPack_PlainFCStd_Passthrough(t *testing.T) {
|
||||||
|
original := buildZip(t, map[string][]byte{
|
||||||
|
"Document.xml": []byte("<xml/>"),
|
||||||
|
"thumbnails/a.png": []byte("png-data"),
|
||||||
|
})
|
||||||
|
|
||||||
|
result, err := Pack(original, &PackInput{
|
||||||
|
Manifest: &Manifest{UUID: "test"},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if !bytes.Equal(result, original) {
|
||||||
|
t.Fatal("expected original bytes returned unchanged for plain .fcstd")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPack_RoundTrip(t *testing.T) {
|
||||||
|
// Build a .kc with old silo/ data
|
||||||
|
oldManifest := Manifest{UUID: "old-uuid", KCVersion: "0.9", RevisionHash: "old-hash"}
|
||||||
|
oldMetadata := Metadata{SchemaName: "old-schema", Tags: []string{"old"}, LifecycleState: "draft"}
|
||||||
|
|
||||||
|
original := buildZip(t, map[string][]byte{
|
||||||
|
"Document.xml": []byte("<freecad/>"),
|
||||||
|
"thumbnails/t.png": []byte("thumb-data"),
|
||||||
|
"silo/manifest.json": mustJSON(t, oldManifest),
|
||||||
|
"silo/metadata.json": mustJSON(t, oldMetadata),
|
||||||
|
})
|
||||||
|
|
||||||
|
// Pack with new data
|
||||||
|
newManifest := &Manifest{UUID: "new-uuid", KCVersion: "1.0", RevisionHash: "new-hash", SiloInstance: "https://silo.test"}
|
||||||
|
newMetadata := &Metadata{SchemaName: "mechanical-part-v2", Tags: []string{"aluminum", "structural"}, LifecycleState: "review", Fields: map[string]any{"material": "7075-T6"}}
|
||||||
|
comment := "initial commit"
|
||||||
|
history := []HistoryEntry{
|
||||||
|
{RevisionNumber: 1, CreatedAt: "2026-01-01T00:00:00Z", Comment: &comment, Status: "draft", Labels: []string{}},
|
||||||
|
}
|
||||||
|
|
||||||
|
packed, err := Pack(original, &PackInput{
|
||||||
|
Manifest: newManifest,
|
||||||
|
Metadata: newMetadata,
|
||||||
|
History: history,
|
||||||
|
Dependencies: []Dependency{},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Pack error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract and verify new silo/ data
|
||||||
|
result, err := Extract(packed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Extract error: %v", err)
|
||||||
|
}
|
||||||
|
if result == nil {
|
||||||
|
t.Fatal("expected non-nil extract result")
|
||||||
|
}
|
||||||
|
if result.Manifest.UUID != "new-uuid" {
|
||||||
|
t.Errorf("manifest UUID = %q, want %q", result.Manifest.UUID, "new-uuid")
|
||||||
|
}
|
||||||
|
if result.Manifest.KCVersion != "1.0" {
|
||||||
|
t.Errorf("manifest KCVersion = %q, want %q", result.Manifest.KCVersion, "1.0")
|
||||||
|
}
|
||||||
|
if result.Manifest.SiloInstance != "https://silo.test" {
|
||||||
|
t.Errorf("manifest SiloInstance = %q, want %q", result.Manifest.SiloInstance, "https://silo.test")
|
||||||
|
}
|
||||||
|
if result.Metadata.SchemaName != "mechanical-part-v2" {
|
||||||
|
t.Errorf("metadata SchemaName = %q, want %q", result.Metadata.SchemaName, "mechanical-part-v2")
|
||||||
|
}
|
||||||
|
if result.Metadata.LifecycleState != "review" {
|
||||||
|
t.Errorf("metadata LifecycleState = %q, want %q", result.Metadata.LifecycleState, "review")
|
||||||
|
}
|
||||||
|
if len(result.Metadata.Tags) != 2 {
|
||||||
|
t.Errorf("metadata Tags len = %d, want 2", len(result.Metadata.Tags))
|
||||||
|
}
|
||||||
|
if result.Metadata.Fields["material"] != "7075-T6" {
|
||||||
|
t.Errorf("metadata Fields[material] = %v, want 7075-T6", result.Metadata.Fields["material"])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify non-silo entries are preserved
|
||||||
|
r, err := zip.NewReader(bytes.NewReader(packed), int64(len(packed)))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("opening packed ZIP: %v", err)
|
||||||
|
}
|
||||||
|
entryMap := make(map[string]bool)
|
||||||
|
for _, f := range r.File {
|
||||||
|
entryMap[f.Name] = true
|
||||||
|
}
|
||||||
|
if !entryMap["Document.xml"] {
|
||||||
|
t.Error("Document.xml missing from packed ZIP")
|
||||||
|
}
|
||||||
|
if !entryMap["thumbnails/t.png"] {
|
||||||
|
t.Error("thumbnails/t.png missing from packed ZIP")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify non-silo content is byte-identical
|
||||||
|
for _, f := range r.File {
|
||||||
|
if f.Name == "Document.xml" {
|
||||||
|
content := readZipEntry(t, f)
|
||||||
|
if string(content) != "<freecad/>" {
|
||||||
|
t.Errorf("Document.xml content = %q, want %q", content, "<freecad/>")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if f.Name == "thumbnails/t.png" {
|
||||||
|
content := readZipEntry(t, f)
|
||||||
|
if string(content) != "thumb-data" {
|
||||||
|
t.Errorf("thumbnails/t.png content = %q, want %q", content, "thumb-data")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPack_NilFields(t *testing.T) {
|
||||||
|
original := buildZip(t, map[string][]byte{
|
||||||
|
"Document.xml": []byte("<xml/>"),
|
||||||
|
"silo/manifest.json": []byte(`{"uuid":"x"}`),
|
||||||
|
})
|
||||||
|
|
||||||
|
// Pack with only manifest, nil metadata/history/deps
|
||||||
|
packed, err := Pack(original, &PackInput{
|
||||||
|
Manifest: &Manifest{UUID: "updated"},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Pack error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract — should have manifest but no metadata
|
||||||
|
result, err := Extract(packed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Extract error: %v", err)
|
||||||
|
}
|
||||||
|
if result.Manifest == nil || result.Manifest.UUID != "updated" {
|
||||||
|
t.Errorf("manifest UUID = %v, want updated", result.Manifest)
|
||||||
|
}
|
||||||
|
if result.Metadata != nil {
|
||||||
|
t.Errorf("expected nil metadata, got %+v", result.Metadata)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify no old silo/ entries leaked through
|
||||||
|
r, _ := zip.NewReader(bytes.NewReader(packed), int64(len(packed)))
|
||||||
|
for _, f := range r.File {
|
||||||
|
if f.Name == "silo/metadata.json" {
|
||||||
|
t.Error("old silo/metadata.json should have been removed")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPack_EmptyDependencies(t *testing.T) {
|
||||||
|
original := buildZip(t, map[string][]byte{
|
||||||
|
"silo/manifest.json": []byte(`{"uuid":"x"}`),
|
||||||
|
})
|
||||||
|
|
||||||
|
packed, err := Pack(original, &PackInput{
|
||||||
|
Manifest: &Manifest{UUID: "x"},
|
||||||
|
Dependencies: []Dependency{},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Pack error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify dependencies.json exists and is []
|
||||||
|
r, _ := zip.NewReader(bytes.NewReader(packed), int64(len(packed)))
|
||||||
|
for _, f := range r.File {
|
||||||
|
if f.Name == "silo/dependencies.json" {
|
||||||
|
content := readZipEntry(t, f)
|
||||||
|
if string(content) != "[]" {
|
||||||
|
t.Errorf("dependencies.json = %q, want %q", content, "[]")
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
t.Error("silo/dependencies.json not found in packed ZIP")
|
||||||
|
}
|
||||||
|
|
||||||
|
// readZipEntry reads the full contents of a zip.File.
|
||||||
|
func readZipEntry(t *testing.T, f *zip.File) []byte {
|
||||||
|
t.Helper()
|
||||||
|
rc, err := f.Open()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("opening zip entry %s: %v", f.Name, err)
|
||||||
|
}
|
||||||
|
defer rc.Close()
|
||||||
|
data, err := io.ReadAll(rc)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("reading zip entry %s: %v", f.Name, err)
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
}
|
||||||
84
internal/modules/loader.go
Normal file
84
internal/modules/loader.go
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
package modules
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
"github.com/kindredsystems/silo/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LoadState applies module state from config YAML and database overrides.
|
||||||
|
//
|
||||||
|
// Precedence (highest wins):
|
||||||
|
// 1. Database module_state table
|
||||||
|
// 2. YAML modules.* toggles
|
||||||
|
// 3. Backward-compat YAML fields (auth.enabled, odoo.enabled)
|
||||||
|
// 4. Module defaults (set by NewRegistry)
|
||||||
|
func LoadState(r *Registry, cfg *config.Config, pool *pgxpool.Pool) error {
|
||||||
|
// Step 1: Apply backward-compat top-level YAML fields.
|
||||||
|
// auth.enabled and odoo.enabled existed before the modules section.
|
||||||
|
// Only apply if the new modules.* section doesn't override them.
|
||||||
|
if cfg.Modules.Auth == nil {
|
||||||
|
r.setEnabledUnchecked(Auth, cfg.Auth.Enabled)
|
||||||
|
}
|
||||||
|
if cfg.Modules.Odoo == nil {
|
||||||
|
r.setEnabledUnchecked(Odoo, cfg.Odoo.Enabled)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2: Apply explicit modules.* YAML toggles (override defaults + compat).
|
||||||
|
applyToggle(r, Auth, cfg.Modules.Auth)
|
||||||
|
applyToggle(r, Projects, cfg.Modules.Projects)
|
||||||
|
applyToggle(r, Audit, cfg.Modules.Audit)
|
||||||
|
applyToggle(r, Odoo, cfg.Modules.Odoo)
|
||||||
|
applyToggle(r, FreeCAD, cfg.Modules.FreeCAD)
|
||||||
|
applyToggle(r, Jobs, cfg.Modules.Jobs)
|
||||||
|
applyToggle(r, DAG, cfg.Modules.DAG)
|
||||||
|
|
||||||
|
// Step 3: Apply database overrides (highest precedence).
|
||||||
|
if pool != nil {
|
||||||
|
if err := loadFromDB(r, pool); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Validate the final state.
|
||||||
|
return r.ValidateDependencies()
|
||||||
|
}
|
||||||
|
|
||||||
|
// applyToggle sets a module's state from a YAML ModuleToggle if present.
|
||||||
|
func applyToggle(r *Registry, id string, toggle *config.ModuleToggle) {
|
||||||
|
if toggle == nil || toggle.Enabled == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.setEnabledUnchecked(id, *toggle.Enabled)
|
||||||
|
}
|
||||||
|
|
||||||
|
// setEnabledUnchecked sets module state without dependency validation.
|
||||||
|
// Used during loading when the full state is being assembled incrementally.
|
||||||
|
func (r *Registry) setEnabledUnchecked(id string, enabled bool) {
|
||||||
|
r.mu.Lock()
|
||||||
|
defer r.mu.Unlock()
|
||||||
|
if m, ok := r.modules[id]; ok && !m.Required {
|
||||||
|
m.enabled = enabled
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadFromDB reads module_state rows and applies them to the registry.
|
||||||
|
func loadFromDB(r *Registry, pool *pgxpool.Pool) error {
|
||||||
|
rows, err := pool.Query(context.Background(),
|
||||||
|
`SELECT module_id, enabled FROM module_state`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var id string
|
||||||
|
var enabled bool
|
||||||
|
if err := rows.Scan(&id, &enabled); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
r.setEnabledUnchecked(id, enabled)
|
||||||
|
}
|
||||||
|
return rows.Err()
|
||||||
|
}
|
||||||
88
internal/modules/loader_test.go
Normal file
88
internal/modules/loader_test.go
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
package modules
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/kindredsystems/silo/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func boolPtr(v bool) *bool { return &v }
|
||||||
|
|
||||||
|
func TestLoadState_DefaultsOnly(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
cfg := &config.Config{}
|
||||||
|
|
||||||
|
if err := LoadState(r, cfg, nil); err != nil {
|
||||||
|
t.Fatalf("LoadState: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auth defaults to true from registry, but cfg.Auth.Enabled is false
|
||||||
|
// (zero value) and backward-compat applies, so auth ends up disabled.
|
||||||
|
if r.IsEnabled(Auth) {
|
||||||
|
t.Error("auth should be disabled (cfg.Auth.Enabled is false by default)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadState_BackwardCompat(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
cfg := &config.Config{}
|
||||||
|
cfg.Auth.Enabled = true
|
||||||
|
cfg.Odoo.Enabled = true
|
||||||
|
|
||||||
|
if err := LoadState(r, cfg, nil); err != nil {
|
||||||
|
t.Fatalf("LoadState: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !r.IsEnabled(Auth) {
|
||||||
|
t.Error("auth should be enabled via cfg.Auth.Enabled")
|
||||||
|
}
|
||||||
|
if !r.IsEnabled(Odoo) {
|
||||||
|
t.Error("odoo should be enabled via cfg.Odoo.Enabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadState_YAMLModulesOverrideCompat(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
cfg := &config.Config{}
|
||||||
|
cfg.Auth.Enabled = true // compat says enabled
|
||||||
|
cfg.Modules.Auth = &config.ModuleToggle{Enabled: boolPtr(false)} // explicit says disabled
|
||||||
|
|
||||||
|
if err := LoadState(r, cfg, nil); err != nil {
|
||||||
|
t.Fatalf("LoadState: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.IsEnabled(Auth) {
|
||||||
|
t.Error("modules.auth.enabled=false should override auth.enabled=true")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadState_EnableJobsAndDAG(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
cfg := &config.Config{}
|
||||||
|
cfg.Auth.Enabled = true
|
||||||
|
cfg.Modules.Jobs = &config.ModuleToggle{Enabled: boolPtr(true)}
|
||||||
|
cfg.Modules.DAG = &config.ModuleToggle{Enabled: boolPtr(true)}
|
||||||
|
|
||||||
|
if err := LoadState(r, cfg, nil); err != nil {
|
||||||
|
t.Fatalf("LoadState: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !r.IsEnabled(Jobs) {
|
||||||
|
t.Error("jobs should be enabled")
|
||||||
|
}
|
||||||
|
if !r.IsEnabled(DAG) {
|
||||||
|
t.Error("dag should be enabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadState_InvalidDependency(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
cfg := &config.Config{}
|
||||||
|
// Auth disabled (default), but enable jobs which depends on auth.
|
||||||
|
cfg.Modules.Jobs = &config.ModuleToggle{Enabled: boolPtr(true)}
|
||||||
|
|
||||||
|
err := LoadState(r, cfg, nil)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("should fail: jobs enabled but auth disabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
163
internal/modules/modules.go
Normal file
163
internal/modules/modules.go
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
// Package modules provides the module registry for Silo.
|
||||||
|
// Each module groups API endpoints, UI views, and configuration.
|
||||||
|
// Modules can be required (always on) or optional (admin-toggleable).
|
||||||
|
package modules
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Module IDs.
|
||||||
|
const (
|
||||||
|
Core = "core"
|
||||||
|
Schemas = "schemas"
|
||||||
|
Storage = "storage"
|
||||||
|
Auth = "auth"
|
||||||
|
Projects = "projects"
|
||||||
|
Audit = "audit"
|
||||||
|
Odoo = "odoo"
|
||||||
|
FreeCAD = "freecad"
|
||||||
|
Jobs = "jobs"
|
||||||
|
DAG = "dag"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ModuleInfo describes a module's metadata.
|
||||||
|
type ModuleInfo struct {
|
||||||
|
ID string
|
||||||
|
Name string
|
||||||
|
Description string
|
||||||
|
Required bool // cannot be disabled
|
||||||
|
DefaultEnabled bool // initial state for optional modules
|
||||||
|
DependsOn []string // module IDs this module requires
|
||||||
|
Version string
|
||||||
|
}
|
||||||
|
|
||||||
|
// registry entries with their runtime enabled state.
|
||||||
|
type moduleState struct {
|
||||||
|
ModuleInfo
|
||||||
|
enabled bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registry holds all module definitions and their enabled state.
|
||||||
|
type Registry struct {
|
||||||
|
mu sync.RWMutex
|
||||||
|
modules map[string]*moduleState
|
||||||
|
}
|
||||||
|
|
||||||
|
// builtinModules defines the complete set of Silo modules.
|
||||||
|
var builtinModules = []ModuleInfo{
|
||||||
|
{ID: Core, Name: "Core PDM", Description: "Items, revisions, files, BOM, search, import/export", Required: true, Version: "0.2"},
|
||||||
|
{ID: Schemas, Name: "Schemas", Description: "Part numbering schema parsing and segment management", Required: true},
|
||||||
|
{ID: Storage, Name: "Storage", Description: "MinIO/S3 file storage, presigned uploads", Required: true},
|
||||||
|
{ID: Auth, Name: "Authentication", Description: "Local, LDAP, OIDC authentication and RBAC", DefaultEnabled: true},
|
||||||
|
{ID: Projects, Name: "Projects", Description: "Project management and item tagging", DefaultEnabled: true},
|
||||||
|
{ID: Audit, Name: "Audit", Description: "Audit logging, completeness scoring", DefaultEnabled: true},
|
||||||
|
{ID: Odoo, Name: "Odoo ERP", Description: "Odoo integration (config, sync-log, push/pull)", DependsOn: []string{Auth}},
|
||||||
|
{ID: FreeCAD, Name: "Create Integration", Description: "URI scheme, executable path, client settings", DefaultEnabled: true},
|
||||||
|
{ID: Jobs, Name: "Job Queue", Description: "Async compute jobs, runner management", DependsOn: []string{Auth}},
|
||||||
|
{ID: DAG, Name: "Dependency DAG", Description: "Feature DAG sync, validation states, interference detection", DependsOn: []string{Jobs}},
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRegistry creates a registry with all builtin modules set to their default state.
|
||||||
|
func NewRegistry() *Registry {
|
||||||
|
r := &Registry{modules: make(map[string]*moduleState, len(builtinModules))}
|
||||||
|
for _, m := range builtinModules {
|
||||||
|
enabled := m.Required || m.DefaultEnabled
|
||||||
|
r.modules[m.ID] = &moduleState{ModuleInfo: m, enabled: enabled}
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEnabled returns whether a module is currently enabled.
|
||||||
|
func (r *Registry) IsEnabled(id string) bool {
|
||||||
|
r.mu.RLock()
|
||||||
|
defer r.mu.RUnlock()
|
||||||
|
if m, ok := r.modules[id]; ok {
|
||||||
|
return m.enabled
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetEnabled changes a module's enabled state with dependency validation.
|
||||||
|
func (r *Registry) SetEnabled(id string, enabled bool) error {
|
||||||
|
r.mu.Lock()
|
||||||
|
defer r.mu.Unlock()
|
||||||
|
|
||||||
|
m, ok := r.modules[id]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("unknown module %q", id)
|
||||||
|
}
|
||||||
|
|
||||||
|
if m.Required {
|
||||||
|
return fmt.Errorf("module %q is required and cannot be disabled", id)
|
||||||
|
}
|
||||||
|
|
||||||
|
if enabled {
|
||||||
|
// Check that all dependencies are enabled.
|
||||||
|
for _, dep := range m.DependsOn {
|
||||||
|
if dm, ok := r.modules[dep]; ok && !dm.enabled {
|
||||||
|
return fmt.Errorf("cannot enable %q: dependency %q is disabled", id, dep)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Check that no enabled module depends on this one.
|
||||||
|
for _, other := range r.modules {
|
||||||
|
if !other.enabled || other.ID == id {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, dep := range other.DependsOn {
|
||||||
|
if dep == id {
|
||||||
|
return fmt.Errorf("cannot disable %q: module %q depends on it", id, other.ID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
m.enabled = enabled
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// All returns info for every module, sorted by ID.
|
||||||
|
func (r *Registry) All() []ModuleInfo {
|
||||||
|
r.mu.RLock()
|
||||||
|
defer r.mu.RUnlock()
|
||||||
|
|
||||||
|
out := make([]ModuleInfo, 0, len(r.modules))
|
||||||
|
for _, m := range r.modules {
|
||||||
|
out = append(out, m.ModuleInfo)
|
||||||
|
}
|
||||||
|
sort.Slice(out, func(i, j int) bool { return out[i].ID < out[j].ID })
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns info for a single module, or nil if not found.
|
||||||
|
func (r *Registry) Get(id string) *ModuleInfo {
|
||||||
|
r.mu.RLock()
|
||||||
|
defer r.mu.RUnlock()
|
||||||
|
if m, ok := r.modules[id]; ok {
|
||||||
|
info := m.ModuleInfo
|
||||||
|
return &info
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateDependencies checks that every enabled module's dependencies
|
||||||
|
// are also enabled. Returns the first violation found.
|
||||||
|
func (r *Registry) ValidateDependencies() error {
|
||||||
|
r.mu.RLock()
|
||||||
|
defer r.mu.RUnlock()
|
||||||
|
|
||||||
|
for _, m := range r.modules {
|
||||||
|
if !m.enabled {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, dep := range m.DependsOn {
|
||||||
|
if dm, ok := r.modules[dep]; ok && !dm.enabled {
|
||||||
|
return fmt.Errorf("module %q is enabled but its dependency %q is disabled", m.ID, dep)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
169
internal/modules/modules_test.go
Normal file
169
internal/modules/modules_test.go
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
package modules
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewRegistry_DefaultState(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
|
||||||
|
// Required modules are always enabled.
|
||||||
|
for _, id := range []string{Core, Schemas, Storage} {
|
||||||
|
if !r.IsEnabled(id) {
|
||||||
|
t.Errorf("required module %q should be enabled by default", id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optional modules with DefaultEnabled=true.
|
||||||
|
for _, id := range []string{Auth, Projects, Audit, FreeCAD} {
|
||||||
|
if !r.IsEnabled(id) {
|
||||||
|
t.Errorf("module %q should be enabled by default", id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optional modules with DefaultEnabled=false.
|
||||||
|
for _, id := range []string{Odoo, Jobs, DAG} {
|
||||||
|
if r.IsEnabled(id) {
|
||||||
|
t.Errorf("module %q should be disabled by default", id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSetEnabled_BasicToggle(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
|
||||||
|
// Disable an optional module with no dependents.
|
||||||
|
if err := r.SetEnabled(Projects, false); err != nil {
|
||||||
|
t.Fatalf("disabling projects: %v", err)
|
||||||
|
}
|
||||||
|
if r.IsEnabled(Projects) {
|
||||||
|
t.Error("projects should be disabled after SetEnabled(false)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-enable it.
|
||||||
|
if err := r.SetEnabled(Projects, true); err != nil {
|
||||||
|
t.Fatalf("enabling projects: %v", err)
|
||||||
|
}
|
||||||
|
if !r.IsEnabled(Projects) {
|
||||||
|
t.Error("projects should be enabled after SetEnabled(true)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCannotDisableRequired(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
|
||||||
|
for _, id := range []string{Core, Schemas, Storage} {
|
||||||
|
if err := r.SetEnabled(id, false); err == nil {
|
||||||
|
t.Errorf("disabling required module %q should return error", id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDependencyChain_EnableWithoutDep(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
|
||||||
|
// Jobs depends on Auth. Auth is enabled by default, so enabling jobs works.
|
||||||
|
if err := r.SetEnabled(Jobs, true); err != nil {
|
||||||
|
t.Fatalf("enabling jobs (auth enabled): %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DAG depends on Jobs. Jobs is now enabled, so enabling dag works.
|
||||||
|
if err := r.SetEnabled(DAG, true); err != nil {
|
||||||
|
t.Fatalf("enabling dag (jobs enabled): %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now try with deps disabled. Start fresh.
|
||||||
|
r2 := NewRegistry()
|
||||||
|
|
||||||
|
// DAG depends on Jobs, which is disabled by default.
|
||||||
|
if err := r2.SetEnabled(DAG, true); err == nil {
|
||||||
|
t.Error("enabling dag without jobs should fail")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDisableDependedOn(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
|
||||||
|
// Enable the full chain: auth (already on) → jobs → dag.
|
||||||
|
if err := r.SetEnabled(Jobs, true); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if err := r.SetEnabled(DAG, true); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cannot disable jobs while dag depends on it.
|
||||||
|
if err := r.SetEnabled(Jobs, false); err == nil {
|
||||||
|
t.Error("disabling jobs while dag is enabled should fail")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disable dag first, then jobs should work.
|
||||||
|
if err := r.SetEnabled(DAG, false); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if err := r.SetEnabled(Jobs, false); err != nil {
|
||||||
|
t.Fatalf("disabling jobs after dag disabled: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCannotDisableAuthWhileJobsEnabled(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
|
||||||
|
if err := r.SetEnabled(Jobs, true); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auth is depended on by jobs.
|
||||||
|
if err := r.SetEnabled(Auth, false); err == nil {
|
||||||
|
t.Error("disabling auth while jobs is enabled should fail")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUnknownModule(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
|
||||||
|
if r.IsEnabled("nonexistent") {
|
||||||
|
t.Error("unknown module should not be enabled")
|
||||||
|
}
|
||||||
|
if err := r.SetEnabled("nonexistent", true); err == nil {
|
||||||
|
t.Error("setting unknown module should return error")
|
||||||
|
}
|
||||||
|
if r.Get("nonexistent") != nil {
|
||||||
|
t.Error("getting unknown module should return nil")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAll_ReturnsAllModules(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
all := r.All()
|
||||||
|
|
||||||
|
if len(all) != 10 {
|
||||||
|
t.Errorf("expected 10 modules, got %d", len(all))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should be sorted by ID.
|
||||||
|
for i := 1; i < len(all); i++ {
|
||||||
|
if all[i].ID < all[i-1].ID {
|
||||||
|
t.Errorf("modules not sorted: %s before %s", all[i-1].ID, all[i].ID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateDependencies(t *testing.T) {
|
||||||
|
r := NewRegistry()
|
||||||
|
|
||||||
|
// Default state should be valid.
|
||||||
|
if err := r.ValidateDependencies(); err != nil {
|
||||||
|
t.Fatalf("default state should be valid: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Force an invalid state by directly mutating (bypassing SetEnabled).
|
||||||
|
r.mu.Lock()
|
||||||
|
r.modules[Jobs].enabled = true
|
||||||
|
r.modules[Auth].enabled = false
|
||||||
|
r.mu.Unlock()
|
||||||
|
|
||||||
|
if err := r.ValidateDependencies(); err == nil {
|
||||||
|
t.Error("should detect jobs enabled without auth")
|
||||||
|
}
|
||||||
|
}
|
||||||
177
internal/storage/filesystem.go
Normal file
177
internal/storage/filesystem.go
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
package storage
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ErrPresignNotSupported is returned when presigned URLs are requested from a
|
||||||
|
// backend that does not support them.
|
||||||
|
var ErrPresignNotSupported = errors.New("presigned URLs not supported by filesystem backend")
|
||||||
|
|
||||||
|
// Compile-time check: *FilesystemStore implements FileStore.
|
||||||
|
var _ FileStore = (*FilesystemStore)(nil)
|
||||||
|
|
||||||
|
// FilesystemStore stores objects as files under a root directory.
|
||||||
|
type FilesystemStore struct {
|
||||||
|
root string // absolute path
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewFilesystemStore creates a new filesystem-backed store rooted at root.
|
||||||
|
// The directory is created if it does not exist.
|
||||||
|
func NewFilesystemStore(root string) (*FilesystemStore, error) {
|
||||||
|
abs, err := filepath.Abs(root)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("resolving root path: %w", err)
|
||||||
|
}
|
||||||
|
if err := os.MkdirAll(abs, 0o755); err != nil {
|
||||||
|
return nil, fmt.Errorf("creating root directory: %w", err)
|
||||||
|
}
|
||||||
|
return &FilesystemStore{root: abs}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// path returns the absolute filesystem path for a storage key.
|
||||||
|
func (fs *FilesystemStore) path(key string) string {
|
||||||
|
return filepath.Join(fs.root, filepath.FromSlash(key))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Put writes reader to the file at key using atomic rename.
|
||||||
|
// SHA-256 checksum is computed during write and returned in PutResult.
|
||||||
|
func (fs *FilesystemStore) Put(_ context.Context, key string, reader io.Reader, _ int64, _ string) (*PutResult, error) {
|
||||||
|
dest := fs.path(key)
|
||||||
|
|
||||||
|
if err := os.MkdirAll(filepath.Dir(dest), 0o755); err != nil {
|
||||||
|
return nil, fmt.Errorf("creating directories: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write to a temp file in the same directory so os.Rename is atomic.
|
||||||
|
tmp, err := os.CreateTemp(filepath.Dir(dest), ".silo-tmp-*")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("creating temp file: %w", err)
|
||||||
|
}
|
||||||
|
tmpPath := tmp.Name()
|
||||||
|
defer func() {
|
||||||
|
// Clean up temp file on any failure path.
|
||||||
|
tmp.Close()
|
||||||
|
os.Remove(tmpPath)
|
||||||
|
}()
|
||||||
|
|
||||||
|
h := sha256.New()
|
||||||
|
w := io.MultiWriter(tmp, h)
|
||||||
|
|
||||||
|
n, err := io.Copy(w, reader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("writing file: %w", err)
|
||||||
|
}
|
||||||
|
if err := tmp.Close(); err != nil {
|
||||||
|
return nil, fmt.Errorf("closing temp file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.Rename(tmpPath, dest); err != nil {
|
||||||
|
return nil, fmt.Errorf("renaming temp file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &PutResult{
|
||||||
|
Key: key,
|
||||||
|
Size: n,
|
||||||
|
Checksum: hex.EncodeToString(h.Sum(nil)),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get opens the file at key for reading.
|
||||||
|
func (fs *FilesystemStore) Get(_ context.Context, key string) (io.ReadCloser, error) {
|
||||||
|
f, err := os.Open(fs.path(key))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("opening file: %w", err)
|
||||||
|
}
|
||||||
|
return f, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetVersion delegates to Get — filesystem storage has no versioning.
|
||||||
|
func (fs *FilesystemStore) GetVersion(ctx context.Context, key string, _ string) (io.ReadCloser, error) {
|
||||||
|
return fs.Get(ctx, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete removes the file at key. No error if already absent.
|
||||||
|
func (fs *FilesystemStore) Delete(_ context.Context, key string) error {
|
||||||
|
err := os.Remove(fs.path(key))
|
||||||
|
if err != nil && !errors.Is(err, os.ErrNotExist) {
|
||||||
|
return fmt.Errorf("removing file: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exists reports whether the file at key exists.
|
||||||
|
func (fs *FilesystemStore) Exists(_ context.Context, key string) (bool, error) {
|
||||||
|
_, err := os.Stat(fs.path(key))
|
||||||
|
if err == nil {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
if errors.Is(err, os.ErrNotExist) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
return false, fmt.Errorf("checking file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy duplicates a file from srcKey to dstKey using atomic rename.
|
||||||
|
func (fs *FilesystemStore) Copy(_ context.Context, srcKey, dstKey string) error {
|
||||||
|
srcPath := fs.path(srcKey)
|
||||||
|
dstPath := fs.path(dstKey)
|
||||||
|
|
||||||
|
src, err := os.Open(srcPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("opening source: %w", err)
|
||||||
|
}
|
||||||
|
defer src.Close()
|
||||||
|
|
||||||
|
if err := os.MkdirAll(filepath.Dir(dstPath), 0o755); err != nil {
|
||||||
|
return fmt.Errorf("creating directories: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
tmp, err := os.CreateTemp(filepath.Dir(dstPath), ".silo-tmp-*")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("creating temp file: %w", err)
|
||||||
|
}
|
||||||
|
tmpPath := tmp.Name()
|
||||||
|
defer func() {
|
||||||
|
tmp.Close()
|
||||||
|
os.Remove(tmpPath)
|
||||||
|
}()
|
||||||
|
|
||||||
|
if _, err := io.Copy(tmp, src); err != nil {
|
||||||
|
return fmt.Errorf("copying file: %w", err)
|
||||||
|
}
|
||||||
|
if err := tmp.Close(); err != nil {
|
||||||
|
return fmt.Errorf("closing temp file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.Rename(tmpPath, dstPath); err != nil {
|
||||||
|
return fmt.Errorf("renaming temp file: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// PresignPut is not supported by the filesystem backend.
|
||||||
|
func (fs *FilesystemStore) PresignPut(_ context.Context, _ string, _ time.Duration) (*url.URL, error) {
|
||||||
|
return nil, ErrPresignNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ping verifies the root directory is accessible and writable.
|
||||||
|
func (fs *FilesystemStore) Ping(_ context.Context) error {
|
||||||
|
tmp, err := os.CreateTemp(fs.root, ".silo-ping-*")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("storage ping failed: %w", err)
|
||||||
|
}
|
||||||
|
name := tmp.Name()
|
||||||
|
tmp.Close()
|
||||||
|
os.Remove(name)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
277
internal/storage/filesystem_test.go
Normal file
277
internal/storage/filesystem_test.go
Normal file
@@ -0,0 +1,277 @@
|
|||||||
|
package storage
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newTestStore(t *testing.T) *FilesystemStore {
|
||||||
|
t.Helper()
|
||||||
|
fs, err := NewFilesystemStore(t.TempDir())
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewFilesystemStore: %v", err)
|
||||||
|
}
|
||||||
|
return fs
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewFilesystemStore(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
sub := filepath.Join(dir, "a", "b")
|
||||||
|
fs, err := NewFilesystemStore(sub)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if !filepath.IsAbs(fs.root) {
|
||||||
|
t.Errorf("root is not absolute: %s", fs.root)
|
||||||
|
}
|
||||||
|
info, err := os.Stat(sub)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("root dir missing: %v", err)
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
t.Error("root is not a directory")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPut(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
ctx := context.Background()
|
||||||
|
data := []byte("hello world")
|
||||||
|
h := sha256.Sum256(data)
|
||||||
|
wantChecksum := hex.EncodeToString(h[:])
|
||||||
|
|
||||||
|
result, err := fs.Put(ctx, "items/P001/rev1.FCStd", bytes.NewReader(data), int64(len(data)), "application/octet-stream")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Put: %v", err)
|
||||||
|
}
|
||||||
|
if result.Key != "items/P001/rev1.FCStd" {
|
||||||
|
t.Errorf("Key = %q, want %q", result.Key, "items/P001/rev1.FCStd")
|
||||||
|
}
|
||||||
|
if result.Size != int64(len(data)) {
|
||||||
|
t.Errorf("Size = %d, want %d", result.Size, len(data))
|
||||||
|
}
|
||||||
|
if result.Checksum != wantChecksum {
|
||||||
|
t.Errorf("Checksum = %q, want %q", result.Checksum, wantChecksum)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify file on disk.
|
||||||
|
got, err := os.ReadFile(fs.path("items/P001/rev1.FCStd"))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("reading file: %v", err)
|
||||||
|
}
|
||||||
|
if !bytes.Equal(got, data) {
|
||||||
|
t.Error("file content mismatch")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPutAtomicity(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
ctx := context.Background()
|
||||||
|
key := "test/atomic.bin"
|
||||||
|
|
||||||
|
// Write an initial file.
|
||||||
|
if _, err := fs.Put(ctx, key, strings.NewReader("original"), 8, ""); err != nil {
|
||||||
|
t.Fatalf("initial Put: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write with a reader that fails partway through.
|
||||||
|
failing := io.MultiReader(strings.NewReader("partial"), &errReader{})
|
||||||
|
_, err := fs.Put(ctx, key, failing, 100, "")
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected error from failing reader")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Original file should still be intact.
|
||||||
|
got, err := os.ReadFile(fs.path(key))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("reading file after failed put: %v", err)
|
||||||
|
}
|
||||||
|
if string(got) != "original" {
|
||||||
|
t.Errorf("file content = %q, want %q", got, "original")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type errReader struct{}
|
||||||
|
|
||||||
|
func (e *errReader) Read([]byte) (int, error) {
|
||||||
|
return 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGet(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
ctx := context.Background()
|
||||||
|
data := []byte("test content")
|
||||||
|
|
||||||
|
if _, err := fs.Put(ctx, "f.txt", bytes.NewReader(data), int64(len(data)), ""); err != nil {
|
||||||
|
t.Fatalf("Put: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
rc, err := fs.Get(ctx, "f.txt")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Get: %v", err)
|
||||||
|
}
|
||||||
|
defer rc.Close()
|
||||||
|
|
||||||
|
got, err := io.ReadAll(rc)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadAll: %v", err)
|
||||||
|
}
|
||||||
|
if !bytes.Equal(got, data) {
|
||||||
|
t.Error("content mismatch")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetMissing(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
_, err := fs.Get(context.Background(), "no/such/file")
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected error for missing file")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetVersion(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
ctx := context.Background()
|
||||||
|
data := []byte("versioned")
|
||||||
|
|
||||||
|
if _, err := fs.Put(ctx, "v.txt", bytes.NewReader(data), int64(len(data)), ""); err != nil {
|
||||||
|
t.Fatalf("Put: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetVersion ignores versionID, returns same file.
|
||||||
|
rc, err := fs.GetVersion(ctx, "v.txt", "ignored-version-id")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("GetVersion: %v", err)
|
||||||
|
}
|
||||||
|
defer rc.Close()
|
||||||
|
|
||||||
|
got, err := io.ReadAll(rc)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ReadAll: %v", err)
|
||||||
|
}
|
||||||
|
if !bytes.Equal(got, data) {
|
||||||
|
t.Error("content mismatch")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDelete(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
if _, err := fs.Put(ctx, "del.txt", strings.NewReader("x"), 1, ""); err != nil {
|
||||||
|
t.Fatalf("Put: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := fs.Delete(ctx, "del.txt"); err != nil {
|
||||||
|
t.Fatalf("Delete: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := os.Stat(fs.path("del.txt")); !os.IsNotExist(err) {
|
||||||
|
t.Error("file still exists after delete")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeleteMissing(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
if err := fs.Delete(context.Background(), "no/such/file"); err != nil {
|
||||||
|
t.Fatalf("Delete missing file should not error: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExists(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
ok, err := fs.Exists(ctx, "nope")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Exists: %v", err)
|
||||||
|
}
|
||||||
|
if ok {
|
||||||
|
t.Error("Exists returned true for missing file")
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := fs.Put(ctx, "yes.txt", strings.NewReader("y"), 1, ""); err != nil {
|
||||||
|
t.Fatalf("Put: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ok, err = fs.Exists(ctx, "yes.txt")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Exists: %v", err)
|
||||||
|
}
|
||||||
|
if !ok {
|
||||||
|
t.Error("Exists returned false for existing file")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCopy(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
ctx := context.Background()
|
||||||
|
data := []byte("copy me")
|
||||||
|
|
||||||
|
if _, err := fs.Put(ctx, "src.bin", bytes.NewReader(data), int64(len(data)), ""); err != nil {
|
||||||
|
t.Fatalf("Put: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := fs.Copy(ctx, "src.bin", "deep/nested/dst.bin"); err != nil {
|
||||||
|
t.Fatalf("Copy: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got, err := os.ReadFile(fs.path("deep/nested/dst.bin"))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("reading copied file: %v", err)
|
||||||
|
}
|
||||||
|
if !bytes.Equal(got, data) {
|
||||||
|
t.Error("copied content mismatch")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Source should still exist.
|
||||||
|
if _, err := os.Stat(fs.path("src.bin")); err != nil {
|
||||||
|
t.Error("source file missing after copy")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPresignPut(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
_, err := fs.PresignPut(context.Background(), "key", 5*60)
|
||||||
|
if err != ErrPresignNotSupported {
|
||||||
|
t.Errorf("PresignPut error = %v, want ErrPresignNotSupported", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPing(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
if err := fs.Ping(context.Background()); err != nil {
|
||||||
|
t.Fatalf("Ping: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPingBadRoot(t *testing.T) {
|
||||||
|
fs := &FilesystemStore{root: "/nonexistent/path/that/should/not/exist"}
|
||||||
|
if err := fs.Ping(context.Background()); err == nil {
|
||||||
|
t.Fatal("expected Ping to fail with invalid root")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPutOverwrite(t *testing.T) {
|
||||||
|
fs := newTestStore(t)
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
if _, err := fs.Put(ctx, "ow.txt", strings.NewReader("first"), 5, ""); err != nil {
|
||||||
|
t.Fatalf("Put: %v", err)
|
||||||
|
}
|
||||||
|
if _, err := fs.Put(ctx, "ow.txt", strings.NewReader("second"), 6, ""); err != nil {
|
||||||
|
t.Fatalf("Put overwrite: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got, _ := os.ReadFile(fs.path("ow.txt"))
|
||||||
|
if string(got) != "second" {
|
||||||
|
t.Errorf("content = %q, want %q", got, "second")
|
||||||
|
}
|
||||||
|
}
|
||||||
21
internal/storage/interface.go
Normal file
21
internal/storage/interface.go
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
// Package storage defines the FileStore interface and backend implementations.
|
||||||
|
package storage
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"net/url"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FileStore is the interface for file storage backends.
|
||||||
|
type FileStore interface {
|
||||||
|
Put(ctx context.Context, key string, reader io.Reader, size int64, contentType string) (*PutResult, error)
|
||||||
|
Get(ctx context.Context, key string) (io.ReadCloser, error)
|
||||||
|
GetVersion(ctx context.Context, key string, versionID string) (io.ReadCloser, error)
|
||||||
|
Delete(ctx context.Context, key string) error
|
||||||
|
Exists(ctx context.Context, key string) (bool, error)
|
||||||
|
Copy(ctx context.Context, srcKey, dstKey string) error
|
||||||
|
PresignPut(ctx context.Context, key string, expiry time.Duration) (*url.URL, error)
|
||||||
|
Ping(ctx context.Context) error
|
||||||
|
}
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
// Package storage provides MinIO file storage operations.
|
|
||||||
package storage
|
package storage
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@@ -22,6 +21,9 @@ type Config struct {
|
|||||||
Region string
|
Region string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Compile-time check: *Storage implements FileStore.
|
||||||
|
var _ FileStore = (*Storage)(nil)
|
||||||
|
|
||||||
// Storage wraps MinIO client operations.
|
// Storage wraps MinIO client operations.
|
||||||
type Storage struct {
|
type Storage struct {
|
||||||
client *minio.Client
|
client *minio.Client
|
||||||
@@ -112,6 +114,19 @@ func (s *Storage) Delete(ctx context.Context, key string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Exists checks if an object exists in storage.
|
||||||
|
func (s *Storage) Exists(ctx context.Context, key string) (bool, error) {
|
||||||
|
_, err := s.client.StatObject(ctx, s.bucket, key, minio.StatObjectOptions{})
|
||||||
|
if err != nil {
|
||||||
|
resp := minio.ToErrorResponse(err)
|
||||||
|
if resp.Code == "NoSuchKey" {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
return false, fmt.Errorf("checking object existence: %w", err)
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
// Ping checks if the storage backend is reachable by verifying the bucket exists.
|
// Ping checks if the storage backend is reachable by verifying the bucket exists.
|
||||||
func (s *Storage) Ping(ctx context.Context) error {
|
func (s *Storage) Ping(ctx context.Context) error {
|
||||||
_, err := s.client.BucketExists(ctx, s.bucket)
|
_, err := s.client.BucketExists(ctx, s.bucket)
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ package testutil
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"sort"
|
"sort"
|
||||||
@@ -80,11 +79,13 @@ func TruncateAll(t *testing.T, pool *pgxpool.Pool) {
|
|||||||
|
|
||||||
_, err := pool.Exec(context.Background(), `
|
_, err := pool.Exec(context.Background(), `
|
||||||
TRUNCATE
|
TRUNCATE
|
||||||
|
item_metadata, item_dependencies, approval_signatures, item_approvals, item_macros,
|
||||||
|
settings_overrides, module_state,
|
||||||
job_log, jobs, job_definitions, runners,
|
job_log, jobs, job_definitions, runners,
|
||||||
dag_cross_edges, dag_edges, dag_nodes,
|
dag_cross_edges, dag_edges, dag_nodes,
|
||||||
audit_log, sync_log, api_tokens, sessions, item_files,
|
audit_log, sync_log, api_tokens, sessions, item_files,
|
||||||
item_projects, relationships, revisions, inventory, items,
|
item_projects, relationships, revisions, inventory, items,
|
||||||
projects, sequences_by_name, users, property_migrations
|
locations, projects, sequences_by_name, users, property_migrations
|
||||||
CASCADE
|
CASCADE
|
||||||
`)
|
`)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -111,6 +112,4 @@ func findProjectRoot(t *testing.T) string {
|
|||||||
}
|
}
|
||||||
dir = parent
|
dir = parent
|
||||||
}
|
}
|
||||||
|
|
||||||
panic(fmt.Sprintf("unreachable"))
|
|
||||||
}
|
}
|
||||||
|
|||||||
15
migrations/016_module_system.sql
Normal file
15
migrations/016_module_system.sql
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
-- 016_module_system.sql — settings overrides and module state persistence
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS settings_overrides (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value JSONB NOT NULL,
|
||||||
|
updated_by TEXT NOT NULL,
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS module_state (
|
||||||
|
module_id TEXT PRIMARY KEY,
|
||||||
|
enabled BOOLEAN NOT NULL,
|
||||||
|
updated_by TEXT NOT NULL,
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
7
migrations/017_file_storage_metadata.sql
Normal file
7
migrations/017_file_storage_metadata.sql
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
-- Track which storage backend holds each attached file.
|
||||||
|
ALTER TABLE item_files
|
||||||
|
ADD COLUMN IF NOT EXISTS storage_backend TEXT NOT NULL DEFAULT 'minio';
|
||||||
|
|
||||||
|
-- Track which storage backend holds each revision file.
|
||||||
|
ALTER TABLE revisions
|
||||||
|
ADD COLUMN IF NOT EXISTS file_storage_backend TEXT NOT NULL DEFAULT 'minio';
|
||||||
110
migrations/018_kc_metadata.sql
Normal file
110
migrations/018_kc_metadata.sql
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
-- Migration 018: .kc Server-Side Metadata Tables
|
||||||
|
--
|
||||||
|
-- Adds tables for indexing the silo/ directory contents from .kc files.
|
||||||
|
-- See docs/KC_SERVER.md for the full specification.
|
||||||
|
--
|
||||||
|
-- Tables:
|
||||||
|
-- item_metadata - indexed manifest + metadata fields (Section 3.1)
|
||||||
|
-- item_dependencies - CAD-extracted assembly dependencies (Section 3.2)
|
||||||
|
-- item_approvals - ECO workflow state (Section 3.3)
|
||||||
|
-- approval_signatures - individual approval/rejection records (Section 3.3)
|
||||||
|
-- item_macros - registered macros from silo/macros/ (Section 3.4)
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- item_metadata: indexed silo/manifest.json + silo/metadata.json
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
CREATE TABLE item_metadata (
|
||||||
|
item_id UUID PRIMARY KEY REFERENCES items(id) ON DELETE CASCADE,
|
||||||
|
schema_name TEXT,
|
||||||
|
tags TEXT[] NOT NULL DEFAULT '{}',
|
||||||
|
lifecycle_state TEXT NOT NULL DEFAULT 'draft',
|
||||||
|
fields JSONB NOT NULL DEFAULT '{}',
|
||||||
|
kc_version TEXT,
|
||||||
|
manifest_uuid UUID,
|
||||||
|
silo_instance TEXT,
|
||||||
|
revision_hash TEXT,
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
updated_by TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_item_metadata_tags ON item_metadata USING GIN (tags);
|
||||||
|
CREATE INDEX idx_item_metadata_lifecycle ON item_metadata (lifecycle_state);
|
||||||
|
CREATE INDEX idx_item_metadata_fields ON item_metadata USING GIN (fields);
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- item_dependencies: indexed silo/dependencies.json
|
||||||
|
--
|
||||||
|
-- Complements the existing `relationships` table.
|
||||||
|
-- relationships = server-authoritative BOM (web UI / API editable)
|
||||||
|
-- item_dependencies = CAD-authoritative record (extracted from .kc)
|
||||||
|
-- BOM merge reconciles the two (see docs/BOM_MERGE.md).
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
CREATE TABLE item_dependencies (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
parent_item_id UUID NOT NULL REFERENCES items(id) ON DELETE CASCADE,
|
||||||
|
child_uuid UUID NOT NULL,
|
||||||
|
child_part_number TEXT,
|
||||||
|
child_revision INTEGER,
|
||||||
|
quantity DECIMAL,
|
||||||
|
label TEXT,
|
||||||
|
relationship TEXT NOT NULL DEFAULT 'component',
|
||||||
|
revision_number INTEGER NOT NULL,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_item_deps_parent ON item_dependencies (parent_item_id);
|
||||||
|
CREATE INDEX idx_item_deps_child ON item_dependencies (child_uuid);
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- item_approvals + approval_signatures: ECO workflow
|
||||||
|
--
|
||||||
|
-- Server-authoritative. The .kc silo/approvals.json is a read cache
|
||||||
|
-- packed on checkout for offline display in Create.
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
CREATE TABLE item_approvals (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
item_id UUID NOT NULL REFERENCES items(id) ON DELETE CASCADE,
|
||||||
|
eco_number TEXT,
|
||||||
|
state TEXT NOT NULL DEFAULT 'draft',
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
updated_by TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_item_approvals_item ON item_approvals (item_id);
|
||||||
|
CREATE INDEX idx_item_approvals_state ON item_approvals (state);
|
||||||
|
|
||||||
|
CREATE TABLE approval_signatures (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
approval_id UUID NOT NULL REFERENCES item_approvals(id) ON DELETE CASCADE,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
role TEXT NOT NULL,
|
||||||
|
status TEXT NOT NULL DEFAULT 'pending',
|
||||||
|
signed_at TIMESTAMPTZ,
|
||||||
|
comment TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_approval_sigs_approval ON approval_signatures (approval_id);
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- item_macros: registered macros from silo/macros/
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
CREATE TABLE item_macros (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
item_id UUID NOT NULL REFERENCES items(id) ON DELETE CASCADE,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
trigger TEXT NOT NULL DEFAULT 'manual',
|
||||||
|
content TEXT NOT NULL,
|
||||||
|
revision_number INTEGER NOT NULL,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
UNIQUE(item_id, filename)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_item_macros_item ON item_macros (item_id);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
@@ -77,6 +77,9 @@ if systemctl is-active --quiet silod 2>/dev/null; then
|
|||||||
sudo systemctl stop silod
|
sudo systemctl stop silod
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Clean old frontend assets before extracting
|
||||||
|
sudo rm -rf "$DEPLOY_DIR/web/dist/assets"
|
||||||
|
|
||||||
# Extract
|
# Extract
|
||||||
echo " Extracting..."
|
echo " Extracting..."
|
||||||
sudo tar -xzf /tmp/silo-deploy.tar.gz -C "$DEPLOY_DIR"
|
sudo tar -xzf /tmp/silo-deploy.tar.gz -C "$DEPLOY_DIR"
|
||||||
|
|||||||
108
scripts/migrate-storage.sh
Executable file
108
scripts/migrate-storage.sh
Executable file
@@ -0,0 +1,108 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Migrate storage from MinIO to filesystem on a remote Silo host.
|
||||||
|
#
|
||||||
|
# Builds the migrate-storage binary locally, uploads it to the target host,
|
||||||
|
# then runs it over SSH using credentials from /etc/silo/silod.env.
|
||||||
|
#
|
||||||
|
# Usage: ./scripts/migrate-storage.sh <silo-host> <psql-host> <minio-host> [flags...]
|
||||||
|
#
|
||||||
|
# Examples:
|
||||||
|
# ./scripts/migrate-storage.sh silo.kindred.internal psql.kindred.internal minio.kindred.internal -dry-run -verbose
|
||||||
|
# ./scripts/migrate-storage.sh silo.kindred.internal psql.kindred.internal minio.kindred.internal
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [ $# -lt 3 ]; then
|
||||||
|
echo "Usage: $0 <silo-host> <psql-host> <minio-host> [flags...]"
|
||||||
|
echo " flags are passed to migrate-storage (e.g. -dry-run -verbose)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
TARGET="$1"
|
||||||
|
DB_HOST="$2"
|
||||||
|
MINIO_HOST="$3"
|
||||||
|
shift 3
|
||||||
|
EXTRA_FLAGS="$*"
|
||||||
|
|
||||||
|
DEST_DIR="/opt/silo/data"
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_DIR="${SCRIPT_DIR}/.."
|
||||||
|
|
||||||
|
echo "=== Migrate Storage: MinIO -> Filesystem ==="
|
||||||
|
echo " Target: ${TARGET}"
|
||||||
|
echo " DB host: ${DB_HOST}"
|
||||||
|
echo " MinIO: ${MINIO_HOST}"
|
||||||
|
echo " Dest: ${DEST_DIR}"
|
||||||
|
[ -n "$EXTRA_FLAGS" ] && echo " Flags: ${EXTRA_FLAGS}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# --- Build locally ---
|
||||||
|
echo "[1/3] Building migrate-storage binary..."
|
||||||
|
cd "$PROJECT_DIR"
|
||||||
|
GOOS=linux GOARCH=amd64 go build -o migrate-storage ./cmd/migrate-storage
|
||||||
|
echo " Built: $(du -h migrate-storage | cut -f1)"
|
||||||
|
|
||||||
|
# --- Upload ---
|
||||||
|
echo "[2/3] Uploading to ${TARGET}..."
|
||||||
|
scp migrate-storage "${TARGET}:/tmp/migrate-storage"
|
||||||
|
rm -f migrate-storage
|
||||||
|
|
||||||
|
# --- Run remotely ---
|
||||||
|
echo "[3/3] Running migration on ${TARGET}..."
|
||||||
|
ssh "$TARGET" DB_HOST="$DB_HOST" MINIO_HOST="$MINIO_HOST" DEST_DIR="$DEST_DIR" EXTRA_FLAGS="$EXTRA_FLAGS" bash -s <<'REMOTE'
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
CONFIG_DIR="/etc/silo"
|
||||||
|
|
||||||
|
# Source credentials
|
||||||
|
if [ ! -f "$CONFIG_DIR/silod.env" ]; then
|
||||||
|
echo "ERROR: $CONFIG_DIR/silod.env not found on $(hostname)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
set -a
|
||||||
|
source "$CONFIG_DIR/silod.env"
|
||||||
|
set +a
|
||||||
|
|
||||||
|
# Ensure destination directory exists
|
||||||
|
sudo mkdir -p "$DEST_DIR"
|
||||||
|
sudo chown silo:silo "$DEST_DIR" 2>/dev/null || true
|
||||||
|
|
||||||
|
chmod +x /tmp/migrate-storage
|
||||||
|
|
||||||
|
# Write temporary config with the provided hosts
|
||||||
|
cat > /tmp/silo-migrate.yaml <<EOF
|
||||||
|
database:
|
||||||
|
host: "${DB_HOST}"
|
||||||
|
port: 5432
|
||||||
|
name: "silo"
|
||||||
|
user: "silo"
|
||||||
|
password: "${SILO_DB_PASSWORD}"
|
||||||
|
sslmode: "require"
|
||||||
|
max_connections: 5
|
||||||
|
|
||||||
|
storage:
|
||||||
|
endpoint: "${MINIO_HOST}:9000"
|
||||||
|
access_key: "${SILO_MINIO_ACCESS_KEY}"
|
||||||
|
secret_key: "${SILO_MINIO_SECRET_KEY}"
|
||||||
|
bucket: "silo"
|
||||||
|
use_ssl: false
|
||||||
|
region: "us-east-1"
|
||||||
|
EOF
|
||||||
|
chmod 600 /tmp/silo-migrate.yaml
|
||||||
|
|
||||||
|
echo " Config written to /tmp/silo-migrate.yaml"
|
||||||
|
echo " Starting migration..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Run the migration
|
||||||
|
/tmp/migrate-storage -config /tmp/silo-migrate.yaml -dest "$DEST_DIR" $EXTRA_FLAGS
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
rm -f /tmp/silo-migrate.yaml /tmp/migrate-storage
|
||||||
|
echo ""
|
||||||
|
echo " Cleaned up temp files."
|
||||||
|
REMOTE
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Migration complete ==="
|
||||||
|
echo " Files written to ${TARGET}:${DEST_DIR}"
|
||||||
@@ -1,12 +1,13 @@
|
|||||||
<!doctype html>
|
<!doctype html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>Silo</title>
|
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
||||||
</head>
|
<title>Silo</title>
|
||||||
<body>
|
</head>
|
||||||
<div id="root"></div>
|
<body>
|
||||||
<script type="module" src="/src/main.tsx"></script>
|
<div id="root"></div>
|
||||||
</body>
|
<script type="module" src="/src/main.tsx"></script>
|
||||||
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
106
web/public/favicon.svg
Normal file
106
web/public/favicon.svg
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
width="1028"
|
||||||
|
height="1028"
|
||||||
|
viewBox="0 0 271.99167 271.99167"
|
||||||
|
version="1.1"
|
||||||
|
id="svg1"
|
||||||
|
inkscape:version="1.4.2 (2aeb623e1d, 2025-05-12)"
|
||||||
|
sodipodi:docname="kindred-logo.svg"
|
||||||
|
inkscape:export-filename="../3290ed6b/kindred-logo-blue-baack.png"
|
||||||
|
inkscape:export-xdpi="96"
|
||||||
|
inkscape:export-ydpi="96"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg">
|
||||||
|
<sodipodi:namedview
|
||||||
|
id="namedview1"
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#000000"
|
||||||
|
borderopacity="0.25"
|
||||||
|
inkscape:showpageshadow="2"
|
||||||
|
inkscape:pageopacity="0.0"
|
||||||
|
inkscape:pagecheckerboard="0"
|
||||||
|
inkscape:deskcolor="#d1d1d1"
|
||||||
|
inkscape:document-units="mm"
|
||||||
|
showgrid="true"
|
||||||
|
inkscape:zoom="1.036062"
|
||||||
|
inkscape:cx="397.6596"
|
||||||
|
inkscape:cy="478.25323"
|
||||||
|
inkscape:window-width="2494"
|
||||||
|
inkscape:window-height="1371"
|
||||||
|
inkscape:window-x="1146"
|
||||||
|
inkscape:window-y="1112"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="layer1"
|
||||||
|
inkscape:export-bgcolor="#79c0c500">
|
||||||
|
<inkscape:grid
|
||||||
|
type="axonomgrid"
|
||||||
|
id="grid6"
|
||||||
|
units="mm"
|
||||||
|
originx="0"
|
||||||
|
originy="0"
|
||||||
|
spacingx="0.99999998"
|
||||||
|
spacingy="1"
|
||||||
|
empcolor="#0099e5"
|
||||||
|
empopacity="0.30196078"
|
||||||
|
color="#0099e5"
|
||||||
|
opacity="0.14901961"
|
||||||
|
empspacing="5"
|
||||||
|
dotted="false"
|
||||||
|
gridanglex="30"
|
||||||
|
gridanglez="30"
|
||||||
|
enabled="true"
|
||||||
|
visible="true" />
|
||||||
|
</sodipodi:namedview>
|
||||||
|
<defs
|
||||||
|
id="defs1">
|
||||||
|
<inkscape:perspective
|
||||||
|
sodipodi:type="inkscape:persp3d"
|
||||||
|
inkscape:vp_x="0 : 123.49166 : 1"
|
||||||
|
inkscape:vp_y="0 : 999.99998 : 0"
|
||||||
|
inkscape:vp_z="210.00001 : 123.49166 : 1"
|
||||||
|
inkscape:persp3d-origin="105 : 73.991665 : 1"
|
||||||
|
id="perspective1" />
|
||||||
|
</defs>
|
||||||
|
<g
|
||||||
|
inkscape:label="Layer 1"
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer1">
|
||||||
|
<path
|
||||||
|
sodipodi:type="star"
|
||||||
|
style="fill:#7c4a82;fill-opacity:1;stroke:#12101c;stroke-width:5;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
|
||||||
|
id="path6-81-5"
|
||||||
|
inkscape:flatsided="true"
|
||||||
|
sodipodi:sides="6"
|
||||||
|
sodipodi:cx="61.574867"
|
||||||
|
sodipodi:cy="103.99491"
|
||||||
|
sodipodi:r1="25.000006"
|
||||||
|
sodipodi:r2="22.404818"
|
||||||
|
sodipodi:arg1="-1.5707963"
|
||||||
|
sodipodi:arg2="-1.0471974"
|
||||||
|
inkscape:rounded="0.77946499"
|
||||||
|
inkscape:randomized="0"
|
||||||
|
d="m 61.574868,78.994905 c 19.486629,10e-7 11.907325,-4.375912 21.65064,12.500004 9.743314,16.875911 9.743314,8.12409 -1e-6,25.000001 -9.743315,16.87592 -2.164011,12.50001 -21.65064,12.50001 -19.486629,0 -11.907326,4.37591 -21.65064,-12.50001 -9.743314,-16.875912 -9.743314,-8.12409 0,-25.000002 9.743315,-16.875916 2.164012,-12.500003 21.650641,-12.500003 z"
|
||||||
|
transform="matrix(1.9704344,0,0,1.8525167,-28.510585,-40.025402)" />
|
||||||
|
<path
|
||||||
|
sodipodi:type="star"
|
||||||
|
style="fill:#ff9701;fill-opacity:1;stroke:#12101c;stroke-width:5;stroke-linejoin:miter;stroke-dasharray:none;stroke-opacity:1"
|
||||||
|
id="path6-81-5-6"
|
||||||
|
inkscape:flatsided="true"
|
||||||
|
sodipodi:sides="6"
|
||||||
|
sodipodi:cx="61.574867"
|
||||||
|
sodipodi:cy="103.99491"
|
||||||
|
sodipodi:r1="25.000006"
|
||||||
|
sodipodi:r2="22.404818"
|
||||||
|
sodipodi:arg1="-1.5707963"
|
||||||
|
sodipodi:arg2="-1.0471974"
|
||||||
|
inkscape:rounded="0.77946499"
|
||||||
|
inkscape:randomized="0"
|
||||||
|
d="m 61.574868,78.994905 c 19.486629,10e-7 11.907325,-4.375912 21.65064,12.500004 9.743314,16.875921 9.743314,8.12409 -1e-6,25.000001 -9.743315,16.87592 -2.164011,12.50001 -21.65064,12.50001 -19.48663,0 -11.907326,4.37591 -21.65064,-12.50001 -9.743314,-16.875913 -9.743315,-8.12409 10e-7,-25.000002 9.743315,-16.875916 2.164011,-12.500003 21.65064,-12.500003 z"
|
||||||
|
transform="matrix(1.9704344,0,0,1.8525167,56.811738,-86.338327)" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 4.0 KiB |
@@ -352,6 +352,35 @@ export interface UpdateSchemaValueRequest {
|
|||||||
description: string;
|
description: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Admin settings — module discovery
|
||||||
|
export interface ModuleInfo {
|
||||||
|
enabled: boolean;
|
||||||
|
required: boolean;
|
||||||
|
name: string;
|
||||||
|
version?: string;
|
||||||
|
depends_on?: string[];
|
||||||
|
config?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ModulesResponse {
|
||||||
|
modules: Record<string, ModuleInfo>;
|
||||||
|
server: { version: string; read_only: boolean };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Admin settings — config management
|
||||||
|
export type AdminSettingsResponse = Record<string, Record<string, unknown>>;
|
||||||
|
|
||||||
|
export interface UpdateSettingsResponse {
|
||||||
|
updated: string[];
|
||||||
|
restart_required: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TestConnectivityResponse {
|
||||||
|
success: boolean;
|
||||||
|
message: string;
|
||||||
|
latency_ms: number;
|
||||||
|
}
|
||||||
|
|
||||||
// Revision comparison
|
// Revision comparison
|
||||||
export interface RevisionComparison {
|
export interface RevisionComparison {
|
||||||
from: number;
|
from: number;
|
||||||
|
|||||||
@@ -1,24 +1,67 @@
|
|||||||
import { NavLink, Outlet } from "react-router-dom";
|
import { useCallback, useEffect, useState } from "react";
|
||||||
|
import { Outlet } from "react-router-dom";
|
||||||
import { useAuth } from "../hooks/useAuth";
|
import { useAuth } from "../hooks/useAuth";
|
||||||
import { useDensity } from "../hooks/useDensity";
|
import { useDensity } from "../hooks/useDensity";
|
||||||
|
import { useModules } from "../hooks/useModules";
|
||||||
const navLinks = [
|
import { useSSE } from "../hooks/useSSE";
|
||||||
{ to: "/", label: "Items" },
|
import { Sidebar } from "./Sidebar";
|
||||||
{ to: "/projects", label: "Projects" },
|
|
||||||
{ to: "/schemas", label: "Schemas" },
|
|
||||||
{ to: "/audit", label: "Audit" },
|
|
||||||
{ to: "/settings", label: "Settings" },
|
|
||||||
];
|
|
||||||
|
|
||||||
const roleBadgeStyle: Record<string, React.CSSProperties> = {
|
|
||||||
admin: { background: "rgba(203,166,247,0.2)", color: "var(--ctp-mauve)" },
|
|
||||||
editor: { background: "rgba(137,180,250,0.2)", color: "var(--ctp-blue)" },
|
|
||||||
viewer: { background: "rgba(148,226,213,0.2)", color: "var(--ctp-teal)" },
|
|
||||||
};
|
|
||||||
|
|
||||||
export function AppShell() {
|
export function AppShell() {
|
||||||
const { user, loading, logout } = useAuth();
|
const { user, loading, logout } = useAuth();
|
||||||
const [density, toggleDensity] = useDensity();
|
const [density, toggleDensity] = useDensity();
|
||||||
|
const { modules, refresh: refreshModules } = useModules();
|
||||||
|
const { on } = useSSE();
|
||||||
|
const [toast, setToast] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Listen for settings.changed SSE events
|
||||||
|
useEffect(() => {
|
||||||
|
return on("settings.changed", (raw) => {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(raw) as {
|
||||||
|
module: string;
|
||||||
|
changed_keys: string[];
|
||||||
|
updated_by: string;
|
||||||
|
};
|
||||||
|
refreshModules();
|
||||||
|
if (data.updated_by !== user?.username) {
|
||||||
|
setToast(`Settings updated by ${data.updated_by}`);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// ignore malformed events
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, [on, refreshModules, user?.username]);
|
||||||
|
|
||||||
|
// Auto-dismiss toast
|
||||||
|
useEffect(() => {
|
||||||
|
if (!toast) return;
|
||||||
|
const timer = setTimeout(() => setToast(null), 5000);
|
||||||
|
return () => clearTimeout(timer);
|
||||||
|
}, [toast]);
|
||||||
|
|
||||||
|
const [sidebarOpen, setSidebarOpen] = useState(() => {
|
||||||
|
return localStorage.getItem("silo-sidebar") !== "closed";
|
||||||
|
});
|
||||||
|
|
||||||
|
const toggleSidebar = useCallback(() => {
|
||||||
|
setSidebarOpen((prev) => {
|
||||||
|
const next = !prev;
|
||||||
|
localStorage.setItem("silo-sidebar", next ? "open" : "closed");
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Ctrl+J to toggle sidebar
|
||||||
|
useEffect(() => {
|
||||||
|
const handler = (e: KeyboardEvent) => {
|
||||||
|
if (e.ctrlKey && e.key === "j") {
|
||||||
|
e.preventDefault();
|
||||||
|
toggleSidebar();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
window.addEventListener("keydown", handler);
|
||||||
|
return () => window.removeEventListener("keydown", handler);
|
||||||
|
}, [toggleSidebar]);
|
||||||
|
|
||||||
if (loading) {
|
if (loading) {
|
||||||
return (
|
return (
|
||||||
@@ -36,119 +79,40 @@ export function AppShell() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div style={{ display: "flex", flexDirection: "column", height: "100vh" }}>
|
<div style={{ display: "flex", height: "100vh" }}>
|
||||||
<header
|
<Sidebar
|
||||||
style={{
|
open={sidebarOpen}
|
||||||
backgroundColor: "var(--ctp-mantle)",
|
onToggle={toggleSidebar}
|
||||||
borderBottom: "1px solid var(--ctp-surface0)",
|
modules={modules}
|
||||||
padding: "var(--d-header-py) var(--d-header-px)",
|
user={user}
|
||||||
display: "flex",
|
density={density}
|
||||||
alignItems: "center",
|
onToggleDensity={toggleDensity}
|
||||||
justifyContent: "space-between",
|
onLogout={logout}
|
||||||
flexShrink: 0,
|
/>
|
||||||
}}
|
<main style={{ flex: 1, overflow: "auto", padding: "1rem" }}>
|
||||||
>
|
|
||||||
<h1
|
|
||||||
style={{
|
|
||||||
fontSize: "var(--d-header-logo)",
|
|
||||||
fontWeight: 600,
|
|
||||||
color: "var(--ctp-mauve)",
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
Silo
|
|
||||||
</h1>
|
|
||||||
|
|
||||||
<nav style={{ display: "flex", gap: "var(--d-nav-gap)" }}>
|
|
||||||
{navLinks.map((link) => (
|
|
||||||
<NavLink
|
|
||||||
key={link.to}
|
|
||||||
to={link.to}
|
|
||||||
end={link.to === "/"}
|
|
||||||
style={({ isActive }) => ({
|
|
||||||
color: isActive ? "var(--ctp-mauve)" : "var(--ctp-subtext1)",
|
|
||||||
backgroundColor: isActive
|
|
||||||
? "var(--ctp-surface1)"
|
|
||||||
: "transparent",
|
|
||||||
fontWeight: 500,
|
|
||||||
padding: "var(--d-nav-py) var(--d-nav-px)",
|
|
||||||
borderRadius: "var(--d-nav-radius)",
|
|
||||||
textDecoration: "none",
|
|
||||||
transition: "all 0.15s ease",
|
|
||||||
})}
|
|
||||||
>
|
|
||||||
{link.label}
|
|
||||||
</NavLink>
|
|
||||||
))}
|
|
||||||
</nav>
|
|
||||||
|
|
||||||
{user && (
|
|
||||||
<div
|
|
||||||
style={{
|
|
||||||
display: "flex",
|
|
||||||
alignItems: "center",
|
|
||||||
gap: "var(--d-user-gap)",
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<span
|
|
||||||
style={{
|
|
||||||
color: "var(--ctp-subtext1)",
|
|
||||||
fontSize: "var(--d-user-font)",
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{user.display_name}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
style={{
|
|
||||||
display: "inline-block",
|
|
||||||
padding: "0.15rem 0.5rem",
|
|
||||||
borderRadius: "1rem",
|
|
||||||
fontSize: "0.75rem",
|
|
||||||
fontWeight: 600,
|
|
||||||
...roleBadgeStyle[user.role],
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{user.role}
|
|
||||||
</span>
|
|
||||||
<button
|
|
||||||
onClick={toggleDensity}
|
|
||||||
title={`Switch to ${density === "comfortable" ? "compact" : "comfortable"} view`}
|
|
||||||
style={{
|
|
||||||
padding: "0.25rem 0.5rem",
|
|
||||||
fontSize: "var(--font-sm)",
|
|
||||||
borderRadius: "0.375rem",
|
|
||||||
cursor: "pointer",
|
|
||||||
border: "1px solid var(--ctp-surface1)",
|
|
||||||
background: "var(--ctp-surface0)",
|
|
||||||
color: "var(--ctp-subtext1)",
|
|
||||||
fontFamily: "'JetBrains Mono', monospace",
|
|
||||||
letterSpacing: "0.05em",
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{density === "comfortable" ? "COM" : "CMP"}
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
onClick={logout}
|
|
||||||
style={{
|
|
||||||
padding: "0.35rem 0.75rem",
|
|
||||||
fontSize: "var(--font-table)",
|
|
||||||
borderRadius: "0.4rem",
|
|
||||||
cursor: "pointer",
|
|
||||||
border: "none",
|
|
||||||
background: "var(--ctp-surface1)",
|
|
||||||
color: "var(--ctp-subtext1)",
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
Logout
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</header>
|
|
||||||
|
|
||||||
<main
|
|
||||||
style={{ flex: 1, padding: "1rem 1rem 0 1rem", overflow: "hidden" }}
|
|
||||||
>
|
|
||||||
<Outlet />
|
<Outlet />
|
||||||
</main>
|
</main>
|
||||||
|
{toast && (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
position: "fixed",
|
||||||
|
bottom: "1rem",
|
||||||
|
right: "1rem",
|
||||||
|
padding: "0.5rem 1rem",
|
||||||
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
|
color: "var(--ctp-text)",
|
||||||
|
borderRadius: "0.5rem",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
border: "1px solid var(--ctp-surface2)",
|
||||||
|
boxShadow: "0 2px 8px rgba(0,0,0,0.3)",
|
||||||
|
zIndex: 1000,
|
||||||
|
cursor: "pointer",
|
||||||
|
}}
|
||||||
|
onClick={() => setToast(null)}
|
||||||
|
>
|
||||||
|
{toast}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ export function ContextMenu({ x, y, items, onClose }: ContextMenuProps) {
|
|||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.5rem",
|
gap: "0.5rem",
|
||||||
width: "100%",
|
width: "100%",
|
||||||
padding: "0.35rem 0.75rem",
|
padding: "0.25rem 0.75rem",
|
||||||
background: "none",
|
background: "none",
|
||||||
border: "none",
|
border: "none",
|
||||||
color: item.disabled ? "var(--ctp-overlay0)" : "var(--ctp-text)",
|
color: item.disabled ? "var(--ctp-overlay0)" : "var(--ctp-text)",
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import type { ReactNode } from 'react';
|
import type { ReactNode } from "react";
|
||||||
|
|
||||||
interface PageFooterProps {
|
interface PageFooterProps {
|
||||||
stats?: ReactNode;
|
stats?: ReactNode;
|
||||||
@@ -8,32 +8,40 @@ interface PageFooterProps {
|
|||||||
onPageChange?: (page: number) => void;
|
onPageChange?: (page: number) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function PageFooter({ stats, page, pageSize, itemCount, onPageChange }: PageFooterProps) {
|
export function PageFooter({
|
||||||
|
stats,
|
||||||
|
page,
|
||||||
|
pageSize,
|
||||||
|
itemCount,
|
||||||
|
onPageChange,
|
||||||
|
}: PageFooterProps) {
|
||||||
const hasPagination = page !== undefined && onPageChange !== undefined;
|
const hasPagination = page !== undefined && onPageChange !== undefined;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div style={{
|
<div
|
||||||
position: 'fixed',
|
style={{
|
||||||
bottom: 0,
|
position: "fixed",
|
||||||
left: 0,
|
bottom: 0,
|
||||||
right: 0,
|
left: 0,
|
||||||
height: 'var(--d-footer-h)',
|
right: 0,
|
||||||
backgroundColor: 'var(--ctp-surface0)',
|
height: "var(--d-footer-h)",
|
||||||
borderTop: '1px solid var(--ctp-surface1)',
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
display: 'flex',
|
borderTop: "1px solid var(--ctp-surface1)",
|
||||||
alignItems: 'center',
|
display: "flex",
|
||||||
justifyContent: 'space-between',
|
alignItems: "center",
|
||||||
padding: '0 var(--d-footer-px)',
|
justifyContent: "space-between",
|
||||||
fontSize: 'var(--d-footer-font)',
|
padding: "0 var(--d-footer-px)",
|
||||||
color: 'var(--ctp-subtext0)',
|
fontSize: "var(--d-footer-font)",
|
||||||
zIndex: 100,
|
color: "var(--ctp-subtext0)",
|
||||||
}}>
|
zIndex: 100,
|
||||||
<div style={{ display: 'flex', gap: '1.5rem', alignItems: 'center' }}>
|
}}
|
||||||
|
>
|
||||||
|
<div style={{ display: "flex", gap: "1.5rem", alignItems: "center" }}>
|
||||||
{stats}
|
{stats}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{hasPagination && (
|
{hasPagination && (
|
||||||
<div style={{ display: 'flex', gap: '0.5rem', alignItems: 'center' }}>
|
<div style={{ display: "flex", gap: "0.5rem", alignItems: "center" }}>
|
||||||
<button
|
<button
|
||||||
onClick={() => onPageChange(Math.max(1, page - 1))}
|
onClick={() => onPageChange(Math.max(1, page - 1))}
|
||||||
disabled={page <= 1}
|
disabled={page <= 1}
|
||||||
@@ -47,7 +55,11 @@ export function PageFooter({ stats, page, pageSize, itemCount, onPageChange }: P
|
|||||||
</span>
|
</span>
|
||||||
<button
|
<button
|
||||||
onClick={() => onPageChange(page + 1)}
|
onClick={() => onPageChange(page + 1)}
|
||||||
disabled={pageSize !== undefined && itemCount !== undefined && itemCount < pageSize}
|
disabled={
|
||||||
|
pageSize !== undefined &&
|
||||||
|
itemCount !== undefined &&
|
||||||
|
itemCount < pageSize
|
||||||
|
}
|
||||||
style={pageBtnStyle}
|
style={pageBtnStyle}
|
||||||
>
|
>
|
||||||
Next
|
Next
|
||||||
@@ -59,11 +71,11 @@ export function PageFooter({ stats, page, pageSize, itemCount, onPageChange }: P
|
|||||||
}
|
}
|
||||||
|
|
||||||
const pageBtnStyle: React.CSSProperties = {
|
const pageBtnStyle: React.CSSProperties = {
|
||||||
padding: '0.15rem 0.4rem',
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: 'inherit',
|
fontSize: "inherit",
|
||||||
border: 'none',
|
border: "none",
|
||||||
borderRadius: '0.25rem',
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: 'var(--ctp-surface1)',
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: 'var(--ctp-text)',
|
color: "var(--ctp-text)",
|
||||||
cursor: 'pointer',
|
cursor: "pointer",
|
||||||
};
|
};
|
||||||
|
|||||||
335
web/src/components/Sidebar.tsx
Normal file
335
web/src/components/Sidebar.tsx
Normal file
@@ -0,0 +1,335 @@
|
|||||||
|
import { useEffect, useRef, useState, useCallback } from "react";
|
||||||
|
import { NavLink, useNavigate } from "react-router-dom";
|
||||||
|
import {
|
||||||
|
Package,
|
||||||
|
FolderKanban,
|
||||||
|
FileCode2,
|
||||||
|
ClipboardCheck,
|
||||||
|
Settings2,
|
||||||
|
ChevronLeft,
|
||||||
|
ChevronRight,
|
||||||
|
LogOut,
|
||||||
|
} from "lucide-react";
|
||||||
|
import type { ModuleInfo } from "../api/types";
|
||||||
|
|
||||||
|
interface NavItem {
|
||||||
|
moduleId: string | null;
|
||||||
|
path: string;
|
||||||
|
label: string;
|
||||||
|
icon: React.ComponentType<{ size?: number }>;
|
||||||
|
}
|
||||||
|
|
||||||
|
const allNavItems: NavItem[] = [
|
||||||
|
{ moduleId: "core", path: "/", label: "Items", icon: Package },
|
||||||
|
{
|
||||||
|
moduleId: "projects",
|
||||||
|
path: "/projects",
|
||||||
|
label: "Projects",
|
||||||
|
icon: FolderKanban,
|
||||||
|
},
|
||||||
|
{ moduleId: "schemas", path: "/schemas", label: "Schemas", icon: FileCode2 },
|
||||||
|
{ moduleId: "audit", path: "/audit", label: "Audit", icon: ClipboardCheck },
|
||||||
|
{ moduleId: null, path: "/settings", label: "Settings", icon: Settings2 },
|
||||||
|
];
|
||||||
|
|
||||||
|
interface SidebarProps {
|
||||||
|
open: boolean;
|
||||||
|
onToggle: () => void;
|
||||||
|
modules: Record<string, ModuleInfo>;
|
||||||
|
user: { display_name: string; role: string } | null;
|
||||||
|
density: string;
|
||||||
|
onToggleDensity: () => void;
|
||||||
|
onLogout: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const roleBadgeStyle: Record<string, React.CSSProperties> = {
|
||||||
|
admin: { background: "rgba(203,166,247,0.2)", color: "var(--ctp-mauve)" },
|
||||||
|
editor: { background: "rgba(137,180,250,0.2)", color: "var(--ctp-blue)" },
|
||||||
|
viewer: { background: "rgba(148,226,213,0.2)", color: "var(--ctp-teal)" },
|
||||||
|
};
|
||||||
|
|
||||||
|
export function Sidebar({
|
||||||
|
open,
|
||||||
|
onToggle,
|
||||||
|
modules,
|
||||||
|
user,
|
||||||
|
density,
|
||||||
|
onToggleDensity,
|
||||||
|
onLogout,
|
||||||
|
}: SidebarProps) {
|
||||||
|
const navigate = useNavigate();
|
||||||
|
const [focusIndex, setFocusIndex] = useState(-1);
|
||||||
|
const navRefs = useRef<(HTMLAnchorElement | null)[]>([]);
|
||||||
|
|
||||||
|
const visibleItems = allNavItems.filter(
|
||||||
|
(item) => item.moduleId === null || modules[item.moduleId]?.enabled,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Focus the item at focusIndex when it changes
|
||||||
|
useEffect(() => {
|
||||||
|
if (focusIndex >= 0 && focusIndex < navRefs.current.length) {
|
||||||
|
navRefs.current[focusIndex]?.focus();
|
||||||
|
}
|
||||||
|
}, [focusIndex]);
|
||||||
|
|
||||||
|
// Reset focus when sidebar closes
|
||||||
|
useEffect(() => {
|
||||||
|
if (!open) setFocusIndex(-1);
|
||||||
|
}, [open]);
|
||||||
|
|
||||||
|
const handleKeyDown = useCallback(
|
||||||
|
(e: React.KeyboardEvent) => {
|
||||||
|
if (!open) return;
|
||||||
|
|
||||||
|
switch (e.key) {
|
||||||
|
case "ArrowDown":
|
||||||
|
e.preventDefault();
|
||||||
|
setFocusIndex((i) => (i + 1) % visibleItems.length);
|
||||||
|
break;
|
||||||
|
case "ArrowUp":
|
||||||
|
e.preventDefault();
|
||||||
|
setFocusIndex(
|
||||||
|
(i) => (i - 1 + visibleItems.length) % visibleItems.length,
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
case "Enter": {
|
||||||
|
const target = visibleItems[focusIndex];
|
||||||
|
if (focusIndex >= 0 && target) {
|
||||||
|
e.preventDefault();
|
||||||
|
navigate(target.path);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case "Escape":
|
||||||
|
e.preventDefault();
|
||||||
|
onToggle();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[open, focusIndex, visibleItems, navigate, onToggle],
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<nav
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
|
style={{
|
||||||
|
width: open ? "var(--d-sidebar-w)" : "var(--d-sidebar-collapsed)",
|
||||||
|
minWidth: open ? "var(--d-sidebar-w)" : "var(--d-sidebar-collapsed)",
|
||||||
|
height: "100vh",
|
||||||
|
backgroundColor: "var(--ctp-mantle)",
|
||||||
|
borderRight: "1px solid var(--ctp-surface0)",
|
||||||
|
display: "flex",
|
||||||
|
flexDirection: "column",
|
||||||
|
transition: "width 0.2s ease, min-width 0.2s ease",
|
||||||
|
overflow: "hidden",
|
||||||
|
flexShrink: 0,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{/* Logo */}
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
padding: open ? "0.75rem 1rem" : "0.75rem 0",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
justifyContent: open ? "flex-start" : "center",
|
||||||
|
borderBottom: "1px solid var(--ctp-surface0)",
|
||||||
|
minHeight: 44,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
fontSize: "1.25rem",
|
||||||
|
fontWeight: 700,
|
||||||
|
color: "var(--ctp-mauve)",
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{open ? "Silo" : "S"}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Nav items */}
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
flex: 1,
|
||||||
|
padding: "0.5rem 0.5rem",
|
||||||
|
display: "flex",
|
||||||
|
flexDirection: "column",
|
||||||
|
gap: "2px",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{visibleItems.map((item, i) => (
|
||||||
|
<NavLink
|
||||||
|
key={item.path}
|
||||||
|
to={item.path}
|
||||||
|
end={item.path === "/"}
|
||||||
|
ref={(el) => {
|
||||||
|
navRefs.current[i] = el;
|
||||||
|
}}
|
||||||
|
title={open ? undefined : item.label}
|
||||||
|
style={({ isActive }) => ({
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
gap: "0.75rem",
|
||||||
|
padding: "var(--d-nav-py) var(--d-nav-px)",
|
||||||
|
borderRadius: "var(--d-nav-radius)",
|
||||||
|
textDecoration: "none",
|
||||||
|
color: isActive ? "var(--ctp-mauve)" : "var(--ctp-subtext1)",
|
||||||
|
backgroundColor: isActive ? "var(--ctp-surface1)" : "transparent",
|
||||||
|
fontWeight: 500,
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
transition: "background-color 0.15s ease, color 0.15s ease",
|
||||||
|
outline: focusIndex === i ? "1px solid var(--ctp-mauve)" : "none",
|
||||||
|
outlineOffset: -1,
|
||||||
|
justifyContent: open ? "flex-start" : "center",
|
||||||
|
})}
|
||||||
|
onMouseEnter={(e) => {
|
||||||
|
const target = e.currentTarget;
|
||||||
|
if (
|
||||||
|
!target.style.backgroundColor ||
|
||||||
|
target.style.backgroundColor === "transparent"
|
||||||
|
) {
|
||||||
|
target.style.backgroundColor = "var(--ctp-surface0)";
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
onMouseLeave={(e) => {
|
||||||
|
const target = e.currentTarget;
|
||||||
|
// Let NavLink's isActive styling handle active items
|
||||||
|
const isActive = target.getAttribute("aria-current") === "page";
|
||||||
|
if (!isActive) {
|
||||||
|
target.style.backgroundColor = "transparent";
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<item.icon size={16} />
|
||||||
|
{open && <span>{item.label}</span>}
|
||||||
|
</NavLink>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Bottom section */}
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
borderTop: "1px solid var(--ctp-surface0)",
|
||||||
|
padding: "0.5rem",
|
||||||
|
display: "flex",
|
||||||
|
flexDirection: "column",
|
||||||
|
gap: "4px",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{/* Toggle sidebar */}
|
||||||
|
<button
|
||||||
|
onClick={onToggle}
|
||||||
|
title={open ? "Collapse sidebar (Ctrl+J)" : "Expand sidebar (Ctrl+J)"}
|
||||||
|
style={{
|
||||||
|
...btnStyle,
|
||||||
|
justifyContent: open ? "flex-start" : "center",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{open ? <ChevronLeft size={16} /> : <ChevronRight size={16} />}
|
||||||
|
{open && <span>Collapse</span>}
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{/* Density toggle */}
|
||||||
|
<button
|
||||||
|
onClick={onToggleDensity}
|
||||||
|
title={`Switch to ${density === "comfortable" ? "compact" : "comfortable"} view`}
|
||||||
|
style={{
|
||||||
|
...btnStyle,
|
||||||
|
justifyContent: open ? "flex-start" : "center",
|
||||||
|
fontFamily: "'JetBrains Mono', monospace",
|
||||||
|
letterSpacing: "0.05em",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
width: 16,
|
||||||
|
textAlign: "center",
|
||||||
|
fontSize: "var(--font-sm)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{density === "comfortable" ? "CO" : "CP"}
|
||||||
|
</span>
|
||||||
|
{open && (
|
||||||
|
<span>{density === "comfortable" ? "Comfortable" : "Compact"}</span>
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{/* User */}
|
||||||
|
{user && (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
gap: "0.5rem",
|
||||||
|
padding: "0.375rem var(--d-nav-px)",
|
||||||
|
justifyContent: open ? "flex-start" : "center",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
display: "inline-flex",
|
||||||
|
alignItems: "center",
|
||||||
|
justifyContent: "center",
|
||||||
|
width: 20,
|
||||||
|
height: 20,
|
||||||
|
borderRadius: "50%",
|
||||||
|
fontSize: "var(--font-xs)",
|
||||||
|
fontWeight: 600,
|
||||||
|
flexShrink: 0,
|
||||||
|
...roleBadgeStyle[user.role],
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{user.role.charAt(0).toUpperCase()}
|
||||||
|
</span>
|
||||||
|
{open && (
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
color: "var(--ctp-subtext1)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
overflow: "hidden",
|
||||||
|
textOverflow: "ellipsis",
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{user.display_name}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Logout */}
|
||||||
|
<button
|
||||||
|
onClick={onLogout}
|
||||||
|
title="Logout"
|
||||||
|
style={{
|
||||||
|
...btnStyle,
|
||||||
|
justifyContent: open ? "flex-start" : "center",
|
||||||
|
color: "var(--ctp-overlay1)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<LogOut size={16} />
|
||||||
|
{open && <span>Logout</span>}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const btnStyle: React.CSSProperties = {
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
gap: "0.75rem",
|
||||||
|
padding: "var(--d-nav-py) var(--d-nav-px)",
|
||||||
|
borderRadius: "var(--d-nav-radius)",
|
||||||
|
border: "none",
|
||||||
|
background: "transparent",
|
||||||
|
color: "var(--ctp-subtext1)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
fontWeight: 500,
|
||||||
|
cursor: "pointer",
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
width: "100%",
|
||||||
|
textAlign: "left",
|
||||||
|
};
|
||||||
@@ -124,7 +124,7 @@ export function TagInput({
|
|||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
backgroundColor: "var(--ctp-base)",
|
backgroundColor: "var(--ctp-base)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
cursor: "text",
|
cursor: "text",
|
||||||
minHeight: "1.8rem",
|
minHeight: "1.8rem",
|
||||||
}}
|
}}
|
||||||
@@ -137,7 +137,7 @@ export function TagInput({
|
|||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.25rem",
|
gap: "0.25rem",
|
||||||
padding: "0.15rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "1rem",
|
borderRadius: "1rem",
|
||||||
backgroundColor: "rgba(203,166,247,0.15)",
|
backgroundColor: "rgba(203,166,247,0.15)",
|
||||||
color: "var(--ctp-mauve)",
|
color: "var(--ctp-mauve)",
|
||||||
@@ -187,7 +187,7 @@ export function TagInput({
|
|||||||
background: "transparent",
|
background: "transparent",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
padding: "0.15rem 0",
|
padding: "0.25rem 0",
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
@@ -202,7 +202,7 @@ export function TagInput({
|
|||||||
marginTop: "0.25rem",
|
marginTop: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
maxHeight: "160px",
|
maxHeight: "160px",
|
||||||
overflowY: "auto",
|
overflowY: "auto",
|
||||||
}}
|
}}
|
||||||
|
|||||||
@@ -218,7 +218,7 @@ export function AuditDetailPanel({
|
|||||||
<span
|
<span
|
||||||
style={{
|
style={{
|
||||||
display: "inline-block",
|
display: "inline-block",
|
||||||
padding: "0.15rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "1rem",
|
borderRadius: "1rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 600,
|
fontWeight: 600,
|
||||||
@@ -477,10 +477,10 @@ function FieldRow({
|
|||||||
placeholder="---"
|
placeholder="---"
|
||||||
style={{
|
style={{
|
||||||
flex: 1,
|
flex: 1,
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
outline: "none",
|
outline: "none",
|
||||||
@@ -495,7 +495,7 @@ const closeBtnStyle: React.CSSProperties = {
|
|||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ export function AuditSummaryBar({
|
|||||||
style={{
|
style={{
|
||||||
display: "flex",
|
display: "flex",
|
||||||
gap: "1.5rem",
|
gap: "1.5rem",
|
||||||
marginTop: "0.4rem",
|
marginTop: "0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
color: "var(--ctp-subtext0)",
|
color: "var(--ctp-subtext0)",
|
||||||
}}
|
}}
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ export function AuditTable({
|
|||||||
<span
|
<span
|
||||||
style={{
|
style={{
|
||||||
display: "inline-block",
|
display: "inline-block",
|
||||||
padding: "0.15rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "1rem",
|
borderRadius: "1rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 600,
|
fontWeight: 600,
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ export function AuditToolbar({
|
|||||||
const selectStyle: React.CSSProperties = {
|
const selectStyle: React.CSSProperties = {
|
||||||
padding: "var(--d-input-py) var(--d-input-px)",
|
padding: "var(--d-input-py) var(--d-input-px)",
|
||||||
fontSize: "var(--d-input-font)",
|
fontSize: "var(--d-input-font)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
@@ -106,7 +106,7 @@ const selectStyle: React.CSSProperties = {
|
|||||||
const btnStyle: React.CSSProperties = {
|
const btnStyle: React.CSSProperties = {
|
||||||
padding: "var(--d-input-py) var(--d-input-px)",
|
padding: "var(--d-input-py) var(--d-input-px)",
|
||||||
fontSize: "var(--d-input-font)",
|
fontSize: "var(--d-input-font)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
|
|||||||
@@ -118,11 +118,11 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const inputStyle: React.CSSProperties = {
|
const inputStyle: React.CSSProperties = {
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
backgroundColor: "var(--ctp-base)",
|
backgroundColor: "var(--ctp-base)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
width: "100%",
|
width: "100%",
|
||||||
};
|
};
|
||||||
@@ -240,7 +240,7 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
|||||||
...toolBtnStyle,
|
...toolBtnStyle,
|
||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.35rem",
|
gap: "0.25rem",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Download size={14} /> Export CSV
|
<Download size={14} /> Export CSV
|
||||||
@@ -256,7 +256,7 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
|||||||
...toolBtnStyle,
|
...toolBtnStyle,
|
||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.35rem",
|
gap: "0.25rem",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Plus size={14} /> Add
|
<Plus size={14} /> Add
|
||||||
@@ -267,9 +267,9 @@ export function BOMTab({ partNumber, isEditor }: BOMTabProps) {
|
|||||||
{isEditor && assemblyCount > 0 && (
|
{isEditor && assemblyCount > 0 && (
|
||||||
<div
|
<div
|
||||||
style={{
|
style={{
|
||||||
padding: "0.35rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
marginBottom: "0.5rem",
|
marginBottom: "0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "rgba(148,226,213,0.1)",
|
backgroundColor: "rgba(148,226,213,0.1)",
|
||||||
border: "1px solid rgba(148,226,213,0.3)",
|
border: "1px solid rgba(148,226,213,0.3)",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
@@ -438,7 +438,7 @@ const toolBtnStyle: React.CSSProperties = {
|
|||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -451,16 +451,16 @@ const actionBtnStyle: React.CSSProperties = {
|
|||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
padding: "0.15rem 0.25rem",
|
padding: "0.25rem 0.25rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
};
|
};
|
||||||
|
|
||||||
const saveBtnStyle: React.CSSProperties = {
|
const saveBtnStyle: React.CSSProperties = {
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-green)",
|
backgroundColor: "var(--ctp-green)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -468,7 +468,7 @@ const saveBtnStyle: React.CSSProperties = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const sourceBadgeBase: React.CSSProperties = {
|
const sourceBadgeBase: React.CSSProperties = {
|
||||||
padding: "0.15rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "1rem",
|
borderRadius: "1rem",
|
||||||
fontSize: "var(--font-sm)",
|
fontSize: "var(--font-sm)",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
@@ -487,11 +487,11 @@ const manualBadge: React.CSSProperties = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const cancelBtnStyle: React.CSSProperties = {
|
const cancelBtnStyle: React.CSSProperties = {
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ export function CategoryPicker({
|
|||||||
<div
|
<div
|
||||||
style={{
|
style={{
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
backgroundColor: "var(--ctp-base)",
|
backgroundColor: "var(--ctp-base)",
|
||||||
overflow: "hidden",
|
overflow: "hidden",
|
||||||
}}
|
}}
|
||||||
@@ -74,7 +74,7 @@ export function CategoryPicker({
|
|||||||
display: "flex",
|
display: "flex",
|
||||||
flexWrap: "wrap",
|
flexWrap: "wrap",
|
||||||
gap: "0.25rem",
|
gap: "0.25rem",
|
||||||
padding: "0.4rem 0.5rem",
|
padding: "0.5rem 0.5rem",
|
||||||
borderBottom: "1px solid var(--ctp-surface1)",
|
borderBottom: "1px solid var(--ctp-surface1)",
|
||||||
backgroundColor: "var(--ctp-mantle)",
|
backgroundColor: "var(--ctp-mantle)",
|
||||||
}}
|
}}
|
||||||
@@ -99,7 +99,7 @@ export function CategoryPicker({
|
|||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
backgroundColor: isActive
|
backgroundColor: isActive
|
||||||
? "rgba(203,166,247,0.2)"
|
? "rgba(203,166,247,0.2)"
|
||||||
@@ -133,7 +133,7 @@ export function CategoryPicker({
|
|||||||
disabled={isMultiStage && !selectedDomain}
|
disabled={isMultiStage && !selectedDomain}
|
||||||
style={{
|
style={{
|
||||||
width: "100%",
|
width: "100%",
|
||||||
padding: "0.4rem 0.5rem",
|
padding: "0.5rem 0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderBottom: "1px solid var(--ctp-surface1)",
|
borderBottom: "1px solid var(--ctp-surface1)",
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { useState, useCallback } from "react";
|
import { useState, useCallback } from "react";
|
||||||
import { get, post, put } from "../../api/client";
|
import { get, post } from "../../api/client";
|
||||||
import type {
|
import type {
|
||||||
Project,
|
Project,
|
||||||
FormFieldDescriptor,
|
FormFieldDescriptor,
|
||||||
@@ -95,34 +95,9 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
|||||||
[],
|
[],
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleFilesAdded = useCallback(
|
const handleFilesAdded = useCallback((files: PendingAttachment[]) => {
|
||||||
(files: PendingAttachment[]) => {
|
setAttachments((prev) => [...prev, ...files]);
|
||||||
const startIdx = attachments.length;
|
}, []);
|
||||||
setAttachments((prev) => [...prev, ...files]);
|
|
||||||
|
|
||||||
files.forEach((f, i) => {
|
|
||||||
const idx = startIdx + i;
|
|
||||||
setAttachments((prev) =>
|
|
||||||
prev.map((a, j) =>
|
|
||||||
j === idx ? { ...a, uploadStatus: "uploading" } : a,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
upload(f.file, (progress) => {
|
|
||||||
setAttachments((prev) =>
|
|
||||||
prev.map((a, j) =>
|
|
||||||
j === idx ? { ...a, uploadProgress: progress } : a,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}).then((result) => {
|
|
||||||
setAttachments((prev) =>
|
|
||||||
prev.map((a, j) => (j === idx ? result : a)),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
},
|
|
||||||
[attachments.length, upload],
|
|
||||||
);
|
|
||||||
|
|
||||||
const handleFileRemoved = useCallback((index: number) => {
|
const handleFileRemoved = useCallback((index: number) => {
|
||||||
setAttachments((prev) => prev.filter((_, i) => i !== index));
|
setAttachments((prev) => prev.filter((_, i) => i !== index));
|
||||||
@@ -136,24 +111,15 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
|||||||
const file = input.files?.[0];
|
const file = input.files?.[0];
|
||||||
if (!file) return;
|
if (!file) return;
|
||||||
|
|
||||||
const pending: PendingAttachment = {
|
setThumbnailFile({
|
||||||
file,
|
file,
|
||||||
objectKey: "",
|
objectKey: "",
|
||||||
uploadProgress: 0,
|
uploadProgress: 0,
|
||||||
uploadStatus: "uploading",
|
uploadStatus: "pending",
|
||||||
};
|
|
||||||
setThumbnailFile(pending);
|
|
||||||
|
|
||||||
upload(file, (progress) => {
|
|
||||||
setThumbnailFile((prev) =>
|
|
||||||
prev ? { ...prev, uploadProgress: progress } : null,
|
|
||||||
);
|
|
||||||
}).then((result) => {
|
|
||||||
setThumbnailFile(result);
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
input.click();
|
input.click();
|
||||||
}, [upload]);
|
}, []);
|
||||||
|
|
||||||
const handleSubmit = async () => {
|
const handleSubmit = async () => {
|
||||||
if (!category) {
|
if (!category) {
|
||||||
@@ -188,33 +154,24 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const pn = result.part_number;
|
const pn = result.part_number;
|
||||||
|
const encodedPN = encodeURIComponent(pn);
|
||||||
|
|
||||||
// Associate uploaded attachments.
|
// Upload attachments via direct multipart POST.
|
||||||
const completed = attachments.filter(
|
for (const att of attachments) {
|
||||||
(a) => a.uploadStatus === "complete" && a.objectKey,
|
|
||||||
);
|
|
||||||
for (const att of completed) {
|
|
||||||
try {
|
try {
|
||||||
await post(`/api/items/${encodeURIComponent(pn)}/files`, {
|
await upload(att.file, `/api/items/${encodedPN}/files/upload`);
|
||||||
object_key: att.objectKey,
|
|
||||||
filename: att.file.name,
|
|
||||||
content_type: att.file.type || "application/octet-stream",
|
|
||||||
size: att.file.size,
|
|
||||||
});
|
|
||||||
} catch {
|
} catch {
|
||||||
// File association failure is non-blocking.
|
// File upload failure is non-blocking.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set thumbnail.
|
// Upload thumbnail via direct multipart POST.
|
||||||
if (
|
if (thumbnailFile) {
|
||||||
thumbnailFile?.uploadStatus === "complete" &&
|
|
||||||
thumbnailFile.objectKey
|
|
||||||
) {
|
|
||||||
try {
|
try {
|
||||||
await put(`/api/items/${encodeURIComponent(pn)}/thumbnail`, {
|
await upload(
|
||||||
object_key: thumbnailFile.objectKey,
|
thumbnailFile.file,
|
||||||
});
|
`/api/items/${encodedPN}/thumbnail/upload`,
|
||||||
|
);
|
||||||
} catch {
|
} catch {
|
||||||
// Thumbnail failure is non-blocking.
|
// Thumbnail failure is non-blocking.
|
||||||
}
|
}
|
||||||
@@ -382,7 +339,7 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
|||||||
onClick={handleThumbnailSelect}
|
onClick={handleThumbnailSelect}
|
||||||
style={{
|
style={{
|
||||||
aspectRatio: "4/3",
|
aspectRatio: "4/3",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
border: "1px dashed var(--ctp-surface1)",
|
border: "1px dashed var(--ctp-surface1)",
|
||||||
display: "flex",
|
display: "flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
@@ -392,21 +349,12 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
|||||||
backgroundColor: "var(--ctp-mantle)",
|
backgroundColor: "var(--ctp-mantle)",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{thumbnailFile?.uploadStatus === "complete" ? (
|
{thumbnailFile ? (
|
||||||
<img
|
<img
|
||||||
src={URL.createObjectURL(thumbnailFile.file)}
|
src={URL.createObjectURL(thumbnailFile.file)}
|
||||||
alt="Thumbnail preview"
|
alt="Thumbnail preview"
|
||||||
style={{ width: "100%", height: "100%", objectFit: "cover" }}
|
style={{ width: "100%", height: "100%", objectFit: "cover" }}
|
||||||
/>
|
/>
|
||||||
) : thumbnailFile?.uploadStatus === "uploading" ? (
|
|
||||||
<span
|
|
||||||
style={{
|
|
||||||
fontSize: "var(--font-table)",
|
|
||||||
color: "var(--ctp-subtext0)",
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
Uploading... {thumbnailFile.uploadProgress}%
|
|
||||||
</span>
|
|
||||||
) : (
|
) : (
|
||||||
<span
|
<span
|
||||||
style={{
|
style={{
|
||||||
@@ -414,7 +362,7 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
|||||||
color: "var(--ctp-subtext0)",
|
color: "var(--ctp-subtext0)",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Click to upload
|
Click to select
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
@@ -619,7 +567,7 @@ function SidebarSection({
|
|||||||
textTransform: "uppercase",
|
textTransform: "uppercase",
|
||||||
letterSpacing: "0.05em",
|
letterSpacing: "0.05em",
|
||||||
color: "var(--ctp-subtext0)",
|
color: "var(--ctp-subtext0)",
|
||||||
marginBottom: "0.4rem",
|
marginBottom: "0.5rem",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{title}
|
{title}
|
||||||
@@ -636,7 +584,7 @@ function MetaRow({ label, value }: { label: string; value: string }) {
|
|||||||
display: "flex",
|
display: "flex",
|
||||||
justifyContent: "space-between",
|
justifyContent: "space-between",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
padding: "0.15rem 0",
|
padding: "0.25rem 0",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<span style={{ color: "var(--ctp-subtext0)" }}>{label}</span>
|
<span style={{ color: "var(--ctp-subtext0)" }}>{label}</span>
|
||||||
@@ -686,7 +634,7 @@ const actionBtnStyle: React.CSSProperties = {
|
|||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
};
|
};
|
||||||
@@ -698,17 +646,17 @@ const cancelBtnStyle: React.CSSProperties = {
|
|||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
};
|
};
|
||||||
|
|
||||||
const inputStyle: React.CSSProperties = {
|
const inputStyle: React.CSSProperties = {
|
||||||
width: "100%",
|
width: "100%",
|
||||||
padding: "0.35rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
backgroundColor: "var(--ctp-base)",
|
backgroundColor: "var(--ctp-base)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
boxSizing: "border-box",
|
boxSizing: "border-box",
|
||||||
};
|
};
|
||||||
@@ -723,7 +671,7 @@ const errorStyle: React.CSSProperties = {
|
|||||||
color: "var(--ctp-red)",
|
color: "var(--ctp-red)",
|
||||||
backgroundColor: "rgba(243,139,168,0.1)",
|
backgroundColor: "rgba(243,139,168,0.1)",
|
||||||
padding: "0.5rem",
|
padding: "0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
marginBottom: "0.5rem",
|
marginBottom: "0.5rem",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ export function DeleteItemPane({
|
|||||||
color: "var(--ctp-red)",
|
color: "var(--ctp-red)",
|
||||||
backgroundColor: "rgba(243,139,168,0.1)",
|
backgroundColor: "rgba(243,139,168,0.1)",
|
||||||
padding: "0.5rem 1rem",
|
padding: "0.5rem 1rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
width: "100%",
|
width: "100%",
|
||||||
textAlign: "center",
|
textAlign: "center",
|
||||||
@@ -125,7 +125,7 @@ export function DeleteItemPane({
|
|||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -141,7 +141,7 @@ export function DeleteItemPane({
|
|||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-red)",
|
backgroundColor: "var(--ctp-red)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -163,6 +163,6 @@ const headerBtnStyle: React.CSSProperties = {
|
|||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ export function EditItemPane({
|
|||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-blue)",
|
backgroundColor: "var(--ctp-blue)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -114,7 +114,7 @@ export function EditItemPane({
|
|||||||
color: "var(--ctp-red)",
|
color: "var(--ctp-red)",
|
||||||
backgroundColor: "rgba(243,139,168,0.1)",
|
backgroundColor: "rgba(243,139,168,0.1)",
|
||||||
padding: "0.5rem",
|
padding: "0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
marginBottom: "0.5rem",
|
marginBottom: "0.5rem",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
}}
|
}}
|
||||||
@@ -208,11 +208,11 @@ function FormGroup({
|
|||||||
|
|
||||||
const inputStyle: React.CSSProperties = {
|
const inputStyle: React.CSSProperties = {
|
||||||
width: "100%",
|
width: "100%",
|
||||||
padding: "0.35rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
backgroundColor: "var(--ctp-base)",
|
backgroundColor: "var(--ctp-base)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -223,6 +223,6 @@ const headerBtnStyle: React.CSSProperties = {
|
|||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -143,8 +143,8 @@ function FileRow({
|
|||||||
display: "flex",
|
display: "flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.5rem",
|
gap: "0.5rem",
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
position: "relative",
|
position: "relative",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
@@ -153,14 +153,14 @@ function FileRow({
|
|||||||
style={{
|
style={{
|
||||||
width: 28,
|
width: 28,
|
||||||
height: 28,
|
height: 28,
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: color,
|
backgroundColor: color,
|
||||||
opacity: 0.8,
|
opacity: 0.8,
|
||||||
display: "flex",
|
display: "flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
justifyContent: "center",
|
justifyContent: "center",
|
||||||
fontSize: "var(--font-xs)",
|
fontSize: "var(--font-xs)",
|
||||||
fontWeight: 700,
|
fontWeight: 600,
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
flexShrink: 0,
|
flexShrink: 0,
|
||||||
}}
|
}}
|
||||||
@@ -239,7 +239,7 @@ function FileRow({
|
|||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
color: hovered ? "var(--ctp-red)" : "var(--ctp-overlay0)",
|
color: hovered ? "var(--ctp-red)" : "var(--ctp-overlay0)",
|
||||||
padding: "0 0.2rem",
|
padding: "0 0.25rem",
|
||||||
flexShrink: 0,
|
flexShrink: 0,
|
||||||
transition: "all 0.15s ease",
|
transition: "all 0.15s ease",
|
||||||
}}
|
}}
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ export function ImportItemsPane({
|
|||||||
color: "var(--ctp-red)",
|
color: "var(--ctp-red)",
|
||||||
backgroundColor: "rgba(243,139,168,0.1)",
|
backgroundColor: "rgba(243,139,168,0.1)",
|
||||||
padding: "0.5rem",
|
padding: "0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
marginBottom: "0.5rem",
|
marginBottom: "0.5rem",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
}}
|
}}
|
||||||
@@ -164,7 +164,7 @@ export function ImportItemsPane({
|
|||||||
style={{
|
style={{
|
||||||
display: "flex",
|
display: "flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.4rem",
|
gap: "0.5rem",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
marginBottom: "0.75rem",
|
marginBottom: "0.75rem",
|
||||||
@@ -187,11 +187,11 @@ export function ImportItemsPane({
|
|||||||
onClick={() => void doImport(true)}
|
onClick={() => void doImport(true)}
|
||||||
disabled={!file || importing}
|
disabled={!file || importing}
|
||||||
style={{
|
style={{
|
||||||
padding: "0.4rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-yellow)",
|
backgroundColor: "var(--ctp-yellow)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -205,11 +205,11 @@ export function ImportItemsPane({
|
|||||||
onClick={() => void doImport(false)}
|
onClick={() => void doImport(false)}
|
||||||
disabled={importing || (result?.error_count ?? 0) > 0}
|
disabled={importing || (result?.error_count ?? 0) > 0}
|
||||||
style={{
|
style={{
|
||||||
padding: "0.4rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-green)",
|
backgroundColor: "var(--ctp-green)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -227,7 +227,7 @@ export function ImportItemsPane({
|
|||||||
style={{
|
style={{
|
||||||
padding: "0.5rem",
|
padding: "0.5rem",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
@@ -262,7 +262,7 @@ export function ImportItemsPane({
|
|||||||
style={{
|
style={{
|
||||||
color: "var(--ctp-red)",
|
color: "var(--ctp-red)",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
padding: "0.15rem 0",
|
padding: "0.25rem 0",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Row {err.row}
|
Row {err.row}
|
||||||
@@ -296,6 +296,6 @@ const headerBtnStyle: React.CSSProperties = {
|
|||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { useState, useEffect } from "react";
|
import { useState, useEffect } from "react";
|
||||||
import { X } from "lucide-react";
|
import { X, Pencil, Trash2 } from "lucide-react";
|
||||||
import { get } from "../../api/client";
|
import { get } from "../../api/client";
|
||||||
import type { Item } from "../../api/types";
|
import type { Item } from "../../api/types";
|
||||||
import { MainTab } from "./MainTab";
|
import { MainTab } from "./MainTab";
|
||||||
@@ -103,7 +103,7 @@ export function ItemDetail({
|
|||||||
</span>
|
</span>
|
||||||
<span
|
<span
|
||||||
style={{
|
style={{
|
||||||
padding: "0.15rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "1rem",
|
borderRadius: "1rem",
|
||||||
fontSize: "var(--font-sm)",
|
fontSize: "var(--font-sm)",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
@@ -114,22 +114,6 @@ export function ItemDetail({
|
|||||||
{item.item_type}
|
{item.item_type}
|
||||||
</span>
|
</span>
|
||||||
<span style={{ flex: 1 }} />
|
<span style={{ flex: 1 }} />
|
||||||
{isEditor && (
|
|
||||||
<>
|
|
||||||
<button
|
|
||||||
onClick={() => onEdit(item.part_number)}
|
|
||||||
style={headerBtnStyle}
|
|
||||||
>
|
|
||||||
Edit
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
onClick={() => onDelete(item.part_number)}
|
|
||||||
style={{ ...headerBtnStyle, color: "var(--ctp-red)" }}
|
|
||||||
>
|
|
||||||
Delete
|
|
||||||
</button>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
<button
|
<button
|
||||||
onClick={onClose}
|
onClick={onClose}
|
||||||
style={{
|
style={{
|
||||||
@@ -142,11 +126,11 @@ export function ItemDetail({
|
|||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Tabs */}
|
{/* Tabs + actions */}
|
||||||
<div
|
<div
|
||||||
style={{
|
style={{
|
||||||
display: "flex",
|
display: "flex",
|
||||||
gap: "0",
|
alignItems: "center",
|
||||||
borderBottom: "1px solid var(--ctp-surface1)",
|
borderBottom: "1px solid var(--ctp-surface1)",
|
||||||
backgroundColor: "var(--ctp-mantle)",
|
backgroundColor: "var(--ctp-mantle)",
|
||||||
flexShrink: 0,
|
flexShrink: 0,
|
||||||
@@ -157,7 +141,7 @@ export function ItemDetail({
|
|||||||
key={tab.key}
|
key={tab.key}
|
||||||
onClick={() => setActiveTab(tab.key)}
|
onClick={() => setActiveTab(tab.key)}
|
||||||
style={{
|
style={{
|
||||||
padding: "0.4rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderBottom:
|
borderBottom:
|
||||||
@@ -175,6 +159,33 @@ export function ItemDetail({
|
|||||||
{tab.label}
|
{tab.label}
|
||||||
</button>
|
</button>
|
||||||
))}
|
))}
|
||||||
|
<span style={{ flex: 1 }} />
|
||||||
|
{isEditor && (
|
||||||
|
<div
|
||||||
|
style={{ display: "flex", gap: "0.25rem", paddingRight: "0.5rem" }}
|
||||||
|
>
|
||||||
|
<button
|
||||||
|
onClick={() => onEdit(item.part_number)}
|
||||||
|
style={{
|
||||||
|
...tabActionBtnStyle,
|
||||||
|
color: "var(--ctp-subtext1)",
|
||||||
|
}}
|
||||||
|
title="Edit item"
|
||||||
|
>
|
||||||
|
<Pencil size={13} /> Edit
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => onDelete(item.part_number)}
|
||||||
|
style={{
|
||||||
|
...tabActionBtnStyle,
|
||||||
|
color: "var(--ctp-red)",
|
||||||
|
}}
|
||||||
|
title="Delete item"
|
||||||
|
>
|
||||||
|
<Trash2 size={13} /> Delete
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Tab Content */}
|
{/* Tab Content */}
|
||||||
@@ -205,5 +216,17 @@ const headerBtnStyle: React.CSSProperties = {
|
|||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
|
};
|
||||||
|
|
||||||
|
const tabActionBtnStyle: React.CSSProperties = {
|
||||||
|
display: "inline-flex",
|
||||||
|
alignItems: "center",
|
||||||
|
gap: "0.25rem",
|
||||||
|
background: "none",
|
||||||
|
border: "none",
|
||||||
|
cursor: "pointer",
|
||||||
|
fontSize: "var(--font-table)",
|
||||||
|
padding: "0.25rem 0.5rem",
|
||||||
|
borderRadius: "0.25rem",
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -268,7 +268,7 @@ export function ItemTable({
|
|||||||
<td key={col.key} style={tdStyle}>
|
<td key={col.key} style={tdStyle}>
|
||||||
<span
|
<span
|
||||||
style={{
|
style={{
|
||||||
padding: "0.15rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "1rem",
|
borderRadius: "1rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
@@ -398,6 +398,6 @@ const actionBtnStyle: React.CSSProperties = {
|
|||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
padding: "0.15rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ export function ItemsToolbar({
|
|||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
backgroundColor:
|
backgroundColor:
|
||||||
filters.searchScope === scope
|
filters.searchScope === scope
|
||||||
@@ -81,7 +81,7 @@ export function ItemsToolbar({
|
|||||||
padding: "var(--d-input-py) var(--d-input-px)",
|
padding: "var(--d-input-py) var(--d-input-px)",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
fontSize: "var(--d-input-font)",
|
fontSize: "var(--d-input-font)",
|
||||||
}}
|
}}
|
||||||
@@ -144,7 +144,7 @@ export function ItemsToolbar({
|
|||||||
...toolBtnStyle,
|
...toolBtnStyle,
|
||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.35rem",
|
gap: "0.25rem",
|
||||||
}}
|
}}
|
||||||
title="Export CSV"
|
title="Export CSV"
|
||||||
>
|
>
|
||||||
@@ -159,7 +159,7 @@ export function ItemsToolbar({
|
|||||||
...toolBtnStyle,
|
...toolBtnStyle,
|
||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.35rem",
|
gap: "0.25rem",
|
||||||
}}
|
}}
|
||||||
title="Import CSV"
|
title="Import CSV"
|
||||||
>
|
>
|
||||||
@@ -177,7 +177,7 @@ export function ItemsToolbar({
|
|||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.35rem",
|
gap: "0.25rem",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Plus size={14} /> New
|
<Plus size={14} /> New
|
||||||
@@ -191,7 +191,7 @@ const selectStyle: React.CSSProperties = {
|
|||||||
padding: "var(--d-input-py) var(--d-input-px)",
|
padding: "var(--d-input-py) var(--d-input-px)",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
fontSize: "var(--d-input-font)",
|
fontSize: "var(--d-input-font)",
|
||||||
};
|
};
|
||||||
@@ -200,7 +200,7 @@ const toolBtnStyle: React.CSSProperties = {
|
|||||||
padding: "var(--d-input-py) var(--d-input-px)",
|
padding: "var(--d-input-py) var(--d-input-px)",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
|
|||||||
@@ -134,7 +134,7 @@ export function MainTab({ item, onReload, isEditor }: MainTabProps) {
|
|||||||
marginTop: "0.75rem",
|
marginTop: "0.75rem",
|
||||||
padding: "0.5rem",
|
padding: "0.5rem",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
@@ -177,7 +177,7 @@ export function MainTab({ item, onReload, isEditor }: MainTabProps) {
|
|||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.25rem",
|
gap: "0.25rem",
|
||||||
padding: "0.15rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "1rem",
|
borderRadius: "1rem",
|
||||||
backgroundColor: "rgba(203,166,247,0.15)",
|
backgroundColor: "rgba(203,166,247,0.15)",
|
||||||
color: "var(--ctp-mauve)",
|
color: "var(--ctp-mauve)",
|
||||||
@@ -208,11 +208,11 @@ export function MainTab({ item, onReload, isEditor }: MainTabProps) {
|
|||||||
value={addProject}
|
value={addProject}
|
||||||
onChange={(e) => setAddProject(e.target.value)}
|
onChange={(e) => setAddProject(e.target.value)}
|
||||||
style={{
|
style={{
|
||||||
padding: "0.15rem 0.25rem",
|
padding: "0.25rem 0.25rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
@@ -229,12 +229,12 @@ export function MainTab({ item, onReload, isEditor }: MainTabProps) {
|
|||||||
<button
|
<button
|
||||||
onClick={() => void handleAddProject()}
|
onClick={() => void handleAddProject()}
|
||||||
style={{
|
style={{
|
||||||
padding: "0.15rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-sm)",
|
fontSize: "var(--font-sm)",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-mauve)",
|
backgroundColor: "var(--ctp-mauve)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
@@ -253,7 +253,7 @@ export function MainTab({ item, onReload, isEditor }: MainTabProps) {
|
|||||||
marginTop: "0.75rem",
|
marginTop: "0.75rem",
|
||||||
padding: "0.5rem",
|
padding: "0.5rem",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<div
|
<div
|
||||||
@@ -298,7 +298,7 @@ export function MainTab({ item, onReload, isEditor }: MainTabProps) {
|
|||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
|
|||||||
@@ -125,11 +125,11 @@ export function PropertiesTab({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const inputStyle: React.CSSProperties = {
|
const inputStyle: React.CSSProperties = {
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
backgroundColor: "var(--ctp-base)",
|
backgroundColor: "var(--ctp-base)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -165,7 +165,7 @@ export function PropertiesTab({
|
|||||||
padding: "0.25rem 0.75rem",
|
padding: "0.25rem 0.75rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-mauve)",
|
backgroundColor: "var(--ctp-mauve)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -250,7 +250,7 @@ export function PropertiesTab({
|
|||||||
marginTop: "0.25rem",
|
marginTop: "0.25rem",
|
||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.35rem",
|
gap: "0.25rem",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Plus size={14} /> Add Property
|
<Plus size={14} /> Add Property
|
||||||
@@ -274,7 +274,7 @@ export function PropertiesTab({
|
|||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
backgroundColor: "var(--ctp-base)",
|
backgroundColor: "var(--ctp-base)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
resize: "vertical",
|
resize: "vertical",
|
||||||
}}
|
}}
|
||||||
@@ -300,7 +300,7 @@ const tabBtn: React.CSSProperties = {
|
|||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
|
|||||||
@@ -97,11 +97,11 @@ export function RevisionsTab({ partNumber, isEditor }: RevisionsTabProps) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
const selectStyle: React.CSSProperties = {
|
const selectStyle: React.CSSProperties = {
|
||||||
padding: "0.25rem 0.4rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -147,7 +147,7 @@ export function RevisionsTab({ partNumber, isEditor }: RevisionsTabProps) {
|
|||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "var(--ctp-mauve)",
|
backgroundColor: "var(--ctp-mauve)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -164,7 +164,7 @@ export function RevisionsTab({ partNumber, isEditor }: RevisionsTabProps) {
|
|||||||
style={{
|
style={{
|
||||||
padding: "0.5rem",
|
padding: "0.5rem",
|
||||||
backgroundColor: "var(--ctp-surface0)",
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
marginBottom: "0.75rem",
|
marginBottom: "0.75rem",
|
||||||
fontFamily: "'JetBrains Mono', monospace",
|
fontFamily: "'JetBrains Mono', monospace",
|
||||||
@@ -250,10 +250,10 @@ export function RevisionsTab({ partNumber, isEditor }: RevisionsTabProps) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
style={{
|
style={{
|
||||||
padding: "0.15rem 0.25rem",
|
padding: "0.25rem 0.25rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
backgroundColor: "transparent",
|
backgroundColor: "transparent",
|
||||||
color: statusColors[rev.status] ?? "var(--ctp-text)",
|
color: statusColors[rev.status] ?? "var(--ctp-text)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
|
|||||||
180
web/src/components/settings/AdminModules.tsx
Normal file
180
web/src/components/settings/AdminModules.tsx
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
import { useEffect, useState } from "react";
|
||||||
|
import { get } from "../../api/client";
|
||||||
|
import type {
|
||||||
|
ModuleInfo,
|
||||||
|
ModulesResponse,
|
||||||
|
AdminSettingsResponse,
|
||||||
|
UpdateSettingsResponse,
|
||||||
|
} from "../../api/types";
|
||||||
|
import { ModuleCard } from "./ModuleCard";
|
||||||
|
|
||||||
|
const infraModules = ["core", "schemas", "database", "storage"];
|
||||||
|
const featureModules = [
|
||||||
|
"auth",
|
||||||
|
"projects",
|
||||||
|
"audit",
|
||||||
|
"freecad",
|
||||||
|
"odoo",
|
||||||
|
"jobs",
|
||||||
|
"dag",
|
||||||
|
];
|
||||||
|
|
||||||
|
export function AdminModules() {
|
||||||
|
const [modules, setModules] = useState<Record<string, ModuleInfo> | null>(
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
const [settings, setSettings] = useState<AdminSettingsResponse | null>(null);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [restartRequired, setRestartRequired] = useState(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
Promise.all([
|
||||||
|
get<ModulesResponse>("/api/modules"),
|
||||||
|
get<AdminSettingsResponse>("/api/admin/settings"),
|
||||||
|
])
|
||||||
|
.then(([modsResp, settingsResp]) => {
|
||||||
|
setModules(modsResp.modules);
|
||||||
|
setSettings(settingsResp);
|
||||||
|
})
|
||||||
|
.catch((e) =>
|
||||||
|
setError(e instanceof Error ? e.message : "Failed to load settings"),
|
||||||
|
)
|
||||||
|
.finally(() => setLoading(false));
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleSaved = (moduleId: string, result: UpdateSettingsResponse) => {
|
||||||
|
if (result.restart_required) setRestartRequired(true);
|
||||||
|
// Refresh the single module's settings
|
||||||
|
get<Record<string, unknown>>(`/api/admin/settings/${moduleId}`)
|
||||||
|
.then((updated) =>
|
||||||
|
setSettings((prev) => (prev ? { ...prev, [moduleId]: updated } : prev)),
|
||||||
|
)
|
||||||
|
.catch(() => {});
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleToggled = (moduleId: string, enabled: boolean) => {
|
||||||
|
setModules((prev) => {
|
||||||
|
if (!prev || !prev[moduleId]) return prev;
|
||||||
|
const updated: Record<string, ModuleInfo> = {
|
||||||
|
...prev,
|
||||||
|
[moduleId]: { ...prev[moduleId], enabled },
|
||||||
|
};
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
if (loading) {
|
||||||
|
return (
|
||||||
|
<div style={sectionStyle}>
|
||||||
|
<h3 style={sectionTitleStyle}>Module Configuration</h3>
|
||||||
|
<p style={{ color: "var(--ctp-overlay0)" }}>Loading modules...</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return (
|
||||||
|
<div style={sectionStyle}>
|
||||||
|
<h3 style={sectionTitleStyle}>Module Configuration</h3>
|
||||||
|
<p style={{ color: "var(--ctp-red)", fontSize: "var(--font-body)" }}>
|
||||||
|
{error}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!modules || !settings) return null;
|
||||||
|
|
||||||
|
const renderGroup = (title: string, ids: string[]) => {
|
||||||
|
const available = ids.filter((id) => modules[id]);
|
||||||
|
if (available.length === 0) return null;
|
||||||
|
return (
|
||||||
|
<div style={{ marginBottom: "1.25rem" }}>
|
||||||
|
<div style={groupTitleStyle}>{title}</div>
|
||||||
|
{available.map((id) => {
|
||||||
|
const meta = modules[id];
|
||||||
|
if (!meta) return null;
|
||||||
|
return (
|
||||||
|
<ModuleCard
|
||||||
|
key={id}
|
||||||
|
moduleId={id}
|
||||||
|
meta={meta}
|
||||||
|
settings={settings[id] ?? {}}
|
||||||
|
allModules={modules}
|
||||||
|
onSaved={handleSaved}
|
||||||
|
onToggled={handleToggled}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div style={sectionStyle}>
|
||||||
|
<h3 style={sectionTitleStyle}>Module Configuration</h3>
|
||||||
|
|
||||||
|
{restartRequired && (
|
||||||
|
<div style={restartBannerStyle}>
|
||||||
|
<span style={{ fontWeight: 600 }}>Restart required</span>
|
||||||
|
<span>Some changes require a server restart to take effect.</span>
|
||||||
|
<button
|
||||||
|
onClick={() => setRestartRequired(false)}
|
||||||
|
style={dismissBtnStyle}
|
||||||
|
>
|
||||||
|
Dismiss
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{renderGroup("Infrastructure", infraModules)}
|
||||||
|
{renderGroup("Features", featureModules)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Styles ---
|
||||||
|
|
||||||
|
const sectionStyle: React.CSSProperties = {
|
||||||
|
marginTop: "0.5rem",
|
||||||
|
};
|
||||||
|
|
||||||
|
const sectionTitleStyle: React.CSSProperties = {
|
||||||
|
marginBottom: "1rem",
|
||||||
|
fontSize: "var(--font-title)",
|
||||||
|
};
|
||||||
|
|
||||||
|
const groupTitleStyle: React.CSSProperties = {
|
||||||
|
fontSize: "0.7rem",
|
||||||
|
fontWeight: 600,
|
||||||
|
textTransform: "uppercase",
|
||||||
|
letterSpacing: "0.08em",
|
||||||
|
color: "var(--ctp-overlay1)",
|
||||||
|
marginBottom: "0.5rem",
|
||||||
|
};
|
||||||
|
|
||||||
|
const restartBannerStyle: React.CSSProperties = {
|
||||||
|
display: "flex",
|
||||||
|
gap: "0.75rem",
|
||||||
|
alignItems: "center",
|
||||||
|
padding: "0.75rem 1rem",
|
||||||
|
marginBottom: "1rem",
|
||||||
|
borderRadius: "0.75rem",
|
||||||
|
background: "rgba(249, 226, 175, 0.1)",
|
||||||
|
border: "1px solid rgba(249, 226, 175, 0.3)",
|
||||||
|
color: "var(--ctp-yellow)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
};
|
||||||
|
|
||||||
|
const dismissBtnStyle: React.CSSProperties = {
|
||||||
|
marginLeft: "auto",
|
||||||
|
padding: "0.25rem 0.5rem",
|
||||||
|
borderRadius: "0.25rem",
|
||||||
|
border: "none",
|
||||||
|
background: "rgba(249, 226, 175, 0.15)",
|
||||||
|
color: "var(--ctp-yellow)",
|
||||||
|
cursor: "pointer",
|
||||||
|
fontSize: "0.7rem",
|
||||||
|
fontWeight: 500,
|
||||||
|
};
|
||||||
878
web/src/components/settings/ModuleCard.tsx
Normal file
878
web/src/components/settings/ModuleCard.tsx
Normal file
@@ -0,0 +1,878 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import { put, post } from "../../api/client";
|
||||||
|
import type {
|
||||||
|
ModuleInfo,
|
||||||
|
UpdateSettingsResponse,
|
||||||
|
TestConnectivityResponse,
|
||||||
|
} from "../../api/types";
|
||||||
|
|
||||||
|
interface ModuleCardProps {
|
||||||
|
moduleId: string;
|
||||||
|
meta: ModuleInfo;
|
||||||
|
settings: Record<string, unknown>;
|
||||||
|
allModules: Record<string, ModuleInfo>;
|
||||||
|
onSaved: (moduleId: string, result: UpdateSettingsResponse) => void;
|
||||||
|
onToggled: (moduleId: string, enabled: boolean) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testableModules = new Set(["database", "storage"]);
|
||||||
|
|
||||||
|
export function ModuleCard({
|
||||||
|
moduleId,
|
||||||
|
meta,
|
||||||
|
settings,
|
||||||
|
allModules,
|
||||||
|
onSaved,
|
||||||
|
onToggled,
|
||||||
|
}: ModuleCardProps) {
|
||||||
|
const [expanded, setExpanded] = useState(false);
|
||||||
|
const [enabled, setEnabled] = useState(meta.enabled);
|
||||||
|
const [toggling, setToggling] = useState(false);
|
||||||
|
const [toggleError, setToggleError] = useState<string | null>(null);
|
||||||
|
const [saving, setSaving] = useState(false);
|
||||||
|
const [saveError, setSaveError] = useState<string | null>(null);
|
||||||
|
const [saveSuccess, setSaveSuccess] = useState(false);
|
||||||
|
const [testing, setTesting] = useState(false);
|
||||||
|
const [testResult, setTestResult] = useState<TestConnectivityResponse | null>(
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
const [edits, setEdits] = useState<Record<string, unknown>>({});
|
||||||
|
|
||||||
|
const hasEdits = Object.keys(edits).length > 0;
|
||||||
|
const isTestable = testableModules.has(moduleId);
|
||||||
|
const hasFields = !["projects", "audit", "dag"].includes(moduleId);
|
||||||
|
const deps = meta.depends_on ?? [];
|
||||||
|
const status = settings.status as string | undefined;
|
||||||
|
|
||||||
|
const handleToggle = async () => {
|
||||||
|
const next = !enabled;
|
||||||
|
setToggling(true);
|
||||||
|
setToggleError(null);
|
||||||
|
try {
|
||||||
|
const result = await put<UpdateSettingsResponse>(
|
||||||
|
`/api/admin/settings/${moduleId}`,
|
||||||
|
{ enabled: next },
|
||||||
|
);
|
||||||
|
setEnabled(next);
|
||||||
|
onToggled(moduleId, next);
|
||||||
|
onSaved(moduleId, result);
|
||||||
|
} catch (e) {
|
||||||
|
setToggleError(e instanceof Error ? e.message : "Toggle failed");
|
||||||
|
} finally {
|
||||||
|
setToggling(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSave = async () => {
|
||||||
|
setSaving(true);
|
||||||
|
setSaveError(null);
|
||||||
|
setSaveSuccess(false);
|
||||||
|
try {
|
||||||
|
const result = await put<UpdateSettingsResponse>(
|
||||||
|
`/api/admin/settings/${moduleId}`,
|
||||||
|
edits,
|
||||||
|
);
|
||||||
|
setEdits({});
|
||||||
|
setSaveSuccess(true);
|
||||||
|
onSaved(moduleId, result);
|
||||||
|
setTimeout(() => setSaveSuccess(false), 3000);
|
||||||
|
} catch (e) {
|
||||||
|
setSaveError(e instanceof Error ? e.message : "Save failed");
|
||||||
|
} finally {
|
||||||
|
setSaving(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleTest = async () => {
|
||||||
|
setTesting(true);
|
||||||
|
setTestResult(null);
|
||||||
|
try {
|
||||||
|
const result = await post<TestConnectivityResponse>(
|
||||||
|
`/api/admin/settings/${moduleId}/test`,
|
||||||
|
);
|
||||||
|
setTestResult(result);
|
||||||
|
} catch (e) {
|
||||||
|
setTestResult({
|
||||||
|
success: false,
|
||||||
|
message: e instanceof Error ? e.message : "Test failed",
|
||||||
|
latency_ms: 0,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
setTesting(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const setField = (key: string, value: unknown) => {
|
||||||
|
setEdits((prev) => ({ ...prev, [key]: value }));
|
||||||
|
setSaveSuccess(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
const getFieldValue = (key: string): unknown => {
|
||||||
|
if (key in edits) return edits[key];
|
||||||
|
return settings[key];
|
||||||
|
};
|
||||||
|
|
||||||
|
const statusBadge = () => {
|
||||||
|
if (!enabled && !meta.required)
|
||||||
|
return <span style={badgeStyles.disabled}>Disabled</span>;
|
||||||
|
if (status === "unavailable")
|
||||||
|
return <span style={badgeStyles.error}>Error</span>;
|
||||||
|
return <span style={badgeStyles.active}>Active</span>;
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div style={cardStyle}>
|
||||||
|
{/* Header */}
|
||||||
|
<div
|
||||||
|
style={headerStyle}
|
||||||
|
onClick={() => hasFields && setExpanded(!expanded)}
|
||||||
|
>
|
||||||
|
<div style={{ display: "flex", alignItems: "center", gap: "0.75rem" }}>
|
||||||
|
{!meta.required && (
|
||||||
|
<button
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
handleToggle();
|
||||||
|
}}
|
||||||
|
disabled={toggling}
|
||||||
|
style={{
|
||||||
|
...toggleBtnStyle,
|
||||||
|
backgroundColor: enabled
|
||||||
|
? "var(--ctp-green)"
|
||||||
|
: "var(--ctp-surface2)",
|
||||||
|
}}
|
||||||
|
title={enabled ? "Disable module" : "Enable module"}
|
||||||
|
>
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
...toggleKnobStyle,
|
||||||
|
transform: enabled ? "translateX(14px)" : "translateX(0)",
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
<span style={{ fontWeight: 600, fontSize: "var(--font-title)" }}>
|
||||||
|
{meta.name}
|
||||||
|
</span>
|
||||||
|
{statusBadge()}
|
||||||
|
</div>
|
||||||
|
{hasFields && (
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
color: "var(--ctp-overlay1)",
|
||||||
|
fontSize: "0.75rem",
|
||||||
|
transition: "transform 0.15s ease",
|
||||||
|
transform: expanded ? "rotate(180deg)" : "rotate(0)",
|
||||||
|
cursor: "pointer",
|
||||||
|
userSelect: "none",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
▼
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Toggle error */}
|
||||||
|
{toggleError && (
|
||||||
|
<div style={{ ...errorStyle, margin: "0.5rem 1.5rem 0" }}>
|
||||||
|
{toggleError}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Dependencies note */}
|
||||||
|
{deps.length > 0 && expanded && (
|
||||||
|
<div style={depNoteStyle}>
|
||||||
|
Requires: {deps.map((d) => allModules[d]?.name ?? d).join(", ")}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Body */}
|
||||||
|
{expanded && hasFields && (
|
||||||
|
<div style={bodyStyle}>
|
||||||
|
{renderModuleFields(moduleId, settings, getFieldValue, setField)}
|
||||||
|
|
||||||
|
{/* Footer */}
|
||||||
|
<div style={footerStyle}>
|
||||||
|
<div
|
||||||
|
style={{ display: "flex", gap: "0.5rem", alignItems: "center" }}
|
||||||
|
>
|
||||||
|
{hasEdits && (
|
||||||
|
<button
|
||||||
|
onClick={handleSave}
|
||||||
|
disabled={saving}
|
||||||
|
style={btnPrimaryStyle}
|
||||||
|
>
|
||||||
|
{saving ? "Saving..." : "Save"}
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
{isTestable && (
|
||||||
|
<button
|
||||||
|
onClick={handleTest}
|
||||||
|
disabled={testing}
|
||||||
|
style={btnSecondaryStyle}
|
||||||
|
>
|
||||||
|
{testing ? "Testing..." : "Test Connection"}
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
style={{ display: "flex", alignItems: "center", gap: "0.5rem" }}
|
||||||
|
>
|
||||||
|
{saveSuccess && (
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
color: "var(--ctp-green)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Saved
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{saveError && (
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
color: "var(--ctp-red)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{saveError}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Test result */}
|
||||||
|
{testResult && (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
...testResultStyle,
|
||||||
|
borderColor: testResult.success
|
||||||
|
? "rgba(166, 227, 161, 0.3)"
|
||||||
|
: "rgba(243, 139, 168, 0.3)",
|
||||||
|
background: testResult.success
|
||||||
|
? "rgba(166, 227, 161, 0.08)"
|
||||||
|
: "rgba(243, 139, 168, 0.08)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
color: testResult.success
|
||||||
|
? "var(--ctp-green)"
|
||||||
|
: "var(--ctp-red)",
|
||||||
|
fontWeight: 600,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{testResult.success ? "OK" : "Failed"}
|
||||||
|
</span>
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
color: "var(--ctp-subtext0)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{testResult.message}
|
||||||
|
</span>
|
||||||
|
{testResult.latency_ms > 0 && (
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
color: "var(--ctp-overlay1)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{testResult.latency_ms}ms
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Field renderers per module ---
|
||||||
|
|
||||||
|
function renderModuleFields(
|
||||||
|
moduleId: string,
|
||||||
|
settings: Record<string, unknown>,
|
||||||
|
getValue: (key: string) => unknown,
|
||||||
|
setValue: (key: string, value: unknown) => void,
|
||||||
|
) {
|
||||||
|
switch (moduleId) {
|
||||||
|
case "core":
|
||||||
|
return (
|
||||||
|
<FieldGrid>
|
||||||
|
<EditableField
|
||||||
|
label="Host"
|
||||||
|
value={getValue("host")}
|
||||||
|
onChange={(v) => setValue("host", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Port"
|
||||||
|
value={getValue("port")}
|
||||||
|
onChange={(v) => setValue("port", Number(v))}
|
||||||
|
type="number"
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Base URL"
|
||||||
|
value={getValue("base_url")}
|
||||||
|
onChange={(v) => setValue("base_url", v)}
|
||||||
|
/>
|
||||||
|
<ReadOnlyField
|
||||||
|
label="Read Only"
|
||||||
|
value={settings.readonly ? "Yes" : "No"}
|
||||||
|
/>
|
||||||
|
</FieldGrid>
|
||||||
|
);
|
||||||
|
case "schemas":
|
||||||
|
return (
|
||||||
|
<FieldGrid>
|
||||||
|
<EditableField
|
||||||
|
label="Directory"
|
||||||
|
value={getValue("directory")}
|
||||||
|
onChange={(v) => setValue("directory", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Default"
|
||||||
|
value={getValue("default")}
|
||||||
|
onChange={(v) => setValue("default", v)}
|
||||||
|
/>
|
||||||
|
<ReadOnlyField label="Schema Count" value={settings.count} />
|
||||||
|
</FieldGrid>
|
||||||
|
);
|
||||||
|
case "database":
|
||||||
|
return (
|
||||||
|
<FieldGrid>
|
||||||
|
<EditableField
|
||||||
|
label="Host"
|
||||||
|
value={getValue("host")}
|
||||||
|
onChange={(v) => setValue("host", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Port"
|
||||||
|
value={getValue("port")}
|
||||||
|
onChange={(v) => setValue("port", Number(v))}
|
||||||
|
type="number"
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Database"
|
||||||
|
value={getValue("name")}
|
||||||
|
onChange={(v) => setValue("name", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="User"
|
||||||
|
value={getValue("user")}
|
||||||
|
onChange={(v) => setValue("user", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Password"
|
||||||
|
value={getValue("password")}
|
||||||
|
onChange={(v) => setValue("password", v)}
|
||||||
|
/>
|
||||||
|
<SelectField
|
||||||
|
label="SSL Mode"
|
||||||
|
value={getValue("sslmode")}
|
||||||
|
options={[
|
||||||
|
"disable",
|
||||||
|
"allow",
|
||||||
|
"prefer",
|
||||||
|
"require",
|
||||||
|
"verify-ca",
|
||||||
|
"verify-full",
|
||||||
|
]}
|
||||||
|
onChange={(v) => setValue("sslmode", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Max Connections"
|
||||||
|
value={getValue("max_connections")}
|
||||||
|
onChange={(v) => setValue("max_connections", Number(v))}
|
||||||
|
type="number"
|
||||||
|
/>
|
||||||
|
</FieldGrid>
|
||||||
|
);
|
||||||
|
case "storage":
|
||||||
|
return (
|
||||||
|
<FieldGrid>
|
||||||
|
<EditableField
|
||||||
|
label="Endpoint"
|
||||||
|
value={getValue("endpoint")}
|
||||||
|
onChange={(v) => setValue("endpoint", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Bucket"
|
||||||
|
value={getValue("bucket")}
|
||||||
|
onChange={(v) => setValue("bucket", v)}
|
||||||
|
/>
|
||||||
|
<CheckboxField
|
||||||
|
label="Use SSL"
|
||||||
|
value={getValue("use_ssl")}
|
||||||
|
onChange={(v) => setValue("use_ssl", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Region"
|
||||||
|
value={getValue("region")}
|
||||||
|
onChange={(v) => setValue("region", v)}
|
||||||
|
/>
|
||||||
|
</FieldGrid>
|
||||||
|
);
|
||||||
|
case "auth":
|
||||||
|
return renderAuthFields(settings, getValue, setValue);
|
||||||
|
case "freecad":
|
||||||
|
return (
|
||||||
|
<FieldGrid>
|
||||||
|
<EditableField
|
||||||
|
label="URI Scheme"
|
||||||
|
value={getValue("uri_scheme")}
|
||||||
|
onChange={(v) => setValue("uri_scheme", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Executable"
|
||||||
|
value={getValue("executable")}
|
||||||
|
onChange={(v) => setValue("executable", v)}
|
||||||
|
/>
|
||||||
|
</FieldGrid>
|
||||||
|
);
|
||||||
|
case "odoo":
|
||||||
|
return (
|
||||||
|
<FieldGrid>
|
||||||
|
<EditableField
|
||||||
|
label="URL"
|
||||||
|
value={getValue("url")}
|
||||||
|
onChange={(v) => setValue("url", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Database"
|
||||||
|
value={getValue("database")}
|
||||||
|
onChange={(v) => setValue("database", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Username"
|
||||||
|
value={getValue("username")}
|
||||||
|
onChange={(v) => setValue("username", v)}
|
||||||
|
/>
|
||||||
|
</FieldGrid>
|
||||||
|
);
|
||||||
|
case "jobs":
|
||||||
|
return (
|
||||||
|
<FieldGrid>
|
||||||
|
<EditableField
|
||||||
|
label="Definitions Directory"
|
||||||
|
value={getValue("directory")}
|
||||||
|
onChange={(v) => setValue("directory", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Runner Timeout (s)"
|
||||||
|
value={getValue("runner_timeout")}
|
||||||
|
onChange={(v) => setValue("runner_timeout", Number(v))}
|
||||||
|
type="number"
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Timeout Check (s)"
|
||||||
|
value={getValue("job_timeout_check")}
|
||||||
|
onChange={(v) => setValue("job_timeout_check", Number(v))}
|
||||||
|
type="number"
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Default Priority"
|
||||||
|
value={getValue("default_priority")}
|
||||||
|
onChange={(v) => setValue("default_priority", Number(v))}
|
||||||
|
type="number"
|
||||||
|
/>
|
||||||
|
</FieldGrid>
|
||||||
|
);
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderAuthFields(
|
||||||
|
settings: Record<string, unknown>,
|
||||||
|
getValue: (key: string) => unknown,
|
||||||
|
setValue: (key: string, value: unknown) => void,
|
||||||
|
) {
|
||||||
|
const local = (getValue("local") ?? settings.local ?? {}) as Record<
|
||||||
|
string,
|
||||||
|
unknown
|
||||||
|
>;
|
||||||
|
const ldap = (getValue("ldap") ?? settings.ldap ?? {}) as Record<
|
||||||
|
string,
|
||||||
|
unknown
|
||||||
|
>;
|
||||||
|
const oidc = (getValue("oidc") ?? settings.oidc ?? {}) as Record<
|
||||||
|
string,
|
||||||
|
unknown
|
||||||
|
>;
|
||||||
|
|
||||||
|
const setNested = (
|
||||||
|
section: string,
|
||||||
|
current: Record<string, unknown>,
|
||||||
|
field: string,
|
||||||
|
v: unknown,
|
||||||
|
) => {
|
||||||
|
setValue(section, { ...current, [field]: v });
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div style={{ display: "flex", flexDirection: "column", gap: "1rem" }}>
|
||||||
|
<SubSection title="Local Auth">
|
||||||
|
<FieldGrid>
|
||||||
|
<CheckboxField
|
||||||
|
label="Enabled"
|
||||||
|
value={local.enabled}
|
||||||
|
onChange={(v) => setNested("local", local, "enabled", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Default Admin"
|
||||||
|
value={local.default_admin_username}
|
||||||
|
onChange={(v) =>
|
||||||
|
setNested("local", local, "default_admin_username", v)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Default Admin Password"
|
||||||
|
value={local.default_admin_password}
|
||||||
|
onChange={(v) =>
|
||||||
|
setNested("local", local, "default_admin_password", v)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</FieldGrid>
|
||||||
|
</SubSection>
|
||||||
|
<SubSection title="LDAP / FreeIPA">
|
||||||
|
<FieldGrid>
|
||||||
|
<CheckboxField
|
||||||
|
label="Enabled"
|
||||||
|
value={ldap.enabled}
|
||||||
|
onChange={(v) => setNested("ldap", ldap, "enabled", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="URL"
|
||||||
|
value={ldap.url}
|
||||||
|
onChange={(v) => setNested("ldap", ldap, "url", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Base DN"
|
||||||
|
value={ldap.base_dn}
|
||||||
|
onChange={(v) => setNested("ldap", ldap, "base_dn", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Bind DN"
|
||||||
|
value={ldap.bind_dn}
|
||||||
|
onChange={(v) => setNested("ldap", ldap, "bind_dn", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Bind Password"
|
||||||
|
value={ldap.bind_password}
|
||||||
|
onChange={(v) => setNested("ldap", ldap, "bind_password", v)}
|
||||||
|
/>
|
||||||
|
</FieldGrid>
|
||||||
|
</SubSection>
|
||||||
|
<SubSection title="OIDC / Keycloak">
|
||||||
|
<FieldGrid>
|
||||||
|
<CheckboxField
|
||||||
|
label="Enabled"
|
||||||
|
value={oidc.enabled}
|
||||||
|
onChange={(v) => setNested("oidc", oidc, "enabled", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Issuer URL"
|
||||||
|
value={oidc.issuer_url}
|
||||||
|
onChange={(v) => setNested("oidc", oidc, "issuer_url", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Client ID"
|
||||||
|
value={oidc.client_id}
|
||||||
|
onChange={(v) => setNested("oidc", oidc, "client_id", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Client Secret"
|
||||||
|
value={oidc.client_secret}
|
||||||
|
onChange={(v) => setNested("oidc", oidc, "client_secret", v)}
|
||||||
|
/>
|
||||||
|
<EditableField
|
||||||
|
label="Redirect URL"
|
||||||
|
value={oidc.redirect_url}
|
||||||
|
onChange={(v) => setNested("oidc", oidc, "redirect_url", v)}
|
||||||
|
/>
|
||||||
|
</FieldGrid>
|
||||||
|
</SubSection>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Shared field components ---
|
||||||
|
|
||||||
|
function FieldGrid({ children }: { children: React.ReactNode }) {
|
||||||
|
return <div style={fieldGridStyle}>{children}</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
function SubSection({
|
||||||
|
title,
|
||||||
|
children,
|
||||||
|
}: {
|
||||||
|
title: string;
|
||||||
|
children: React.ReactNode;
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<div style={subSectionTitleStyle}>{title}</div>
|
||||||
|
{children}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function ReadOnlyField({ label, value }: { label: string; value: unknown }) {
|
||||||
|
const display =
|
||||||
|
value === undefined || value === null || value === "" ? "—" : String(value);
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<div style={fieldLabelStyle}>{label}</div>
|
||||||
|
<div style={fieldValueStyle}>{display}</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function EditableField({
|
||||||
|
label,
|
||||||
|
value,
|
||||||
|
onChange,
|
||||||
|
type = "text",
|
||||||
|
}: {
|
||||||
|
label: string;
|
||||||
|
value: unknown;
|
||||||
|
onChange: (v: string) => void;
|
||||||
|
type?: string;
|
||||||
|
}) {
|
||||||
|
const strVal = value === undefined || value === null ? "" : String(value);
|
||||||
|
const isRedacted = strVal === "****";
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<div style={fieldLabelStyle}>{label}</div>
|
||||||
|
<input
|
||||||
|
type={isRedacted ? "password" : type}
|
||||||
|
value={isRedacted ? "" : strVal}
|
||||||
|
onChange={(e) => onChange(e.target.value)}
|
||||||
|
placeholder={isRedacted ? "••••••••" : undefined}
|
||||||
|
className="silo-input"
|
||||||
|
style={fieldInputStyle}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function SelectField({
|
||||||
|
label,
|
||||||
|
value,
|
||||||
|
options,
|
||||||
|
onChange,
|
||||||
|
}: {
|
||||||
|
label: string;
|
||||||
|
value: unknown;
|
||||||
|
options: string[];
|
||||||
|
onChange: (v: string) => void;
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<div style={fieldLabelStyle}>{label}</div>
|
||||||
|
<select
|
||||||
|
value={String(value ?? "")}
|
||||||
|
onChange={(e) => onChange(e.target.value)}
|
||||||
|
style={fieldInputStyle}
|
||||||
|
>
|
||||||
|
{options.map((opt) => (
|
||||||
|
<option key={opt} value={opt}>
|
||||||
|
{opt}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function CheckboxField({
|
||||||
|
label,
|
||||||
|
value,
|
||||||
|
onChange,
|
||||||
|
}: {
|
||||||
|
label: string;
|
||||||
|
value: unknown;
|
||||||
|
onChange: (v: boolean) => void;
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<div style={{ display: "flex", alignItems: "center", gap: "0.5rem" }}>
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={Boolean(value)}
|
||||||
|
onChange={(e) => onChange(e.target.checked)}
|
||||||
|
style={{ accentColor: "var(--ctp-mauve)" }}
|
||||||
|
/>
|
||||||
|
<div style={fieldLabelStyle}>{label}</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Styles ---
|
||||||
|
|
||||||
|
const cardStyle: React.CSSProperties = {
|
||||||
|
backgroundColor: "var(--ctp-surface0)",
|
||||||
|
borderRadius: "0.75rem",
|
||||||
|
marginBottom: "0.75rem",
|
||||||
|
overflow: "hidden",
|
||||||
|
};
|
||||||
|
|
||||||
|
const headerStyle: React.CSSProperties = {
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
justifyContent: "space-between",
|
||||||
|
padding: "1rem 1.5rem",
|
||||||
|
cursor: "pointer",
|
||||||
|
userSelect: "none",
|
||||||
|
};
|
||||||
|
|
||||||
|
const bodyStyle: React.CSSProperties = {
|
||||||
|
padding: "0 1.5rem 1.25rem",
|
||||||
|
};
|
||||||
|
|
||||||
|
const footerStyle: React.CSSProperties = {
|
||||||
|
display: "flex",
|
||||||
|
justifyContent: "space-between",
|
||||||
|
alignItems: "center",
|
||||||
|
marginTop: "1rem",
|
||||||
|
paddingTop: "0.75rem",
|
||||||
|
borderTop: "1px solid var(--ctp-surface1)",
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleBtnStyle: React.CSSProperties = {
|
||||||
|
position: "relative",
|
||||||
|
width: 34,
|
||||||
|
height: 20,
|
||||||
|
borderRadius: 10,
|
||||||
|
border: "none",
|
||||||
|
cursor: "pointer",
|
||||||
|
padding: 0,
|
||||||
|
flexShrink: 0,
|
||||||
|
transition: "background-color 0.15s ease",
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleKnobStyle: React.CSSProperties = {
|
||||||
|
position: "absolute",
|
||||||
|
top: 3,
|
||||||
|
left: 3,
|
||||||
|
width: 14,
|
||||||
|
height: 14,
|
||||||
|
borderRadius: "50%",
|
||||||
|
backgroundColor: "var(--ctp-crust)",
|
||||||
|
transition: "transform 0.15s ease",
|
||||||
|
};
|
||||||
|
|
||||||
|
const badgeBase: React.CSSProperties = {
|
||||||
|
display: "inline-block",
|
||||||
|
padding: "0.15rem 0.5rem",
|
||||||
|
borderRadius: "1rem",
|
||||||
|
fontSize: "0.65rem",
|
||||||
|
fontWeight: 600,
|
||||||
|
textTransform: "uppercase",
|
||||||
|
letterSpacing: "0.05em",
|
||||||
|
};
|
||||||
|
|
||||||
|
const badgeStyles = {
|
||||||
|
active: {
|
||||||
|
...badgeBase,
|
||||||
|
background: "rgba(166, 227, 161, 0.2)",
|
||||||
|
color: "var(--ctp-green)",
|
||||||
|
} as React.CSSProperties,
|
||||||
|
disabled: {
|
||||||
|
...badgeBase,
|
||||||
|
background: "rgba(147, 153, 178, 0.15)",
|
||||||
|
color: "var(--ctp-overlay1)",
|
||||||
|
} as React.CSSProperties,
|
||||||
|
error: {
|
||||||
|
...badgeBase,
|
||||||
|
background: "rgba(243, 139, 168, 0.2)",
|
||||||
|
color: "var(--ctp-red)",
|
||||||
|
} as React.CSSProperties,
|
||||||
|
};
|
||||||
|
|
||||||
|
const errorStyle: React.CSSProperties = {
|
||||||
|
color: "var(--ctp-red)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
};
|
||||||
|
|
||||||
|
const depNoteStyle: React.CSSProperties = {
|
||||||
|
padding: "0 1.5rem",
|
||||||
|
color: "var(--ctp-overlay1)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
fontStyle: "italic",
|
||||||
|
};
|
||||||
|
|
||||||
|
const fieldGridStyle: React.CSSProperties = {
|
||||||
|
display: "grid",
|
||||||
|
gridTemplateColumns: "1fr 1fr",
|
||||||
|
gap: "0.75rem 1.5rem",
|
||||||
|
};
|
||||||
|
|
||||||
|
const subSectionTitleStyle: React.CSSProperties = {
|
||||||
|
fontSize: "0.7rem",
|
||||||
|
fontWeight: 600,
|
||||||
|
textTransform: "uppercase",
|
||||||
|
letterSpacing: "0.05em",
|
||||||
|
color: "var(--ctp-overlay1)",
|
||||||
|
marginBottom: "0.5rem",
|
||||||
|
paddingBottom: "0.25rem",
|
||||||
|
borderBottom: "1px solid var(--ctp-surface1)",
|
||||||
|
};
|
||||||
|
|
||||||
|
const fieldLabelStyle: React.CSSProperties = {
|
||||||
|
color: "var(--ctp-subtext0)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
fontWeight: 500,
|
||||||
|
marginBottom: "0.2rem",
|
||||||
|
};
|
||||||
|
|
||||||
|
const fieldValueStyle: React.CSSProperties = {
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
color: "var(--ctp-text)",
|
||||||
|
fontFamily: "'JetBrains Mono', monospace",
|
||||||
|
};
|
||||||
|
|
||||||
|
const fieldInputStyle: React.CSSProperties = {
|
||||||
|
width: "100%",
|
||||||
|
padding: "0.4rem 0.6rem",
|
||||||
|
backgroundColor: "var(--ctp-base)",
|
||||||
|
border: "1px solid var(--ctp-surface1)",
|
||||||
|
borderRadius: "0.375rem",
|
||||||
|
color: "var(--ctp-text)",
|
||||||
|
fontSize: "var(--font-body)",
|
||||||
|
boxSizing: "border-box",
|
||||||
|
};
|
||||||
|
|
||||||
|
const btnPrimaryStyle: React.CSSProperties = {
|
||||||
|
padding: "0.4rem 0.75rem",
|
||||||
|
borderRadius: "0.25rem",
|
||||||
|
border: "none",
|
||||||
|
backgroundColor: "var(--ctp-mauve)",
|
||||||
|
color: "var(--ctp-crust)",
|
||||||
|
fontWeight: 500,
|
||||||
|
fontSize: "0.75rem",
|
||||||
|
cursor: "pointer",
|
||||||
|
};
|
||||||
|
|
||||||
|
const btnSecondaryStyle: React.CSSProperties = {
|
||||||
|
padding: "0.4rem 0.75rem",
|
||||||
|
borderRadius: "0.25rem",
|
||||||
|
border: "1px solid var(--ctp-surface2)",
|
||||||
|
backgroundColor: "transparent",
|
||||||
|
color: "var(--ctp-subtext1)",
|
||||||
|
fontWeight: 500,
|
||||||
|
fontSize: "0.75rem",
|
||||||
|
cursor: "pointer",
|
||||||
|
};
|
||||||
|
|
||||||
|
const testResultStyle: React.CSSProperties = {
|
||||||
|
display: "flex",
|
||||||
|
gap: "0.75rem",
|
||||||
|
alignItems: "center",
|
||||||
|
marginTop: "0.75rem",
|
||||||
|
padding: "0.5rem 0.75rem",
|
||||||
|
borderRadius: "0.5rem",
|
||||||
|
border: "1px solid",
|
||||||
|
};
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
import { useState, useCallback } from "react";
|
import { useState, useCallback } from "react";
|
||||||
import { post } from "../api/client";
|
|
||||||
|
|
||||||
export interface PendingAttachment {
|
export interface PendingAttachment {
|
||||||
file: File;
|
file: File;
|
||||||
@@ -9,61 +8,65 @@ export interface PendingAttachment {
|
|||||||
error?: string;
|
error?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface PresignResponse {
|
interface UploadResponse {
|
||||||
object_key: string;
|
id?: number;
|
||||||
upload_url: string;
|
object_key?: string;
|
||||||
expires_at: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hook for uploading files via direct multipart POST.
|
||||||
|
*
|
||||||
|
* Callers provide the target URL; the hook builds a FormData body and uses
|
||||||
|
* XMLHttpRequest so that upload progress events are available.
|
||||||
|
*/
|
||||||
export function useFileUpload() {
|
export function useFileUpload() {
|
||||||
const [uploading, setUploading] = useState(false);
|
const [uploading, setUploading] = useState(false);
|
||||||
|
|
||||||
const upload = useCallback(
|
const upload = useCallback(
|
||||||
(
|
(
|
||||||
file: File,
|
file: File,
|
||||||
|
url: string,
|
||||||
onProgress?: (progress: number) => void,
|
onProgress?: (progress: number) => void,
|
||||||
): Promise<PendingAttachment> => {
|
): Promise<PendingAttachment> => {
|
||||||
setUploading(true);
|
setUploading(true);
|
||||||
|
|
||||||
return (async () => {
|
return (async () => {
|
||||||
try {
|
try {
|
||||||
// Get presigned URL.
|
const form = new FormData();
|
||||||
const presign = await post<PresignResponse>(
|
form.append("file", file);
|
||||||
"/api/uploads/presign",
|
|
||||||
{
|
const result = await new Promise<UploadResponse>(
|
||||||
filename: file.name,
|
(resolve, reject) => {
|
||||||
content_type: file.type || "application/octet-stream",
|
const xhr = new XMLHttpRequest();
|
||||||
size: file.size,
|
xhr.open("POST", url);
|
||||||
|
xhr.withCredentials = true;
|
||||||
|
|
||||||
|
xhr.upload.onprogress = (e) => {
|
||||||
|
if (e.lengthComputable) {
|
||||||
|
onProgress?.(Math.round((e.loaded / e.total) * 100));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
xhr.onload = () => {
|
||||||
|
if (xhr.status >= 200 && xhr.status < 300) {
|
||||||
|
try {
|
||||||
|
resolve(JSON.parse(xhr.responseText) as UploadResponse);
|
||||||
|
} catch {
|
||||||
|
resolve({});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
reject(new Error(`Upload failed: HTTP ${xhr.status}`));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
xhr.onerror = () => reject(new Error("Upload failed"));
|
||||||
|
xhr.send(form);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Upload via XMLHttpRequest for progress events.
|
|
||||||
await new Promise<void>((resolve, reject) => {
|
|
||||||
const xhr = new XMLHttpRequest();
|
|
||||||
xhr.open("PUT", presign.upload_url);
|
|
||||||
xhr.setRequestHeader(
|
|
||||||
"Content-Type",
|
|
||||||
file.type || "application/octet-stream",
|
|
||||||
);
|
|
||||||
|
|
||||||
xhr.upload.onprogress = (e) => {
|
|
||||||
if (e.lengthComputable) {
|
|
||||||
onProgress?.(Math.round((e.loaded / e.total) * 100));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
xhr.onload = () => {
|
|
||||||
if (xhr.status >= 200 && xhr.status < 300) resolve();
|
|
||||||
else reject(new Error(`Upload failed: HTTP ${xhr.status}`));
|
|
||||||
};
|
|
||||||
|
|
||||||
xhr.onerror = () => reject(new Error("Upload failed"));
|
|
||||||
xhr.send(file);
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
file,
|
file,
|
||||||
objectKey: presign.object_key,
|
objectKey: result.object_key ?? "",
|
||||||
uploadProgress: 100,
|
uploadProgress: 100,
|
||||||
uploadStatus: "complete" as const,
|
uploadStatus: "complete" as const,
|
||||||
};
|
};
|
||||||
|
|||||||
23
web/src/hooks/useModules.ts
Normal file
23
web/src/hooks/useModules.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { useEffect, useState, useCallback } from "react";
|
||||||
|
import { get } from "../api/client";
|
||||||
|
import type { ModuleInfo, ModulesResponse } from "../api/types";
|
||||||
|
|
||||||
|
export function useModules() {
|
||||||
|
const [modules, setModules] = useState<Record<string, ModuleInfo>>({});
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
|
||||||
|
const refresh = useCallback(() => {
|
||||||
|
get<ModulesResponse>("/api/modules")
|
||||||
|
.then((res) => setModules(res.modules))
|
||||||
|
.catch(() => {});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
get<ModulesResponse>("/api/modules")
|
||||||
|
.then((res) => setModules(res.modules))
|
||||||
|
.catch(() => {})
|
||||||
|
.finally(() => setLoading(false));
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return { modules, loading, refresh };
|
||||||
|
}
|
||||||
76
web/src/hooks/useSSE.ts
Normal file
76
web/src/hooks/useSSE.ts
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import { useEffect, useRef, useCallback } from "react";
|
||||||
|
|
||||||
|
type SSEHandler = (data: string) => void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Subscribe to the server-sent event stream at /api/events.
|
||||||
|
* Returns a stable `on` function to register typed event handlers.
|
||||||
|
* Reconnects automatically with exponential backoff on connection loss.
|
||||||
|
*/
|
||||||
|
export function useSSE() {
|
||||||
|
const handlersRef = useRef(new Map<string, Set<SSEHandler>>());
|
||||||
|
|
||||||
|
// Register a handler for a given event type. Returns an unsubscribe function.
|
||||||
|
const on = useCallback((eventType: string, handler: SSEHandler) => {
|
||||||
|
if (!handlersRef.current.has(eventType)) {
|
||||||
|
handlersRef.current.set(eventType, new Set());
|
||||||
|
}
|
||||||
|
handlersRef.current.get(eventType)!.add(handler);
|
||||||
|
return () => {
|
||||||
|
handlersRef.current.get(eventType)?.delete(handler);
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
let retryDelay = 1000;
|
||||||
|
let timer: ReturnType<typeof setTimeout>;
|
||||||
|
let cancelled = false;
|
||||||
|
let es: EventSource | null = null;
|
||||||
|
|
||||||
|
function dispatch(type: string, data: string) {
|
||||||
|
const handlers = handlersRef.current.get(type);
|
||||||
|
if (handlers) {
|
||||||
|
for (const h of handlers) h(data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function connect() {
|
||||||
|
if (cancelled) return;
|
||||||
|
|
||||||
|
es = new EventSource("/api/events", { withCredentials: true });
|
||||||
|
|
||||||
|
es.onopen = () => {
|
||||||
|
retryDelay = 1000;
|
||||||
|
};
|
||||||
|
|
||||||
|
// The backend sends named events (event: settings.changed\ndata: ...),
|
||||||
|
// so we register listeners for all event types we care about.
|
||||||
|
// We use a generic message handler plus named event listeners.
|
||||||
|
const knownEvents = ["settings.changed", "server.state", "heartbeat"];
|
||||||
|
for (const eventType of knownEvents) {
|
||||||
|
es.addEventListener(eventType, ((e: MessageEvent) => {
|
||||||
|
dispatch(eventType, e.data);
|
||||||
|
}) as EventListener);
|
||||||
|
}
|
||||||
|
|
||||||
|
es.onerror = () => {
|
||||||
|
es?.close();
|
||||||
|
es = null;
|
||||||
|
if (!cancelled) {
|
||||||
|
timer = setTimeout(connect, retryDelay);
|
||||||
|
retryDelay = Math.min(retryDelay * 2, 30000);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
connect();
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
cancelled = true;
|
||||||
|
clearTimeout(timer);
|
||||||
|
es?.close();
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return { on };
|
||||||
|
}
|
||||||
@@ -116,7 +116,7 @@ const titleStyle: React.CSSProperties = {
|
|||||||
color: "var(--ctp-mauve)",
|
color: "var(--ctp-mauve)",
|
||||||
textAlign: "center",
|
textAlign: "center",
|
||||||
fontSize: "2rem",
|
fontSize: "2rem",
|
||||||
fontWeight: 700,
|
fontWeight: 600,
|
||||||
marginBottom: "0.25rem",
|
marginBottom: "0.25rem",
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -164,7 +164,7 @@ const btnPrimaryStyle: React.CSSProperties = {
|
|||||||
display: "block",
|
display: "block",
|
||||||
width: "100%",
|
width: "100%",
|
||||||
padding: "0.75rem 1.5rem",
|
padding: "0.75rem 1.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
@@ -189,7 +189,7 @@ const btnOidcStyle: React.CSSProperties = {
|
|||||||
display: "block",
|
display: "block",
|
||||||
width: "100%",
|
width: "100%",
|
||||||
padding: "0.75rem 1.5rem",
|
padding: "0.75rem 1.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
|
|||||||
@@ -216,7 +216,7 @@ export function ProjectsPage() {
|
|||||||
...btnPrimaryStyle,
|
...btnPrimaryStyle,
|
||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.35rem",
|
gap: "0.25rem",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Plus size={14} /> New Project
|
<Plus size={14} /> New Project
|
||||||
@@ -465,7 +465,7 @@ export function ProjectsPage() {
|
|||||||
// Styles
|
// Styles
|
||||||
const btnPrimaryStyle: React.CSSProperties = {
|
const btnPrimaryStyle: React.CSSProperties = {
|
||||||
padding: "0.5rem 1rem",
|
padding: "0.5rem 1rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-mauve)",
|
backgroundColor: "var(--ctp-mauve)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
@@ -476,7 +476,7 @@ const btnPrimaryStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const btnSecondaryStyle: React.CSSProperties = {
|
const btnSecondaryStyle: React.CSSProperties = {
|
||||||
padding: "0.5rem 1rem",
|
padding: "0.5rem 1rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
@@ -487,7 +487,7 @@ const btnSecondaryStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const btnDangerStyle: React.CSSProperties = {
|
const btnDangerStyle: React.CSSProperties = {
|
||||||
padding: "0.5rem 1rem",
|
padding: "0.5rem 1rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-red)",
|
backgroundColor: "var(--ctp-red)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
@@ -498,7 +498,7 @@ const btnDangerStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const btnSmallStyle: React.CSSProperties = {
|
const btnSmallStyle: React.CSSProperties = {
|
||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
@@ -530,7 +530,7 @@ const formCloseStyle: React.CSSProperties = {
|
|||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
};
|
};
|
||||||
|
|
||||||
const errorBannerStyle: React.CSSProperties = {
|
const errorBannerStyle: React.CSSProperties = {
|
||||||
@@ -538,7 +538,7 @@ const errorBannerStyle: React.CSSProperties = {
|
|||||||
background: "rgba(243, 139, 168, 0.1)",
|
background: "rgba(243, 139, 168, 0.1)",
|
||||||
border: "1px solid rgba(243, 139, 168, 0.2)",
|
border: "1px solid rgba(243, 139, 168, 0.2)",
|
||||||
padding: "0.5rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
marginBottom: "0.75rem",
|
marginBottom: "0.75rem",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
};
|
};
|
||||||
@@ -549,7 +549,7 @@ const fieldStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const labelStyle: React.CSSProperties = {
|
const labelStyle: React.CSSProperties = {
|
||||||
display: "block",
|
display: "block",
|
||||||
marginBottom: "0.35rem",
|
marginBottom: "0.25rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
@@ -560,7 +560,7 @@ const inputStyle: React.CSSProperties = {
|
|||||||
padding: "0.5rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
backgroundColor: "var(--ctp-base)",
|
backgroundColor: "var(--ctp-base)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
boxSizing: "border-box",
|
boxSizing: "border-box",
|
||||||
@@ -587,7 +587,7 @@ const thStyle: React.CSSProperties = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const tdStyle: React.CSSProperties = {
|
const tdStyle: React.CSSProperties = {
|
||||||
padding: "0.35rem 0.75rem",
|
padding: "0.25rem 0.75rem",
|
||||||
borderBottom: "1px solid var(--ctp-surface1)",
|
borderBottom: "1px solid var(--ctp-surface1)",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -657,7 +657,7 @@ function SegmentBlock({
|
|||||||
marginTop: "0.5rem",
|
marginTop: "0.5rem",
|
||||||
display: "inline-flex",
|
display: "inline-flex",
|
||||||
alignItems: "center",
|
alignItems: "center",
|
||||||
gap: "0.35rem",
|
gap: "0.25rem",
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Plus size={14} /> Add Value
|
<Plus size={14} /> Add Value
|
||||||
@@ -692,7 +692,7 @@ const segmentStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const typeBadgeStyle: React.CSSProperties = {
|
const typeBadgeStyle: React.CSSProperties = {
|
||||||
display: "inline-block",
|
display: "inline-block",
|
||||||
padding: "0.15rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.25rem",
|
borderRadius: "0.25rem",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 600,
|
fontWeight: 600,
|
||||||
@@ -707,7 +707,7 @@ const emptyStyle: React.CSSProperties = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const thStyle: React.CSSProperties = {
|
const thStyle: React.CSSProperties = {
|
||||||
padding: "0.4rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
textAlign: "left",
|
textAlign: "left",
|
||||||
borderBottom: "1px solid var(--ctp-surface1)",
|
borderBottom: "1px solid var(--ctp-surface1)",
|
||||||
color: "var(--ctp-overlay1)",
|
color: "var(--ctp-overlay1)",
|
||||||
@@ -725,7 +725,7 @@ const tdStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const btnTinyStyle: React.CSSProperties = {
|
const btnTinyStyle: React.CSSProperties = {
|
||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
@@ -736,7 +736,7 @@ const btnTinyStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const btnTinyPrimaryStyle: React.CSSProperties = {
|
const btnTinyPrimaryStyle: React.CSSProperties = {
|
||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-mauve)",
|
backgroundColor: "var(--ctp-mauve)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { useEffect, useState, type FormEvent } from "react";
|
|||||||
import { get, post, del } from "../api/client";
|
import { get, post, del } from "../api/client";
|
||||||
import { useAuth } from "../hooks/useAuth";
|
import { useAuth } from "../hooks/useAuth";
|
||||||
import type { ApiToken, ApiTokenCreated } from "../api/types";
|
import type { ApiToken, ApiTokenCreated } from "../api/types";
|
||||||
|
import { AdminModules } from "../components/settings/AdminModules";
|
||||||
|
|
||||||
export function SettingsPage() {
|
export function SettingsPage() {
|
||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
@@ -89,7 +90,7 @@ export function SettingsPage() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div style={{ maxWidth: "66%", margin: "0 auto" }}>
|
||||||
<h2 style={{ marginBottom: "1rem" }}>Settings</h2>
|
<h2 style={{ marginBottom: "1rem" }}>Settings</h2>
|
||||||
|
|
||||||
{/* Account Card */}
|
{/* Account Card */}
|
||||||
@@ -114,7 +115,7 @@ export function SettingsPage() {
|
|||||||
<span
|
<span
|
||||||
style={{
|
style={{
|
||||||
display: "inline-block",
|
display: "inline-block",
|
||||||
padding: "0.15rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "1rem",
|
borderRadius: "1rem",
|
||||||
fontSize: "var(--font-table)",
|
fontSize: "var(--font-table)",
|
||||||
fontWeight: 600,
|
fontWeight: 600,
|
||||||
@@ -216,7 +217,14 @@ export function SettingsPage() {
|
|||||||
{tokensError}
|
{tokensError}
|
||||||
</p>
|
</p>
|
||||||
) : (
|
) : (
|
||||||
<div style={{ overflowX: "auto", marginTop: "1rem" }}>
|
<div
|
||||||
|
style={{
|
||||||
|
overflowX: "auto",
|
||||||
|
overflowY: "auto",
|
||||||
|
maxHeight: "28rem",
|
||||||
|
marginTop: "1rem",
|
||||||
|
}}
|
||||||
|
>
|
||||||
<table style={{ width: "100%", borderCollapse: "collapse" }}>
|
<table style={{ width: "100%", borderCollapse: "collapse" }}>
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
@@ -311,6 +319,9 @@ export function SettingsPage() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Admin: Module Configuration */}
|
||||||
|
{user?.role === "admin" && <AdminModules />}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -386,7 +397,7 @@ const createFormStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const labelStyle: React.CSSProperties = {
|
const labelStyle: React.CSSProperties = {
|
||||||
display: "block",
|
display: "block",
|
||||||
marginBottom: "0.35rem",
|
marginBottom: "0.25rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
color: "var(--ctp-subtext1)",
|
color: "var(--ctp-subtext1)",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
@@ -397,7 +408,7 @@ const inputStyle: React.CSSProperties = {
|
|||||||
padding: "0.5rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
backgroundColor: "var(--ctp-base)",
|
backgroundColor: "var(--ctp-base)",
|
||||||
border: "1px solid var(--ctp-surface1)",
|
border: "1px solid var(--ctp-surface1)",
|
||||||
borderRadius: "0.4rem",
|
borderRadius: "0.5rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
boxSizing: "border-box",
|
boxSizing: "border-box",
|
||||||
@@ -405,7 +416,7 @@ const inputStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const btnPrimaryStyle: React.CSSProperties = {
|
const btnPrimaryStyle: React.CSSProperties = {
|
||||||
padding: "0.5rem 1rem",
|
padding: "0.5rem 1rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-mauve)",
|
backgroundColor: "var(--ctp-mauve)",
|
||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
@@ -416,10 +427,10 @@ const btnPrimaryStyle: React.CSSProperties = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const btnCopyStyle: React.CSSProperties = {
|
const btnCopyStyle: React.CSSProperties = {
|
||||||
padding: "0.4rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
background: "var(--ctp-surface1)",
|
background: "var(--ctp-surface1)",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
@@ -427,10 +438,10 @@ const btnCopyStyle: React.CSSProperties = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const btnDismissStyle: React.CSSProperties = {
|
const btnDismissStyle: React.CSSProperties = {
|
||||||
padding: "0.4rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
background: "none",
|
background: "none",
|
||||||
border: "none",
|
border: "none",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
color: "var(--ctp-subtext0)",
|
color: "var(--ctp-subtext0)",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
@@ -442,7 +453,7 @@ const btnDangerStyle: React.CSSProperties = {
|
|||||||
color: "var(--ctp-red)",
|
color: "var(--ctp-red)",
|
||||||
border: "none",
|
border: "none",
|
||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
@@ -453,7 +464,7 @@ const btnRevokeConfirmStyle: React.CSSProperties = {
|
|||||||
color: "var(--ctp-crust)",
|
color: "var(--ctp-crust)",
|
||||||
border: "none",
|
border: "none",
|
||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
cursor: "pointer",
|
cursor: "pointer",
|
||||||
fontSize: "0.75rem",
|
fontSize: "0.75rem",
|
||||||
fontWeight: 500,
|
fontWeight: 500,
|
||||||
@@ -461,7 +472,7 @@ const btnRevokeConfirmStyle: React.CSSProperties = {
|
|||||||
|
|
||||||
const btnTinyStyle: React.CSSProperties = {
|
const btnTinyStyle: React.CSSProperties = {
|
||||||
padding: "0.25rem 0.5rem",
|
padding: "0.25rem 0.5rem",
|
||||||
borderRadius: "0.375rem",
|
borderRadius: "0.25rem",
|
||||||
border: "none",
|
border: "none",
|
||||||
backgroundColor: "var(--ctp-surface1)",
|
backgroundColor: "var(--ctp-surface1)",
|
||||||
color: "var(--ctp-text)",
|
color: "var(--ctp-text)",
|
||||||
@@ -488,7 +499,7 @@ const thStyle: React.CSSProperties = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const tdStyle: React.CSSProperties = {
|
const tdStyle: React.CSSProperties = {
|
||||||
padding: "0.4rem 0.75rem",
|
padding: "0.5rem 0.75rem",
|
||||||
borderBottom: "1px solid var(--ctp-surface1)",
|
borderBottom: "1px solid var(--ctp-surface1)",
|
||||||
fontSize: "var(--font-body)",
|
fontSize: "var(--font-body)",
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
/* Focus and hover states for form inputs */
|
/* Focus and hover states for form inputs */
|
||||||
.silo-input {
|
.silo-input {
|
||||||
transition: border-color 0.15s ease, box-shadow 0.15s ease;
|
transition:
|
||||||
|
border-color 0.15s ease,
|
||||||
|
box-shadow 0.15s ease;
|
||||||
}
|
}
|
||||||
|
|
||||||
.silo-input:hover {
|
.silo-input:hover {
|
||||||
@@ -9,6 +11,6 @@
|
|||||||
|
|
||||||
.silo-input:focus {
|
.silo-input:focus {
|
||||||
border-color: var(--ctp-mauve);
|
border-color: var(--ctp-mauve);
|
||||||
box-shadow: 0 0 0 0.2rem rgba(203, 166, 247, 0.25);
|
box-shadow: 0 0 0 0.25rem rgba(203, 166, 247, 0.25);
|
||||||
outline: none;
|
outline: none;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,13 +44,13 @@
|
|||||||
--d-header-px: 2rem;
|
--d-header-px: 2rem;
|
||||||
--d-header-logo: 1.25rem;
|
--d-header-logo: 1.25rem;
|
||||||
--d-nav-gap: 1rem;
|
--d-nav-gap: 1rem;
|
||||||
--d-nav-py: 0.35rem;
|
--d-nav-py: 0.25rem;
|
||||||
--d-nav-px: 0.75rem;
|
--d-nav-px: 0.75rem;
|
||||||
--d-nav-radius: 0.4rem;
|
--d-nav-radius: 0.5rem;
|
||||||
--d-user-gap: 0.6rem;
|
--d-user-gap: 0.5rem;
|
||||||
--d-user-font: var(--font-body);
|
--d-user-font: var(--font-body);
|
||||||
|
|
||||||
--d-th-py: 0.35rem;
|
--d-th-py: 0.25rem;
|
||||||
--d-th-px: 0.75rem;
|
--d-th-px: 0.75rem;
|
||||||
--d-th-font: var(--font-table);
|
--d-th-font: var(--font-table);
|
||||||
--d-td-py: 0.25rem;
|
--d-td-py: 0.25rem;
|
||||||
@@ -59,43 +59,49 @@
|
|||||||
|
|
||||||
--d-toolbar-gap: 0.5rem;
|
--d-toolbar-gap: 0.5rem;
|
||||||
--d-toolbar-py: 0.5rem;
|
--d-toolbar-py: 0.5rem;
|
||||||
--d-toolbar-mb: 0.35rem;
|
--d-toolbar-mb: 0.25rem;
|
||||||
--d-input-py: 0.35rem;
|
--d-input-py: 0.25rem;
|
||||||
--d-input-px: 0.6rem;
|
--d-input-px: 0.5rem;
|
||||||
--d-input-font: var(--font-body);
|
--d-input-font: var(--font-body);
|
||||||
|
|
||||||
--d-footer-h: 28px;
|
--d-footer-h: 28px;
|
||||||
--d-footer-font: var(--font-table);
|
--d-footer-font: var(--font-table);
|
||||||
--d-footer-px: 2rem;
|
--d-footer-px: 2rem;
|
||||||
|
|
||||||
|
--d-sidebar-w: 220px;
|
||||||
|
--d-sidebar-collapsed: 48px;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ── Density: compact ── */
|
/* ── Density: compact ── */
|
||||||
[data-density="compact"] {
|
[data-density="compact"] {
|
||||||
--d-header-py: 0.35rem;
|
--d-header-py: 0.25rem;
|
||||||
--d-header-px: 1.25rem;
|
--d-header-px: 1.25rem;
|
||||||
--d-header-logo: 1.1rem;
|
--d-header-logo: 1.1rem;
|
||||||
--d-nav-gap: 0.5rem;
|
--d-nav-gap: 0.5rem;
|
||||||
--d-nav-py: 0.2rem;
|
--d-nav-py: 0.25rem;
|
||||||
--d-nav-px: 0.5rem;
|
--d-nav-px: 0.5rem;
|
||||||
--d-nav-radius: 0.3rem;
|
--d-nav-radius: 0.25rem;
|
||||||
--d-user-gap: 0.35rem;
|
--d-user-gap: 0.25rem;
|
||||||
--d-user-font: var(--font-table);
|
--d-user-font: var(--font-table);
|
||||||
|
|
||||||
--d-th-py: 0.2rem;
|
--d-th-py: 0.25rem;
|
||||||
--d-th-px: 0.5rem;
|
--d-th-px: 0.5rem;
|
||||||
--d-th-font: var(--font-sm);
|
--d-th-font: var(--font-sm);
|
||||||
--d-td-py: 0.125rem;
|
--d-td-py: 0.25rem;
|
||||||
--d-td-px: 0.5rem;
|
--d-td-px: 0.5rem;
|
||||||
--d-td-font: var(--font-table);
|
--d-td-font: var(--font-table);
|
||||||
|
|
||||||
--d-toolbar-gap: 0.35rem;
|
--d-toolbar-gap: 0.25rem;
|
||||||
--d-toolbar-py: 0.25rem;
|
--d-toolbar-py: 0.25rem;
|
||||||
--d-toolbar-mb: 0.15rem;
|
--d-toolbar-mb: 0.25rem;
|
||||||
--d-input-py: 0.2rem;
|
--d-input-py: 0.25rem;
|
||||||
--d-input-px: 0.4rem;
|
--d-input-px: 0.5rem;
|
||||||
--d-input-font: var(--font-table);
|
--d-input-font: var(--font-table);
|
||||||
|
|
||||||
--d-footer-h: 24px;
|
--d-footer-h: 24px;
|
||||||
--d-footer-font: var(--font-sm);
|
--d-footer-font: var(--font-sm);
|
||||||
--d-footer-px: 1.25rem;
|
--d-footer-px: 1.25rem;
|
||||||
|
|
||||||
|
--d-sidebar-w: 180px;
|
||||||
|
--d-sidebar-collapsed: 40px;
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user