Merge pull request 'main' (#63) from main into production
Reviewed-on: #63
This commit was merged in pull request #63.
This commit is contained in:
24
.env.example
24
.env.example
@@ -1,12 +1,26 @@
|
||||
# Silo Environment Configuration
|
||||
# Copy this file to .env and update values as needed
|
||||
# Copy to .env (or deployments/.env) and update values as needed.
|
||||
# For automated setup, run: ./scripts/setup-docker.sh
|
||||
|
||||
# PostgreSQL
|
||||
POSTGRES_PASSWORD=silodev
|
||||
|
||||
# MinIO
|
||||
MINIO_ACCESS_KEY=minioadmin
|
||||
MINIO_SECRET_KEY=minioadmin
|
||||
MINIO_ACCESS_KEY=silominio
|
||||
MINIO_SECRET_KEY=silominiosecret
|
||||
|
||||
# Silo API (optional overrides)
|
||||
# SILO_SERVER_PORT=8080
|
||||
# OpenLDAP
|
||||
LDAP_ADMIN_PASSWORD=ldapadmin
|
||||
LDAP_USERS=siloadmin
|
||||
LDAP_PASSWORDS=siloadmin
|
||||
|
||||
# Silo Authentication
|
||||
SILO_SESSION_SECRET=change-me-in-production
|
||||
SILO_ADMIN_USERNAME=admin
|
||||
SILO_ADMIN_PASSWORD=admin
|
||||
|
||||
# Optional: OIDC (Keycloak)
|
||||
# SILO_OIDC_CLIENT_SECRET=
|
||||
|
||||
# Optional: LDAP service account
|
||||
# SILO_LDAP_BIND_PASSWORD=
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -29,6 +29,7 @@ Thumbs.db
|
||||
# Config with secrets
|
||||
config.yaml
|
||||
*.env
|
||||
deployments/config.docker.yaml
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
|
||||
26
README.md
26
README.md
@@ -25,7 +25,7 @@ silo/
|
||||
│ ├── silo/ # CLI tool
|
||||
│ └── silod/ # API server
|
||||
├── internal/
|
||||
│ ├── api/ # HTTP handlers and routes (75 endpoints)
|
||||
│ ├── api/ # HTTP handlers and routes (78 endpoints)
|
||||
│ ├── auth/ # Authentication (local, LDAP, OIDC)
|
||||
│ ├── config/ # Configuration loading
|
||||
│ ├── db/ # PostgreSQL repositories
|
||||
@@ -53,15 +53,20 @@ silo/
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# Docker Compose (quickest)
|
||||
cp config.example.yaml config.yaml
|
||||
# Edit config.yaml with your database, MinIO, and auth settings
|
||||
make docker-up
|
||||
See the **[Installation Guide](docs/INSTALL.md)** for complete setup instructions.
|
||||
|
||||
# Or manual setup
|
||||
psql -h localhost -U silo -d silo -f migrations/*.sql
|
||||
go run ./cmd/silod -config config.yaml
|
||||
**Docker Compose (quickest — includes PostgreSQL, MinIO, OpenLDAP, and Silo):**
|
||||
|
||||
```bash
|
||||
./scripts/setup-docker.sh
|
||||
docker compose -f deployments/docker-compose.allinone.yaml up -d
|
||||
```
|
||||
|
||||
**Development (local Go + Docker services):**
|
||||
|
||||
```bash
|
||||
make docker-up # Start PostgreSQL + MinIO in Docker
|
||||
make run # Run silo locally with Go
|
||||
```
|
||||
|
||||
When auth is enabled, a default admin account is created on first startup using the credentials in `config.yaml` under `auth.local.default_admin_username` and `auth.local.default_admin_password`.
|
||||
@@ -104,9 +109,10 @@ The server provides the REST API and ODS endpoints consumed by these clients.
|
||||
|
||||
| Document | Description |
|
||||
|----------|-------------|
|
||||
| [docs/INSTALL.md](docs/INSTALL.md) | Installation guide (Docker Compose and daemon) |
|
||||
| [docs/SPECIFICATION.md](docs/SPECIFICATION.md) | Full design specification and API reference |
|
||||
| [docs/STATUS.md](docs/STATUS.md) | Implementation status |
|
||||
| [docs/DEPLOYMENT.md](docs/DEPLOYMENT.md) | Production deployment guide |
|
||||
| [docs/DEPLOYMENT.md](docs/DEPLOYMENT.md) | Production deployment and operations guide |
|
||||
| [docs/CONFIGURATION.md](docs/CONFIGURATION.md) | Configuration reference (all `config.yaml` options) |
|
||||
| [docs/AUTH.md](docs/AUTH.md) | Authentication system design |
|
||||
| [docs/AUTH_USER_GUIDE.md](docs/AUTH_USER_GUIDE.md) | User guide for login, tokens, and roles |
|
||||
|
||||
10
ROADMAP.md
10
ROADMAP.md
@@ -39,7 +39,7 @@ This document compares Silo's current capabilities against SOLIDWORKS PDM—the
|
||||
### Implemented Features (MVP Complete)
|
||||
|
||||
#### Core Database System
|
||||
- PostgreSQL schema with 11 migrations
|
||||
- PostgreSQL schema with 13 migrations
|
||||
- UUID-based identifiers throughout
|
||||
- Soft delete support via `archived_at` timestamps
|
||||
- Atomic sequence generation for part numbers
|
||||
@@ -92,7 +92,7 @@ This document compares Silo's current capabilities against SOLIDWORKS PDM—the
|
||||
- Template generation for import formatting
|
||||
|
||||
#### API & Web Interface
|
||||
- REST API with 75 endpoints
|
||||
- REST API with 78 endpoints
|
||||
- Authentication: local (bcrypt), LDAP/FreeIPA, OIDC/Keycloak
|
||||
- Role-based access control (admin > editor > viewer)
|
||||
- API token management (SHA-256 hashed)
|
||||
@@ -129,7 +129,7 @@ This document compares Silo's current capabilities against SOLIDWORKS PDM—the
|
||||
|
||||
| Component | Status |
|
||||
|-----------|--------|
|
||||
| PostgreSQL | Running (psql.kindred.internal) |
|
||||
| PostgreSQL | Running (psql.example.internal) |
|
||||
| MinIO | Configured in Docker Compose |
|
||||
| Silo API Server | Builds successfully |
|
||||
| Docker Compose | Complete (dev and production) |
|
||||
@@ -255,14 +255,14 @@ CAD integration is maintained in separate repositories ([silo-mod](https://git.k
|
||||
| Feature | SOLIDWORKS PDM | Silo Status | Priority | Complexity |
|
||||
|---------|---------------|-------------|----------|------------|
|
||||
| ERP integration | SAP, Dynamics, etc. | Partial (Odoo stubs) | Medium | Complex |
|
||||
| API access | Full COM/REST API | Full REST API (75 endpoints) | - | - |
|
||||
| API access | Full COM/REST API | Full REST API (78 endpoints) | - | - |
|
||||
| Dispatch scripts | Automation without coding | None | Medium | Moderate |
|
||||
| Task scheduler | Background processing | None | Medium | Moderate |
|
||||
| Email system | SMTP integration | None | High | Simple |
|
||||
| Web portal | Browser access | Full (React SPA + auth) | - | - |
|
||||
|
||||
**Gap Analysis:**
|
||||
Silo has a comprehensive REST API (75 endpoints) and a full web UI with authentication. Odoo ERP integration has config/sync-log scaffolding but push/pull operations are stubs. Remaining gaps: email notifications, task scheduler, dispatch automation.
|
||||
Silo has a comprehensive REST API (78 endpoints) and a full web UI with authentication. Odoo ERP integration has config/sync-log scaffolding but push/pull operations are stubs. Remaining gaps: email notifications, task scheduler, dispatch automation.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ Token subcommands:
|
||||
silo token revoke <id> Revoke a token
|
||||
|
||||
Environment variables for API access:
|
||||
SILO_API_URL Base URL of the Silo server (e.g., https://silo.kindred.internal)
|
||||
SILO_API_URL Base URL of the Silo server (e.g., https://silo.example.internal)
|
||||
SILO_API_TOKEN API token for authentication
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -8,20 +8,20 @@ server:
|
||||
# read_only: false # Reject all write operations; toggle at runtime with SIGUSR1
|
||||
|
||||
database:
|
||||
host: "psql.kindred.internal"
|
||||
host: "localhost" # Use "postgres" for Docker Compose
|
||||
port: 5432
|
||||
name: "silo"
|
||||
user: "silo"
|
||||
password: "" # Use SILO_DB_PASSWORD env var
|
||||
sslmode: "require"
|
||||
sslmode: "require" # Use "disable" for Docker Compose (internal network)
|
||||
max_connections: 10
|
||||
|
||||
storage:
|
||||
endpoint: "minio.kindred.internal:9000"
|
||||
endpoint: "localhost:9000" # Use "minio:9000" for Docker Compose
|
||||
access_key: "" # Use SILO_MINIO_ACCESS_KEY env var
|
||||
secret_key: "" # Use SILO_MINIO_SECRET_KEY env var
|
||||
bucket: "silo-files"
|
||||
use_ssl: true
|
||||
use_ssl: true # Use false for Docker Compose (internal network)
|
||||
region: "us-east-1"
|
||||
|
||||
schemas:
|
||||
@@ -53,7 +53,7 @@ auth:
|
||||
# LDAP / FreeIPA
|
||||
ldap:
|
||||
enabled: false
|
||||
url: "ldaps://ipa.kindred.internal"
|
||||
url: "ldaps://ipa.example.internal"
|
||||
base_dn: "dc=kindred,dc=internal"
|
||||
user_search_dn: "cn=users,cn=accounts,dc=kindred,dc=internal"
|
||||
# Optional service account for user search (omit for direct user bind)
|
||||
@@ -77,10 +77,10 @@ auth:
|
||||
# OIDC / Keycloak
|
||||
oidc:
|
||||
enabled: false
|
||||
issuer_url: "https://keycloak.kindred.internal/realms/silo"
|
||||
issuer_url: "https://keycloak.example.internal/realms/silo"
|
||||
client_id: "silo"
|
||||
client_secret: "" # Use SILO_OIDC_CLIENT_SECRET env var
|
||||
redirect_url: "https://silo.kindred.internal/auth/callback"
|
||||
redirect_url: "https://silo.example.internal/auth/callback"
|
||||
scopes: ["openid", "profile", "email"]
|
||||
# Map Keycloak realm roles to Silo roles
|
||||
admin_role: "silo-admin"
|
||||
@@ -90,4 +90,4 @@ auth:
|
||||
# CORS origins (locked down when auth is enabled)
|
||||
cors:
|
||||
allowed_origins:
|
||||
- "https://silo.kindred.internal"
|
||||
- "https://silo.example.internal"
|
||||
|
||||
35
deployments/config.dev.yaml
Normal file
35
deployments/config.dev.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
# Silo Development Configuration
|
||||
# Used by deployments/docker-compose.yaml — works with zero setup via `make docker-up`.
|
||||
# For production Docker installs, run scripts/setup-docker.sh instead.
|
||||
|
||||
server:
|
||||
host: "0.0.0.0"
|
||||
port: 8080
|
||||
base_url: "http://localhost:8080"
|
||||
|
||||
database:
|
||||
host: "postgres"
|
||||
port: 5432
|
||||
name: "silo"
|
||||
user: "silo"
|
||||
password: "${POSTGRES_PASSWORD:-silodev}"
|
||||
sslmode: "disable"
|
||||
max_connections: 10
|
||||
|
||||
storage:
|
||||
endpoint: "minio:9000"
|
||||
access_key: "${MINIO_ACCESS_KEY:-silominio}"
|
||||
secret_key: "${MINIO_SECRET_KEY:-silominiosecret}"
|
||||
bucket: "silo-files"
|
||||
use_ssl: false
|
||||
region: "us-east-1"
|
||||
|
||||
schemas:
|
||||
directory: "/etc/silo/schemas"
|
||||
default: "kindred-rd"
|
||||
|
||||
freecad:
|
||||
uri_scheme: "silo"
|
||||
|
||||
auth:
|
||||
enabled: false
|
||||
@@ -1,7 +1,7 @@
|
||||
# Silo Production Configuration
|
||||
# Single-binary deployment: silod serves API + React SPA
|
||||
#
|
||||
# Layout on silo.kindred.internal:
|
||||
# Layout on silo.example.internal:
|
||||
# /opt/silo/bin/silod - server binary
|
||||
# /opt/silo/web/dist/ - built React frontend (served automatically)
|
||||
# /opt/silo/schemas/ - part number schemas
|
||||
@@ -18,10 +18,10 @@
|
||||
server:
|
||||
host: "0.0.0.0"
|
||||
port: 8080
|
||||
base_url: "https://silo.kindred.internal"
|
||||
base_url: "https://silo.example.internal"
|
||||
|
||||
database:
|
||||
host: "psql.kindred.internal"
|
||||
host: "psql.example.internal"
|
||||
port: 5432
|
||||
name: "silo"
|
||||
user: "silo"
|
||||
@@ -30,7 +30,7 @@ database:
|
||||
max_connections: 20
|
||||
|
||||
storage:
|
||||
endpoint: "minio.kindred.internal:9000"
|
||||
endpoint: "minio.example.internal:9000"
|
||||
access_key: "" # Set via SILO_MINIO_ACCESS_KEY
|
||||
secret_key: "" # Set via SILO_MINIO_SECRET_KEY
|
||||
bucket: "silo-files"
|
||||
@@ -53,7 +53,7 @@ auth:
|
||||
default_admin_password: "" # Set via SILO_ADMIN_PASSWORD
|
||||
ldap:
|
||||
enabled: true
|
||||
url: "ldaps://ipa.kindred.internal"
|
||||
url: "ldaps://ipa.example.internal"
|
||||
base_dn: "dc=kindred,dc=internal"
|
||||
user_search_dn: "cn=users,cn=accounts,dc=kindred,dc=internal"
|
||||
user_attr: "uid"
|
||||
@@ -73,4 +73,4 @@ auth:
|
||||
enabled: false
|
||||
cors:
|
||||
allowed_origins:
|
||||
- "https://silo.kindred.internal"
|
||||
- "https://silo.example.internal"
|
||||
|
||||
172
deployments/docker-compose.allinone.yaml
Normal file
172
deployments/docker-compose.allinone.yaml
Normal file
@@ -0,0 +1,172 @@
|
||||
# Silo All-in-One Stack
|
||||
# PostgreSQL + MinIO + OpenLDAP + Silo API + Nginx (optional)
|
||||
#
|
||||
# Quick start:
|
||||
# ./scripts/setup-docker.sh
|
||||
# docker compose -f deployments/docker-compose.allinone.yaml up -d
|
||||
#
|
||||
# With nginx reverse proxy:
|
||||
# docker compose -f deployments/docker-compose.allinone.yaml --profile nginx up -d
|
||||
#
|
||||
# View logs:
|
||||
# docker compose -f deployments/docker-compose.allinone.yaml logs -f
|
||||
#
|
||||
# Stop:
|
||||
# docker compose -f deployments/docker-compose.allinone.yaml down
|
||||
#
|
||||
# Stop and delete data:
|
||||
# docker compose -f deployments/docker-compose.allinone.yaml down -v
|
||||
|
||||
services:
|
||||
# ---------------------------------------------------------------------------
|
||||
# PostgreSQL 16
|
||||
# ---------------------------------------------------------------------------
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
container_name: silo-postgres
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_DB: silo
|
||||
POSTGRES_USER: silo
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:?Run ./scripts/setup-docker.sh first}
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- ../migrations:/docker-entrypoint-initdb.d:ro
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U silo -d silo"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- silo-net
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# MinIO (S3-compatible object storage)
|
||||
# ---------------------------------------------------------------------------
|
||||
minio:
|
||||
image: minio/minio:latest
|
||||
container_name: silo-minio
|
||||
restart: unless-stopped
|
||||
command: server /data --console-address ":9001"
|
||||
environment:
|
||||
MINIO_ROOT_USER: ${MINIO_ACCESS_KEY:?Run ./scripts/setup-docker.sh first}
|
||||
MINIO_ROOT_PASSWORD: ${MINIO_SECRET_KEY:?Run ./scripts/setup-docker.sh first}
|
||||
volumes:
|
||||
- minio_data:/data
|
||||
ports:
|
||||
- "9001:9001" # MinIO console (remove in hardened setups)
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- silo-net
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# OpenLDAP (user directory for LDAP authentication)
|
||||
# ---------------------------------------------------------------------------
|
||||
openldap:
|
||||
image: bitnami/openldap:2.6
|
||||
container_name: silo-openldap
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
LDAP_ROOT: "dc=silo,dc=local"
|
||||
LDAP_ADMIN_USERNAME: "admin"
|
||||
LDAP_ADMIN_PASSWORD: ${LDAP_ADMIN_PASSWORD:?Run ./scripts/setup-docker.sh first}
|
||||
LDAP_USERS: ${LDAP_USERS:-siloadmin}
|
||||
LDAP_PASSWORDS: ${LDAP_PASSWORDS:?Run ./scripts/setup-docker.sh first}
|
||||
LDAP_GROUP: "silo-users"
|
||||
LDAP_USER_OU: "users"
|
||||
LDAP_GROUP_OU: "groups"
|
||||
volumes:
|
||||
- openldap_data:/bitnami/openldap
|
||||
- ./ldap:/docker-entrypoint-initdb.d:ro
|
||||
ports:
|
||||
- "1389:1389" # LDAP access for debugging (remove in hardened setups)
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "ldapsearch -x -H ldap://localhost:1389 -b dc=silo,dc=local -D cn=admin,dc=silo,dc=local -w $${LDAP_ADMIN_PASSWORD} '(objectClass=organization)' >/dev/null 2>&1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- silo-net
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Silo API Server
|
||||
# ---------------------------------------------------------------------------
|
||||
silo:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: build/package/Dockerfile
|
||||
container_name: silo-api
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
minio:
|
||||
condition: service_healthy
|
||||
openldap:
|
||||
condition: service_healthy
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
# These override values in config.docker.yaml via the Go config loader's
|
||||
# direct env var support (see internal/config/config.go).
|
||||
SILO_DB_HOST: postgres
|
||||
SILO_DB_NAME: silo
|
||||
SILO_DB_USER: silo
|
||||
SILO_DB_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
SILO_MINIO_ENDPOINT: minio:9000
|
||||
SILO_MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
|
||||
SILO_MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
|
||||
ports:
|
||||
- "${SILO_PORT:-8080}:8080"
|
||||
volumes:
|
||||
- ../schemas:/etc/silo/schemas:ro
|
||||
- ./config.docker.yaml:/etc/silo/config.yaml:ro
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:8080/health"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 15s
|
||||
networks:
|
||||
- silo-net
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Nginx reverse proxy (optional — enable with --profile nginx)
|
||||
# ---------------------------------------------------------------------------
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
container_name: silo-nginx
|
||||
restart: unless-stopped
|
||||
profiles:
|
||||
- nginx
|
||||
depends_on:
|
||||
silo:
|
||||
condition: service_healthy
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./nginx/nginx.conf:/etc/nginx/conf.d/default.conf:ro
|
||||
# Uncomment to mount TLS certificates:
|
||||
# - /path/to/cert.pem:/etc/nginx/ssl/cert.pem:ro
|
||||
# - /path/to/key.pem:/etc/nginx/ssl/key.pem:ro
|
||||
networks:
|
||||
- silo-net
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
minio_data:
|
||||
openldap_data:
|
||||
|
||||
networks:
|
||||
silo-net:
|
||||
driver: bridge
|
||||
@@ -1,5 +1,5 @@
|
||||
# Production Docker Compose for Silo
|
||||
# Uses external PostgreSQL (psql.kindred.internal) and MinIO (minio.kindred.internal)
|
||||
# Uses external PostgreSQL (psql.example.internal) and MinIO (minio.example.internal)
|
||||
#
|
||||
# Usage:
|
||||
# export SILO_DB_PASSWORD=<your-password>
|
||||
@@ -15,23 +15,23 @@ services:
|
||||
container_name: silod
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
# Database connection (psql.kindred.internal)
|
||||
SILO_DB_HOST: psql.kindred.internal
|
||||
SILO_DB_PORT: 5432
|
||||
# Database connection (psql.example.internal)
|
||||
# Supported as direct env var overrides in the Go config loader:
|
||||
SILO_DB_HOST: psql.example.internal
|
||||
SILO_DB_NAME: silo
|
||||
SILO_DB_USER: silo
|
||||
SILO_DB_PASSWORD: ${SILO_DB_PASSWORD:?Database password required}
|
||||
SILO_DB_SSLMODE: require
|
||||
# Note: SILO_DB_PORT and SILO_DB_SSLMODE are NOT supported as direct
|
||||
# env var overrides. Set these in config.yaml instead, or use ${VAR}
|
||||
# syntax in the YAML file. See docs/CONFIGURATION.md for details.
|
||||
|
||||
# MinIO storage (minio.kindred.internal)
|
||||
SILO_MINIO_ENDPOINT: minio.kindred.internal:9000
|
||||
# MinIO storage (minio.example.internal)
|
||||
# Supported as direct env var overrides:
|
||||
SILO_MINIO_ENDPOINT: minio.example.internal:9000
|
||||
SILO_MINIO_ACCESS_KEY: ${SILO_MINIO_ACCESS_KEY:?MinIO access key required}
|
||||
SILO_MINIO_SECRET_KEY: ${SILO_MINIO_SECRET_KEY:?MinIO secret key required}
|
||||
SILO_MINIO_BUCKET: silo-files
|
||||
SILO_MINIO_USE_SSL: "true"
|
||||
|
||||
# Server settings
|
||||
SILO_SERVER_BASE_URL: ${SILO_BASE_URL:-http://silo.kindred.internal:8080}
|
||||
# Note: SILO_MINIO_BUCKET and SILO_MINIO_USE_SSL are NOT supported as
|
||||
# direct env var overrides. Set these in config.yaml instead.
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
|
||||
@@ -69,7 +69,7 @@ services:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ../schemas:/etc/silo/schemas:ro
|
||||
- ../configs/config.yaml:/etc/silo/config.yaml:ro
|
||||
- ./config.dev.yaml:/etc/silo/config.yaml:ro
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:8080/health"]
|
||||
interval: 10s
|
||||
|
||||
36
deployments/ldap/memberof.ldif
Normal file
36
deployments/ldap/memberof.ldif
Normal file
@@ -0,0 +1,36 @@
|
||||
# Enable the memberOf overlay for OpenLDAP.
|
||||
# When a user is added to a groupOfNames, their entry automatically
|
||||
# gets a memberOf attribute pointing to the group DN.
|
||||
# This is required for Silo's LDAP role mapping.
|
||||
#
|
||||
# Loaded automatically by bitnami/openldap from /docker-entrypoint-initdb.d/
|
||||
|
||||
dn: cn=module{0},cn=config
|
||||
changetype: modify
|
||||
add: olcModuleLoad
|
||||
olcModuleLoad: memberof
|
||||
|
||||
dn: olcOverlay=memberof,olcDatabase={2}mdb,cn=config
|
||||
changetype: add
|
||||
objectClass: olcOverlayConfig
|
||||
objectClass: olcMemberOf
|
||||
olcOverlay: memberof
|
||||
olcMemberOfRefInt: TRUE
|
||||
olcMemberOfDangling: ignore
|
||||
olcMemberOfGroupOC: groupOfNames
|
||||
olcMemberOfMemberAD: member
|
||||
olcMemberOfMemberOfAD: memberOf
|
||||
|
||||
# Enable refint overlay to maintain referential integrity
|
||||
# (removes memberOf when a user is removed from a group)
|
||||
dn: cn=module{0},cn=config
|
||||
changetype: modify
|
||||
add: olcModuleLoad
|
||||
olcModuleLoad: refint
|
||||
|
||||
dn: olcOverlay=refint,olcDatabase={2}mdb,cn=config
|
||||
changetype: add
|
||||
objectClass: olcOverlayConfig
|
||||
objectClass: olcRefintConfig
|
||||
olcOverlay: refint
|
||||
olcRefintAttribute: memberOf member
|
||||
34
deployments/ldap/silo-groups.ldif
Normal file
34
deployments/ldap/silo-groups.ldif
Normal file
@@ -0,0 +1,34 @@
|
||||
# Create Silo role groups for LDAP-based access control.
|
||||
# These groups map to Silo roles via auth.ldap.role_mapping in config.yaml.
|
||||
#
|
||||
# Group hierarchy:
|
||||
# silo-admins -> admin role (full access)
|
||||
# silo-users -> editor role (create/modify items)
|
||||
# silo-viewers -> viewer role (read-only)
|
||||
#
|
||||
# The initial LDAP user (set via LDAP_USERS env var) is added to silo-admins.
|
||||
# Additional users can be added with ldapadd or ldapmodify.
|
||||
#
|
||||
# Loaded automatically by bitnami/openldap from /docker-entrypoint-initdb.d/
|
||||
# Note: This runs after the default tree is created (users/groups OUs exist).
|
||||
|
||||
# Admin group — initial user is a member
|
||||
dn: cn=silo-admins,ou=groups,dc=silo,dc=local
|
||||
objectClass: groupOfNames
|
||||
cn: silo-admins
|
||||
description: Silo administrators (full access)
|
||||
member: cn=siloadmin,ou=users,dc=silo,dc=local
|
||||
|
||||
# Editor group
|
||||
dn: cn=silo-users,ou=groups,dc=silo,dc=local
|
||||
objectClass: groupOfNames
|
||||
cn: silo-users
|
||||
description: Silo editors (create and modify items)
|
||||
member: cn=placeholder,ou=users,dc=silo,dc=local
|
||||
|
||||
# Viewer group
|
||||
dn: cn=silo-viewers,ou=groups,dc=silo,dc=local
|
||||
objectClass: groupOfNames
|
||||
cn: silo-viewers
|
||||
description: Silo viewers (read-only access)
|
||||
member: cn=placeholder,ou=users,dc=silo,dc=local
|
||||
44
deployments/nginx/nginx-nossl.conf
Normal file
44
deployments/nginx/nginx-nossl.conf
Normal file
@@ -0,0 +1,44 @@
|
||||
# Silo Nginx Reverse Proxy — HTTP only (no TLS)
|
||||
#
|
||||
# Use this when TLS is terminated by an external load balancer or when
|
||||
# running on a trusted internal network without HTTPS.
|
||||
|
||||
upstream silo_backend {
|
||||
server silo:8080;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
server_name _;
|
||||
|
||||
location / {
|
||||
proxy_pass http://silo_backend;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
|
||||
# SSE support
|
||||
proxy_set_header Connection "";
|
||||
proxy_buffering off;
|
||||
|
||||
# Timeouts
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 60s;
|
||||
proxy_read_timeout 300s;
|
||||
|
||||
# File uploads (CAD files can be large)
|
||||
client_max_body_size 100M;
|
||||
}
|
||||
|
||||
location /nginx-health {
|
||||
access_log off;
|
||||
return 200 "OK\n";
|
||||
add_header Content-Type text/plain;
|
||||
}
|
||||
}
|
||||
103
deployments/nginx/nginx.conf
Normal file
103
deployments/nginx/nginx.conf
Normal file
@@ -0,0 +1,103 @@
|
||||
# Silo Nginx Reverse Proxy (Docker)
|
||||
#
|
||||
# HTTP reverse proxy with optional HTTPS. To enable TLS:
|
||||
# 1. Uncomment the ssl server block below
|
||||
# 2. Mount your certificate and key in docker-compose:
|
||||
# volumes:
|
||||
# - /path/to/cert.pem:/etc/nginx/ssl/cert.pem:ro
|
||||
# - /path/to/key.pem:/etc/nginx/ssl/key.pem:ro
|
||||
# 3. Uncomment the HTTP-to-HTTPS redirect in the port 80 block
|
||||
|
||||
upstream silo_backend {
|
||||
server silo:8080;
|
||||
}
|
||||
|
||||
# HTTP server
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
server_name _;
|
||||
|
||||
# Uncomment the next line to redirect all HTTP traffic to HTTPS
|
||||
# return 301 https://$host$request_uri;
|
||||
|
||||
location / {
|
||||
proxy_pass http://silo_backend;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
|
||||
# SSE support
|
||||
proxy_set_header Connection "";
|
||||
proxy_buffering off;
|
||||
|
||||
# Timeouts
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 60s;
|
||||
proxy_read_timeout 300s;
|
||||
|
||||
# File uploads (CAD files can be large)
|
||||
client_max_body_size 100M;
|
||||
}
|
||||
|
||||
# Health check endpoint for monitoring
|
||||
location /nginx-health {
|
||||
access_log off;
|
||||
return 200 "OK\n";
|
||||
add_header Content-Type text/plain;
|
||||
}
|
||||
}
|
||||
|
||||
# Uncomment for HTTPS (mount certs in docker-compose volumes)
|
||||
# server {
|
||||
# listen 443 ssl http2;
|
||||
# listen [::]:443 ssl http2;
|
||||
# server_name _;
|
||||
#
|
||||
# ssl_certificate /etc/nginx/ssl/cert.pem;
|
||||
# ssl_certificate_key /etc/nginx/ssl/key.pem;
|
||||
#
|
||||
# ssl_protocols TLSv1.2 TLSv1.3;
|
||||
# ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305;
|
||||
# ssl_prefer_server_ciphers off;
|
||||
# ssl_session_timeout 1d;
|
||||
# ssl_session_cache shared:SSL:10m;
|
||||
# ssl_session_tickets off;
|
||||
#
|
||||
# # Security headers
|
||||
# add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
# add_header X-Content-Type-Options "nosniff" always;
|
||||
# add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||
#
|
||||
# location / {
|
||||
# proxy_pass http://silo_backend;
|
||||
# proxy_http_version 1.1;
|
||||
#
|
||||
# proxy_set_header Host $host;
|
||||
# proxy_set_header X-Real-IP $remote_addr;
|
||||
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# proxy_set_header X-Forwarded-Proto $scheme;
|
||||
# proxy_set_header X-Forwarded-Host $host;
|
||||
# proxy_set_header X-Forwarded-Port $server_port;
|
||||
#
|
||||
# proxy_set_header Connection "";
|
||||
# proxy_buffering off;
|
||||
#
|
||||
# proxy_connect_timeout 60s;
|
||||
# proxy_send_timeout 60s;
|
||||
# proxy_read_timeout 300s;
|
||||
#
|
||||
# client_max_body_size 100M;
|
||||
# }
|
||||
#
|
||||
# location /nginx-health {
|
||||
# access_log off;
|
||||
# return 200 "OK\n";
|
||||
# add_header Content-Type text/plain;
|
||||
# }
|
||||
# }
|
||||
@@ -2,11 +2,11 @@
|
||||
# Copy to /etc/silo/silod.env and fill in values
|
||||
# Permissions: chmod 600 /etc/silo/silod.env
|
||||
|
||||
# Database credentials (psql.kindred.internal)
|
||||
# Database credentials (psql.example.internal)
|
||||
# Database: silo, User: silo
|
||||
SILO_DB_PASSWORD=
|
||||
|
||||
# MinIO credentials (minio.kindred.internal)
|
||||
# MinIO credentials (minio.example.internal)
|
||||
# User: silouser
|
||||
SILO_MINIO_ACCESS_KEY=silouser
|
||||
SILO_MINIO_SECRET_KEY=
|
||||
@@ -23,4 +23,4 @@ SILO_ADMIN_PASSWORD=
|
||||
# SILO_LDAP_BIND_PASSWORD=
|
||||
|
||||
# Optional: Override server base URL
|
||||
# SILO_SERVER_BASE_URL=http://silo.kindred.internal:8080
|
||||
# SILO_SERVER_BASE_URL=http://silo.example.internal:8080
|
||||
|
||||
@@ -38,7 +38,7 @@ API tokens allow the FreeCAD plugin, scripts, and CI pipelines to authenticate w
|
||||
### Creating a Token (CLI)
|
||||
|
||||
```sh
|
||||
export SILO_API_URL=https://silo.kindred.internal
|
||||
export SILO_API_URL=https://silo.example.internal
|
||||
export SILO_API_TOKEN=silo_<your-existing-token>
|
||||
|
||||
silo token create --name "CI pipeline"
|
||||
@@ -140,7 +140,7 @@ auth:
|
||||
|
||||
ldap:
|
||||
enabled: true
|
||||
url: "ldaps://ipa.kindred.internal"
|
||||
url: "ldaps://ipa.example.internal"
|
||||
base_dn: "dc=kindred,dc=internal"
|
||||
user_search_dn: "cn=users,cn=accounts,dc=kindred,dc=internal"
|
||||
user_attr: "uid"
|
||||
@@ -170,10 +170,10 @@ auth:
|
||||
|
||||
oidc:
|
||||
enabled: true
|
||||
issuer_url: "https://keycloak.kindred.internal/realms/silo"
|
||||
issuer_url: "https://keycloak.example.internal/realms/silo"
|
||||
client_id: "silo"
|
||||
client_secret: "" # Set via SILO_OIDC_CLIENT_SECRET
|
||||
redirect_url: "https://silo.kindred.internal/auth/callback"
|
||||
redirect_url: "https://silo.example.internal/auth/callback"
|
||||
scopes: ["openid", "profile", "email"]
|
||||
admin_role: "silo-admin"
|
||||
editor_role: "silo-editor"
|
||||
@@ -186,7 +186,7 @@ auth:
|
||||
auth:
|
||||
cors:
|
||||
allowed_origins:
|
||||
- "https://silo.kindred.internal"
|
||||
- "https://silo.example.internal"
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
@@ -254,4 +254,4 @@ UPDATE users SET password_hash = '<bcrypt-hash>', is_active = true WHERE usernam
|
||||
|
||||
- Verify the token is set in FreeCAD preferences or `SILO_API_TOKEN`
|
||||
- Check the API URL points to the correct server
|
||||
- Test with curl: `curl -H "Authorization: Bearer silo_..." https://silo.kindred.internal/api/items`
|
||||
- Test with curl: `curl -H "Authorization: Bearer silo_..." https://silo.example.internal/api/items`
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# Silo Production Deployment Guide
|
||||
|
||||
> **First-time setup?** See the [Installation Guide](INSTALL.md) for step-by-step
|
||||
> instructions. This document covers ongoing maintenance and operations for an
|
||||
> existing deployment.
|
||||
|
||||
This guide covers deploying Silo to a dedicated VM using external PostgreSQL and MinIO services.
|
||||
|
||||
## Table of Contents
|
||||
@@ -17,7 +21,7 @@ This guide covers deploying Silo to a dedicated VM using external PostgreSQL and
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ silo.kindred.internal │
|
||||
│ silo.example.internal │
|
||||
│ ┌───────────────────────────────────────────────────────────┐ │
|
||||
│ │ silod │ │
|
||||
│ │ (Silo API Server) │ │
|
||||
@@ -27,7 +31,7 @@ This guide covers deploying Silo to a dedicated VM using external PostgreSQL and
|
||||
│ │
|
||||
▼ ▼
|
||||
┌─────────────────────────┐ ┌─────────────────────────────────┐
|
||||
│ psql.kindred.internal │ │ minio.kindred.internal │
|
||||
│ psql.example.internal │ │ minio.example.internal │
|
||||
│ PostgreSQL 16 │ │ MinIO S3 │
|
||||
│ :5432 │ │ :9000 (API) │
|
||||
│ │ │ :9001 (Console) │
|
||||
@@ -40,8 +44,8 @@ The following external services are already configured:
|
||||
|
||||
| Service | Host | Database/Bucket | User |
|
||||
|---------|------|-----------------|------|
|
||||
| PostgreSQL | psql.kindred.internal:5432 | silo | silo |
|
||||
| MinIO | minio.kindred.internal:9000 | silo-files | silouser |
|
||||
| PostgreSQL | psql.example.internal:5432 | silo | silo |
|
||||
| MinIO | minio.example.internal:9000 | silo-files | silouser |
|
||||
|
||||
Migrations have been applied to the database.
|
||||
|
||||
@@ -53,10 +57,10 @@ For a fresh VM, run these commands:
|
||||
|
||||
```bash
|
||||
# 1. SSH to the target host
|
||||
ssh root@silo.kindred.internal
|
||||
ssh root@silo.example.internal
|
||||
|
||||
# 2. Download and run setup script
|
||||
curl -fsSL https://gitea.kindred.internal/kindred/silo-0062/raw/branch/main/scripts/setup-host.sh | bash
|
||||
curl -fsSL https://git.kindred-systems.com/kindred/silo/raw/branch/main/scripts/setup-host.sh | bash
|
||||
|
||||
# 3. Configure credentials
|
||||
nano /etc/silo/silod.env
|
||||
@@ -69,16 +73,16 @@ nano /etc/silo/silod.env
|
||||
|
||||
## Initial Setup
|
||||
|
||||
Run the setup script once on `silo.kindred.internal` to prepare the host:
|
||||
Run the setup script once on `silo.example.internal` to prepare the host:
|
||||
|
||||
```bash
|
||||
# Option 1: If you have the repo locally
|
||||
scp scripts/setup-host.sh root@silo.kindred.internal:/tmp/
|
||||
ssh root@silo.kindred.internal 'bash /tmp/setup-host.sh'
|
||||
scp scripts/setup-host.sh root@silo.example.internal:/tmp/
|
||||
ssh root@silo.example.internal 'bash /tmp/setup-host.sh'
|
||||
|
||||
# Option 2: Direct on the host
|
||||
ssh root@silo.kindred.internal
|
||||
curl -fsSL https://git.kindred.internal/kindred/silo/raw/branch/main/scripts/setup-host.sh -o /tmp/setup-host.sh
|
||||
ssh root@silo.example.internal
|
||||
curl -fsSL https://git.kindred-systems.com/kindred/silo/raw/branch/main/scripts/setup-host.sh -o /tmp/setup-host.sh
|
||||
bash /tmp/setup-host.sh
|
||||
```
|
||||
|
||||
@@ -100,10 +104,10 @@ sudo nano /etc/silo/silod.env
|
||||
Fill in the values:
|
||||
|
||||
```bash
|
||||
# Database credentials (psql.kindred.internal)
|
||||
# Database credentials (psql.example.internal)
|
||||
SILO_DB_PASSWORD=your-database-password
|
||||
|
||||
# MinIO credentials (minio.kindred.internal)
|
||||
# MinIO credentials (minio.example.internal)
|
||||
SILO_MINIO_ACCESS_KEY=silouser
|
||||
SILO_MINIO_SECRET_KEY=your-minio-secret-key
|
||||
```
|
||||
@@ -114,10 +118,10 @@ Before deploying, verify connectivity to external services:
|
||||
|
||||
```bash
|
||||
# Test PostgreSQL
|
||||
psql -h psql.kindred.internal -U silo -d silo -c 'SELECT 1'
|
||||
psql -h psql.example.internal -U silo -d silo -c 'SELECT 1'
|
||||
|
||||
# Test MinIO
|
||||
curl -I http://minio.kindred.internal:9000/minio/health/live
|
||||
curl -I http://minio.example.internal:9000/minio/health/live
|
||||
```
|
||||
|
||||
---
|
||||
@@ -129,7 +133,7 @@ curl -I http://minio.kindred.internal:9000/minio/health/live
|
||||
To deploy or update Silo, run the deploy script on the target host:
|
||||
|
||||
```bash
|
||||
ssh root@silo.kindred.internal
|
||||
ssh root@silo.example.internal
|
||||
/opt/silo/src/scripts/deploy.sh
|
||||
```
|
||||
|
||||
@@ -165,7 +169,7 @@ sudo /opt/silo/src/scripts/deploy.sh --status
|
||||
You can override the git repository URL and branch:
|
||||
|
||||
```bash
|
||||
export SILO_REPO_URL=https://git.kindred.internal/kindred/silo.git
|
||||
export SILO_REPO_URL=https://git.kindred-systems.com/kindred/silo.git
|
||||
export SILO_BRANCH=main
|
||||
sudo -E /opt/silo/src/scripts/deploy.sh
|
||||
```
|
||||
@@ -247,7 +251,7 @@ curl http://localhost:8080/ready
|
||||
To update to the latest version:
|
||||
|
||||
```bash
|
||||
ssh root@silo.kindred.internal
|
||||
ssh root@silo.example.internal
|
||||
/opt/silo/src/scripts/deploy.sh
|
||||
```
|
||||
|
||||
@@ -269,7 +273,7 @@ When new migrations are added, run them manually:
|
||||
ls -la /opt/silo/src/migrations/
|
||||
|
||||
# Run a specific migration
|
||||
psql -h psql.kindred.internal -U silo -d silo -f /opt/silo/src/migrations/008_new_feature.sql
|
||||
psql -h psql.example.internal -U silo -d silo -f /opt/silo/src/migrations/008_new_feature.sql
|
||||
```
|
||||
|
||||
---
|
||||
@@ -303,13 +307,13 @@ psql -h psql.kindred.internal -U silo -d silo -f /opt/silo/src/migrations/008_ne
|
||||
|
||||
1. Test network connectivity:
|
||||
```bash
|
||||
nc -zv psql.kindred.internal 5432
|
||||
nc -zv psql.example.internal 5432
|
||||
```
|
||||
|
||||
2. Test credentials:
|
||||
```bash
|
||||
source /etc/silo/silod.env
|
||||
PGPASSWORD=$SILO_DB_PASSWORD psql -h psql.kindred.internal -U silo -d silo -c 'SELECT 1'
|
||||
PGPASSWORD=$SILO_DB_PASSWORD psql -h psql.example.internal -U silo -d silo -c 'SELECT 1'
|
||||
```
|
||||
|
||||
3. Check `pg_hba.conf` on PostgreSQL server allows connections from this host.
|
||||
@@ -318,12 +322,12 @@ psql -h psql.kindred.internal -U silo -d silo -f /opt/silo/src/migrations/008_ne
|
||||
|
||||
1. Test network connectivity:
|
||||
```bash
|
||||
nc -zv minio.kindred.internal 9000
|
||||
nc -zv minio.example.internal 9000
|
||||
```
|
||||
|
||||
2. Test with curl:
|
||||
```bash
|
||||
curl -I http://minio.kindred.internal:9000/minio/health/live
|
||||
curl -I http://minio.example.internal:9000/minio/health/live
|
||||
```
|
||||
|
||||
3. Check SSL settings in config match MinIO setup:
|
||||
@@ -340,8 +344,8 @@ curl -v http://localhost:8080/health
|
||||
curl -v http://localhost:8080/ready
|
||||
|
||||
# If ready fails but health passes, check external services
|
||||
psql -h psql.kindred.internal -U silo -d silo -c 'SELECT 1'
|
||||
curl http://minio.kindred.internal:9000/minio/health/live
|
||||
psql -h psql.example.internal -U silo -d silo -c 'SELECT 1'
|
||||
curl http://minio.example.internal:9000/minio/health/live
|
||||
```
|
||||
|
||||
### Build Fails
|
||||
@@ -391,14 +395,14 @@ This script:
|
||||
getcert list
|
||||
```
|
||||
|
||||
2. The silo config is already updated to use `https://silo.kindred.internal` as base URL. Restart silo:
|
||||
2. The silo config is already updated to use `https://silo.example.internal` as base URL. Restart silo:
|
||||
```bash
|
||||
sudo systemctl restart silod
|
||||
```
|
||||
|
||||
3. Test the setup:
|
||||
```bash
|
||||
curl https://silo.kindred.internal/health
|
||||
curl https://silo.example.internal/health
|
||||
```
|
||||
|
||||
### Certificate Management
|
||||
@@ -422,7 +426,7 @@ For clients to trust the Silo HTTPS certificate, they need the IPA CA:
|
||||
|
||||
```bash
|
||||
# Download CA cert
|
||||
curl -o /tmp/ipa-ca.crt https://ipa.kindred.internal/ipa/config/ca.crt
|
||||
curl -o /tmp/ipa-ca.crt https://ipa.example.internal/ipa/config/ca.crt
|
||||
|
||||
# Ubuntu/Debian
|
||||
sudo cp /tmp/ipa-ca.crt /usr/local/share/ca-certificates/ipa-ca.crt
|
||||
|
||||
@@ -365,7 +365,7 @@ internal/
|
||||
handlers.go # Items, schemas, projects, revisions
|
||||
middleware.go # Auth middleware
|
||||
odoo_handlers.go # Odoo integration endpoints
|
||||
routes.go # Route registration (75 endpoints)
|
||||
routes.go # Route registration (78 endpoints)
|
||||
search.go # Fuzzy search
|
||||
auth/
|
||||
auth.go # Auth service: local, LDAP, OIDC
|
||||
|
||||
518
docs/INSTALL.md
Normal file
518
docs/INSTALL.md
Normal file
@@ -0,0 +1,518 @@
|
||||
# Installing Silo
|
||||
|
||||
This guide covers two installation methods:
|
||||
|
||||
- **[Option A: Docker Compose](#option-a-docker-compose)** — self-contained stack with all services. Recommended for evaluation, small teams, and environments where Docker is the standard.
|
||||
- **[Option B: Daemon Install](#option-b-daemon-install-systemd--external-services)** — systemd service with external PostgreSQL, MinIO, and optional LDAP/nginx. Recommended for production deployments integrated with existing infrastructure.
|
||||
|
||||
Both methods produce the same result: a running Silo server with a web UI, REST API, and authentication.
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Option A: Docker Compose](#option-a-docker-compose)
|
||||
- [A.1 Prerequisites](#a1-prerequisites)
|
||||
- [A.2 Clone the Repository](#a2-clone-the-repository)
|
||||
- [A.3 Run the Setup Script](#a3-run-the-setup-script)
|
||||
- [A.4 Start the Stack](#a4-start-the-stack)
|
||||
- [A.5 Verify the Installation](#a5-verify-the-installation)
|
||||
- [A.6 LDAP Users and Groups](#a6-ldap-users-and-groups)
|
||||
- [A.7 Optional: Enable Nginx Reverse Proxy](#a7-optional-enable-nginx-reverse-proxy)
|
||||
- [A.8 Stopping, Starting, and Upgrading](#a8-stopping-starting-and-upgrading)
|
||||
- [Option B: Daemon Install (systemd + External Services)](#option-b-daemon-install-systemd--external-services)
|
||||
- [B.1 Architecture Overview](#b1-architecture-overview)
|
||||
- [B.2 Prerequisites](#b2-prerequisites)
|
||||
- [B.3 Set Up External Services](#b3-set-up-external-services)
|
||||
- [B.4 Prepare the Host](#b4-prepare-the-host)
|
||||
- [B.5 Configure Credentials](#b5-configure-credentials)
|
||||
- [B.6 Deploy](#b6-deploy)
|
||||
- [B.7 Set Up Nginx and TLS](#b7-set-up-nginx-and-tls)
|
||||
- [B.8 Verify the Installation](#b8-verify-the-installation)
|
||||
- [B.9 Upgrading](#b9-upgrading)
|
||||
- [Post-Install Configuration](#post-install-configuration)
|
||||
- [Further Reading](#further-reading)
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Regardless of which method you choose:
|
||||
|
||||
- **Git** to clone the repository
|
||||
- A machine with at least **2 GB RAM** and **10 GB free disk**
|
||||
- Network access to pull container images or download Go/Node toolchains
|
||||
|
||||
---
|
||||
|
||||
## Option A: Docker Compose
|
||||
|
||||
A single Docker Compose file runs everything: PostgreSQL, MinIO, OpenLDAP, and Silo. An optional nginx container can be enabled for reverse proxying.
|
||||
|
||||
### A.1 Prerequisites
|
||||
|
||||
- [Docker Engine](https://docs.docker.com/engine/install/) 24+ with the [Compose plugin](https://docs.docker.com/compose/install/) (v2)
|
||||
- `openssl` (used by the setup script to generate secrets)
|
||||
|
||||
Verify your installation:
|
||||
|
||||
```bash
|
||||
docker --version # Docker Engine 24+
|
||||
docker compose version # Docker Compose v2+
|
||||
```
|
||||
|
||||
### A.2 Clone the Repository
|
||||
|
||||
```bash
|
||||
git clone https://git.kindred-systems.com/kindred/silo.git
|
||||
cd silo
|
||||
```
|
||||
|
||||
### A.3 Run the Setup Script
|
||||
|
||||
The setup script generates credentials and configuration files:
|
||||
|
||||
```bash
|
||||
./scripts/setup-docker.sh
|
||||
```
|
||||
|
||||
It prompts for:
|
||||
- Server domain (default: `localhost`)
|
||||
- PostgreSQL password (auto-generated if you press Enter)
|
||||
- MinIO credentials (auto-generated)
|
||||
- OpenLDAP admin password and initial user (auto-generated)
|
||||
- Silo local admin account (fallback when LDAP is unavailable)
|
||||
|
||||
For automated/CI environments, use non-interactive mode:
|
||||
|
||||
```bash
|
||||
./scripts/setup-docker.sh --non-interactive
|
||||
```
|
||||
|
||||
The script writes two files:
|
||||
- `deployments/.env` — secrets for Docker Compose
|
||||
- `deployments/config.docker.yaml` — Silo server configuration
|
||||
|
||||
### A.4 Start the Stack
|
||||
|
||||
```bash
|
||||
docker compose -f deployments/docker-compose.allinone.yaml up -d
|
||||
```
|
||||
|
||||
Wait for all services to become healthy:
|
||||
|
||||
```bash
|
||||
docker compose -f deployments/docker-compose.allinone.yaml ps
|
||||
```
|
||||
|
||||
You should see `silo-postgres`, `silo-minio`, `silo-openldap`, and `silo-api` all in a healthy state.
|
||||
|
||||
View logs:
|
||||
|
||||
```bash
|
||||
# All services
|
||||
docker compose -f deployments/docker-compose.allinone.yaml logs -f
|
||||
|
||||
# Silo only
|
||||
docker compose -f deployments/docker-compose.allinone.yaml logs -f silo
|
||||
```
|
||||
|
||||
### A.5 Verify the Installation
|
||||
|
||||
```bash
|
||||
# Health check
|
||||
curl http://localhost:8080/health
|
||||
|
||||
# Readiness check (includes database and storage connectivity)
|
||||
curl http://localhost:8080/ready
|
||||
```
|
||||
|
||||
Open http://localhost:8080 in your browser. Log in with either:
|
||||
|
||||
- **LDAP account**: the username and password shown by the setup script (default: `siloadmin`)
|
||||
- **Local admin**: the local admin credentials shown by the setup script (default: `admin`)
|
||||
|
||||
The credentials were printed at the end of the setup script output and are stored in `deployments/.env`.
|
||||
|
||||
### A.6 LDAP Users and Groups
|
||||
|
||||
The Docker stack includes an OpenLDAP server with three preconfigured groups that map to Silo roles:
|
||||
|
||||
| LDAP Group | Silo Role | Access Level |
|
||||
|------------|-----------|-------------|
|
||||
| `cn=silo-admins,ou=groups,dc=silo,dc=local` | admin | Full access |
|
||||
| `cn=silo-users,ou=groups,dc=silo,dc=local` | editor | Create and modify items |
|
||||
| `cn=silo-viewers,ou=groups,dc=silo,dc=local` | viewer | Read-only |
|
||||
|
||||
The initial LDAP user (default: `siloadmin`) is added to `silo-admins`.
|
||||
|
||||
**Add a new LDAP user:**
|
||||
|
||||
```bash
|
||||
# From the host (using the exposed port)
|
||||
ldapadd -x -H ldap://localhost:1389 \
|
||||
-D "cn=admin,dc=silo,dc=local" \
|
||||
-w "YOUR_LDAP_ADMIN_PASSWORD" << EOF
|
||||
dn: cn=jdoe,ou=users,dc=silo,dc=local
|
||||
objectClass: inetOrgPerson
|
||||
cn: jdoe
|
||||
sn: Doe
|
||||
userPassword: changeme
|
||||
mail: jdoe@example.com
|
||||
EOF
|
||||
```
|
||||
|
||||
**Add a user to a group:**
|
||||
|
||||
```bash
|
||||
ldapmodify -x -H ldap://localhost:1389 \
|
||||
-D "cn=admin,dc=silo,dc=local" \
|
||||
-w "YOUR_LDAP_ADMIN_PASSWORD" << EOF
|
||||
dn: cn=silo-users,ou=groups,dc=silo,dc=local
|
||||
changetype: modify
|
||||
add: member
|
||||
member: cn=jdoe,ou=users,dc=silo,dc=local
|
||||
EOF
|
||||
```
|
||||
|
||||
**List all users:**
|
||||
|
||||
```bash
|
||||
ldapsearch -x -H ldap://localhost:1389 \
|
||||
-b "ou=users,dc=silo,dc=local" \
|
||||
-D "cn=admin,dc=silo,dc=local" \
|
||||
-w "YOUR_LDAP_ADMIN_PASSWORD" "(objectClass=inetOrgPerson)" cn mail memberOf
|
||||
```
|
||||
|
||||
### A.7 Optional: Enable Nginx Reverse Proxy
|
||||
|
||||
To place nginx in front of Silo (for TLS termination or to serve on port 80):
|
||||
|
||||
```bash
|
||||
docker compose -f deployments/docker-compose.allinone.yaml --profile nginx up -d
|
||||
```
|
||||
|
||||
By default nginx listens on ports 80 and 443 and proxies to the Silo container. The configuration is at `deployments/nginx/nginx.conf`.
|
||||
|
||||
**To enable HTTPS**, edit `deployments/docker-compose.allinone.yaml` and uncomment the TLS certificate volume mounts in the `nginx` service, then uncomment the HTTPS server block in `deployments/nginx/nginx.conf`. See the comments in those files for details.
|
||||
|
||||
If you already have your own reverse proxy or load balancer, skip the nginx profile and point your proxy at port 8080.
|
||||
|
||||
### A.8 Stopping, Starting, and Upgrading
|
||||
|
||||
```bash
|
||||
# Stop the stack (data is preserved in Docker volumes)
|
||||
docker compose -f deployments/docker-compose.allinone.yaml down
|
||||
|
||||
# Start again
|
||||
docker compose -f deployments/docker-compose.allinone.yaml up -d
|
||||
|
||||
# Stop and delete all data (WARNING: destroys database, files, and LDAP data)
|
||||
docker compose -f deployments/docker-compose.allinone.yaml down -v
|
||||
```
|
||||
|
||||
**To upgrade to a newer version:**
|
||||
|
||||
```bash
|
||||
cd silo
|
||||
git pull
|
||||
docker compose -f deployments/docker-compose.allinone.yaml up -d --build
|
||||
```
|
||||
|
||||
The Silo container is rebuilt from the updated source. Database migrations in `migrations/` are applied automatically on container startup via the PostgreSQL init mechanism.
|
||||
|
||||
---
|
||||
|
||||
## Option B: Daemon Install (systemd + External Services)
|
||||
|
||||
This method runs Silo as a systemd service on a dedicated host, connecting to externally managed PostgreSQL, MinIO, and optionally LDAP services.
|
||||
|
||||
### B.1 Architecture Overview
|
||||
|
||||
```
|
||||
┌──────────────────────┐
|
||||
│ Silo Host │
|
||||
│ ┌────────────────┐ │
|
||||
HTTPS (443) ──►│ │ nginx │ │
|
||||
│ └───────┬────────┘ │
|
||||
│ │ :8080 │
|
||||
│ ┌───────▼────────┐ │
|
||||
│ │ silod │ │
|
||||
│ │ (API server) │ │
|
||||
│ └──┬─────────┬───┘ │
|
||||
└─────┼─────────┼──────┘
|
||||
│ │
|
||||
┌───────────▼──┐ ┌───▼──────────────┐
|
||||
│ PostgreSQL 16│ │ MinIO (S3) │
|
||||
│ :5432 │ │ :9000 API │
|
||||
└──────────────┘ │ :9001 Console │
|
||||
└──────────────────┘
|
||||
```
|
||||
|
||||
### B.2 Prerequisites
|
||||
|
||||
- Linux host (Debian/Ubuntu or RHEL/Fedora/AlmaLinux)
|
||||
- Root or sudo access
|
||||
- Network access to your PostgreSQL and MinIO servers
|
||||
|
||||
The setup script installs Go and other build dependencies automatically.
|
||||
|
||||
### B.3 Set Up External Services
|
||||
|
||||
#### PostgreSQL 16
|
||||
|
||||
Install PostgreSQL and create the Silo database:
|
||||
|
||||
- [PostgreSQL downloads](https://www.postgresql.org/download/)
|
||||
|
||||
```bash
|
||||
# After installing PostgreSQL, create the database and user:
|
||||
sudo -u postgres createuser silo
|
||||
sudo -u postgres createdb -O silo silo
|
||||
sudo -u postgres psql -c "ALTER USER silo WITH PASSWORD 'your-password';"
|
||||
```
|
||||
|
||||
Ensure the Silo host can connect (check `pg_hba.conf` on the PostgreSQL server).
|
||||
|
||||
Verify:
|
||||
|
||||
```bash
|
||||
psql -h YOUR_PG_HOST -U silo -d silo -c 'SELECT 1'
|
||||
```
|
||||
|
||||
#### MinIO
|
||||
|
||||
Install MinIO and create a bucket and service account:
|
||||
|
||||
- [MinIO quickstart](https://min.io/docs/minio/linux/index.html)
|
||||
|
||||
```bash
|
||||
# Using the MinIO client (mc):
|
||||
mc alias set local http://YOUR_MINIO_HOST:9000 minioadmin minioadmin
|
||||
mc mb local/silo-files
|
||||
mc admin user add local silouser YOUR_MINIO_SECRET
|
||||
mc admin policy attach local readwrite --user silouser
|
||||
```
|
||||
|
||||
Verify:
|
||||
|
||||
```bash
|
||||
curl -I http://YOUR_MINIO_HOST:9000/minio/health/live
|
||||
```
|
||||
|
||||
#### LDAP / FreeIPA (Optional)
|
||||
|
||||
For LDAP authentication, you need an LDAP server with user and group entries. Options:
|
||||
|
||||
- [FreeIPA](https://www.freeipa.org/page/Quick_Start_Guide) — full identity management (recommended for organizations already using it)
|
||||
- [OpenLDAP](https://www.openldap.org/doc/admin26/) — lightweight LDAP server
|
||||
|
||||
Silo needs:
|
||||
- A base DN (e.g., `dc=example,dc=com`)
|
||||
- Users under a known OU (e.g., `cn=users,cn=accounts,dc=example,dc=com`)
|
||||
- Groups that map to Silo roles (`admin`, `editor`, `viewer`)
|
||||
- The `memberOf` overlay enabled (so user entries have `memberOf` attributes)
|
||||
|
||||
See [CONFIGURATION.md — LDAP](CONFIGURATION.md#ldap--freeipa) for the full LDAP configuration reference.
|
||||
|
||||
### B.4 Prepare the Host
|
||||
|
||||
Run the setup script on the target host:
|
||||
|
||||
```bash
|
||||
# Copy and run the script
|
||||
scp scripts/setup-host.sh root@YOUR_HOST:/tmp/
|
||||
ssh root@YOUR_HOST 'bash /tmp/setup-host.sh'
|
||||
```
|
||||
|
||||
Or directly on the host:
|
||||
|
||||
```bash
|
||||
sudo bash scripts/setup-host.sh
|
||||
```
|
||||
|
||||
The script:
|
||||
1. Installs dependencies (git, Go 1.24)
|
||||
2. Creates the `silo` system user
|
||||
3. Creates directories (`/opt/silo`, `/etc/silo`)
|
||||
4. Clones the repository
|
||||
5. Creates the environment file template
|
||||
|
||||
To override the default service hostnames:
|
||||
|
||||
```bash
|
||||
SILO_DB_HOST=db.example.com SILO_MINIO_HOST=s3.example.com sudo -E bash scripts/setup-host.sh
|
||||
```
|
||||
|
||||
### B.5 Configure Credentials
|
||||
|
||||
Edit the environment file with your service credentials:
|
||||
|
||||
```bash
|
||||
sudo nano /etc/silo/silod.env
|
||||
```
|
||||
|
||||
```bash
|
||||
# Database
|
||||
SILO_DB_PASSWORD=your-database-password
|
||||
|
||||
# MinIO
|
||||
SILO_MINIO_ACCESS_KEY=silouser
|
||||
SILO_MINIO_SECRET_KEY=your-minio-secret
|
||||
|
||||
# Authentication
|
||||
SILO_SESSION_SECRET=generate-a-long-random-string
|
||||
SILO_ADMIN_USERNAME=admin
|
||||
SILO_ADMIN_PASSWORD=your-admin-password
|
||||
```
|
||||
|
||||
Generate a session secret:
|
||||
|
||||
```bash
|
||||
openssl rand -hex 32
|
||||
```
|
||||
|
||||
Review the server configuration:
|
||||
|
||||
```bash
|
||||
sudo nano /etc/silo/config.yaml
|
||||
```
|
||||
|
||||
Update `database.host`, `storage.endpoint`, `server.base_url`, and authentication settings for your environment. See [CONFIGURATION.md](CONFIGURATION.md) for all options.
|
||||
|
||||
### B.6 Deploy
|
||||
|
||||
Run the deploy script:
|
||||
|
||||
```bash
|
||||
sudo /opt/silo/src/scripts/deploy.sh
|
||||
```
|
||||
|
||||
The script:
|
||||
1. Pulls latest code from git
|
||||
2. Builds the `silod` binary and React frontend
|
||||
3. Installs files to `/opt/silo` and `/etc/silo`
|
||||
4. Runs database migrations
|
||||
5. Installs and starts the systemd service
|
||||
|
||||
Deploy options:
|
||||
|
||||
```bash
|
||||
# Skip git pull (use current checkout)
|
||||
sudo /opt/silo/src/scripts/deploy.sh --no-pull
|
||||
|
||||
# Skip build (use existing binary)
|
||||
sudo /opt/silo/src/scripts/deploy.sh --no-build
|
||||
|
||||
# Just restart the service
|
||||
sudo /opt/silo/src/scripts/deploy.sh --restart-only
|
||||
|
||||
# Check service status
|
||||
sudo /opt/silo/src/scripts/deploy.sh --status
|
||||
```
|
||||
|
||||
To override the target host or database host:
|
||||
|
||||
```bash
|
||||
SILO_DEPLOY_TARGET=silo.example.com SILO_DB_HOST=db.example.com sudo -E scripts/deploy.sh
|
||||
```
|
||||
|
||||
### B.7 Set Up Nginx and TLS
|
||||
|
||||
#### With FreeIPA (automated)
|
||||
|
||||
If your organization uses FreeIPA, the included script handles nginx setup, IPA enrollment, and certificate issuance:
|
||||
|
||||
```bash
|
||||
sudo /opt/silo/src/scripts/setup-ipa-nginx.sh
|
||||
```
|
||||
|
||||
Override the hostname if needed:
|
||||
|
||||
```bash
|
||||
SILO_HOSTNAME=silo.example.com sudo -E /opt/silo/src/scripts/setup-ipa-nginx.sh
|
||||
```
|
||||
|
||||
The script installs nginx, enrolls the host in FreeIPA, requests a TLS certificate from the IPA CA (auto-renewed by certmonger), and configures nginx as an HTTPS reverse proxy.
|
||||
|
||||
#### Manual nginx setup
|
||||
|
||||
Install nginx and create a config:
|
||||
|
||||
```bash
|
||||
sudo apt install nginx # or: sudo dnf install nginx
|
||||
```
|
||||
|
||||
Use the template at `deployments/nginx/nginx.conf` as a starting point. Copy it to `/etc/nginx/sites-available/silo`, update the `server_name` and certificate paths, then enable it:
|
||||
|
||||
```bash
|
||||
sudo ln -sf /etc/nginx/sites-available/silo /etc/nginx/sites-enabled/silo
|
||||
sudo nginx -t
|
||||
sudo systemctl reload nginx
|
||||
```
|
||||
|
||||
After enabling HTTPS, update `server.base_url` in `/etc/silo/config.yaml` to use `https://` and restart Silo:
|
||||
|
||||
```bash
|
||||
sudo systemctl restart silod
|
||||
```
|
||||
|
||||
### B.8 Verify the Installation
|
||||
|
||||
```bash
|
||||
# Service status
|
||||
sudo systemctl status silod
|
||||
|
||||
# Health check
|
||||
curl http://localhost:8080/health
|
||||
|
||||
# Readiness check
|
||||
curl http://localhost:8080/ready
|
||||
|
||||
# Follow logs
|
||||
sudo journalctl -u silod -f
|
||||
```
|
||||
|
||||
Open your configured base URL in a browser and log in.
|
||||
|
||||
### B.9 Upgrading
|
||||
|
||||
```bash
|
||||
# Pull latest code and redeploy
|
||||
sudo /opt/silo/src/scripts/deploy.sh
|
||||
|
||||
# Or deploy a specific version
|
||||
cd /opt/silo/src
|
||||
git fetch --all --tags
|
||||
git checkout v1.2.3
|
||||
sudo /opt/silo/src/scripts/deploy.sh --no-pull
|
||||
```
|
||||
|
||||
New database migrations are applied automatically during deployment.
|
||||
|
||||
---
|
||||
|
||||
## Post-Install Configuration
|
||||
|
||||
After a successful installation:
|
||||
|
||||
- **Authentication**: Configure LDAP, OIDC, or local auth backends. See [CONFIGURATION.md — Authentication](CONFIGURATION.md#authentication).
|
||||
- **Schemas**: Part numbering schemas are loaded from YAML files. See the `schemas/` directory and [CONFIGURATION.md — Schemas](CONFIGURATION.md#schemas).
|
||||
- **Read-only mode**: Toggle write protection at runtime with `kill -USR1 $(pidof silod)` or by setting `server.read_only: true` in the config.
|
||||
- **Ongoing maintenance**: See [DEPLOYMENT.md](DEPLOYMENT.md) for service management, log viewing, troubleshooting, and the security checklist.
|
||||
|
||||
---
|
||||
|
||||
## Further Reading
|
||||
|
||||
| Document | Description |
|
||||
|----------|-------------|
|
||||
| [CONFIGURATION.md](CONFIGURATION.md) | Complete `config.yaml` reference |
|
||||
| [DEPLOYMENT.md](DEPLOYMENT.md) | Operations guide: maintenance, troubleshooting, security |
|
||||
| [AUTH.md](AUTH.md) | Authentication system design |
|
||||
| [AUTH_USER_GUIDE.md](AUTH_USER_GUIDE.md) | User guide for login, tokens, and roles |
|
||||
| [SPECIFICATION.md](SPECIFICATION.md) | Full design specification and API reference |
|
||||
| [STATUS.md](STATUS.md) | Implementation status |
|
||||
| [GAP_ANALYSIS.md](GAP_ANALYSIS.md) | Gap analysis and revision control roadmap |
|
||||
| [COMPONENT_AUDIT.md](COMPONENT_AUDIT.md) | Component audit tool design |
|
||||
@@ -37,7 +37,7 @@ Silo treats **part numbering schemas as configuration, not code**. Multiple numb
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Silo Server (silod) │
|
||||
│ - REST API (75 endpoints) │
|
||||
│ - REST API (78 endpoints) │
|
||||
│ - Authentication (local, LDAP, OIDC) │
|
||||
│ - Schema parsing and validation │
|
||||
│ - Part number generation engine │
|
||||
@@ -50,7 +50,7 @@ Silo treats **part numbering schemas as configuration, not code**. Multiple numb
|
||||
▼ ▼
|
||||
┌─────────────────────────┐ ┌─────────────────────────────┐
|
||||
│ PostgreSQL │ │ MinIO │
|
||||
│ (psql.kindred.internal)│ │ - File storage │
|
||||
│ (psql.example.internal)│ │ - File storage │
|
||||
│ - Item metadata │ │ - Versioned objects │
|
||||
│ - Relationships │ │ - Thumbnails │
|
||||
│ - Revision history │ │ │
|
||||
@@ -63,7 +63,7 @@ Silo treats **part numbering schemas as configuration, not code**. Multiple numb
|
||||
|
||||
| Component | Technology | Notes |
|
||||
|-----------|------------|-------|
|
||||
| Database | PostgreSQL 16 | Existing instance at psql.kindred.internal |
|
||||
| Database | PostgreSQL 16 | Existing instance at psql.example.internal |
|
||||
| File Storage | MinIO | S3-compatible, versioning enabled |
|
||||
| CLI & API Server | Go (1.24) | chi/v5 router, pgx/v5 driver, zerolog |
|
||||
| Authentication | Multi-backend | Local (bcrypt), LDAP/FreeIPA, OIDC/Keycloak |
|
||||
@@ -598,7 +598,7 @@ See [AUTH.md](AUTH.md) for full architecture details and [AUTH_USER_GUIDE.md](AU
|
||||
|
||||
## 11. API Design
|
||||
|
||||
### 11.1 REST Endpoints (75 Implemented)
|
||||
### 11.1 REST Endpoints (78 Implemented)
|
||||
|
||||
```
|
||||
# Health (no auth)
|
||||
@@ -615,6 +615,9 @@ GET /auth/callback # OIDC callback
|
||||
# Public API (no auth required)
|
||||
GET /api/auth/config # Auth backend configuration (for login UI)
|
||||
|
||||
# Server-Sent Events (require auth)
|
||||
GET /api/events # SSE stream for real-time updates
|
||||
|
||||
# Auth API (require auth)
|
||||
GET /api/auth/me # Current authenticated user
|
||||
GET /api/auth/tokens # List user's API tokens
|
||||
@@ -627,7 +630,7 @@ POST /api/uploads/presign # Get presigned MinI
|
||||
# Schemas (read: viewer, write: editor)
|
||||
GET /api/schemas # List all schemas
|
||||
GET /api/schemas/{name} # Get schema details
|
||||
GET /api/schemas/{name}/properties # Get property schema for category
|
||||
GET /api/schemas/{name}/form # Get form descriptor (field groups, widgets, category picker)
|
||||
POST /api/schemas/{name}/segments/{segment}/values # Add enum value [editor]
|
||||
PUT /api/schemas/{name}/segments/{segment}/values/{code} # Update enum value [editor]
|
||||
DELETE /api/schemas/{name}/segments/{segment}/values/{code} # Delete enum value [editor]
|
||||
@@ -644,6 +647,7 @@ DELETE /api/projects/{code} # Delete project [ed
|
||||
# Items (read: viewer, write: editor)
|
||||
GET /api/items # List/filter items
|
||||
GET /api/items/search # Fuzzy search
|
||||
GET /api/items/by-uuid/{uuid} # Get item by UUID
|
||||
GET /api/items/export.csv # Export items to CSV
|
||||
GET /api/items/template.csv # CSV import template
|
||||
GET /api/items/export.ods # Export items to ODS
|
||||
@@ -689,6 +693,7 @@ GET /api/items/{partNumber}/bom/export.csv # Export BOM as CSV
|
||||
GET /api/items/{partNumber}/bom/export.ods # Export BOM as ODS
|
||||
POST /api/items/{partNumber}/bom # Add BOM entry [editor]
|
||||
POST /api/items/{partNumber}/bom/import # Import BOM from CSV [editor]
|
||||
POST /api/items/{partNumber}/bom/merge # Merge BOM from ODS with conflict resolution [editor]
|
||||
PUT /api/items/{partNumber}/bom/{childPartNumber} # Update BOM entry [editor]
|
||||
DELETE /api/items/{partNumber}/bom/{childPartNumber} # Remove BOM entry [editor]
|
||||
|
||||
@@ -734,11 +739,11 @@ POST /api/inventory/{partNumber}/move
|
||||
|
||||
### 12.1 Implemented
|
||||
|
||||
- [x] PostgreSQL database schema (11 migrations)
|
||||
- [x] PostgreSQL database schema (13 migrations)
|
||||
- [x] YAML schema parser for part numbering
|
||||
- [x] Part number generation engine
|
||||
- [x] CLI tool (`cmd/silo`)
|
||||
- [x] API server (`cmd/silod`) with 75 endpoints
|
||||
- [x] API server (`cmd/silod`) with 78 endpoints
|
||||
- [x] MinIO integration for file storage with versioning
|
||||
- [x] BOM relationships (component, alternate, reference)
|
||||
- [x] Multi-level BOM (recursive expansion with configurable depth)
|
||||
|
||||
@@ -10,10 +10,10 @@
|
||||
|
||||
| Component | Status | Notes |
|
||||
|-----------|--------|-------|
|
||||
| PostgreSQL schema | Complete | 11 migrations applied |
|
||||
| PostgreSQL schema | Complete | 13 migrations applied |
|
||||
| YAML schema parser | Complete | Supports enum, serial, constant, string segments |
|
||||
| Part number generator | Complete | Scoped sequences, category-based format |
|
||||
| API server (`silod`) | Complete | 75 REST endpoints via chi/v5 |
|
||||
| API server (`silod`) | Complete | 78 REST endpoints via chi/v5 |
|
||||
| CLI tool (`silo`) | Complete | Item registration and management |
|
||||
| MinIO file storage | Complete | Upload, download, versioning, checksums |
|
||||
| Revision control | Complete | Append-only history, rollback, comparison, status/labels |
|
||||
@@ -55,7 +55,7 @@ FreeCAD workbench and LibreOffice Calc extension are maintained in separate repo
|
||||
|
||||
| Service | Host | Status |
|
||||
|---------|------|--------|
|
||||
| PostgreSQL | psql.kindred.internal:5432 | Running |
|
||||
| PostgreSQL | psql.example.internal:5432 | Running |
|
||||
| MinIO | localhost:9000 (API) / :9001 (console) | Configured |
|
||||
| Silo API | localhost:8080 | Builds successfully |
|
||||
|
||||
@@ -92,5 +92,7 @@ The schema defines 170 category codes across 10 groups:
|
||||
| 007_revision_status.sql | Revision status and labels |
|
||||
| 008_odoo_integration.sql | Odoo ERP integration tables (integrations, sync_log) |
|
||||
| 009_auth.sql | Authentication system (users, api_tokens, sessions, audit_log, user tracking columns) |
|
||||
| 010_item_extended_fields.sql | Extended item fields (sourcing_type, sourcing_link, standard_cost, long_description) |
|
||||
| 010_item_extended_fields.sql | Extended item fields (sourcing_type, long_description) |
|
||||
| 011_item_files.sql | Item file attachments (item_files table, thumbnail_key column) |
|
||||
| 012_bom_source.sql | BOM entry source tracking |
|
||||
| 013_move_cost_sourcing_to_props.sql | Move sourcing_link and standard_cost from item columns to revision properties |
|
||||
|
||||
339
frontend-spec.md
339
frontend-spec.md
@@ -1,6 +1,6 @@
|
||||
# Silo Frontend Specification
|
||||
|
||||
Current as of 2026-02-08. Documents the React + Vite + TypeScript frontend (migration from Go templates is complete).
|
||||
Current as of 2026-02-11. Documents the React + Vite + TypeScript frontend (migration from Go templates is complete).
|
||||
|
||||
## Overview
|
||||
|
||||
@@ -68,6 +68,7 @@ web/
|
||||
│ └── AuthContext.tsx AuthProvider with login/logout/refresh methods
|
||||
├── hooks/
|
||||
│ ├── useAuth.ts Context consumer hook
|
||||
│ ├── useFormDescriptor.ts Fetches form descriptor from /api/schemas/{name}/form (replaces useCategories)
|
||||
│ ├── useItems.ts Items fetching with search, filters, pagination, debounce
|
||||
│ └── useLocalStorage.ts Typed localStorage persistence hook
|
||||
├── styles/
|
||||
@@ -271,63 +272,81 @@ Vite dev server runs on port 5173 with proxy config in `vite.config.ts` forwardi
|
||||
|
||||
## New Frontend Tasks
|
||||
|
||||
# CreateItemPane Redesign Specification
|
||||
# CreateItemPane — Schema-Driven Dynamic Form
|
||||
|
||||
**Date**: 2026-02-06
|
||||
**Scope**: Replace existing `CreateItemPane.tsx` with a two-column layout, multi-stage category picker, file attachment via MinIO, and full use of screen real estate.
|
||||
**Date**: 2026-02-10
|
||||
**Scope**: `CreateItemPane.tsx` renders a dynamic form driven entirely by the form descriptor API (`GET /api/schemas/{name}/form`). All field groups, field types, widgets, and category-specific fields are defined in YAML and resolved server-side.
|
||||
**Parent**: Items page (`ItemsPage.tsx`) — renders in the detail pane area per existing in-pane CRUD pattern.
|
||||
|
||||
---
|
||||
|
||||
## Layout
|
||||
|
||||
The pane uses a CSS Grid two-column layout instead of the current single-column form:
|
||||
Single-column scrollable form with a green header bar. Field groups are rendered dynamically from the form descriptor. Category-specific field groups appear after global groups when a category is selected.
|
||||
|
||||
```
|
||||
┌──────────────────────────────────────────────────────┬──────────────┐
|
||||
│ Header: "New Item" [green bar] Cancel │ Create │ │
|
||||
├──────────────────────────────────────────────────────┤ │
|
||||
│ │ Auto- │
|
||||
│ ── Identity ────────────────────────────────────── │ assigned │
|
||||
│ [Part Number *] [Type * v] │ metadata │
|
||||
│ [Description ] │ │
|
||||
│ Category * [Domain │ Group │ Subtype ] │──────────────│
|
||||
│ Mechanical│ Structural│ Bracket │ │ │
|
||||
│ Electrical│ Bearings │ Plate │ │ Attachments │
|
||||
│ ... │ ... │ ... │ │ ┌─ ─ ─ ─ ┐ │
|
||||
│ ── Sourcing ────────────────────────────────────── │ │ Drop │ │
|
||||
│ [Sourcing Type v] [Standard Cost $ ] │ │ zone │ │
|
||||
│ [Unit of Measure v] [Sourcing Link ] │ └─ ─ ─ ─ ┘ │
|
||||
│ │ file.FCStd │
|
||||
│ ── Details ─────────────────────────────────────── │ drawing.pdf │
|
||||
│ [Long Description ] │ │
|
||||
│ [Projects: [tag][tag] type to search... ] │──────────────│
|
||||
│ │ Thumbnail │
|
||||
│ │ [preview] │
|
||||
└──────────────────────────────────────────────────────┴──────────────┘
|
||||
┌──────────────────────────────────────────────────────────────────────┐
|
||||
│ Header: "New Item" [green bar] Cancel │ Create │
|
||||
├──────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ Category * [Domain buttons: F C R S E M T A P X] │
|
||||
│ [Subcategory search + filtered list] │
|
||||
│ │
|
||||
│ ── Identity ────────────────────────────────────────────────────── │
|
||||
│ [Type * (auto-derived from category)] [Description ] │
|
||||
│ │
|
||||
│ ── Sourcing ────────────────────────────────────────────────────── │
|
||||
│ [Sourcing Type v] [Manufacturer] [MPN] [Supplier] [SPN] │
|
||||
│ [Sourcing Link] │
|
||||
│ │
|
||||
│ ── Cost & Lead Time ────────────────────────────────────────────── │
|
||||
│ [Standard Cost $] [Lead Time Days] [Min Order Qty] │
|
||||
│ │
|
||||
│ ── Status ──────────────────────────────────────────────────────── │
|
||||
│ [Lifecycle Status v] [RoHS Compliant ☐] [Country of Origin] │
|
||||
│ │
|
||||
│ ── Details ─────────────────────────────────────────────────────── │
|
||||
│ [Long Description ] │
|
||||
│ [Projects: [tag][tag] type to search... ] │
|
||||
│ [Notes ] │
|
||||
│ │
|
||||
│ ── Fastener Specifications (category-specific) ─────────────────── │
|
||||
│ [Material] [Finish] [Thread Size] [Head Type] [Drive Type] ... │
|
||||
│ │
|
||||
└──────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
Grid definition: `grid-template-columns: 1fr 320px`. The left column scrolls independently if content overflows. The right sidebar is a flex column with sections separated by `--ctp-surface1` borders.
|
||||
## Data Source — Form Descriptor API
|
||||
|
||||
All form structure is fetched from `GET /api/schemas/kindred-rd/form`, which returns:
|
||||
|
||||
- `category_picker`: Multi-stage picker config (domain → subcategory)
|
||||
- `item_fields`: Definitions for item-level fields (description, item_type, sourcing_type, etc.)
|
||||
- `field_groups`: Ordered groups with resolved field metadata (Identity, Sourcing, Cost, Status, Details)
|
||||
- `category_field_groups`: Per-category-prefix groups (e.g., Fastener Specifications for `F` prefix)
|
||||
- `field_overrides`: Widget hints (currency, url, select, checkbox)
|
||||
|
||||
The YAML schema (`schemas/kindred-rd.yaml`) is the single source of truth. Adding a new field or category in YAML propagates to all clients with no code changes.
|
||||
|
||||
## File Location
|
||||
|
||||
`web/src/components/items/CreateItemPane.tsx` (replaces existing file)
|
||||
`web/src/components/items/CreateItemPane.tsx`
|
||||
|
||||
New supporting files:
|
||||
Supporting files:
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `web/src/components/items/CategoryPicker.tsx` | Multi-stage category selector |
|
||||
| `web/src/components/items/CategoryPicker.tsx` | Multi-stage domain/subcategory selector |
|
||||
| `web/src/components/items/FileDropZone.tsx` | Drag-and-drop file upload with MinIO presigned URLs |
|
||||
| `web/src/components/items/TagInput.tsx` | Multi-select tag input for projects |
|
||||
| `web/src/hooks/useCategories.ts` | Fetches category tree from schema data |
|
||||
| `web/src/hooks/useFormDescriptor.ts` | Fetches and caches form descriptor from `/api/schemas/{name}/form` |
|
||||
| `web/src/hooks/useFileUpload.ts` | Manages presigned URL upload flow |
|
||||
|
||||
## Component Breakdown
|
||||
|
||||
### CreateItemPane
|
||||
|
||||
Top-level orchestrator. Manages form state, submission, and layout.
|
||||
Top-level orchestrator. Renders dynamic form from the form descriptor.
|
||||
|
||||
**Props** (unchanged interface):
|
||||
|
||||
@@ -341,68 +360,64 @@ interface CreateItemPaneProps {
|
||||
**State**:
|
||||
|
||||
```typescript
|
||||
const [form, setForm] = useState<CreateItemForm>({
|
||||
part_number: '',
|
||||
item_type: 'part',
|
||||
description: '',
|
||||
category_path: [], // e.g. ['Mechanical', 'Structural', 'Bracket']
|
||||
sourcing_type: 'manufactured',
|
||||
standard_cost: '',
|
||||
unit_of_measure: 'ea',
|
||||
sourcing_link: '',
|
||||
long_description: '',
|
||||
project_ids: [],
|
||||
});
|
||||
const [attachments, setAttachments] = useState<PendingAttachment[]>([]);
|
||||
const [thumbnail, setThumbnail] = useState<PendingAttachment | null>(null);
|
||||
const { descriptor, categories, loading } = useFormDescriptor();
|
||||
const [category, setCategory] = useState(''); // selected category code, e.g. "F01"
|
||||
const [fields, setFields] = useState<Record<string, string>>({}); // all field values keyed by name
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [submitting, setSubmitting] = useState(false);
|
||||
```
|
||||
|
||||
A single `fields` record holds all form values (both item-level and property fields). The `ITEM_LEVEL_FIELDS` set (`description`, `item_type`, `sourcing_type`, `long_description`) determines which fields go into the top-level request vs. the `properties` map on submission.
|
||||
|
||||
**Auto-derivation**: When a category is selected, `item_type` is automatically set based on the `derived_from_category` mapping in the form descriptor (e.g., category prefix `A` → `assembly`, `T` → `tooling`, default → `part`).
|
||||
|
||||
**Dynamic rendering**: A `renderField()` function maps each field's `widget` type to the appropriate input:
|
||||
|
||||
| Widget | Rendered As |
|
||||
|--------|-------------|
|
||||
| `text` | `<input type="text">` |
|
||||
| `number` | `<input type="number">` |
|
||||
| `textarea` | `<textarea>` |
|
||||
| `select` | `<select>` with `<option>` elements from `field.options` |
|
||||
| `checkbox` | `<input type="checkbox">` |
|
||||
| `currency` | `<input type="number">` with currency prefix (e.g., "$") |
|
||||
| `url` | `<input type="url">` |
|
||||
| `tag_input` | `TagInput` component with search endpoint |
|
||||
|
||||
**Submission flow**:
|
||||
|
||||
1. Validate required fields (part_number, item_type, category_path length === 3).
|
||||
2. `POST /api/items` with form data → returns created `Item` with UUID.
|
||||
3. For each attachment in `attachments[]`, call the file association endpoint: `POST /api/items/{id}/files` with the MinIO object key returned from upload.
|
||||
4. If thumbnail exists, `PUT /api/items/{id}/thumbnail` with the object key.
|
||||
5. Call `onCreated(item)`.
|
||||
1. Validate required fields (category must be selected).
|
||||
2. Split `fields` into item-level fields and properties using `ITEM_LEVEL_FIELDS`.
|
||||
3. `POST /api/items` with `{ part_number: '', item_type, description, sourcing_type, long_description, category, properties: {...} }`.
|
||||
4. Call `onCreated(item)`.
|
||||
|
||||
If step 2 fails, show error banner. If file association fails, show warning but still navigate (item was created, files can be re-attached).
|
||||
|
||||
**Header bar**: Green (`--ctp-green` background, `--ctp-crust` text) per existing create-pane convention. "New Item" title on left, Cancel (ghost button) and Create Item (primary button, `--ctp-green` bg) on right.
|
||||
**Header bar**: Green (`--ctp-green` background, `--ctp-crust` text). "New Item" title on left, Cancel and Create Item buttons on right.
|
||||
|
||||
### CategoryPicker
|
||||
|
||||
Three-column scrollable list for hierarchical category selection.
|
||||
Multi-stage category selector driven by the form descriptor's `category_picker.stages` config.
|
||||
|
||||
**Props**:
|
||||
|
||||
```typescript
|
||||
interface CategoryPickerProps {
|
||||
value: string[]; // current selection path, e.g. ['Mechanical', 'Structural']
|
||||
onChange: (path: string[]) => void;
|
||||
categories: CategoryNode[]; // top-level nodes
|
||||
}
|
||||
|
||||
interface CategoryNode {
|
||||
name: string;
|
||||
children?: CategoryNode[];
|
||||
value: string; // selected category code, e.g. "F01"
|
||||
onChange: (code: string) => void;
|
||||
categories: Record<string, string>; // flat code → description map
|
||||
stages?: CategoryPickerStage[]; // from form descriptor
|
||||
}
|
||||
```
|
||||
|
||||
**Rendering**: Three side-by-side `<div>` columns inside a container with `border: 1px solid var(--ctp-surface1)` and `border-radius: 0.4rem`. Each column has:
|
||||
**Rendering**: Two-stage selection:
|
||||
|
||||
- A sticky header row (10px uppercase, `--ctp-overlay0` text, `--ctp-mantle` background) labeling the tier. Labels come from the schema definition if available, otherwise "Level 1", "Level 2", "Level 3".
|
||||
- A scrollable list of options. Each option is a `<div>` row, 28px height, `0.85rem` font. Hover: `--ctp-surface0` background. Selected: translucent mauve background (`rgba(203, 166, 247, 0.12)`), `--ctp-mauve` text, weight 600.
|
||||
- If a node has children, show a `›` chevron on the right side of the row.
|
||||
1. **Domain row**: Horizontal row of buttons, one per domain from `stages[0].values` (F=Fasteners, C=Fluid Fittings, etc.). Selected domain has mauve highlight.
|
||||
2. **Subcategory list**: Filtered list of categories matching the selected domain prefix. Includes a search input for filtering. Each row shows code and description.
|
||||
|
||||
Column 1 always shows all top-level nodes. Column 2 shows children of the selected Column 1 node. Column 3 shows children of the selected Column 2 node. If nothing is selected in a column, the next column shows an empty state with muted text: "Select a [tier name]".
|
||||
If no `stages` prop is provided, falls back to a flat searchable list of all categories.
|
||||
|
||||
Below the picker, render a breadcrumb trail: `Mechanical › Structural › Bracket` in `--ctp-mauve` with `›` separators in `--ctp-overlay0`. Only show segments that are selected.
|
||||
Below the picker, the selected category is shown as a breadcrumb: `Fasteners › F01 — Hex Cap Screw` in `--ctp-mauve`.
|
||||
|
||||
**Data source**: Categories are derived from schemas. The `useCategories` hook calls `GET /api/schemas` and transforms the response into a `CategoryNode[]` tree. The exact mapping depends on how schemas define category hierarchies — if schemas don't currently support hierarchical categories, this requires a backend addition (see Backend Changes section).
|
||||
|
||||
**Max height**: 180px per column with `overflow-y: auto`.
|
||||
**Data source**: Categories come from `useFormDescriptor()` which derives them from the `category_picker` stages and `values_by_domain` in the form descriptor response.
|
||||
|
||||
### FileDropZone
|
||||
|
||||
@@ -478,17 +493,17 @@ The dropdown is an absolutely-positioned `<div>` below the input container, `--c
|
||||
|
||||
**For projects**: `searchFn` calls `GET /api/projects?q={query}` and maps to `{ id: project.id, label: project.code + ' — ' + project.name }`.
|
||||
|
||||
### useCategories Hook
|
||||
### useFormDescriptor Hook
|
||||
|
||||
```typescript
|
||||
function useCategories(): {
|
||||
categories: CategoryNode[];
|
||||
function useFormDescriptor(schemaName = "kindred-rd"): {
|
||||
descriptor: FormDescriptor | null;
|
||||
categories: Record<string, string>; // flat code → description map derived from descriptor
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
}
|
||||
```
|
||||
|
||||
Fetches `GET /api/schemas` on mount and transforms into a category tree. Caches in a module-level variable so repeated renders don't refetch. If the API doesn't currently support hierarchical categories, this returns a flat list as a single-tier picker until the backend is extended.
|
||||
Fetches `GET /api/schemas/{name}/form` on mount. Caches the result in a module-level variable so repeated renders/mounts don't refetch. Derives a flat `categories` map from the `category_picker` stages and `values_by_domain` in the response. Replaces the old `useCategories` hook (deleted).
|
||||
|
||||
### useFileUpload Hook
|
||||
|
||||
@@ -542,30 +557,32 @@ const styles = {
|
||||
|
||||
## Form Sections
|
||||
|
||||
The form is visually divided by section headers. Each header is a flex row containing a label (11px uppercase, `--ctp-overlay0`) and a `flex: 1` horizontal line (`1px solid --ctp-surface0`). Sections span `grid-column: 1 / -1`.
|
||||
Form sections are rendered dynamically from the `field_groups` array in the form descriptor. Each section header is a flex row containing a label (11px uppercase, `--ctp-overlay0`) and a `flex: 1` horizontal line (`1px solid --ctp-surface0`).
|
||||
|
||||
| Section | Fields |
|
||||
|---------|--------|
|
||||
| Identity | Part Number*, Type*, Description, Category* |
|
||||
| Sourcing | Sourcing Type, Standard Cost, Unit of Measure, Sourcing Link |
|
||||
| Details | Long Description, Projects |
|
||||
**Global field groups** (from `ui.field_groups` in YAML):
|
||||
|
||||
## Sidebar Sections
|
||||
| Group Key | Label | Fields |
|
||||
|-----------|-------|--------|
|
||||
| identity | Identity | item_type, description |
|
||||
| sourcing | Sourcing | sourcing_type, manufacturer, manufacturer_pn, supplier, supplier_pn, sourcing_link |
|
||||
| cost | Cost & Lead Time | standard_cost, lead_time_days, minimum_order_qty |
|
||||
| status | Status | lifecycle_status, rohs_compliant, country_of_origin |
|
||||
| details | Details | long_description, projects, notes |
|
||||
|
||||
The right sidebar is divided into three sections with `borderBottom: 1px solid var(--ctp-surface0)`:
|
||||
**Category-specific field groups** (from `ui.category_field_groups` in YAML, shown when a category is selected):
|
||||
|
||||
**Auto-assigned metadata**: Read-only key-value rows showing:
|
||||
- UUID: "On create" in `--ctp-teal` italic
|
||||
- Revision: "A" (hardcoded initial)
|
||||
- Created By: current user's display name from `useAuth()`
|
||||
| Prefix | Group | Example Fields |
|
||||
|--------|-------|----------------|
|
||||
| F | Fastener Specifications | material, finish, thread_size, head_type, drive_type, ... |
|
||||
| C | Fitting Specifications | material, connection_type, size_1, pressure_rating, ... |
|
||||
| R | Motion Specifications | bearing_type, bore_diameter, load_rating, ... |
|
||||
| ... | ... | (one group per category prefix, defined in YAML) |
|
||||
|
||||
**Attachments**: `FileDropZone` component. Takes `flex: 1` to fill available space.
|
||||
|
||||
**Thumbnail**: A 4:3 aspect ratio placeholder box (`--ctp-crust` bg, `--ctp-surface0` border) with centered text "Generated from CAD file or upload manually". Clicking opens file picker filtered to images. If a thumbnail is uploaded, show it as an `<img>` with `object-fit: cover`.
|
||||
Note: `sourcing_link` and `standard_cost` are revision properties (stored in the `properties` JSONB), not item-level DB columns. They were migrated from item-level fields in PR #1 (migration 013).
|
||||
|
||||
## Backend Changes
|
||||
|
||||
Items 1-3 and 5 below are implemented (migration `011_item_files.sql`, `internal/api/file_handlers.go`). Item 4 (hierarchical categories) remains open.
|
||||
Items 1-5 below are implemented. Item 4 (hierarchical categories) is resolved by the form descriptor's multi-stage category picker.
|
||||
|
||||
### 1. Presigned Upload URL -- IMPLEMENTED
|
||||
|
||||
@@ -597,33 +614,14 @@ Response: 204
|
||||
|
||||
Stores the thumbnail at `items/{item_id}/thumbnail.png` in MinIO. Updates `item.thumbnail_key` column.
|
||||
|
||||
### 4. Hierarchical Categories -- NOT IMPLEMENTED
|
||||
### 4. Hierarchical Categories -- IMPLEMENTED (via Form Descriptor)
|
||||
|
||||
If schemas don't currently support a hierarchical category tree, one of these approaches:
|
||||
Resolved by the schema-driven form descriptor (`GET /api/schemas/{name}/form`). The YAML schema's `ui.category_picker` section defines multi-stage selection:
|
||||
|
||||
**Option A — Schema-driven**: Add a `category_tree` JSON column to the `schemas` table that defines the hierarchy. The `GET /api/schemas` response already returns schemas; the frontend transforms this into the picker tree.
|
||||
- **Stage 1 (domain)**: Groups categories by first character of category code (F=Fasteners, C=Fluid Fittings, etc.). Values defined in `ui.category_picker.stages[0].values`.
|
||||
- **Stage 2 (subcategory)**: Auto-derived by the Go backend's `ValuesByDomain()` method, which groups the category enum values by their first character.
|
||||
|
||||
**Option B — Dedicated table**:
|
||||
|
||||
```sql
|
||||
CREATE TABLE categories (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name TEXT NOT NULL,
|
||||
parent_id UUID REFERENCES categories(id),
|
||||
sort_order INT NOT NULL DEFAULT 0,
|
||||
UNIQUE(parent_id, name)
|
||||
);
|
||||
```
|
||||
|
||||
With endpoints:
|
||||
```
|
||||
GET /api/categories → flat list with parent_id, frontend builds tree
|
||||
POST /api/categories → { name, parent_id? }
|
||||
PUT /api/categories/{id} → { name, sort_order }
|
||||
DELETE /api/categories/{id} → cascade check
|
||||
```
|
||||
|
||||
**Recommendation**: Option B is more flexible and keeps categories as a first-class entity. The three-tier picker doesn't need to be limited to exactly three levels — it can render as many columns as the deepest category path, but three is the practical default (Domain → Group → Subtype).
|
||||
No separate `categories` table is needed — the existing schema enum values are the single source of truth. Adding a new category code to the YAML propagates to the picker automatically.
|
||||
|
||||
### 5. Database Schema Addition -- IMPLEMENTED
|
||||
|
||||
@@ -641,46 +639,89 @@ CREATE TABLE item_files (
|
||||
CREATE INDEX idx_item_files_item ON item_files(item_id);
|
||||
|
||||
ALTER TABLE items ADD COLUMN thumbnail_key TEXT;
|
||||
ALTER TABLE items ADD COLUMN category_id UUID REFERENCES categories(id);
|
||||
ALTER TABLE items ADD COLUMN sourcing_type TEXT NOT NULL DEFAULT 'manufactured';
|
||||
ALTER TABLE items ADD COLUMN sourcing_link TEXT;
|
||||
ALTER TABLE items ADD COLUMN standard_cost NUMERIC(12,2);
|
||||
ALTER TABLE items ADD COLUMN unit_of_measure TEXT NOT NULL DEFAULT 'ea';
|
||||
ALTER TABLE items ADD COLUMN long_description TEXT;
|
||||
```
|
||||
|
||||
## Implementation Order
|
||||
|
||||
1. **TagInput component** — reusable, no backend changes needed, uses existing projects API.
|
||||
2. **CategoryPicker component** — start with flat/mock data, wire to real API after backend adds categories.
|
||||
3. **FileDropZone + useFileUpload** — requires presigned URL backend endpoint first.
|
||||
4. **CreateItemPane rewrite** — compose the above into the two-column layout.
|
||||
5. **Backend: categories table + endpoints** — unblocks real category data.
|
||||
6. **Backend: presigned uploads + item_files** — unblocks file attachments.
|
||||
7. **Backend: items table migration** — adds new columns (sourcing_type, standard_cost, etc.).
|
||||
1. **[DONE] Deduplicate sourcing_link/standard_cost** — Migrated from item-level DB columns to revision properties (migration 013). Removed from Go structs, API types, frontend types.
|
||||
2. **[DONE] Form descriptor API** — Added `ui` section to YAML, Go structs + validation, `GET /api/schemas/{name}/form` endpoint.
|
||||
3. **[DONE] useFormDescriptor hook** — Replaces `useCategories`, fetches and caches form descriptor.
|
||||
4. **[DONE] CategoryPicker rewrite** — Multi-stage domain/subcategory picker driven by form descriptor.
|
||||
5. **[DONE] CreateItemPane rewrite** — Dynamic form rendering from field groups, widget-based field rendering.
|
||||
6. **TagInput component** — reusable, no backend changes needed, uses existing projects API.
|
||||
7. **FileDropZone + useFileUpload** — requires presigned URL backend endpoint (already implemented).
|
||||
|
||||
Steps 1-2 can start immediately. Steps 5-7 can run in parallel once specified. Step 4 ties it all together.
|
||||
## Types Added
|
||||
|
||||
## Types to Add
|
||||
|
||||
Add to `web/src/api/types.ts`:
|
||||
The following types were added to `web/src/api/types.ts` for the form descriptor system:
|
||||
|
||||
```typescript
|
||||
// Categories
|
||||
interface Category {
|
||||
id: string;
|
||||
// Form descriptor types (from GET /api/schemas/{name}/form)
|
||||
interface FormFieldDescriptor {
|
||||
name: string;
|
||||
parent_id: string | null;
|
||||
sort_order: number;
|
||||
type: string;
|
||||
widget: string;
|
||||
label: string;
|
||||
required?: boolean;
|
||||
default?: string;
|
||||
unit?: string;
|
||||
description?: string;
|
||||
options?: string[];
|
||||
currency?: string;
|
||||
derived_from_category?: Record<string, string>;
|
||||
search_endpoint?: string;
|
||||
}
|
||||
|
||||
interface CategoryNode {
|
||||
name: string;
|
||||
id: string;
|
||||
children?: CategoryNode[];
|
||||
interface FormFieldGroup {
|
||||
key: string;
|
||||
label: string;
|
||||
order: number;
|
||||
fields: FormFieldDescriptor[];
|
||||
}
|
||||
|
||||
// File uploads
|
||||
interface CategoryPickerStage {
|
||||
name: string;
|
||||
label: string;
|
||||
values?: Record<string, string>;
|
||||
values_by_domain?: Record<string, Record<string, string>>;
|
||||
}
|
||||
|
||||
interface CategoryPickerDescriptor {
|
||||
style: string;
|
||||
stages: CategoryPickerStage[];
|
||||
}
|
||||
|
||||
interface ItemFieldDef {
|
||||
type: string;
|
||||
widget: string;
|
||||
label: string;
|
||||
required?: boolean;
|
||||
default?: string;
|
||||
options?: string[];
|
||||
derived_from_category?: Record<string, string>;
|
||||
search_endpoint?: string;
|
||||
}
|
||||
|
||||
interface FieldOverride {
|
||||
widget?: string;
|
||||
currency?: string;
|
||||
options?: string[];
|
||||
}
|
||||
|
||||
interface FormDescriptor {
|
||||
schema_name: string;
|
||||
format: string;
|
||||
category_picker: CategoryPickerDescriptor;
|
||||
item_fields: Record<string, ItemFieldDef>;
|
||||
field_groups: FormFieldGroup[];
|
||||
category_field_groups: Record<string, FormFieldGroup[]>;
|
||||
field_overrides: Record<string, FieldOverride>;
|
||||
}
|
||||
|
||||
// File uploads (unchanged)
|
||||
interface PresignRequest {
|
||||
filename: string;
|
||||
content_type: string;
|
||||
@@ -703,20 +744,6 @@ interface ItemFile {
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
// Extended create request
|
||||
interface CreateItemRequest {
|
||||
part_number: string;
|
||||
item_type: 'part' | 'assembly' | 'document';
|
||||
description?: string;
|
||||
category_id?: string;
|
||||
sourcing_type?: 'manufactured' | 'purchased' | 'phantom';
|
||||
standard_cost?: number;
|
||||
unit_of_measure?: string;
|
||||
sourcing_link?: string;
|
||||
long_description?: string;
|
||||
project_ids?: string[];
|
||||
}
|
||||
|
||||
// Pending upload (frontend only, not an API type)
|
||||
interface PendingAttachment {
|
||||
file: File;
|
||||
@@ -726,3 +753,5 @@ interface PendingAttachment {
|
||||
error?: string;
|
||||
}
|
||||
```
|
||||
|
||||
Note: `sourcing_link` and `standard_cost` have been removed from the `Item`, `CreateItemRequest`, and `UpdateItemRequest` interfaces — they are now stored as revision properties and rendered dynamically from the form descriptor.
|
||||
|
||||
@@ -114,8 +114,6 @@ var manufacturedWeights = map[string]float64{
|
||||
var itemLevelFields = map[string]bool{
|
||||
"description": true,
|
||||
"sourcing_type": true,
|
||||
"sourcing_link": true,
|
||||
"standard_cost": true,
|
||||
"long_description": true,
|
||||
}
|
||||
|
||||
@@ -258,18 +256,6 @@ func scoreItem(
|
||||
processField("description", "item", "string", item.Description)
|
||||
processField("sourcing_type", "item", "string", item.SourcingType)
|
||||
|
||||
var sourcingLinkVal any
|
||||
if item.SourcingLink != nil {
|
||||
sourcingLinkVal = *item.SourcingLink
|
||||
}
|
||||
processField("sourcing_link", "item", "string", sourcingLinkVal)
|
||||
|
||||
var stdCostVal any
|
||||
if item.StandardCost != nil {
|
||||
stdCostVal = *item.StandardCost
|
||||
}
|
||||
processField("standard_cost", "item", "number", stdCostVal)
|
||||
|
||||
var longDescVal any
|
||||
if item.LongDescription != nil {
|
||||
longDescVal = *item.LongDescription
|
||||
@@ -287,10 +273,6 @@ func scoreItem(
|
||||
if skipFields[key] || itemLevelFields[key] {
|
||||
continue
|
||||
}
|
||||
// sourcing_link and standard_cost are already handled at item level.
|
||||
if key == "sourcing_link" || key == "standard_cost" {
|
||||
continue
|
||||
}
|
||||
value := item.Properties[key]
|
||||
processField(key, "property", def.Type, value)
|
||||
}
|
||||
|
||||
@@ -573,8 +573,20 @@ func (s *Server) HandleGetBOMCost(w http.ResponseWriter, r *http.Request) {
|
||||
for i, e := range entries {
|
||||
unitCost := 0.0
|
||||
leaf, err := s.items.GetByID(ctx, e.ItemID)
|
||||
if err == nil && leaf != nil && leaf.StandardCost != nil {
|
||||
unitCost = *leaf.StandardCost
|
||||
if err == nil && leaf != nil {
|
||||
// Get standard_cost from revision properties
|
||||
if revs, rerr := s.items.GetRevisions(ctx, leaf.ID); rerr == nil {
|
||||
for _, rev := range revs {
|
||||
if rev.RevisionNumber == leaf.CurrentRevision && rev.Properties != nil {
|
||||
if sc, ok := rev.Properties["standard_cost"]; ok {
|
||||
if cost, cok := sc.(float64); cok {
|
||||
unitCost = cost
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
extCost := e.TotalQuantity * unitCost
|
||||
totalCost += extCost
|
||||
|
||||
@@ -51,9 +51,7 @@ var csvColumns = []string{
|
||||
"category",
|
||||
"projects", // comma-separated project codes
|
||||
"sourcing_type",
|
||||
"sourcing_link",
|
||||
"long_description",
|
||||
"standard_cost",
|
||||
}
|
||||
|
||||
// HandleExportCSV exports items to CSV format.
|
||||
@@ -158,14 +156,8 @@ func (s *Server) HandleExportCSV(w http.ResponseWriter, r *http.Request) {
|
||||
row[6] = category
|
||||
row[7] = projectCodes
|
||||
row[8] = item.SourcingType
|
||||
if item.SourcingLink != nil {
|
||||
row[9] = *item.SourcingLink
|
||||
}
|
||||
if item.LongDescription != nil {
|
||||
row[10] = *item.LongDescription
|
||||
}
|
||||
if item.StandardCost != nil {
|
||||
row[11] = strconv.FormatFloat(*item.StandardCost, 'f', -1, 64)
|
||||
row[9] = *item.LongDescription
|
||||
}
|
||||
|
||||
// Property columns
|
||||
@@ -366,9 +358,17 @@ func (s *Server) HandleImportCSV(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
// Parse extended fields
|
||||
sourcingType := getCSVValue(record, colIndex, "sourcing_type")
|
||||
sourcingLink := getCSVValue(record, colIndex, "sourcing_link")
|
||||
longDesc := getCSVValue(record, colIndex, "long_description")
|
||||
stdCostStr := getCSVValue(record, colIndex, "standard_cost")
|
||||
|
||||
// sourcing_link and standard_cost are now properties — add to properties map
|
||||
if sl := getCSVValue(record, colIndex, "sourcing_link"); sl != "" {
|
||||
properties["sourcing_link"] = sl
|
||||
}
|
||||
if sc := getCSVValue(record, colIndex, "standard_cost"); sc != "" {
|
||||
if cost, err := strconv.ParseFloat(sc, 64); err == nil {
|
||||
properties["standard_cost"] = cost
|
||||
}
|
||||
}
|
||||
|
||||
// Create item
|
||||
item := &db.Item{
|
||||
@@ -382,17 +382,9 @@ func (s *Server) HandleImportCSV(w http.ResponseWriter, r *http.Request) {
|
||||
if sourcingType != "" {
|
||||
item.SourcingType = sourcingType
|
||||
}
|
||||
if sourcingLink != "" {
|
||||
item.SourcingLink = &sourcingLink
|
||||
}
|
||||
if longDesc != "" {
|
||||
item.LongDescription = &longDesc
|
||||
}
|
||||
if stdCostStr != "" {
|
||||
if cost, err := strconv.ParseFloat(stdCostStr, 64); err == nil {
|
||||
item.StandardCost = &cost
|
||||
}
|
||||
}
|
||||
|
||||
if err := s.items.Create(ctx, item, properties); err != nil {
|
||||
result.Errors = append(result.Errors, CSVImportErr{
|
||||
@@ -585,9 +577,7 @@ func isStandardColumn(col string) bool {
|
||||
"objects": true, // FreeCAD objects data - skip on import
|
||||
"archived_at": true,
|
||||
"sourcing_type": true,
|
||||
"sourcing_link": true,
|
||||
"long_description": true,
|
||||
"standard_cost": true,
|
||||
}
|
||||
return standardCols[col]
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -195,10 +196,35 @@ func (s *Server) HandleGetSchema(w http.ResponseWriter, r *http.Request) {
|
||||
writeJSON(w, http.StatusOK, schemaToResponse(sch))
|
||||
}
|
||||
|
||||
// HandleGetPropertySchema returns the property schema for a category.
|
||||
func (s *Server) HandleGetPropertySchema(w http.ResponseWriter, r *http.Request) {
|
||||
// FormFieldDescriptor describes a single field in the form descriptor response.
|
||||
type FormFieldDescriptor struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Widget string `json:"widget,omitempty"`
|
||||
Label string `json:"label"`
|
||||
Required bool `json:"required,omitempty"`
|
||||
Default any `json:"default,omitempty"`
|
||||
Unit string `json:"unit,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Options []string `json:"options,omitempty"`
|
||||
Currency string `json:"currency,omitempty"`
|
||||
|
||||
// Item-field specific
|
||||
DerivedFromCategory map[string]string `json:"derived_from_category,omitempty"`
|
||||
SearchEndpoint string `json:"search_endpoint,omitempty"`
|
||||
}
|
||||
|
||||
// FormFieldGroupDescriptor describes an ordered group of resolved fields.
|
||||
type FormFieldGroupDescriptor struct {
|
||||
Key string `json:"key"`
|
||||
Label string `json:"label"`
|
||||
Order int `json:"order"`
|
||||
Fields []FormFieldDescriptor `json:"fields"`
|
||||
}
|
||||
|
||||
// HandleGetFormDescriptor returns the full form descriptor for a schema.
|
||||
func (s *Server) HandleGetFormDescriptor(w http.ResponseWriter, r *http.Request) {
|
||||
schemaName := chi.URLParam(r, "name")
|
||||
category := r.URL.Query().Get("category")
|
||||
|
||||
sch, ok := s.schemas[schemaName]
|
||||
if !ok {
|
||||
@@ -206,19 +232,194 @@ func (s *Server) HandleGetPropertySchema(w http.ResponseWriter, r *http.Request)
|
||||
return
|
||||
}
|
||||
|
||||
if sch.PropertySchemas == nil {
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"version": 0,
|
||||
"properties": map[string]any{},
|
||||
})
|
||||
return
|
||||
result := map[string]any{
|
||||
"schema_name": sch.Name,
|
||||
"format": sch.Format,
|
||||
}
|
||||
|
||||
props := sch.PropertySchemas.GetPropertiesForCategory(category)
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"version": sch.PropertySchemas.Version,
|
||||
"properties": props,
|
||||
// Category picker with auto-derived values_by_domain
|
||||
if sch.UI != nil && sch.UI.CategoryPicker != nil {
|
||||
picker := map[string]any{
|
||||
"style": sch.UI.CategoryPicker.Style,
|
||||
}
|
||||
|
||||
vbd := sch.ValuesByDomain()
|
||||
|
||||
stages := make([]map[string]any, 0, len(sch.UI.CategoryPicker.Stages)+1)
|
||||
for _, stage := range sch.UI.CategoryPicker.Stages {
|
||||
stg := map[string]any{
|
||||
"name": stage.Name,
|
||||
"label": stage.Label,
|
||||
}
|
||||
if stage.Values != nil {
|
||||
stg["values"] = stage.Values
|
||||
}
|
||||
stages = append(stages, stg)
|
||||
}
|
||||
|
||||
// Auto-add subcategory stage from values_by_domain
|
||||
if vbd != nil {
|
||||
stages = append(stages, map[string]any{
|
||||
"name": "subcategory",
|
||||
"label": "Type",
|
||||
"values_by_domain": vbd,
|
||||
})
|
||||
}
|
||||
|
||||
picker["stages"] = stages
|
||||
result["category_picker"] = picker
|
||||
}
|
||||
|
||||
// Item fields
|
||||
if sch.UI != nil && sch.UI.ItemFields != nil {
|
||||
result["item_fields"] = sch.UI.ItemFields
|
||||
}
|
||||
|
||||
// Resolve field groups into ordered list with full field metadata
|
||||
if sch.UI != nil && sch.UI.FieldGroups != nil {
|
||||
groups := s.resolveFieldGroups(sch, sch.UI.FieldGroups)
|
||||
result["field_groups"] = groups
|
||||
}
|
||||
|
||||
// Category field groups
|
||||
if sch.UI != nil && sch.UI.CategoryFieldGroups != nil {
|
||||
catGroups := make(map[string][]FormFieldGroupDescriptor)
|
||||
for prefix, groups := range sch.UI.CategoryFieldGroups {
|
||||
catGroups[prefix] = s.resolveCategoryFieldGroups(sch, prefix, groups)
|
||||
}
|
||||
result["category_field_groups"] = catGroups
|
||||
}
|
||||
|
||||
// Field overrides (pass through)
|
||||
if sch.UI != nil && sch.UI.FieldOverrides != nil {
|
||||
result["field_overrides"] = sch.UI.FieldOverrides
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, result)
|
||||
}
|
||||
|
||||
// resolveFieldGroups converts field group definitions into fully resolved descriptors.
|
||||
func (s *Server) resolveFieldGroups(sch *schema.Schema, groups map[string]schema.FieldGroup) []FormFieldGroupDescriptor {
|
||||
result := make([]FormFieldGroupDescriptor, 0, len(groups))
|
||||
for key, group := range groups {
|
||||
desc := FormFieldGroupDescriptor{
|
||||
Key: key,
|
||||
Label: group.Label,
|
||||
Order: group.Order,
|
||||
}
|
||||
for _, fieldName := range group.Fields {
|
||||
fd := s.resolveField(sch, fieldName)
|
||||
desc.Fields = append(desc.Fields, fd)
|
||||
}
|
||||
result = append(result, desc)
|
||||
}
|
||||
// Sort by order
|
||||
sort.Slice(result, func(i, j int) bool {
|
||||
return result[i].Order < result[j].Order
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
// resolveCategoryFieldGroups resolves category-specific field groups.
|
||||
func (s *Server) resolveCategoryFieldGroups(sch *schema.Schema, prefix string, groups map[string]schema.FieldGroup) []FormFieldGroupDescriptor {
|
||||
result := make([]FormFieldGroupDescriptor, 0, len(groups))
|
||||
for key, group := range groups {
|
||||
desc := FormFieldGroupDescriptor{
|
||||
Key: key,
|
||||
Label: group.Label,
|
||||
Order: group.Order,
|
||||
}
|
||||
for _, fieldName := range group.Fields {
|
||||
fd := s.resolveCategoryField(sch, prefix, fieldName)
|
||||
desc.Fields = append(desc.Fields, fd)
|
||||
}
|
||||
result = append(result, desc)
|
||||
}
|
||||
sort.Slice(result, func(i, j int) bool {
|
||||
return result[i].Order < result[j].Order
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
// resolveField builds a FormFieldDescriptor from item_fields or property_schemas.defaults.
|
||||
func (s *Server) resolveField(sch *schema.Schema, name string) FormFieldDescriptor {
|
||||
fd := FormFieldDescriptor{Name: name}
|
||||
|
||||
// Check item_fields first
|
||||
if sch.UI != nil && sch.UI.ItemFields != nil {
|
||||
if def, ok := sch.UI.ItemFields[name]; ok {
|
||||
fd.Type = def.Type
|
||||
fd.Widget = def.Widget
|
||||
fd.Label = def.Label
|
||||
fd.Required = def.Required
|
||||
fd.Default = def.Default
|
||||
fd.Options = def.Options
|
||||
fd.DerivedFromCategory = def.DerivedFromCategory
|
||||
fd.SearchEndpoint = def.SearchEndpoint
|
||||
s.applyOverrides(sch, name, &fd)
|
||||
return fd
|
||||
}
|
||||
}
|
||||
|
||||
// Check property_schemas.defaults
|
||||
if sch.PropertySchemas != nil && sch.PropertySchemas.Defaults != nil {
|
||||
if def, ok := sch.PropertySchemas.Defaults[name]; ok {
|
||||
fd.Type = def.Type
|
||||
fd.Label = name // Use field name as label if not overridden
|
||||
fd.Default = def.Default
|
||||
fd.Unit = def.Unit
|
||||
fd.Description = def.Description
|
||||
fd.Required = def.Required
|
||||
s.applyOverrides(sch, name, &fd)
|
||||
return fd
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback — field name only
|
||||
fd.Label = name
|
||||
fd.Type = "string"
|
||||
s.applyOverrides(sch, name, &fd)
|
||||
return fd
|
||||
}
|
||||
|
||||
// resolveCategoryField builds a FormFieldDescriptor from category-specific property schema.
|
||||
func (s *Server) resolveCategoryField(sch *schema.Schema, prefix, name string) FormFieldDescriptor {
|
||||
fd := FormFieldDescriptor{Name: name, Label: name, Type: "string"}
|
||||
|
||||
if sch.PropertySchemas != nil {
|
||||
if catProps, ok := sch.PropertySchemas.Categories[prefix]; ok {
|
||||
if def, ok := catProps[name]; ok {
|
||||
fd.Type = def.Type
|
||||
fd.Default = def.Default
|
||||
fd.Unit = def.Unit
|
||||
fd.Description = def.Description
|
||||
fd.Required = def.Required
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
s.applyOverrides(sch, name, &fd)
|
||||
return fd
|
||||
}
|
||||
|
||||
// applyOverrides applies field_overrides to a field descriptor.
|
||||
func (s *Server) applyOverrides(sch *schema.Schema, name string, fd *FormFieldDescriptor) {
|
||||
if sch.UI == nil || sch.UI.FieldOverrides == nil {
|
||||
return
|
||||
}
|
||||
ov, ok := sch.UI.FieldOverrides[name]
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
if ov.Widget != "" {
|
||||
fd.Widget = ov.Widget
|
||||
}
|
||||
if ov.Currency != "" {
|
||||
fd.Currency = ov.Currency
|
||||
}
|
||||
if len(ov.Options) > 0 {
|
||||
fd.Options = ov.Options
|
||||
}
|
||||
}
|
||||
|
||||
func schemaToResponse(sch *schema.Schema) SchemaResponse {
|
||||
@@ -256,9 +457,7 @@ type ItemResponse struct {
|
||||
CreatedAt string `json:"created_at"`
|
||||
UpdatedAt string `json:"updated_at"`
|
||||
SourcingType string `json:"sourcing_type"`
|
||||
SourcingLink *string `json:"sourcing_link,omitempty"`
|
||||
LongDescription *string `json:"long_description,omitempty"`
|
||||
StandardCost *float64 `json:"standard_cost,omitempty"`
|
||||
ThumbnailKey *string `json:"thumbnail_key,omitempty"`
|
||||
FileCount int `json:"file_count"`
|
||||
FilesTotalSize int64 `json:"files_total_size"`
|
||||
@@ -273,9 +472,7 @@ type CreateItemRequest struct {
|
||||
Projects []string `json:"projects,omitempty"`
|
||||
Properties map[string]any `json:"properties,omitempty"`
|
||||
SourcingType string `json:"sourcing_type,omitempty"`
|
||||
SourcingLink *string `json:"sourcing_link,omitempty"`
|
||||
LongDescription *string `json:"long_description,omitempty"`
|
||||
StandardCost *float64 `json:"standard_cost,omitempty"`
|
||||
}
|
||||
|
||||
// HandleListItems lists items with optional filtering.
|
||||
@@ -429,9 +626,7 @@ func (s *Server) HandleCreateItem(w http.ResponseWriter, r *http.Request) {
|
||||
ItemType: itemType,
|
||||
Description: req.Description,
|
||||
SourcingType: req.SourcingType,
|
||||
SourcingLink: req.SourcingLink,
|
||||
LongDescription: req.LongDescription,
|
||||
StandardCost: req.StandardCost,
|
||||
}
|
||||
if user := auth.UserFromContext(ctx); user != nil {
|
||||
item.CreatedBy = &user.Username
|
||||
@@ -557,9 +752,7 @@ type UpdateItemRequest struct {
|
||||
Properties map[string]any `json:"properties,omitempty"`
|
||||
Comment string `json:"comment,omitempty"`
|
||||
SourcingType *string `json:"sourcing_type,omitempty"`
|
||||
SourcingLink *string `json:"sourcing_link,omitempty"`
|
||||
LongDescription *string `json:"long_description,omitempty"`
|
||||
StandardCost *float64 `json:"standard_cost,omitempty"`
|
||||
}
|
||||
|
||||
// HandleUpdateItem updates an item's fields and/or creates a new revision.
|
||||
@@ -590,9 +783,7 @@ func (s *Server) HandleUpdateItem(w http.ResponseWriter, r *http.Request) {
|
||||
ItemType: item.ItemType,
|
||||
Description: item.Description,
|
||||
SourcingType: req.SourcingType,
|
||||
SourcingLink: req.SourcingLink,
|
||||
LongDescription: req.LongDescription,
|
||||
StandardCost: req.StandardCost,
|
||||
}
|
||||
|
||||
if req.PartNumber != "" {
|
||||
@@ -1204,9 +1395,7 @@ func itemToResponse(item *db.Item) ItemResponse {
|
||||
CreatedAt: item.CreatedAt.Format("2006-01-02T15:04:05Z07:00"),
|
||||
UpdatedAt: item.UpdatedAt.Format("2006-01-02T15:04:05Z07:00"),
|
||||
SourcingType: item.SourcingType,
|
||||
SourcingLink: item.SourcingLink,
|
||||
LongDescription: item.LongDescription,
|
||||
StandardCost: item.StandardCost,
|
||||
ThumbnailKey: item.ThumbnailKey,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,21 +138,11 @@ func (s *Server) HandleExportODS(w http.ResponseWriter, r *http.Request) {
|
||||
ods.StringCell(item.SourcingType),
|
||||
}
|
||||
|
||||
if item.SourcingLink != nil {
|
||||
cells = append(cells, ods.StringCell(*item.SourcingLink))
|
||||
} else {
|
||||
cells = append(cells, ods.EmptyCell())
|
||||
}
|
||||
if item.LongDescription != nil {
|
||||
cells = append(cells, ods.StringCell(*item.LongDescription))
|
||||
} else {
|
||||
cells = append(cells, ods.EmptyCell())
|
||||
}
|
||||
if item.StandardCost != nil {
|
||||
cells = append(cells, ods.CurrencyCell(*item.StandardCost))
|
||||
} else {
|
||||
cells = append(cells, ods.EmptyCell())
|
||||
}
|
||||
|
||||
// Property columns
|
||||
if includeProps {
|
||||
@@ -419,6 +409,16 @@ func (s *Server) HandleImportODS(w http.ResponseWriter, r *http.Request) {
|
||||
longDesc := getCellValue("long_description")
|
||||
stdCostStr := getCellValue("standard_cost")
|
||||
|
||||
// Put sourcing_link and standard_cost into properties
|
||||
if sourcingLink != "" {
|
||||
properties["sourcing_link"] = sourcingLink
|
||||
}
|
||||
if stdCostStr != "" {
|
||||
if cost, err := strconv.ParseFloat(strings.TrimLeft(stdCostStr, "$"), 64); err == nil {
|
||||
properties["standard_cost"] = cost
|
||||
}
|
||||
}
|
||||
|
||||
item := &db.Item{
|
||||
PartNumber: partNumber,
|
||||
ItemType: itemType,
|
||||
@@ -430,17 +430,9 @@ func (s *Server) HandleImportODS(w http.ResponseWriter, r *http.Request) {
|
||||
if sourcingType != "" {
|
||||
item.SourcingType = sourcingType
|
||||
}
|
||||
if sourcingLink != "" {
|
||||
item.SourcingLink = &sourcingLink
|
||||
}
|
||||
if longDesc != "" {
|
||||
item.LongDescription = &longDesc
|
||||
}
|
||||
if stdCostStr != "" {
|
||||
if cost, err := strconv.ParseFloat(strings.TrimLeft(stdCostStr, "$"), 64); err == nil {
|
||||
item.StandardCost = &cost
|
||||
}
|
||||
}
|
||||
|
||||
if err := s.items.Create(ctx, item, properties); err != nil {
|
||||
result.Errors = append(result.Errors, CSVImportErr{
|
||||
@@ -580,9 +572,16 @@ func (s *Server) HandleExportBOMODS(w http.ResponseWriter, r *http.Request) {
|
||||
childItem, _ := s.items.GetByPartNumber(ctx, e.ChildPartNumber)
|
||||
|
||||
unitCost, hasUnitCost := getMetaFloat(e.Metadata, "unit_cost")
|
||||
if !hasUnitCost && childItem != nil && childItem.StandardCost != nil {
|
||||
unitCost = *childItem.StandardCost
|
||||
hasUnitCost = true
|
||||
if !hasUnitCost && childItem != nil {
|
||||
// Fall back to standard_cost from revision properties
|
||||
if childProps := itemPropsCache[e.ChildPartNumber]; childProps != nil {
|
||||
if sc, ok := childProps["standard_cost"]; ok {
|
||||
if cost, cok := sc.(float64); cok {
|
||||
unitCost = cost
|
||||
hasUnitCost = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
qty := 0.0
|
||||
@@ -682,6 +681,21 @@ func (s *Server) HandleProjectSheetODS(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// Build item properties cache for sourcing_link / standard_cost
|
||||
itemPropsMap := make(map[string]map[string]any)
|
||||
for _, item := range items {
|
||||
revisions, err := s.items.GetRevisions(ctx, item.ID)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
for _, rev := range revisions {
|
||||
if rev.RevisionNumber == item.CurrentRevision && rev.Properties != nil {
|
||||
itemPropsMap[item.ID] = rev.Properties
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sheet 1: Items list
|
||||
itemHeaders := []string{
|
||||
"PN", "Type", "Description", "Revision", "Category",
|
||||
@@ -696,6 +710,8 @@ func (s *Server) HandleProjectSheetODS(w http.ResponseWriter, r *http.Request) {
|
||||
itemRows = append(itemRows, ods.Row{Cells: itemHeaderCells})
|
||||
|
||||
for _, item := range items {
|
||||
props := itemPropsMap[item.ID]
|
||||
|
||||
cells := []ods.Cell{
|
||||
ods.StringCell(item.PartNumber),
|
||||
ods.StringCell(item.ItemType),
|
||||
@@ -704,13 +720,17 @@ func (s *Server) HandleProjectSheetODS(w http.ResponseWriter, r *http.Request) {
|
||||
ods.StringCell(parseCategory(item.PartNumber)),
|
||||
ods.StringCell(item.SourcingType),
|
||||
}
|
||||
if item.SourcingLink != nil {
|
||||
cells = append(cells, ods.StringCell(*item.SourcingLink))
|
||||
if sl, ok := props["sourcing_link"]; ok {
|
||||
cells = append(cells, ods.StringCell(formatPropertyValue(sl)))
|
||||
} else {
|
||||
cells = append(cells, ods.EmptyCell())
|
||||
}
|
||||
if item.StandardCost != nil {
|
||||
cells = append(cells, ods.CurrencyCell(*item.StandardCost))
|
||||
if sc, ok := props["standard_cost"]; ok {
|
||||
if cost, cok := sc.(float64); cok {
|
||||
cells = append(cells, ods.CurrencyCell(cost))
|
||||
} else {
|
||||
cells = append(cells, ods.StringCell(formatPropertyValue(sc)))
|
||||
}
|
||||
} else {
|
||||
cells = append(cells, ods.EmptyCell())
|
||||
}
|
||||
@@ -746,9 +766,27 @@ func (s *Server) HandleProjectSheetODS(w http.ResponseWriter, r *http.Request) {
|
||||
for _, e := range bomEntries {
|
||||
childItem, _ := s.items.GetByPartNumber(ctx, e.ChildPartNumber)
|
||||
unitCost, hasUnitCost := getMetaFloat(e.Metadata, "unit_cost")
|
||||
if !hasUnitCost && childItem != nil && childItem.StandardCost != nil {
|
||||
unitCost = *childItem.StandardCost
|
||||
hasUnitCost = true
|
||||
if !hasUnitCost && childItem != nil {
|
||||
// Fall back to standard_cost from revision properties
|
||||
// Ensure child item props are cached
|
||||
if _, cached := itemPropsMap[childItem.ID]; !cached {
|
||||
if revs, err := s.items.GetRevisions(ctx, childItem.ID); err == nil {
|
||||
for _, rev := range revs {
|
||||
if rev.RevisionNumber == childItem.CurrentRevision && rev.Properties != nil {
|
||||
itemPropsMap[childItem.ID] = rev.Properties
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if childRevProps := itemPropsMap[childItem.ID]; childRevProps != nil {
|
||||
if sc, ok := childRevProps["standard_cost"]; ok {
|
||||
if cost, cok := sc.(float64); cok {
|
||||
unitCost = cost
|
||||
hasUnitCost = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
qty := 0.0
|
||||
if e.Quantity != nil {
|
||||
@@ -957,7 +995,20 @@ func (s *Server) HandleSheetDiff(w http.ResponseWriter, r *http.Request) {
|
||||
if costStr != "" {
|
||||
costStr = strings.TrimLeft(costStr, "$")
|
||||
if cost, err := strconv.ParseFloat(costStr, 64); err == nil {
|
||||
if dbItem.StandardCost == nil || *dbItem.StandardCost != cost {
|
||||
// Compare against standard_cost in revision properties
|
||||
revisions, _ := s.items.GetRevisions(ctx, dbItem.ID)
|
||||
var dbCost *float64
|
||||
for _, rev := range revisions {
|
||||
if rev.RevisionNumber == dbItem.CurrentRevision && rev.Properties != nil {
|
||||
if sc, ok := rev.Properties["standard_cost"]; ok {
|
||||
if c, cok := sc.(float64); cok {
|
||||
dbCost = &c
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if dbCost == nil || *dbCost != cost {
|
||||
changes["standard_cost"] = cost
|
||||
}
|
||||
}
|
||||
@@ -986,8 +1037,11 @@ func buildBOMRow(itemLabel string, depth int, source, pn string, item *db.Item,
|
||||
|
||||
if item != nil {
|
||||
description = item.Description
|
||||
if sourcingLink == "" && item.SourcingLink != nil {
|
||||
sourcingLink = *item.SourcingLink
|
||||
}
|
||||
// Fall back to sourcing_link from revision properties
|
||||
if sourcingLink == "" && props != nil {
|
||||
if sl, ok := props["sourcing_link"]; ok {
|
||||
sourcingLink = formatPropertyValue(sl)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -86,7 +86,7 @@ func NewRouter(server *Server, logger zerolog.Logger) http.Handler {
|
||||
r.Route("/schemas", func(r chi.Router) {
|
||||
r.Get("/", server.HandleListSchemas)
|
||||
r.Get("/{name}", server.HandleGetSchema)
|
||||
r.Get("/{name}/properties", server.HandleGetPropertySchema)
|
||||
r.Get("/{name}/form", server.HandleGetFormDescriptor)
|
||||
|
||||
r.Group(func(r chi.Router) {
|
||||
r.Use(server.RequireWritable)
|
||||
|
||||
@@ -31,7 +31,7 @@ func (r *ItemRepository) ListItemsWithProperties(ctx context.Context, opts Audit
|
||||
query = `
|
||||
SELECT DISTINCT i.id, i.part_number, i.schema_id, i.item_type, i.description,
|
||||
i.created_at, i.updated_at, i.archived_at, i.current_revision,
|
||||
i.sourcing_type, i.sourcing_link, i.long_description, i.standard_cost,
|
||||
i.sourcing_type, i.long_description,
|
||||
COALESCE(r.properties, '{}'::jsonb) as properties
|
||||
FROM items i
|
||||
LEFT JOIN revisions r ON r.item_id = i.id AND r.revision_number = i.current_revision
|
||||
@@ -45,7 +45,7 @@ func (r *ItemRepository) ListItemsWithProperties(ctx context.Context, opts Audit
|
||||
query = `
|
||||
SELECT i.id, i.part_number, i.schema_id, i.item_type, i.description,
|
||||
i.created_at, i.updated_at, i.archived_at, i.current_revision,
|
||||
i.sourcing_type, i.sourcing_link, i.long_description, i.standard_cost,
|
||||
i.sourcing_type, i.long_description,
|
||||
COALESCE(r.properties, '{}'::jsonb) as properties
|
||||
FROM items i
|
||||
LEFT JOIN revisions r ON r.item_id = i.id AND r.revision_number = i.current_revision
|
||||
@@ -85,7 +85,7 @@ func (r *ItemRepository) ListItemsWithProperties(ctx context.Context, opts Audit
|
||||
err := rows.Scan(
|
||||
&iwp.ID, &iwp.PartNumber, &iwp.SchemaID, &iwp.ItemType, &iwp.Description,
|
||||
&iwp.CreatedAt, &iwp.UpdatedAt, &iwp.ArchivedAt, &iwp.CurrentRevision,
|
||||
&iwp.SourcingType, &iwp.SourcingLink, &iwp.LongDescription, &iwp.StandardCost,
|
||||
&iwp.SourcingType, &iwp.LongDescription,
|
||||
&propsJSON,
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -24,11 +24,9 @@ type Item struct {
|
||||
CADFilePath *string
|
||||
CreatedBy *string
|
||||
UpdatedBy *string
|
||||
SourcingType string // "manufactured" or "purchased"
|
||||
SourcingLink *string // URL to supplier/datasheet
|
||||
LongDescription *string // extended description
|
||||
StandardCost *float64 // baseline unit cost
|
||||
ThumbnailKey *string // MinIO key for item thumbnail
|
||||
SourcingType string // "manufactured" or "purchased"
|
||||
LongDescription *string // extended description
|
||||
ThumbnailKey *string // MinIO key for item thumbnail
|
||||
}
|
||||
|
||||
// Revision represents a revision record.
|
||||
@@ -96,11 +94,11 @@ func (r *ItemRepository) Create(ctx context.Context, item *Item, properties map[
|
||||
}
|
||||
err := tx.QueryRow(ctx, `
|
||||
INSERT INTO items (part_number, schema_id, item_type, description, created_by,
|
||||
sourcing_type, sourcing_link, long_description, standard_cost)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
sourcing_type, long_description)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING id, created_at, updated_at, current_revision
|
||||
`, item.PartNumber, item.SchemaID, item.ItemType, item.Description, item.CreatedBy,
|
||||
sourcingType, item.SourcingLink, item.LongDescription, item.StandardCost,
|
||||
sourcingType, item.LongDescription,
|
||||
).Scan(
|
||||
&item.ID, &item.CreatedAt, &item.UpdatedAt, &item.CurrentRevision,
|
||||
)
|
||||
@@ -133,7 +131,7 @@ func (r *ItemRepository) GetByPartNumber(ctx context.Context, partNumber string)
|
||||
SELECT id, part_number, schema_id, item_type, description,
|
||||
created_at, updated_at, archived_at, current_revision,
|
||||
cad_synced_at, cad_file_path,
|
||||
sourcing_type, sourcing_link, long_description, standard_cost,
|
||||
sourcing_type, long_description,
|
||||
thumbnail_key
|
||||
FROM items
|
||||
WHERE part_number = $1 AND archived_at IS NULL
|
||||
@@ -141,7 +139,7 @@ func (r *ItemRepository) GetByPartNumber(ctx context.Context, partNumber string)
|
||||
&item.ID, &item.PartNumber, &item.SchemaID, &item.ItemType, &item.Description,
|
||||
&item.CreatedAt, &item.UpdatedAt, &item.ArchivedAt, &item.CurrentRevision,
|
||||
&item.CADSyncedAt, &item.CADFilePath,
|
||||
&item.SourcingType, &item.SourcingLink, &item.LongDescription, &item.StandardCost,
|
||||
&item.SourcingType, &item.LongDescription,
|
||||
&item.ThumbnailKey,
|
||||
)
|
||||
if err == pgx.ErrNoRows {
|
||||
@@ -160,7 +158,7 @@ func (r *ItemRepository) GetByID(ctx context.Context, id string) (*Item, error)
|
||||
SELECT id, part_number, schema_id, item_type, description,
|
||||
created_at, updated_at, archived_at, current_revision,
|
||||
cad_synced_at, cad_file_path,
|
||||
sourcing_type, sourcing_link, long_description, standard_cost,
|
||||
sourcing_type, long_description,
|
||||
thumbnail_key
|
||||
FROM items
|
||||
WHERE id = $1
|
||||
@@ -168,7 +166,7 @@ func (r *ItemRepository) GetByID(ctx context.Context, id string) (*Item, error)
|
||||
&item.ID, &item.PartNumber, &item.SchemaID, &item.ItemType, &item.Description,
|
||||
&item.CreatedAt, &item.UpdatedAt, &item.ArchivedAt, &item.CurrentRevision,
|
||||
&item.CADSyncedAt, &item.CADFilePath,
|
||||
&item.SourcingType, &item.SourcingLink, &item.LongDescription, &item.StandardCost,
|
||||
&item.SourcingType, &item.LongDescription,
|
||||
&item.ThumbnailKey,
|
||||
)
|
||||
if err == pgx.ErrNoRows {
|
||||
@@ -192,7 +190,7 @@ func (r *ItemRepository) List(ctx context.Context, opts ListOptions) ([]*Item, e
|
||||
query = `
|
||||
SELECT DISTINCT i.id, i.part_number, i.schema_id, i.item_type, i.description,
|
||||
i.created_at, i.updated_at, i.archived_at, i.current_revision,
|
||||
i.sourcing_type, i.sourcing_link, i.long_description, i.standard_cost,
|
||||
i.sourcing_type, i.long_description,
|
||||
i.thumbnail_key
|
||||
FROM items i
|
||||
JOIN item_projects ip ON ip.item_id = i.id
|
||||
@@ -205,7 +203,7 @@ func (r *ItemRepository) List(ctx context.Context, opts ListOptions) ([]*Item, e
|
||||
query = `
|
||||
SELECT id, part_number, schema_id, item_type, description,
|
||||
created_at, updated_at, archived_at, current_revision,
|
||||
sourcing_type, sourcing_link, long_description, standard_cost,
|
||||
sourcing_type, long_description,
|
||||
thumbnail_key
|
||||
FROM items
|
||||
WHERE archived_at IS NULL
|
||||
@@ -257,7 +255,7 @@ func (r *ItemRepository) List(ctx context.Context, opts ListOptions) ([]*Item, e
|
||||
err := rows.Scan(
|
||||
&item.ID, &item.PartNumber, &item.SchemaID, &item.ItemType, &item.Description,
|
||||
&item.CreatedAt, &item.UpdatedAt, &item.ArchivedAt, &item.CurrentRevision,
|
||||
&item.SourcingType, &item.SourcingLink, &item.LongDescription, &item.StandardCost,
|
||||
&item.SourcingType, &item.LongDescription,
|
||||
&item.ThumbnailKey,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -659,9 +657,7 @@ type UpdateItemFields struct {
|
||||
Description string
|
||||
UpdatedBy *string
|
||||
SourcingType *string
|
||||
SourcingLink *string
|
||||
LongDescription *string
|
||||
StandardCost *float64
|
||||
}
|
||||
|
||||
// Update modifies an item's fields. The UUID remains stable.
|
||||
@@ -670,16 +666,12 @@ func (r *ItemRepository) Update(ctx context.Context, id string, fields UpdateIte
|
||||
UPDATE items
|
||||
SET part_number = $2, item_type = $3, description = $4, updated_by = $5,
|
||||
sourcing_type = COALESCE($6, sourcing_type),
|
||||
sourcing_link = CASE WHEN $7::boolean THEN $8 ELSE sourcing_link END,
|
||||
long_description = CASE WHEN $9::boolean THEN $10 ELSE long_description END,
|
||||
standard_cost = CASE WHEN $11::boolean THEN $12 ELSE standard_cost END,
|
||||
long_description = CASE WHEN $7::boolean THEN $8 ELSE long_description END,
|
||||
updated_at = now()
|
||||
WHERE id = $1 AND archived_at IS NULL
|
||||
`, id, fields.PartNumber, fields.ItemType, fields.Description, fields.UpdatedBy,
|
||||
fields.SourcingType,
|
||||
fields.SourcingLink != nil, fields.SourcingLink,
|
||||
fields.LongDescription != nil, fields.LongDescription,
|
||||
fields.StandardCost != nil, fields.StandardCost,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("updating item: %w", err)
|
||||
|
||||
@@ -134,12 +134,10 @@ func TestItemUpdate(t *testing.T) {
|
||||
t.Fatalf("Create: %v", err)
|
||||
}
|
||||
|
||||
cost := 42.50
|
||||
err := repo.Update(ctx, item.ID, UpdateItemFields{
|
||||
PartNumber: "UPD-001",
|
||||
ItemType: "part",
|
||||
Description: "updated",
|
||||
StandardCost: &cost,
|
||||
PartNumber: "UPD-001",
|
||||
ItemType: "part",
|
||||
Description: "updated",
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("Update: %v", err)
|
||||
@@ -149,9 +147,6 @@ func TestItemUpdate(t *testing.T) {
|
||||
if got.Description != "updated" {
|
||||
t.Errorf("description: got %q, want %q", got.Description, "updated")
|
||||
}
|
||||
if got.StandardCost == nil || *got.StandardCost != 42.50 {
|
||||
t.Errorf("standard_cost: got %v, want 42.50", got.StandardCost)
|
||||
}
|
||||
}
|
||||
|
||||
func TestItemArchiveUnarchive(t *testing.T) {
|
||||
|
||||
@@ -240,7 +240,7 @@ func (r *ProjectRepository) GetItemsForProject(ctx context.Context, projectID st
|
||||
SELECT i.id, i.part_number, i.schema_id, i.item_type, i.description,
|
||||
i.created_at, i.updated_at, i.archived_at, i.current_revision,
|
||||
i.cad_synced_at, i.cad_file_path,
|
||||
i.sourcing_type, i.sourcing_link, i.long_description, i.standard_cost,
|
||||
i.sourcing_type, i.long_description,
|
||||
i.thumbnail_key
|
||||
FROM items i
|
||||
JOIN item_projects ip ON ip.item_id = i.id
|
||||
@@ -259,7 +259,7 @@ func (r *ProjectRepository) GetItemsForProject(ctx context.Context, projectID st
|
||||
&item.ID, &item.PartNumber, &item.SchemaID, &item.ItemType, &item.Description,
|
||||
&item.CreatedAt, &item.UpdatedAt, &item.ArchivedAt, &item.CurrentRevision,
|
||||
&item.CADSyncedAt, &item.CADFilePath,
|
||||
&item.SourcingType, &item.SourcingLink, &item.LongDescription, &item.StandardCost,
|
||||
&item.SourcingType, &item.LongDescription,
|
||||
&item.ThumbnailKey,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -22,6 +22,55 @@ type Schema struct {
|
||||
Format string `yaml:"format"`
|
||||
Examples []string `yaml:"examples"`
|
||||
PropertySchemas *PropertySchemas `yaml:"property_schemas,omitempty"`
|
||||
UI *UIConfig `yaml:"ui,omitempty" json:"ui,omitempty"`
|
||||
}
|
||||
|
||||
// UIConfig defines form layout for all clients.
|
||||
type UIConfig struct {
|
||||
CategoryPicker *CategoryPickerConfig `yaml:"category_picker,omitempty" json:"category_picker,omitempty"`
|
||||
ItemFields map[string]ItemFieldDef `yaml:"item_fields,omitempty" json:"item_fields,omitempty"`
|
||||
FieldGroups map[string]FieldGroup `yaml:"field_groups,omitempty" json:"field_groups"`
|
||||
CategoryFieldGroups map[string]map[string]FieldGroup `yaml:"category_field_groups,omitempty" json:"category_field_groups,omitempty"`
|
||||
FieldOverrides map[string]FieldOverride `yaml:"field_overrides,omitempty" json:"field_overrides,omitempty"`
|
||||
}
|
||||
|
||||
// CategoryPickerConfig defines how the category picker is rendered.
|
||||
type CategoryPickerConfig struct {
|
||||
Style string `yaml:"style" json:"style"`
|
||||
Stages []CategoryPickerStage `yaml:"stages" json:"stages"`
|
||||
}
|
||||
|
||||
// CategoryPickerStage defines one stage of a multi-stage category picker.
|
||||
type CategoryPickerStage struct {
|
||||
Name string `yaml:"name" json:"name"`
|
||||
Label string `yaml:"label" json:"label"`
|
||||
Values map[string]string `yaml:"values,omitempty" json:"values,omitempty"`
|
||||
}
|
||||
|
||||
// ItemFieldDef defines a field stored on the items table (not in revision properties).
|
||||
type ItemFieldDef struct {
|
||||
Type string `yaml:"type" json:"type"`
|
||||
Widget string `yaml:"widget" json:"widget"`
|
||||
Label string `yaml:"label" json:"label"`
|
||||
Required bool `yaml:"required,omitempty" json:"required,omitempty"`
|
||||
Default any `yaml:"default,omitempty" json:"default,omitempty"`
|
||||
Options []string `yaml:"options,omitempty" json:"options,omitempty"`
|
||||
DerivedFromCategory map[string]string `yaml:"derived_from_category,omitempty" json:"derived_from_category,omitempty"`
|
||||
SearchEndpoint string `yaml:"search_endpoint,omitempty" json:"search_endpoint,omitempty"`
|
||||
}
|
||||
|
||||
// FieldGroup defines an ordered group of fields for form layout.
|
||||
type FieldGroup struct {
|
||||
Label string `yaml:"label" json:"label"`
|
||||
Order int `yaml:"order" json:"order"`
|
||||
Fields []string `yaml:"fields" json:"fields"`
|
||||
}
|
||||
|
||||
// FieldOverride overrides display hints for a field.
|
||||
type FieldOverride struct {
|
||||
Widget string `yaml:"widget,omitempty" json:"widget,omitempty"`
|
||||
Currency string `yaml:"currency,omitempty" json:"currency,omitempty"`
|
||||
Options []string `yaml:"options,omitempty" json:"options,omitempty"`
|
||||
}
|
||||
|
||||
// PropertySchemas defines property schemas per category.
|
||||
@@ -180,6 +229,10 @@ func (s *Schema) Validate() error {
|
||||
}
|
||||
}
|
||||
|
||||
if err := s.ValidateUI(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -224,6 +277,112 @@ func (seg *Segment) Validate() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValuesByDomain groups category enum values by their first character (domain prefix).
|
||||
func (s *Schema) ValuesByDomain() map[string]map[string]string {
|
||||
catSeg := s.GetSegment("category")
|
||||
if catSeg == nil {
|
||||
return nil
|
||||
}
|
||||
result := make(map[string]map[string]string)
|
||||
for code, desc := range catSeg.Values {
|
||||
if len(code) == 0 {
|
||||
continue
|
||||
}
|
||||
domain := string(code[0])
|
||||
if result[domain] == nil {
|
||||
result[domain] = make(map[string]string)
|
||||
}
|
||||
result[domain][code] = desc
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// ValidateUI validates the UI configuration against property schemas and segments.
|
||||
func (s *Schema) ValidateUI() error {
|
||||
if s.UI == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Build a set of all known fields (item_fields + property defaults)
|
||||
knownGlobal := make(map[string]bool)
|
||||
if s.UI.ItemFields != nil {
|
||||
for k := range s.UI.ItemFields {
|
||||
knownGlobal[k] = true
|
||||
}
|
||||
}
|
||||
if s.PropertySchemas != nil {
|
||||
for k := range s.PropertySchemas.Defaults {
|
||||
knownGlobal[k] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Validate field_groups: every field must be a known global field
|
||||
for groupKey, group := range s.UI.FieldGroups {
|
||||
for _, field := range group.Fields {
|
||||
if !knownGlobal[field] {
|
||||
return fmt.Errorf("ui.field_groups.%s: field %q not found in item_fields or property_schemas.defaults", groupKey, field)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate category_field_groups: every field must exist in the category's property schema
|
||||
if s.PropertySchemas != nil {
|
||||
for prefix, groups := range s.UI.CategoryFieldGroups {
|
||||
catProps := s.PropertySchemas.Categories[prefix]
|
||||
for groupKey, group := range groups {
|
||||
for _, field := range group.Fields {
|
||||
if catProps == nil {
|
||||
return fmt.Errorf("ui.category_field_groups.%s.%s: category prefix %q has no property schema", prefix, groupKey, prefix)
|
||||
}
|
||||
if _, ok := catProps[field]; !ok {
|
||||
return fmt.Errorf("ui.category_field_groups.%s.%s: field %q not found in property_schemas.categories.%s", prefix, groupKey, field, prefix)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate field_overrides: keys must be known fields
|
||||
for key := range s.UI.FieldOverrides {
|
||||
if !knownGlobal[key] {
|
||||
// Also check category-level properties
|
||||
found := false
|
||||
if s.PropertySchemas != nil {
|
||||
for _, catProps := range s.PropertySchemas.Categories {
|
||||
if _, ok := catProps[key]; ok {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return fmt.Errorf("ui.field_overrides: field %q not found in any property schema", key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate category_picker stages: first stage values must be valid domain prefixes
|
||||
if s.UI.CategoryPicker != nil && len(s.UI.CategoryPicker.Stages) > 0 {
|
||||
catSeg := s.GetSegment("category")
|
||||
if catSeg != nil {
|
||||
validPrefixes := make(map[string]bool)
|
||||
for code := range catSeg.Values {
|
||||
if len(code) > 0 {
|
||||
validPrefixes[string(code[0])] = true
|
||||
}
|
||||
}
|
||||
firstStage := s.UI.CategoryPicker.Stages[0]
|
||||
for key := range firstStage.Values {
|
||||
if !validPrefixes[key] {
|
||||
return fmt.Errorf("ui.category_picker.stages[0]: value %q is not a valid category prefix", key)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetSegment returns a segment by name.
|
||||
func (s *Schema) GetSegment(name string) *Segment {
|
||||
for i := range s.Segments {
|
||||
|
||||
25
migrations/013_move_cost_sourcing_to_props.sql
Normal file
25
migrations/013_move_cost_sourcing_to_props.sql
Normal file
@@ -0,0 +1,25 @@
|
||||
-- Migration 013: Move sourcing_link and standard_cost to revision properties
|
||||
--
|
||||
-- These fields are being deduplicated from the items table into revision
|
||||
-- properties (JSONB). The YAML property_schemas.defaults already defines
|
||||
-- them, so they belong in the properties system rather than as standalone
|
||||
-- columns.
|
||||
|
||||
-- Step 1: Copy sourcing_link and standard_cost from items into the current
|
||||
-- revision's properties JSONB for every item that has non-null values.
|
||||
UPDATE revisions r
|
||||
SET properties = r.properties
|
||||
|| CASE WHEN i.sourcing_link IS NOT NULL
|
||||
THEN jsonb_build_object('sourcing_link', i.sourcing_link)
|
||||
ELSE '{}'::jsonb END
|
||||
|| CASE WHEN i.standard_cost IS NOT NULL
|
||||
THEN jsonb_build_object('standard_cost', i.standard_cost)
|
||||
ELSE '{}'::jsonb END
|
||||
FROM items i
|
||||
WHERE r.item_id = i.id
|
||||
AND r.revision_number = i.current_revision
|
||||
AND (i.sourcing_link IS NOT NULL OR i.standard_cost IS NOT NULL);
|
||||
|
||||
-- Step 2: Drop the columns from the items table.
|
||||
ALTER TABLE items DROP COLUMN sourcing_link;
|
||||
ALTER TABLE items DROP COLUMN standard_cost;
|
||||
@@ -846,3 +846,255 @@ schema:
|
||||
type: string
|
||||
default: ""
|
||||
description: "Inspection/QC requirements"
|
||||
|
||||
# UI configuration — drives form rendering for all clients.
|
||||
ui:
|
||||
category_picker:
|
||||
style: multi_stage
|
||||
stages:
|
||||
- name: domain
|
||||
label: "Domain"
|
||||
values:
|
||||
F: "Fasteners"
|
||||
C: "Fluid Fittings"
|
||||
R: "Motion"
|
||||
S: "Structural"
|
||||
E: "Electrical"
|
||||
M: "Mechanical"
|
||||
T: "Tooling"
|
||||
A: "Assemblies"
|
||||
P: "Purchased"
|
||||
X: "Custom Fabricated"
|
||||
|
||||
# Item-level fields (stored on items table, not in revision properties)
|
||||
item_fields:
|
||||
description:
|
||||
type: string
|
||||
widget: text
|
||||
label: "Description"
|
||||
item_type:
|
||||
type: string
|
||||
widget: select
|
||||
label: "Type"
|
||||
options: [part, assembly, consumable, tool]
|
||||
derived_from_category:
|
||||
A: assembly
|
||||
T: tool
|
||||
default: part
|
||||
sourcing_type:
|
||||
type: string
|
||||
widget: select
|
||||
label: "Sourcing Type"
|
||||
options: [manufactured, purchased]
|
||||
default: manufactured
|
||||
long_description:
|
||||
type: string
|
||||
widget: textarea
|
||||
label: "Long Description"
|
||||
projects:
|
||||
type: string_array
|
||||
widget: tag_input
|
||||
label: "Projects"
|
||||
search_endpoint: "/api/projects"
|
||||
|
||||
field_groups:
|
||||
identity:
|
||||
label: "Identity"
|
||||
order: 1
|
||||
fields: [item_type, description]
|
||||
sourcing:
|
||||
label: "Sourcing"
|
||||
order: 2
|
||||
fields:
|
||||
[
|
||||
sourcing_type,
|
||||
manufacturer,
|
||||
manufacturer_pn,
|
||||
supplier,
|
||||
supplier_pn,
|
||||
sourcing_link,
|
||||
]
|
||||
cost:
|
||||
label: "Cost & Lead Time"
|
||||
order: 3
|
||||
fields: [standard_cost, lead_time_days, minimum_order_qty]
|
||||
status:
|
||||
label: "Status"
|
||||
order: 4
|
||||
fields: [lifecycle_status, rohs_compliant, country_of_origin]
|
||||
details:
|
||||
label: "Details"
|
||||
order: 5
|
||||
fields: [long_description, projects, notes]
|
||||
|
||||
# Per-category-prefix field groups (rendered after global groups)
|
||||
category_field_groups:
|
||||
F:
|
||||
fastener_specs:
|
||||
label: "Fastener Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
material,
|
||||
finish,
|
||||
thread_size,
|
||||
thread_pitch,
|
||||
length,
|
||||
head_type,
|
||||
drive_type,
|
||||
strength_grade,
|
||||
torque_spec,
|
||||
]
|
||||
C:
|
||||
fitting_specs:
|
||||
label: "Fitting Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
material,
|
||||
connection_type,
|
||||
size_1,
|
||||
size_2,
|
||||
pressure_rating,
|
||||
temperature_min,
|
||||
temperature_max,
|
||||
media_compatibility,
|
||||
seal_material,
|
||||
]
|
||||
R:
|
||||
motion_specs:
|
||||
label: "Motion Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
load_capacity,
|
||||
speed_rating,
|
||||
power_rating,
|
||||
voltage_nominal,
|
||||
current_nominal,
|
||||
torque_continuous,
|
||||
bore_diameter,
|
||||
travel,
|
||||
stroke,
|
||||
operating_pressure,
|
||||
]
|
||||
S:
|
||||
structural_specs:
|
||||
label: "Structural Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
material,
|
||||
material_spec,
|
||||
profile,
|
||||
dimension_a,
|
||||
dimension_b,
|
||||
wall_thickness,
|
||||
length,
|
||||
weight_per_length,
|
||||
finish,
|
||||
temper,
|
||||
]
|
||||
E:
|
||||
electrical_specs:
|
||||
label: "Electrical Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
voltage_rating,
|
||||
current_rating,
|
||||
power_rating,
|
||||
value,
|
||||
tolerance,
|
||||
package,
|
||||
mounting,
|
||||
pin_count,
|
||||
wire_gauge,
|
||||
connector_type,
|
||||
]
|
||||
M:
|
||||
mechanical_specs:
|
||||
label: "Mechanical Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
material,
|
||||
spring_rate,
|
||||
free_length,
|
||||
max_load,
|
||||
travel,
|
||||
inner_diameter,
|
||||
outer_diameter,
|
||||
hardness,
|
||||
temperature_range,
|
||||
]
|
||||
T:
|
||||
tooling_specs:
|
||||
label: "Tooling Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
material,
|
||||
tolerance,
|
||||
surface_finish,
|
||||
hardness,
|
||||
associated_part,
|
||||
machine,
|
||||
cycle_life,
|
||||
]
|
||||
A:
|
||||
assembly_specs:
|
||||
label: "Assembly Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
weight,
|
||||
dimensions,
|
||||
component_count,
|
||||
assembly_time,
|
||||
test_procedure,
|
||||
voltage_rating,
|
||||
current_rating,
|
||||
ip_rating,
|
||||
]
|
||||
P:
|
||||
purchased_specs:
|
||||
label: "Purchased Item Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
material,
|
||||
form,
|
||||
grade,
|
||||
quantity_per_unit,
|
||||
unit_of_measure,
|
||||
shelf_life,
|
||||
]
|
||||
X:
|
||||
fabrication_specs:
|
||||
label: "Fabrication Specifications"
|
||||
order: 10
|
||||
fields:
|
||||
[
|
||||
material,
|
||||
material_spec,
|
||||
finish,
|
||||
critical_dimensions,
|
||||
weight,
|
||||
process,
|
||||
secondary_operations,
|
||||
drawing_rev,
|
||||
inspection_requirements,
|
||||
]
|
||||
|
||||
field_overrides:
|
||||
standard_cost:
|
||||
widget: currency
|
||||
currency: USD
|
||||
sourcing_link:
|
||||
widget: url
|
||||
lifecycle_status:
|
||||
widget: select
|
||||
options: [active, deprecated, obsolete, prototype]
|
||||
rohs_compliant:
|
||||
widget: checkbox
|
||||
|
||||
@@ -1,18 +1,23 @@
|
||||
#!/bin/bash
|
||||
# Deploy Silo to silo.kindred.internal
|
||||
# Deploy Silo to a target host
|
||||
#
|
||||
# Usage: ./scripts/deploy.sh [host]
|
||||
# host defaults to silo.kindred.internal
|
||||
# host defaults to SILO_DEPLOY_TARGET env var, or silo.example.internal
|
||||
#
|
||||
# Prerequisites:
|
||||
# - SSH access to the target host
|
||||
# - /etc/silo/silod.env must exist on target with credentials filled in
|
||||
# - PostgreSQL reachable from target at psql.kindred.internal
|
||||
# - MinIO reachable from target at minio.kindred.internal
|
||||
# - PostgreSQL reachable from target (set SILO_DB_HOST to override)
|
||||
# - MinIO reachable from target (set SILO_MINIO_HOST to override)
|
||||
#
|
||||
# Environment variables:
|
||||
# SILO_DEPLOY_TARGET - target host (default: silo.example.internal)
|
||||
# SILO_DB_HOST - PostgreSQL host (default: psql.example.internal)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
TARGET="${1:-silo.kindred.internal}"
|
||||
TARGET="${1:-${SILO_DEPLOY_TARGET:-silo.example.internal}}"
|
||||
DB_HOST="${SILO_DB_HOST:-psql.example.internal}"
|
||||
DEPLOY_DIR="/opt/silo"
|
||||
CONFIG_DIR="/etc/silo"
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
@@ -104,7 +109,7 @@ echo " Files installed to $DEPLOY_DIR"
|
||||
REMOTE
|
||||
|
||||
echo "[6/6] Running migrations and starting service..."
|
||||
ssh "$TARGET" bash -s <<'REMOTE'
|
||||
ssh "$TARGET" DB_HOST="$DB_HOST" bash -s <<'REMOTE'
|
||||
set -euo pipefail
|
||||
|
||||
DEPLOY_DIR="/opt/silo"
|
||||
@@ -123,14 +128,14 @@ if command -v psql &>/dev/null && [ -n "${SILO_DB_PASSWORD:-}" ]; then
|
||||
for f in "$DEPLOY_DIR/migrations/"*.sql; do
|
||||
echo " $(basename "$f")"
|
||||
PGPASSWORD="$SILO_DB_PASSWORD" psql \
|
||||
-h psql.kindred.internal -p 5432 \
|
||||
-h "$DB_HOST" -p 5432 \
|
||||
-U silo -d silo \
|
||||
-f "$f" -q 2>&1 | grep -v "already exists" || true
|
||||
done
|
||||
echo " Migrations complete."
|
||||
else
|
||||
echo " WARNING: psql not available or SILO_DB_PASSWORD not set, skipping migrations."
|
||||
echo " Run migrations manually: PGPASSWORD=... psql -h psql.kindred.internal -U silo -d silo -f /opt/silo/migrations/NNN_name.sql"
|
||||
echo " Run migrations manually: PGPASSWORD=... psql -h $DB_HOST -U silo -d silo -f /opt/silo/migrations/NNN_name.sql"
|
||||
fi
|
||||
|
||||
# Start service
|
||||
|
||||
344
scripts/setup-docker.sh
Executable file
344
scripts/setup-docker.sh
Executable file
@@ -0,0 +1,344 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Silo Docker Setup Script
|
||||
# Generates .env and config.docker.yaml for the all-in-one Docker Compose stack.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/setup-docker.sh # interactive
|
||||
# ./scripts/setup-docker.sh --non-interactive # use defaults / env vars
|
||||
# ./scripts/setup-docker.sh --domain silo.example.com
|
||||
# ./scripts/setup-docker.sh --with-nginx
|
||||
#
|
||||
# Output:
|
||||
# deployments/.env - Docker Compose environment variables
|
||||
# deployments/config.docker.yaml - Silo server configuration
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Colors (disabled if not a terminal)
|
||||
if [[ -t 1 ]]; then
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
BOLD='\033[1m'
|
||||
NC='\033[0m'
|
||||
else
|
||||
RED='' GREEN='' YELLOW='' BLUE='' BOLD='' NC=''
|
||||
fi
|
||||
|
||||
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
|
||||
log_success() { echo -e "${GREEN}[OK]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*" >&2; }
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Defaults
|
||||
# ---------------------------------------------------------------------------
|
||||
DOMAIN="localhost"
|
||||
NON_INTERACTIVE=false
|
||||
WITH_NGINX=false
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_DIR="${SCRIPT_DIR}/.."
|
||||
OUTPUT_DIR="${PROJECT_DIR}/deployments"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Parse arguments
|
||||
# ---------------------------------------------------------------------------
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--non-interactive) NON_INTERACTIVE=true; shift ;;
|
||||
--domain) DOMAIN="$2"; shift 2 ;;
|
||||
--with-nginx) WITH_NGINX=true; shift ;;
|
||||
--output-dir) OUTPUT_DIR="$2"; shift 2 ;;
|
||||
-h|--help)
|
||||
echo "Usage: $0 [OPTIONS]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --non-interactive Use defaults and env vars, no prompts"
|
||||
echo " --domain DOMAIN Server hostname (default: localhost)"
|
||||
echo " --with-nginx Print instructions for the nginx profile"
|
||||
echo " --output-dir DIR Output directory (default: ./deployments)"
|
||||
echo " -h, --help Show this help"
|
||||
exit 0
|
||||
;;
|
||||
*) log_error "Unknown option: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
generate_secret() {
|
||||
local len="${1:-32}"
|
||||
openssl rand -hex "$len" 2>/dev/null \
|
||||
|| head -c "$len" /dev/urandom | od -An -tx1 | tr -d ' \n'
|
||||
}
|
||||
|
||||
prompt() {
|
||||
local var_name="$1" prompt_text="$2" default="$3"
|
||||
if [[ "$NON_INTERACTIVE" == "true" ]]; then
|
||||
eval "$var_name=\"$default\""
|
||||
return
|
||||
fi
|
||||
local input
|
||||
read -r -p "$(echo -e "${BOLD}${prompt_text}${NC} [${default}]: ")" input
|
||||
eval "$var_name=\"${input:-$default}\""
|
||||
}
|
||||
|
||||
prompt_secret() {
|
||||
local var_name="$1" prompt_text="$2" default="$3"
|
||||
if [[ "$NON_INTERACTIVE" == "true" ]]; then
|
||||
eval "$var_name=\"$default\""
|
||||
return
|
||||
fi
|
||||
local input
|
||||
read -r -p "$(echo -e "${BOLD}${prompt_text}${NC} [auto-generated]: ")" input
|
||||
eval "$var_name=\"${input:-$default}\""
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Banner
|
||||
# ---------------------------------------------------------------------------
|
||||
echo ""
|
||||
echo -e "${BOLD}Silo Docker Setup${NC}"
|
||||
echo "Generates configuration for the all-in-one Docker Compose stack."
|
||||
echo ""
|
||||
|
||||
# Check for existing files
|
||||
if [[ -f "${OUTPUT_DIR}/.env" ]]; then
|
||||
log_warn "deployments/.env already exists."
|
||||
if [[ "$NON_INTERACTIVE" == "false" ]]; then
|
||||
read -r -p "Overwrite? [y/N]: " overwrite
|
||||
if [[ "${overwrite,,}" != "y" ]]; then
|
||||
log_info "Aborted."
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Gather configuration
|
||||
# ---------------------------------------------------------------------------
|
||||
log_info "Gathering configuration..."
|
||||
echo ""
|
||||
|
||||
# Domain / base URL
|
||||
prompt DOMAIN "Server domain" "$DOMAIN"
|
||||
|
||||
if [[ "$WITH_NGINX" == "true" ]]; then
|
||||
BASE_URL="http://${DOMAIN}"
|
||||
elif [[ "$DOMAIN" == "localhost" ]]; then
|
||||
BASE_URL="http://localhost:8080"
|
||||
else
|
||||
BASE_URL="http://${DOMAIN}:8080"
|
||||
fi
|
||||
|
||||
# PostgreSQL
|
||||
PG_PASSWORD_DEFAULT="$(generate_secret 16)"
|
||||
prompt_secret POSTGRES_PASSWORD "PostgreSQL password" "$PG_PASSWORD_DEFAULT"
|
||||
|
||||
# MinIO
|
||||
MINIO_AK_DEFAULT="$(generate_secret 10)"
|
||||
MINIO_SK_DEFAULT="$(generate_secret 16)"
|
||||
prompt_secret MINIO_ACCESS_KEY "MinIO access key" "$MINIO_AK_DEFAULT"
|
||||
prompt_secret MINIO_SECRET_KEY "MinIO secret key" "$MINIO_SK_DEFAULT"
|
||||
|
||||
# OpenLDAP
|
||||
LDAP_ADMIN_PW_DEFAULT="$(generate_secret 16)"
|
||||
prompt_secret LDAP_ADMIN_PASSWORD "LDAP admin password" "$LDAP_ADMIN_PW_DEFAULT"
|
||||
prompt LDAP_USERS "LDAP initial username" "siloadmin"
|
||||
LDAP_USER_PW_DEFAULT="$(generate_secret 12)"
|
||||
prompt_secret LDAP_PASSWORDS "LDAP initial user password" "$LDAP_USER_PW_DEFAULT"
|
||||
|
||||
# Session secret
|
||||
SESSION_SECRET="$(generate_secret 32)"
|
||||
|
||||
# Silo local admin (fallback when LDAP is unavailable)
|
||||
prompt SILO_ADMIN_USERNAME "Silo local admin username" "admin"
|
||||
ADMIN_PW_DEFAULT="$(generate_secret 12)"
|
||||
prompt_secret SILO_ADMIN_PASSWORD "Silo local admin password" "$ADMIN_PW_DEFAULT"
|
||||
|
||||
echo ""
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Write .env
|
||||
# ---------------------------------------------------------------------------
|
||||
log_info "Writing ${OUTPUT_DIR}/.env ..."
|
||||
|
||||
cat > "${OUTPUT_DIR}/.env" << EOF
|
||||
# Generated by setup-docker.sh on $(date +%Y-%m-%d)
|
||||
# Used by: docker compose -f deployments/docker-compose.allinone.yaml
|
||||
|
||||
# PostgreSQL
|
||||
POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||
|
||||
# MinIO
|
||||
MINIO_ACCESS_KEY=${MINIO_ACCESS_KEY}
|
||||
MINIO_SECRET_KEY=${MINIO_SECRET_KEY}
|
||||
|
||||
# OpenLDAP
|
||||
LDAP_ADMIN_PASSWORD=${LDAP_ADMIN_PASSWORD}
|
||||
LDAP_USERS=${LDAP_USERS}
|
||||
LDAP_PASSWORDS=${LDAP_PASSWORDS}
|
||||
|
||||
# Silo
|
||||
SILO_SESSION_SECRET=${SESSION_SECRET}
|
||||
SILO_ADMIN_USERNAME=${SILO_ADMIN_USERNAME}
|
||||
SILO_ADMIN_PASSWORD=${SILO_ADMIN_PASSWORD}
|
||||
SILO_BASE_URL=${BASE_URL}
|
||||
|
||||
# Uncomment if using OIDC (Keycloak)
|
||||
# SILO_OIDC_CLIENT_SECRET=
|
||||
EOF
|
||||
|
||||
chmod 600 "${OUTPUT_DIR}/.env"
|
||||
log_success "deployments/.env written"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Write config.docker.yaml
|
||||
# ---------------------------------------------------------------------------
|
||||
log_info "Writing ${OUTPUT_DIR}/config.docker.yaml ..."
|
||||
|
||||
# Note: Values wrapped in ${VAR} (inside the single-quoted YAMLEOF blocks)
|
||||
# are NOT expanded by bash — they are written literally into the YAML file
|
||||
# and expanded at runtime by the Go config loader via os.ExpandEnv().
|
||||
# The base_url and cors origin use the bash variable directly since
|
||||
# SILO_SERVER_BASE_URL is not a supported direct override in the Go loader.
|
||||
{
|
||||
cat << 'YAMLEOF'
|
||||
# Silo Configuration — Docker Compose (all-in-one)
|
||||
# Generated by scripts/setup-docker.sh
|
||||
#
|
||||
# Values using ${VAR} syntax are expanded from environment variables at
|
||||
# startup. Direct env var overrides (SILO_DB_PASSWORD, etc.) take precedence
|
||||
# over YAML values — see docs/CONFIGURATION.md for the full reference.
|
||||
|
||||
server:
|
||||
host: "0.0.0.0"
|
||||
port: 8080
|
||||
YAMLEOF
|
||||
|
||||
cat << EOF
|
||||
base_url: "${BASE_URL}"
|
||||
EOF
|
||||
|
||||
cat << 'YAMLEOF'
|
||||
|
||||
database:
|
||||
host: "postgres"
|
||||
port: 5432
|
||||
name: "silo"
|
||||
user: "silo"
|
||||
password: "${SILO_DB_PASSWORD}"
|
||||
sslmode: "disable"
|
||||
max_connections: 10
|
||||
|
||||
storage:
|
||||
endpoint: "minio:9000"
|
||||
access_key: "${SILO_MINIO_ACCESS_KEY}"
|
||||
secret_key: "${SILO_MINIO_SECRET_KEY}"
|
||||
bucket: "silo-files"
|
||||
use_ssl: false
|
||||
region: "us-east-1"
|
||||
|
||||
schemas:
|
||||
directory: "/etc/silo/schemas"
|
||||
default: "kindred-rd"
|
||||
|
||||
freecad:
|
||||
uri_scheme: "silo"
|
||||
|
||||
auth:
|
||||
enabled: true
|
||||
session_secret: "${SILO_SESSION_SECRET}"
|
||||
|
||||
# Local accounts (fallback when LDAP is unavailable)
|
||||
local:
|
||||
enabled: true
|
||||
default_admin_username: "${SILO_ADMIN_USERNAME}"
|
||||
default_admin_password: "${SILO_ADMIN_PASSWORD}"
|
||||
|
||||
# OpenLDAP (provided by the Docker Compose stack)
|
||||
ldap:
|
||||
enabled: true
|
||||
url: "ldap://openldap:1389"
|
||||
base_dn: "dc=silo,dc=local"
|
||||
user_search_dn: "ou=users,dc=silo,dc=local"
|
||||
user_attr: "cn"
|
||||
email_attr: "mail"
|
||||
display_attr: "cn"
|
||||
group_attr: "memberOf"
|
||||
role_mapping:
|
||||
admin:
|
||||
- "cn=silo-admins,ou=groups,dc=silo,dc=local"
|
||||
editor:
|
||||
- "cn=silo-users,ou=groups,dc=silo,dc=local"
|
||||
viewer:
|
||||
- "cn=silo-viewers,ou=groups,dc=silo,dc=local"
|
||||
tls_skip_verify: false
|
||||
|
||||
oidc:
|
||||
enabled: false
|
||||
|
||||
cors:
|
||||
allowed_origins:
|
||||
YAMLEOF
|
||||
|
||||
cat << EOF
|
||||
- "${BASE_URL}"
|
||||
EOF
|
||||
} > "${OUTPUT_DIR}/config.docker.yaml"
|
||||
|
||||
log_success "deployments/config.docker.yaml written"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Summary
|
||||
# ---------------------------------------------------------------------------
|
||||
echo ""
|
||||
echo -e "${BOLD}============================================${NC}"
|
||||
echo -e "${BOLD}Setup complete!${NC}"
|
||||
echo -e "${BOLD}============================================${NC}"
|
||||
echo ""
|
||||
echo "Generated files:"
|
||||
echo " deployments/.env - secrets and credentials"
|
||||
echo " deployments/config.docker.yaml - server configuration"
|
||||
echo ""
|
||||
echo -e "${BOLD}Credentials:${NC}"
|
||||
echo " PostgreSQL: silo / ${POSTGRES_PASSWORD}"
|
||||
echo " MinIO: ${MINIO_ACCESS_KEY} / ${MINIO_SECRET_KEY}"
|
||||
echo " MinIO Console: http://localhost:9001"
|
||||
echo " LDAP Admin: cn=admin,dc=silo,dc=local / ${LDAP_ADMIN_PASSWORD}"
|
||||
echo " LDAP User: ${LDAP_USERS} / ${LDAP_PASSWORDS}"
|
||||
echo " Silo Admin: ${SILO_ADMIN_USERNAME} / ${SILO_ADMIN_PASSWORD} (local fallback)"
|
||||
echo " Base URL: ${BASE_URL}"
|
||||
echo ""
|
||||
echo -e "${BOLD}Next steps:${NC}"
|
||||
echo ""
|
||||
echo " # Start the stack"
|
||||
if [[ "$WITH_NGINX" == "true" ]]; then
|
||||
echo " docker compose -f deployments/docker-compose.allinone.yaml --profile nginx up -d"
|
||||
else
|
||||
echo " docker compose -f deployments/docker-compose.allinone.yaml up -d"
|
||||
fi
|
||||
echo ""
|
||||
echo " # Check status"
|
||||
echo " docker compose -f deployments/docker-compose.allinone.yaml ps"
|
||||
echo ""
|
||||
echo " # View logs"
|
||||
echo " docker compose -f deployments/docker-compose.allinone.yaml logs -f silo"
|
||||
echo ""
|
||||
echo " # Open in browser"
|
||||
echo " ${BASE_URL}"
|
||||
echo ""
|
||||
echo " # Log in with LDAP: ${LDAP_USERS} / <password above>"
|
||||
echo " # Or local admin: ${SILO_ADMIN_USERNAME} / <password above>"
|
||||
echo ""
|
||||
if [[ "$WITH_NGINX" != "true" ]]; then
|
||||
echo " To add nginx later:"
|
||||
echo " docker compose -f deployments/docker-compose.allinone.yaml --profile nginx up -d"
|
||||
echo ""
|
||||
fi
|
||||
echo "Save these credentials somewhere safe. The passwords in deployments/.env"
|
||||
echo "are the source of truth for the running stack."
|
||||
echo ""
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Silo Host Setup Script
|
||||
# Run this once on silo.kindred.internal to prepare for deployment
|
||||
# Run this once on silo.example.internal to prepare for deployment
|
||||
#
|
||||
# Usage:
|
||||
# sudo ./setup-host.sh
|
||||
@@ -24,11 +24,13 @@ BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Configuration
|
||||
REPO_URL="${SILO_REPO_URL:-https://gitea.kindred.internal/kindred/silo-0062.git}"
|
||||
REPO_URL="${SILO_REPO_URL:-https://git.kindred-systems.com/kindred/silo.git}"
|
||||
REPO_BRANCH="${SILO_BRANCH:-main}"
|
||||
INSTALL_DIR="/opt/silo"
|
||||
CONFIG_DIR="/etc/silo"
|
||||
GO_VERSION="1.23.0"
|
||||
GO_VERSION="1.24.0"
|
||||
DB_HOST="${SILO_DB_HOST:-psql.example.internal}"
|
||||
MINIO_HOST="${SILO_MINIO_HOST:-minio.example.internal}"
|
||||
|
||||
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
|
||||
log_success() { echo -e "${GREEN}[OK]${NC} $*"; }
|
||||
@@ -155,21 +157,28 @@ log_success "Directories created"
|
||||
ENV_FILE="${CONFIG_DIR}/silod.env"
|
||||
if [[ ! -f "${ENV_FILE}" ]]; then
|
||||
log_info "Creating environment file..."
|
||||
cat > "${ENV_FILE}" << 'EOF'
|
||||
cat > "${ENV_FILE}" << EOF
|
||||
# Silo daemon environment variables
|
||||
# Fill in the values below
|
||||
|
||||
# Database credentials (psql.kindred.internal)
|
||||
# Database credentials (${DB_HOST})
|
||||
# Database: silo, User: silo
|
||||
SILO_DB_PASSWORD=
|
||||
|
||||
# MinIO credentials (minio.kindred.internal)
|
||||
# MinIO credentials (${MINIO_HOST})
|
||||
# User: silouser
|
||||
SILO_MINIO_ACCESS_KEY=silouser
|
||||
SILO_MINIO_SECRET_KEY=
|
||||
|
||||
# Authentication
|
||||
# Session secret (required when auth is enabled)
|
||||
SILO_SESSION_SECRET=
|
||||
# Default admin account (created on first startup if both are set)
|
||||
SILO_ADMIN_USERNAME=admin
|
||||
SILO_ADMIN_PASSWORD=
|
||||
|
||||
# Optional overrides
|
||||
# SILO_SERVER_BASE_URL=http://silo.kindred.internal:8080
|
||||
# SILO_SERVER_BASE_URL=http://\$(hostname -f):8080
|
||||
EOF
|
||||
chmod 600 "${ENV_FILE}"
|
||||
chown root:silo "${ENV_FILE}"
|
||||
@@ -214,10 +223,10 @@ echo "1. Edit ${ENV_FILE} and fill in credentials:"
|
||||
echo " sudo nano ${ENV_FILE}"
|
||||
echo ""
|
||||
echo "2. Verify database connectivity:"
|
||||
echo " psql -h psql.kindred.internal -U silo -d silo -c 'SELECT 1'"
|
||||
echo " psql -h ${DB_HOST} -U silo -d silo -c 'SELECT 1'"
|
||||
echo ""
|
||||
echo "3. Verify MinIO connectivity:"
|
||||
echo " curl -I http://minio.kindred.internal:9000/minio/health/live"
|
||||
echo " curl -I http://${MINIO_HOST}:9000/minio/health/live"
|
||||
echo ""
|
||||
echo "4. Run the deployment:"
|
||||
echo " sudo ${INSTALL_DIR}/src/scripts/deploy.sh"
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
# sudo ./scripts/setup-ipa-nginx.sh
|
||||
#
|
||||
# Prerequisites:
|
||||
# - FreeIPA server at ipa.kindred.internal
|
||||
# - DNS configured for silo.kindred.internal
|
||||
# - FreeIPA server at ipa.example.internal
|
||||
# - DNS configured for the silo host (set SILO_HOSTNAME to override default)
|
||||
# - Admin credentials for IPA enrollment
|
||||
|
||||
set -euo pipefail
|
||||
@@ -21,12 +21,12 @@ BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Configuration
|
||||
IPA_SERVER="${IPA_SERVER:-ipa.kindred.internal}"
|
||||
IPA_DOMAIN="${IPA_DOMAIN:-kindred.internal}"
|
||||
IPA_SERVER="${IPA_SERVER:-ipa.example.internal}"
|
||||
IPA_DOMAIN="${IPA_DOMAIN:-example.internal}"
|
||||
IPA_REALM="${IPA_REALM:-KINDRED.INTERNAL}"
|
||||
HOSTNAME="silo.kindred.internal"
|
||||
SILO_HOSTNAME="${SILO_HOSTNAME:-silo.example.internal}"
|
||||
CERT_DIR="/etc/ssl/silo"
|
||||
SILO_PORT=8080
|
||||
SILO_PORT="${SILO_PORT:-8080}"
|
||||
|
||||
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
|
||||
log_success() { echo -e "${GREEN}[OK]${NC} $*"; }
|
||||
@@ -77,8 +77,8 @@ log_success "Packages installed"
|
||||
#
|
||||
# Step 2: Set hostname
|
||||
#
|
||||
log_info "Setting hostname to ${HOSTNAME}..."
|
||||
hostnamectl set-hostname "${HOSTNAME}"
|
||||
log_info "Setting hostname to ${SILO_HOSTNAME}..."
|
||||
hostnamectl set-hostname "${SILO_HOSTNAME}"
|
||||
log_success "Hostname set"
|
||||
|
||||
#
|
||||
@@ -95,7 +95,7 @@ else
|
||||
--server="${IPA_SERVER}" \
|
||||
--domain="${IPA_DOMAIN}" \
|
||||
--realm="${IPA_REALM}" \
|
||||
--hostname="${HOSTNAME}" \
|
||||
--hostname="${SILO_HOSTNAME}" \
|
||||
--mkhomedir \
|
||||
--enable-dns-updates \
|
||||
--unattended \
|
||||
@@ -105,7 +105,7 @@ else
|
||||
--server="${IPA_SERVER}" \
|
||||
--domain="${IPA_DOMAIN}" \
|
||||
--realm="${IPA_REALM}" \
|
||||
--hostname="${HOSTNAME}" \
|
||||
--hostname="${SILO_HOSTNAME}" \
|
||||
--mkhomedir \
|
||||
--enable-dns-updates
|
||||
}
|
||||
@@ -135,9 +135,9 @@ else
|
||||
ipa-getcert request \
|
||||
-f "${CERT_DIR}/silo.crt" \
|
||||
-k "${CERT_DIR}/silo.key" \
|
||||
-K "HTTP/${HOSTNAME}" \
|
||||
-D "${HOSTNAME}" \
|
||||
-N "CN=${HOSTNAME}" \
|
||||
-K "HTTP/${SILO_HOSTNAME}" \
|
||||
-D "${SILO_HOSTNAME}" \
|
||||
-N "CN=${SILO_HOSTNAME}" \
|
||||
-C "systemctl reload nginx"
|
||||
|
||||
log_info "Waiting for certificate to be issued..."
|
||||
@@ -186,14 +186,14 @@ if [[ -f /etc/nginx/sites-enabled/default ]]; then
|
||||
fi
|
||||
|
||||
# Create silo nginx config
|
||||
cat > /etc/nginx/sites-available/silo << 'NGINX_EOF'
|
||||
cat > /etc/nginx/sites-available/silo << NGINX_EOF
|
||||
# Silo API Server - Nginx Reverse Proxy Configuration
|
||||
|
||||
# Redirect HTTP to HTTPS
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
server_name silo.kindred.internal;
|
||||
server_name ${SILO_HOSTNAME};
|
||||
|
||||
# Allow certmonger/ACME challenges
|
||||
location /.well-known/ {
|
||||
@@ -201,7 +201,7 @@ server {
|
||||
}
|
||||
|
||||
location / {
|
||||
return 301 https://$server_name$request_uri;
|
||||
return 301 https://\\$server_name\\$request_uri;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,11 +209,11 @@ server {
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
listen [::]:443 ssl http2;
|
||||
server_name silo.kindred.internal;
|
||||
server_name ${SILO_HOSTNAME};
|
||||
|
||||
# SSL certificates (managed by certmonger/IPA)
|
||||
ssl_certificate /etc/ssl/silo/silo.crt;
|
||||
ssl_certificate_key /etc/ssl/silo/silo.key;
|
||||
ssl_certificate ${CERT_DIR}/silo.crt;
|
||||
ssl_certificate_key ${CERT_DIR}/silo.key;
|
||||
|
||||
# SSL configuration
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
@@ -226,7 +226,7 @@ server {
|
||||
# OCSP stapling
|
||||
ssl_stapling on;
|
||||
ssl_stapling_verify on;
|
||||
ssl_trusted_certificate /etc/ssl/silo/ca.crt;
|
||||
ssl_trusted_certificate ${CERT_DIR}/ca.crt;
|
||||
|
||||
# Security headers
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
@@ -240,19 +240,19 @@ server {
|
||||
|
||||
# Proxy settings
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8080;
|
||||
proxy_pass http://127.0.0.1:${SILO_PORT};
|
||||
proxy_http_version 1.1;
|
||||
|
||||
# Headers
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header Host \\$host;
|
||||
proxy_set_header X-Real-IP \\$remote_addr;
|
||||
proxy_set_header X-Forwarded-For \\$proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto \\$scheme;
|
||||
proxy_set_header X-Forwarded-Host \\$host;
|
||||
proxy_set_header X-Forwarded-Port \\$server_port;
|
||||
|
||||
# WebSocket support (for future use)
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Upgrade \\$http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
|
||||
# Timeouts
|
||||
@@ -343,14 +343,14 @@ echo " getcert list"
|
||||
echo ""
|
||||
echo "2. Update silo config to use correct base URL:"
|
||||
echo " sudo nano /etc/silo/config.yaml"
|
||||
echo " # Change base_url to: https://silo.kindred.internal"
|
||||
echo " # Change base_url to: https://${SILO_HOSTNAME}"
|
||||
echo ""
|
||||
echo "3. Restart silo service:"
|
||||
echo " sudo systemctl restart silod"
|
||||
echo ""
|
||||
echo "4. Test the setup:"
|
||||
echo " curl -k https://silo.kindred.internal/health"
|
||||
echo " curl https://silo.kindred.internal/health # after trusting IPA CA"
|
||||
echo " curl -k https://${SILO_HOSTNAME}/health"
|
||||
echo " curl https://${SILO_HOSTNAME}/health # after trusting IPA CA"
|
||||
echo ""
|
||||
echo "5. Trust IPA CA on client machines:"
|
||||
echo " # The CA cert is at: ${CERT_DIR}/ca.crt"
|
||||
|
||||
@@ -16,9 +16,7 @@ export interface Item {
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
sourcing_type: string;
|
||||
sourcing_link?: string;
|
||||
long_description?: string;
|
||||
standard_cost?: number;
|
||||
file_count: number;
|
||||
files_total_size: number;
|
||||
properties?: Record<string, unknown>;
|
||||
@@ -170,9 +168,7 @@ export interface CreateItemRequest {
|
||||
projects?: string[];
|
||||
properties?: Record<string, unknown>;
|
||||
sourcing_type?: string;
|
||||
sourcing_link?: string;
|
||||
long_description?: string;
|
||||
standard_cost?: number;
|
||||
}
|
||||
|
||||
export interface UpdateItemRequest {
|
||||
@@ -182,9 +178,7 @@ export interface UpdateItemRequest {
|
||||
properties?: Record<string, unknown>;
|
||||
comment?: string;
|
||||
sourcing_type?: string;
|
||||
sourcing_link?: string;
|
||||
long_description?: string;
|
||||
standard_cost?: number;
|
||||
}
|
||||
|
||||
export interface CreateRevisionRequest {
|
||||
@@ -254,6 +248,68 @@ export interface PropertyDef {
|
||||
|
||||
export type PropertySchema = Record<string, PropertyDef>;
|
||||
|
||||
// Form Descriptor (from GET /api/schemas/{name}/form)
|
||||
export interface FormFieldDescriptor {
|
||||
name: string;
|
||||
type: string;
|
||||
widget?: string;
|
||||
label: string;
|
||||
required?: boolean;
|
||||
default?: unknown;
|
||||
unit?: string;
|
||||
description?: string;
|
||||
options?: string[];
|
||||
currency?: string;
|
||||
derived_from_category?: Record<string, string>;
|
||||
search_endpoint?: string;
|
||||
}
|
||||
|
||||
export interface FormFieldGroup {
|
||||
key: string;
|
||||
label: string;
|
||||
order: number;
|
||||
fields: FormFieldDescriptor[];
|
||||
}
|
||||
|
||||
export interface CategoryPickerStage {
|
||||
name: string;
|
||||
label: string;
|
||||
values?: Record<string, string>;
|
||||
values_by_domain?: Record<string, Record<string, string>>;
|
||||
}
|
||||
|
||||
export interface CategoryPickerDescriptor {
|
||||
style: string;
|
||||
stages: CategoryPickerStage[];
|
||||
}
|
||||
|
||||
export interface ItemFieldDef {
|
||||
type: string;
|
||||
widget: string;
|
||||
label: string;
|
||||
required?: boolean;
|
||||
default?: unknown;
|
||||
options?: string[];
|
||||
derived_from_category?: Record<string, string>;
|
||||
search_endpoint?: string;
|
||||
}
|
||||
|
||||
export interface FieldOverride {
|
||||
widget?: string;
|
||||
currency?: string;
|
||||
options?: string[];
|
||||
}
|
||||
|
||||
export interface FormDescriptor {
|
||||
schema_name: string;
|
||||
format: string;
|
||||
category_picker?: CategoryPickerDescriptor;
|
||||
item_fields?: Record<string, ItemFieldDef>;
|
||||
field_groups?: FormFieldGroup[];
|
||||
category_field_groups?: Record<string, FormFieldGroup[]>;
|
||||
field_overrides?: Record<string, FieldOverride>;
|
||||
}
|
||||
|
||||
// API Token
|
||||
export interface ApiToken {
|
||||
id: string;
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import { useEffect, useState, useCallback, useRef } from "react";
|
||||
import { get, put } from "../../api/client";
|
||||
import type {
|
||||
AuditItemResult,
|
||||
AuditFieldResult,
|
||||
Item,
|
||||
} from "../../api/types";
|
||||
import type { AuditItemResult, AuditFieldResult, Item } from "../../api/types";
|
||||
|
||||
const tierColors: Record<string, string> = {
|
||||
critical: "var(--ctp-red)",
|
||||
@@ -18,8 +14,6 @@ const tierColors: Record<string, string> = {
|
||||
const itemFields = new Set([
|
||||
"description",
|
||||
"sourcing_type",
|
||||
"sourcing_link",
|
||||
"standard_cost",
|
||||
"long_description",
|
||||
]);
|
||||
|
||||
@@ -83,12 +77,9 @@ export function AuditDetailPanel({
|
||||
void fetchData();
|
||||
}, [fetchData]);
|
||||
|
||||
const handleFieldChange = useCallback(
|
||||
(key: string, value: string) => {
|
||||
setEdits((prev) => ({ ...prev, [key]: value }));
|
||||
},
|
||||
[],
|
||||
);
|
||||
const handleFieldChange = useCallback((key: string, value: string) => {
|
||||
setEdits((prev) => ({ ...prev, [key]: value }));
|
||||
}, []);
|
||||
|
||||
const saveChanges = useCallback(async () => {
|
||||
if (!item || Object.keys(edits).length === 0) return;
|
||||
@@ -102,18 +93,14 @@ export function AuditDetailPanel({
|
||||
|
||||
for (const [key, value] of Object.entries(edits)) {
|
||||
if (itemFields.has(key)) {
|
||||
if (key === "standard_cost") {
|
||||
const num = parseFloat(value);
|
||||
itemUpdate[key] = isNaN(num) ? undefined : num;
|
||||
} else {
|
||||
itemUpdate[key] = value || undefined;
|
||||
}
|
||||
itemUpdate[key] = value || undefined;
|
||||
} else {
|
||||
// Attempt number coercion for property fields.
|
||||
const num = parseFloat(value);
|
||||
propUpdate[key] = !isNaN(num) && String(num) === value.trim()
|
||||
? num
|
||||
: value || undefined;
|
||||
propUpdate[key] =
|
||||
!isNaN(num) && String(num) === value.trim()
|
||||
? num
|
||||
: value || undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,7 +110,10 @@ export function AuditDetailPanel({
|
||||
const payload: Record<string, unknown> = {
|
||||
...itemUpdate,
|
||||
...(hasProps
|
||||
? { properties: { ...currentProps, ...propUpdate }, comment: "Audit field update" }
|
||||
? {
|
||||
properties: { ...currentProps, ...propUpdate },
|
||||
comment: "Audit field update",
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
|
||||
@@ -423,9 +413,7 @@ function FieldRow({
|
||||
? String(field.value)
|
||||
: "";
|
||||
|
||||
const borderColor = field.filled
|
||||
? "var(--ctp-green)"
|
||||
: "var(--ctp-red)";
|
||||
const borderColor = field.filled ? "var(--ctp-green)" : "var(--ctp-red)";
|
||||
|
||||
const label = field.key
|
||||
.replace(/_/g, " ")
|
||||
@@ -469,9 +457,7 @@ function FieldRow({
|
||||
style={{
|
||||
flex: 1,
|
||||
fontSize: "0.8rem",
|
||||
color: field.filled
|
||||
? "var(--ctp-text)"
|
||||
: "var(--ctp-subtext0)",
|
||||
color: field.filled ? "var(--ctp-text)" : "var(--ctp-subtext0)",
|
||||
fontStyle: field.filled ? "normal" : "italic",
|
||||
}}
|
||||
>
|
||||
|
||||
@@ -1,21 +1,48 @@
|
||||
import { useState, useMemo, useRef, useEffect } from "react";
|
||||
import type { CategoryPickerStage } from "../../api/types";
|
||||
|
||||
interface CategoryPickerProps {
|
||||
value: string;
|
||||
onChange: (code: string) => void;
|
||||
categories: Record<string, string>;
|
||||
stages?: CategoryPickerStage[];
|
||||
}
|
||||
|
||||
export function CategoryPicker({
|
||||
value,
|
||||
onChange,
|
||||
categories,
|
||||
stages,
|
||||
}: CategoryPickerProps) {
|
||||
const [selectedDomain, setSelectedDomain] = useState<string>("");
|
||||
const [search, setSearch] = useState("");
|
||||
const selectedRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// Derive domain from current value
|
||||
useEffect(() => {
|
||||
if (value && value.length > 0) {
|
||||
setSelectedDomain(value[0]!);
|
||||
}
|
||||
}, [value]);
|
||||
|
||||
const isMultiStage = stages && stages.length >= 2;
|
||||
|
||||
// Domain stage (first stage)
|
||||
const domainStage = isMultiStage ? stages[0] : undefined;
|
||||
const subcatStage = isMultiStage
|
||||
? stages.find((s) => s.values_by_domain)
|
||||
: undefined;
|
||||
|
||||
// Filtered categories for current domain in multi-stage mode
|
||||
const filteredCategories = useMemo(() => {
|
||||
if (!isMultiStage || !selectedDomain || !subcatStage?.values_by_domain) {
|
||||
return categories;
|
||||
}
|
||||
return subcatStage.values_by_domain[selectedDomain] ?? {};
|
||||
}, [isMultiStage, selectedDomain, subcatStage, categories]);
|
||||
|
||||
const entries = useMemo(() => {
|
||||
const all = Object.entries(categories).sort(([a], [b]) =>
|
||||
const all = Object.entries(filteredCategories).sort(([a], [b]) =>
|
||||
a.localeCompare(b),
|
||||
);
|
||||
if (!search) return all;
|
||||
@@ -24,7 +51,7 @@ export function CategoryPicker({
|
||||
([code, desc]) =>
|
||||
code.toLowerCase().includes(q) || desc.toLowerCase().includes(q),
|
||||
);
|
||||
}, [categories, search]);
|
||||
}, [filteredCategories, search]);
|
||||
|
||||
// Scroll selected into view on mount.
|
||||
useEffect(() => {
|
||||
@@ -40,12 +67,70 @@ export function CategoryPicker({
|
||||
overflow: "hidden",
|
||||
}}
|
||||
>
|
||||
{/* Multi-stage domain picker */}
|
||||
{isMultiStage && domainStage?.values && (
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
flexWrap: "wrap",
|
||||
gap: "0.25rem",
|
||||
padding: "0.4rem 0.5rem",
|
||||
borderBottom: "1px solid var(--ctp-surface1)",
|
||||
backgroundColor: "var(--ctp-mantle)",
|
||||
}}
|
||||
>
|
||||
{Object.entries(domainStage.values)
|
||||
.sort(([a], [b]) => a.localeCompare(b))
|
||||
.map(([code, label]) => {
|
||||
const isActive = code === selectedDomain;
|
||||
return (
|
||||
<button
|
||||
key={code}
|
||||
onClick={() => {
|
||||
setSelectedDomain(code);
|
||||
setSearch("");
|
||||
// Clear selection if switching domain
|
||||
if (value && value[0] !== code) {
|
||||
onChange("");
|
||||
}
|
||||
}}
|
||||
style={{
|
||||
padding: "0.2rem 0.5rem",
|
||||
fontSize: "0.7rem",
|
||||
fontWeight: isActive ? 600 : 400,
|
||||
border: "none",
|
||||
borderRadius: "0.25rem",
|
||||
cursor: "pointer",
|
||||
backgroundColor: isActive
|
||||
? "rgba(203,166,247,0.2)"
|
||||
: "transparent",
|
||||
color: isActive
|
||||
? "var(--ctp-mauve)"
|
||||
: "var(--ctp-subtext0)",
|
||||
transition: "background-color 0.1s",
|
||||
}}
|
||||
>
|
||||
<span style={{ fontFamily: "'JetBrains Mono', monospace" }}>
|
||||
{code}
|
||||
</span>{" "}
|
||||
{label}
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Search */}
|
||||
<input
|
||||
type="text"
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.target.value)}
|
||||
placeholder="Search categories..."
|
||||
placeholder={
|
||||
isMultiStage && !selectedDomain
|
||||
? "Select a domain above..."
|
||||
: "Search categories..."
|
||||
}
|
||||
disabled={isMultiStage && !selectedDomain}
|
||||
style={{
|
||||
width: "100%",
|
||||
padding: "0.4rem 0.5rem",
|
||||
@@ -61,7 +146,18 @@ export function CategoryPicker({
|
||||
|
||||
{/* Scrollable list */}
|
||||
<div style={{ maxHeight: 200, overflowY: "auto" }}>
|
||||
{entries.length === 0 ? (
|
||||
{isMultiStage && !selectedDomain ? (
|
||||
<div
|
||||
style={{
|
||||
padding: "0.75rem",
|
||||
textAlign: "center",
|
||||
color: "var(--ctp-subtext0)",
|
||||
fontSize: "0.8rem",
|
||||
}}
|
||||
>
|
||||
Select a domain to see categories
|
||||
</div>
|
||||
) : entries.length === 0 ? (
|
||||
<div
|
||||
style={{
|
||||
padding: "0.75rem",
|
||||
@@ -90,9 +186,7 @@ export function CategoryPicker({
|
||||
backgroundColor: isSelected
|
||||
? "rgba(203,166,247,0.12)"
|
||||
: "transparent",
|
||||
color: isSelected
|
||||
? "var(--ctp-mauve)"
|
||||
: "var(--ctp-text)",
|
||||
color: isSelected ? "var(--ctp-mauve)" : "var(--ctp-text)",
|
||||
fontWeight: isSelected ? 600 : 400,
|
||||
transition: "background-color 0.1s",
|
||||
}}
|
||||
|
||||
@@ -1,16 +1,29 @@
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import { useState, useCallback } from "react";
|
||||
import { get, post, put } from "../../api/client";
|
||||
import type { Project } from "../../api/types";
|
||||
import type {
|
||||
Project,
|
||||
FormFieldDescriptor,
|
||||
FormFieldGroup,
|
||||
} from "../../api/types";
|
||||
import { TagInput, type TagOption } from "../TagInput";
|
||||
import { CategoryPicker } from "./CategoryPicker";
|
||||
import { FileDropZone } from "./FileDropZone";
|
||||
import { useCategories } from "../../hooks/useCategories";
|
||||
import { useFormDescriptor } from "../../hooks/useFormDescriptor";
|
||||
import {
|
||||
useFileUpload,
|
||||
type PendingAttachment,
|
||||
} from "../../hooks/useFileUpload";
|
||||
import { useAuth } from "../../hooks/useAuth";
|
||||
|
||||
// Item-level field names that are sent as top-level API fields, not properties.
|
||||
const ITEM_LEVEL_FIELDS = new Set([
|
||||
"item_type",
|
||||
"description",
|
||||
"sourcing_type",
|
||||
"long_description",
|
||||
"projects",
|
||||
]);
|
||||
|
||||
interface CreateItemPaneProps {
|
||||
onCreated: (partNumber: string) => void;
|
||||
onCancel: () => void;
|
||||
@@ -18,22 +31,13 @@ interface CreateItemPaneProps {
|
||||
|
||||
export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
||||
const { user } = useAuth();
|
||||
const { categories } = useCategories();
|
||||
const { descriptor, categories } = useFormDescriptor();
|
||||
const { upload } = useFileUpload();
|
||||
|
||||
// Form state.
|
||||
const [itemType, setItemType] = useState("part");
|
||||
// Single form state for all fields (item-level + properties).
|
||||
const [category, setCategory] = useState("");
|
||||
const [description, setDescription] = useState("");
|
||||
const [sourcingType, setSourcingType] = useState("manufactured");
|
||||
const [sourcingLink, setSourcingLink] = useState("");
|
||||
const [longDescription, setLongDescription] = useState("");
|
||||
const [standardCost, setStandardCost] = useState("");
|
||||
const [fields, setFields] = useState<Record<string, string>>({});
|
||||
const [selectedProjects, setSelectedProjects] = useState<string[]>([]);
|
||||
const [catProps, setCatProps] = useState<Record<string, string>>({});
|
||||
const [catPropDefs, setCatPropDefs] = useState<
|
||||
Record<string, { type: string }>
|
||||
>({});
|
||||
|
||||
// Attachments.
|
||||
const [attachments, setAttachments] = useState<PendingAttachment[]>([]);
|
||||
@@ -44,27 +48,33 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
||||
const [saving, setSaving] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
// Load category-specific properties.
|
||||
useEffect(() => {
|
||||
if (!category) {
|
||||
setCatPropDefs({});
|
||||
setCatProps({});
|
||||
return;
|
||||
const setField = (name: string, value: string) =>
|
||||
setFields((prev) => ({ ...prev, [name]: value }));
|
||||
|
||||
const getField = (name: string) => fields[name] ?? "";
|
||||
|
||||
// Derive item_type from category using derived_from_category mapping
|
||||
const deriveItemType = (cat: string): string => {
|
||||
if (!cat || !descriptor?.item_fields?.item_type?.derived_from_category) {
|
||||
return getField("item_type") || "part";
|
||||
}
|
||||
get<Record<string, { type: string }>>(
|
||||
`/api/schemas/kindred-rd/properties?category=${encodeURIComponent(category)}`,
|
||||
)
|
||||
.then((defs) => {
|
||||
setCatPropDefs(defs);
|
||||
const defaults: Record<string, string> = {};
|
||||
for (const key of Object.keys(defs)) defaults[key] = "";
|
||||
setCatProps(defaults);
|
||||
})
|
||||
.catch(() => {
|
||||
setCatPropDefs({});
|
||||
setCatProps({});
|
||||
});
|
||||
}, [category]);
|
||||
const mapping = descriptor.item_fields.item_type.derived_from_category;
|
||||
const prefix = cat[0]!;
|
||||
return mapping[prefix] ?? mapping["default"] ?? "part";
|
||||
};
|
||||
|
||||
const handleCategoryChange = (cat: string) => {
|
||||
setCategory(cat);
|
||||
// Auto-derive item_type when category changes
|
||||
if (descriptor?.item_fields?.item_type?.derived_from_category) {
|
||||
const derived = cat
|
||||
? (descriptor.item_fields.item_type.derived_from_category[cat[0]!] ??
|
||||
descriptor.item_fields.item_type.derived_from_category["default"] ??
|
||||
"part")
|
||||
: "part";
|
||||
setField("item_type", derived);
|
||||
}
|
||||
};
|
||||
|
||||
const searchProjects = useCallback(
|
||||
async (query: string): Promise<TagOption[]> => {
|
||||
@@ -90,10 +100,8 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
||||
const startIdx = attachments.length;
|
||||
setAttachments((prev) => [...prev, ...files]);
|
||||
|
||||
// Upload each file.
|
||||
files.forEach((f, i) => {
|
||||
const idx = startIdx + i;
|
||||
// Mark uploading.
|
||||
setAttachments((prev) =>
|
||||
prev.map((a, j) =>
|
||||
j === idx ? { ...a, uploadStatus: "uploading" } : a,
|
||||
@@ -155,12 +163,15 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
||||
setSaving(true);
|
||||
setError(null);
|
||||
|
||||
// Split fields into item-level and properties
|
||||
const properties: Record<string, unknown> = {};
|
||||
for (const [k, v] of Object.entries(catProps)) {
|
||||
for (const [k, v] of Object.entries(fields)) {
|
||||
if (!v) continue;
|
||||
const def = catPropDefs[k];
|
||||
if (def?.type === "number") properties[k] = Number(v);
|
||||
else if (def?.type === "boolean") properties[k] = v === "true";
|
||||
if (ITEM_LEVEL_FIELDS.has(k)) continue; // handled separately
|
||||
// Coerce type from descriptor
|
||||
const fieldDef = findFieldDef(k);
|
||||
if (fieldDef?.type === "number") properties[k] = Number(v);
|
||||
else if (fieldDef?.type === "boolean") properties[k] = v === "true";
|
||||
else properties[k] = v;
|
||||
}
|
||||
|
||||
@@ -168,14 +179,12 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
||||
const result = await post<{ part_number: string }>("/api/items", {
|
||||
schema: "kindred-rd",
|
||||
category,
|
||||
description,
|
||||
item_type: itemType,
|
||||
description: getField("description") || undefined,
|
||||
item_type: deriveItemType(category),
|
||||
projects: selectedProjects.length > 0 ? selectedProjects : undefined,
|
||||
properties: Object.keys(properties).length > 0 ? properties : undefined,
|
||||
sourcing_type: sourcingType || undefined,
|
||||
sourcing_link: sourcingLink || undefined,
|
||||
long_description: longDescription || undefined,
|
||||
standard_cost: standardCost ? Number(standardCost) : undefined,
|
||||
sourcing_type: getField("sourcing_type") || undefined,
|
||||
long_description: getField("long_description") || undefined,
|
||||
});
|
||||
|
||||
const pn = result.part_number;
|
||||
@@ -219,6 +228,33 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
||||
}
|
||||
};
|
||||
|
||||
// Find field definition from descriptor (global groups + category groups).
|
||||
function findFieldDef(name: string): FormFieldDescriptor | undefined {
|
||||
if (descriptor?.field_groups) {
|
||||
for (const group of descriptor.field_groups) {
|
||||
const f = group.fields.find((fd) => fd.name === name);
|
||||
if (f) return f;
|
||||
}
|
||||
}
|
||||
if (descriptor?.category_field_groups && category) {
|
||||
const prefix = category[0]!;
|
||||
const catGroups = descriptor.category_field_groups[prefix];
|
||||
if (catGroups) {
|
||||
for (const group of catGroups) {
|
||||
const f = group.fields.find((fd) => fd.name === name);
|
||||
if (f) return f;
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Get category-specific field groups for the selected category.
|
||||
const catFieldGroups: FormFieldGroup[] =
|
||||
category && descriptor?.category_field_groups
|
||||
? (descriptor.category_field_groups[category[0]!] ?? [])
|
||||
: [];
|
||||
|
||||
return (
|
||||
<div style={{ display: "flex", flexDirection: "column", height: "100%" }}>
|
||||
{/* Header */}
|
||||
@@ -262,130 +298,52 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
||||
<div style={{ overflow: "auto", padding: "0.75rem" }}>
|
||||
{error && <div style={errorStyle}>{error}</div>}
|
||||
|
||||
{/* Identity section */}
|
||||
<SectionHeader>Identity</SectionHeader>
|
||||
<div style={fieldGridStyle}>
|
||||
<FormGroup label="Type *">
|
||||
<select
|
||||
value={itemType}
|
||||
onChange={(e) => setItemType(e.target.value)}
|
||||
style={inputStyle}
|
||||
>
|
||||
<option value="part">Part</option>
|
||||
<option value="assembly">Assembly</option>
|
||||
<option value="consumable">Consumable</option>
|
||||
<option value="tool">Tool</option>
|
||||
</select>
|
||||
</FormGroup>
|
||||
<FormGroup label="Description">
|
||||
<input
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
style={inputStyle}
|
||||
placeholder="Item description"
|
||||
/>
|
||||
</FormGroup>
|
||||
<div style={{ gridColumn: "1 / -1" }}>
|
||||
<FormGroup label="Category *">
|
||||
<CategoryPicker
|
||||
value={category}
|
||||
onChange={setCategory}
|
||||
categories={categories}
|
||||
/>
|
||||
</FormGroup>
|
||||
</div>
|
||||
</div>
|
||||
{/* Category picker */}
|
||||
<SectionHeader>Category *</SectionHeader>
|
||||
<CategoryPicker
|
||||
value={category}
|
||||
onChange={handleCategoryChange}
|
||||
categories={categories}
|
||||
stages={descriptor?.category_picker?.stages}
|
||||
/>
|
||||
|
||||
{/* Sourcing section */}
|
||||
<SectionHeader>Sourcing</SectionHeader>
|
||||
<div style={fieldGridStyle}>
|
||||
<FormGroup label="Sourcing Type">
|
||||
<select
|
||||
value={sourcingType}
|
||||
onChange={(e) => setSourcingType(e.target.value)}
|
||||
style={inputStyle}
|
||||
>
|
||||
<option value="manufactured">Manufactured</option>
|
||||
<option value="purchased">Purchased</option>
|
||||
</select>
|
||||
</FormGroup>
|
||||
<FormGroup label="Standard Cost">
|
||||
<input
|
||||
type="number"
|
||||
step="0.01"
|
||||
value={standardCost}
|
||||
onChange={(e) => setStandardCost(e.target.value)}
|
||||
style={inputStyle}
|
||||
placeholder="0.00"
|
||||
/>
|
||||
</FormGroup>
|
||||
<div style={{ gridColumn: "1 / -1" }}>
|
||||
<FormGroup label="Sourcing Link">
|
||||
<input
|
||||
value={sourcingLink}
|
||||
onChange={(e) => setSourcingLink(e.target.value)}
|
||||
style={inputStyle}
|
||||
placeholder="https://..."
|
||||
/>
|
||||
</FormGroup>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Details section */}
|
||||
<SectionHeader>Details</SectionHeader>
|
||||
<FormGroup label="Long Description">
|
||||
<textarea
|
||||
value={longDescription}
|
||||
onChange={(e) => setLongDescription(e.target.value)}
|
||||
style={{ ...inputStyle, minHeight: 60, resize: "vertical" }}
|
||||
placeholder="Detailed description..."
|
||||
/>
|
||||
</FormGroup>
|
||||
<FormGroup label="Projects">
|
||||
<TagInput
|
||||
value={selectedProjects}
|
||||
onChange={setSelectedProjects}
|
||||
placeholder="Search projects\u2026"
|
||||
searchFn={searchProjects}
|
||||
/>
|
||||
</FormGroup>
|
||||
|
||||
{/* Category properties */}
|
||||
{Object.keys(catPropDefs).length > 0 && (
|
||||
<>
|
||||
<SectionHeader>
|
||||
{categories[category] ?? category} Properties
|
||||
</SectionHeader>
|
||||
{/* Dynamic field groups from descriptor */}
|
||||
{descriptor?.field_groups?.map((group) => (
|
||||
<div key={group.key}>
|
||||
<SectionHeader>{group.label}</SectionHeader>
|
||||
<div style={fieldGridStyle}>
|
||||
{Object.entries(catPropDefs).map(([key, def]) => (
|
||||
<FormGroup key={key} label={key}>
|
||||
{def.type === "boolean" ? (
|
||||
<select
|
||||
value={catProps[key] ?? ""}
|
||||
onChange={(e) =>
|
||||
setCatProps({ ...catProps, [key]: e.target.value })
|
||||
}
|
||||
style={inputStyle}
|
||||
>
|
||||
<option value="">---</option>
|
||||
<option value="true">true</option>
|
||||
<option value="false">false</option>
|
||||
</select>
|
||||
) : (
|
||||
<input
|
||||
type={def.type === "number" ? "number" : "text"}
|
||||
value={catProps[key] ?? ""}
|
||||
onChange={(e) =>
|
||||
setCatProps({ ...catProps, [key]: e.target.value })
|
||||
}
|
||||
style={inputStyle}
|
||||
/>
|
||||
)}
|
||||
</FormGroup>
|
||||
))}
|
||||
{group.fields.map((field) =>
|
||||
renderField(
|
||||
field,
|
||||
getField(field.name),
|
||||
(v) => setField(field.name, v),
|
||||
selectedProjects,
|
||||
setSelectedProjects,
|
||||
searchProjects,
|
||||
),
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
|
||||
{/* Category-specific field groups */}
|
||||
{catFieldGroups.map((group) => (
|
||||
<div key={group.key}>
|
||||
<SectionHeader>{group.label}</SectionHeader>
|
||||
<div style={fieldGridStyle}>
|
||||
{group.fields.map((field) =>
|
||||
renderField(
|
||||
field,
|
||||
getField(field.name),
|
||||
(v) => setField(field.name, v),
|
||||
selectedProjects,
|
||||
setSelectedProjects,
|
||||
searchProjects,
|
||||
),
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Right: sidebar */}
|
||||
@@ -461,6 +419,138 @@ export function CreateItemPane({ onCreated, onCancel }: CreateItemPaneProps) {
|
||||
);
|
||||
}
|
||||
|
||||
// --- Field renderer ---
|
||||
|
||||
function renderField(
|
||||
field: FormFieldDescriptor,
|
||||
value: string,
|
||||
onChange: (v: string) => void,
|
||||
selectedProjects: string[],
|
||||
setSelectedProjects: (v: string[]) => void,
|
||||
searchProjects: (q: string) => Promise<{ id: string; label: string }[]>,
|
||||
) {
|
||||
const widget =
|
||||
field.widget ?? (field.type === "boolean" ? "checkbox" : "text");
|
||||
|
||||
// Projects field gets special tag_input treatment
|
||||
if (widget === "tag_input") {
|
||||
return (
|
||||
<div key={field.name} style={{ gridColumn: "1 / -1" }}>
|
||||
<FormGroup label={field.label}>
|
||||
<TagInput
|
||||
value={selectedProjects}
|
||||
onChange={setSelectedProjects}
|
||||
placeholder="Search projects\u2026"
|
||||
searchFn={searchProjects}
|
||||
/>
|
||||
</FormGroup>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (widget === "textarea") {
|
||||
return (
|
||||
<div key={field.name} style={{ gridColumn: "1 / -1" }}>
|
||||
<FormGroup label={field.label}>
|
||||
<textarea
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
style={{ ...inputStyle, minHeight: 60, resize: "vertical" }}
|
||||
placeholder={field.description ?? ""}
|
||||
/>
|
||||
</FormGroup>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (widget === "select" && field.options) {
|
||||
return (
|
||||
<FormGroup key={field.name} label={field.label}>
|
||||
<select
|
||||
value={value || (field.default != null ? String(field.default) : "")}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
style={inputStyle}
|
||||
>
|
||||
{!field.required && <option value="">---</option>}
|
||||
{field.options.map((opt) => (
|
||||
<option key={opt} value={opt}>
|
||||
{opt}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</FormGroup>
|
||||
);
|
||||
}
|
||||
|
||||
if (widget === "checkbox") {
|
||||
return (
|
||||
<FormGroup key={field.name} label={field.label}>
|
||||
<select
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
style={inputStyle}
|
||||
>
|
||||
<option value="">---</option>
|
||||
<option value="true">Yes</option>
|
||||
<option value="false">No</option>
|
||||
</select>
|
||||
</FormGroup>
|
||||
);
|
||||
}
|
||||
|
||||
if (widget === "currency") {
|
||||
return (
|
||||
<FormGroup
|
||||
key={field.name}
|
||||
label={`${field.label}${field.currency ? ` (${field.currency})` : ""}`}
|
||||
>
|
||||
<input
|
||||
type="number"
|
||||
step="0.01"
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
style={inputStyle}
|
||||
placeholder="0.00"
|
||||
/>
|
||||
</FormGroup>
|
||||
);
|
||||
}
|
||||
|
||||
if (widget === "url") {
|
||||
return (
|
||||
<div key={field.name} style={{ gridColumn: "1 / -1" }}>
|
||||
<FormGroup label={field.label}>
|
||||
<input
|
||||
type="url"
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
style={inputStyle}
|
||||
placeholder="https://..."
|
||||
/>
|
||||
</FormGroup>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Default: text or number input
|
||||
const inputType = field.type === "number" ? "number" : "text";
|
||||
const placeholder = field.unit
|
||||
? `${field.description ?? ""} (${field.unit})`
|
||||
: (field.description ?? "");
|
||||
|
||||
return (
|
||||
<FormGroup key={field.name} label={field.label}>
|
||||
<input
|
||||
type={inputType}
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
style={inputStyle}
|
||||
placeholder={placeholder}
|
||||
/>
|
||||
</FormGroup>
|
||||
);
|
||||
}
|
||||
|
||||
// --- Sub-components ---
|
||||
|
||||
function SectionHeader({ children }: { children: React.ReactNode }) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { get, put } from '../../api/client';
|
||||
import type { Item } from '../../api/types';
|
||||
import { useState, useEffect } from "react";
|
||||
import { get, put } from "../../api/client";
|
||||
import type { Item } from "../../api/types";
|
||||
|
||||
interface EditItemPaneProps {
|
||||
partNumber: string;
|
||||
@@ -8,17 +8,19 @@ interface EditItemPaneProps {
|
||||
onCancel: () => void;
|
||||
}
|
||||
|
||||
export function EditItemPane({ partNumber, onSaved, onCancel }: EditItemPaneProps) {
|
||||
export function EditItemPane({
|
||||
partNumber,
|
||||
onSaved,
|
||||
onCancel,
|
||||
}: EditItemPaneProps) {
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [saving, setSaving] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [pn, setPN] = useState('');
|
||||
const [itemType, setItemType] = useState('');
|
||||
const [description, setDescription] = useState('');
|
||||
const [sourcingType, setSourcingType] = useState('');
|
||||
const [sourcingLink, setSourcingLink] = useState('');
|
||||
const [longDescription, setLongDescription] = useState('');
|
||||
const [standardCost, setStandardCost] = useState('');
|
||||
const [pn, setPN] = useState("");
|
||||
const [itemType, setItemType] = useState("");
|
||||
const [description, setDescription] = useState("");
|
||||
const [sourcingType, setSourcingType] = useState("");
|
||||
const [longDescription, setLongDescription] = useState("");
|
||||
|
||||
useEffect(() => {
|
||||
setLoading(true);
|
||||
@@ -27,12 +29,10 @@ export function EditItemPane({ partNumber, onSaved, onCancel }: EditItemPaneProp
|
||||
setPN(item.part_number);
|
||||
setItemType(item.item_type);
|
||||
setDescription(item.description);
|
||||
setSourcingType(item.sourcing_type ?? '');
|
||||
setSourcingLink(item.sourcing_link ?? '');
|
||||
setLongDescription(item.long_description ?? '');
|
||||
setStandardCost(item.standard_cost != null ? String(item.standard_cost) : '');
|
||||
setSourcingType(item.sourcing_type ?? "");
|
||||
setLongDescription(item.long_description ?? "");
|
||||
})
|
||||
.catch(() => setError('Failed to load item'))
|
||||
.catch(() => setError("Failed to load item"))
|
||||
.finally(() => setLoading(false));
|
||||
}, [partNumber]);
|
||||
|
||||
@@ -45,54 +45,97 @@ export function EditItemPane({ partNumber, onSaved, onCancel }: EditItemPaneProp
|
||||
item_type: itemType || undefined,
|
||||
description: description || undefined,
|
||||
sourcing_type: sourcingType || undefined,
|
||||
sourcing_link: sourcingLink || undefined,
|
||||
long_description: longDescription || undefined,
|
||||
standard_cost: standardCost ? Number(standardCost) : undefined,
|
||||
});
|
||||
onSaved();
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : 'Failed to save item');
|
||||
setError(e instanceof Error ? e.message : "Failed to save item");
|
||||
} finally {
|
||||
setSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
if (loading) return <div style={{ padding: '1rem', color: 'var(--ctp-subtext0)' }}>Loading...</div>;
|
||||
if (loading)
|
||||
return (
|
||||
<div style={{ padding: "1rem", color: "var(--ctp-subtext0)" }}>
|
||||
Loading...
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
<div style={{
|
||||
display: 'flex', alignItems: 'center', gap: '0.75rem',
|
||||
padding: '0.5rem 0.75rem',
|
||||
borderBottom: '1px solid var(--ctp-surface1)',
|
||||
backgroundColor: 'var(--ctp-mantle)',
|
||||
flexShrink: 0,
|
||||
}}>
|
||||
<span style={{ color: 'var(--ctp-blue)', fontWeight: 600, fontSize: '0.9rem' }}>Edit {partNumber}</span>
|
||||
<div style={{ display: "flex", flexDirection: "column", height: "100%" }}>
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "0.75rem",
|
||||
padding: "0.5rem 0.75rem",
|
||||
borderBottom: "1px solid var(--ctp-surface1)",
|
||||
backgroundColor: "var(--ctp-mantle)",
|
||||
flexShrink: 0,
|
||||
}}
|
||||
>
|
||||
<span
|
||||
style={{
|
||||
color: "var(--ctp-blue)",
|
||||
fontWeight: 600,
|
||||
fontSize: "0.9rem",
|
||||
}}
|
||||
>
|
||||
Edit {partNumber}
|
||||
</span>
|
||||
<span style={{ flex: 1 }} />
|
||||
<button onClick={() => void handleSave()} disabled={saving} style={{
|
||||
padding: '0.3rem 0.75rem', fontSize: '0.8rem', border: 'none', borderRadius: '0.3rem',
|
||||
backgroundColor: 'var(--ctp-blue)', color: 'var(--ctp-crust)', cursor: 'pointer',
|
||||
opacity: saving ? 0.6 : 1,
|
||||
}}>
|
||||
{saving ? 'Saving...' : 'Save'}
|
||||
<button
|
||||
onClick={() => void handleSave()}
|
||||
disabled={saving}
|
||||
style={{
|
||||
padding: "0.3rem 0.75rem",
|
||||
fontSize: "0.8rem",
|
||||
border: "none",
|
||||
borderRadius: "0.3rem",
|
||||
backgroundColor: "var(--ctp-blue)",
|
||||
color: "var(--ctp-crust)",
|
||||
cursor: "pointer",
|
||||
opacity: saving ? 0.6 : 1,
|
||||
}}
|
||||
>
|
||||
{saving ? "Saving..." : "Save"}
|
||||
</button>
|
||||
<button onClick={onCancel} style={headerBtnStyle}>
|
||||
Cancel
|
||||
</button>
|
||||
<button onClick={onCancel} style={headerBtnStyle}>Cancel</button>
|
||||
</div>
|
||||
|
||||
<div style={{ flex: 1, overflow: 'auto', padding: '0.75rem' }}>
|
||||
<div style={{ flex: 1, overflow: "auto", padding: "0.75rem" }}>
|
||||
{error && (
|
||||
<div style={{ color: 'var(--ctp-red)', backgroundColor: 'rgba(243,139,168,0.1)', padding: '0.5rem', borderRadius: '0.3rem', marginBottom: '0.5rem', fontSize: '0.85rem' }}>
|
||||
<div
|
||||
style={{
|
||||
color: "var(--ctp-red)",
|
||||
backgroundColor: "rgba(243,139,168,0.1)",
|
||||
padding: "0.5rem",
|
||||
borderRadius: "0.3rem",
|
||||
marginBottom: "0.5rem",
|
||||
fontSize: "0.85rem",
|
||||
}}
|
||||
>
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<FormGroup label="Part Number">
|
||||
<input value={pn} onChange={(e) => setPN(e.target.value)} style={inputStyle} />
|
||||
<input
|
||||
value={pn}
|
||||
onChange={(e) => setPN(e.target.value)}
|
||||
style={inputStyle}
|
||||
/>
|
||||
</FormGroup>
|
||||
|
||||
<FormGroup label="Type">
|
||||
<select value={itemType} onChange={(e) => setItemType(e.target.value)} style={inputStyle}>
|
||||
<select
|
||||
value={itemType}
|
||||
onChange={(e) => setItemType(e.target.value)}
|
||||
style={inputStyle}
|
||||
>
|
||||
<option value="part">Part</option>
|
||||
<option value="assembly">Assembly</option>
|
||||
<option value="document">Document</option>
|
||||
@@ -101,11 +144,19 @@ export function EditItemPane({ partNumber, onSaved, onCancel }: EditItemPaneProp
|
||||
</FormGroup>
|
||||
|
||||
<FormGroup label="Description">
|
||||
<input value={description} onChange={(e) => setDescription(e.target.value)} style={inputStyle} />
|
||||
<input
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
style={inputStyle}
|
||||
/>
|
||||
</FormGroup>
|
||||
|
||||
<FormGroup label="Sourcing Type">
|
||||
<select value={sourcingType} onChange={(e) => setSourcingType(e.target.value)} style={inputStyle}>
|
||||
<select
|
||||
value={sourcingType}
|
||||
onChange={(e) => setSourcingType(e.target.value)}
|
||||
style={inputStyle}
|
||||
>
|
||||
<option value="">—</option>
|
||||
<option value="manufactured">Manufactured</option>
|
||||
<option value="purchased">Purchased</option>
|
||||
@@ -113,38 +164,57 @@ export function EditItemPane({ partNumber, onSaved, onCancel }: EditItemPaneProp
|
||||
</select>
|
||||
</FormGroup>
|
||||
|
||||
<FormGroup label="Sourcing Link">
|
||||
<input value={sourcingLink} onChange={(e) => setSourcingLink(e.target.value)} style={inputStyle} placeholder="URL" />
|
||||
</FormGroup>
|
||||
|
||||
<FormGroup label="Standard Cost">
|
||||
<input type="number" step="0.01" value={standardCost} onChange={(e) => setStandardCost(e.target.value)} style={inputStyle} placeholder="0.00" />
|
||||
</FormGroup>
|
||||
|
||||
<FormGroup label="Long Description">
|
||||
<textarea value={longDescription} onChange={(e) => setLongDescription(e.target.value)} style={{ ...inputStyle, minHeight: 80, resize: 'vertical' }} />
|
||||
<textarea
|
||||
value={longDescription}
|
||||
onChange={(e) => setLongDescription(e.target.value)}
|
||||
style={{ ...inputStyle, minHeight: 80, resize: "vertical" }}
|
||||
/>
|
||||
</FormGroup>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function FormGroup({ label, children }: { label: string; children: React.ReactNode }) {
|
||||
function FormGroup({
|
||||
label,
|
||||
children,
|
||||
}: {
|
||||
label: string;
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
return (
|
||||
<div style={{ marginBottom: '0.6rem' }}>
|
||||
<label style={{ display: 'block', fontSize: '0.75rem', color: 'var(--ctp-subtext0)', marginBottom: '0.2rem' }}>{label}</label>
|
||||
<div style={{ marginBottom: "0.6rem" }}>
|
||||
<label
|
||||
style={{
|
||||
display: "block",
|
||||
fontSize: "0.75rem",
|
||||
color: "var(--ctp-subtext0)",
|
||||
marginBottom: "0.2rem",
|
||||
}}
|
||||
>
|
||||
{label}
|
||||
</label>
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const inputStyle: React.CSSProperties = {
|
||||
width: '100%', padding: '0.35rem 0.5rem', fontSize: '0.85rem',
|
||||
backgroundColor: 'var(--ctp-base)', border: '1px solid var(--ctp-surface1)',
|
||||
borderRadius: '0.3rem', color: 'var(--ctp-text)',
|
||||
width: "100%",
|
||||
padding: "0.35rem 0.5rem",
|
||||
fontSize: "0.85rem",
|
||||
backgroundColor: "var(--ctp-base)",
|
||||
border: "1px solid var(--ctp-surface1)",
|
||||
borderRadius: "0.3rem",
|
||||
color: "var(--ctp-text)",
|
||||
};
|
||||
|
||||
const headerBtnStyle: React.CSSProperties = {
|
||||
background: 'none', border: 'none', cursor: 'pointer',
|
||||
color: 'var(--ctp-subtext1)', fontSize: '0.8rem', padding: '0.2rem 0.4rem',
|
||||
background: "none",
|
||||
border: "none",
|
||||
cursor: "pointer",
|
||||
color: "var(--ctp-subtext1)",
|
||||
fontSize: "0.8rem",
|
||||
padding: "0.2rem 0.4rem",
|
||||
};
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import { useState, useRef } from 'react';
|
||||
import type { CSVImportResult } from '../../api/types';
|
||||
import { useState, useRef } from "react";
|
||||
import type { CSVImportResult } from "../../api/types";
|
||||
|
||||
interface ImportItemsPaneProps {
|
||||
onImported: () => void;
|
||||
onCancel: () => void;
|
||||
}
|
||||
|
||||
export function ImportItemsPane({ onImported, onCancel }: ImportItemsPaneProps) {
|
||||
export function ImportItemsPane({
|
||||
onImported,
|
||||
onCancel,
|
||||
}: ImportItemsPaneProps) {
|
||||
const [file, setFile] = useState<File | null>(null);
|
||||
const [skipExisting, setSkipExisting] = useState(false);
|
||||
const [importing, setImporting] = useState(false);
|
||||
@@ -21,19 +24,22 @@ export function ImportItemsPane({ onImported, onCancel }: ImportItemsPaneProps)
|
||||
setError(null);
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
if (dryRun) formData.append('dry_run', 'true');
|
||||
if (skipExisting) formData.append('skip_existing', 'true');
|
||||
formData.append("file", file);
|
||||
if (dryRun) formData.append("dry_run", "true");
|
||||
if (skipExisting) formData.append("skip_existing", "true");
|
||||
|
||||
try {
|
||||
const res = await fetch('/api/items/import', {
|
||||
method: 'POST',
|
||||
credentials: 'include',
|
||||
const res = await fetch("/api/items/import", {
|
||||
method: "POST",
|
||||
credentials: "include",
|
||||
body: formData,
|
||||
});
|
||||
const data = await res.json() as CSVImportResult;
|
||||
const data = (await res.json()) as CSVImportResult;
|
||||
if (!res.ok) {
|
||||
setError((data as unknown as { message?: string }).message ?? `HTTP ${res.status}`);
|
||||
setError(
|
||||
(data as unknown as { message?: string }).message ??
|
||||
`HTTP ${res.status}`,
|
||||
);
|
||||
} else {
|
||||
setResult(data);
|
||||
if (dryRun) {
|
||||
@@ -43,48 +49,85 @@ export function ImportItemsPane({ onImported, onCancel }: ImportItemsPaneProps)
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : 'Import failed');
|
||||
setError(e instanceof Error ? e.message : "Import failed");
|
||||
} finally {
|
||||
setImporting(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
<div style={{
|
||||
display: 'flex', alignItems: 'center', gap: '0.75rem',
|
||||
padding: '0.5rem 0.75rem',
|
||||
borderBottom: '1px solid var(--ctp-surface1)',
|
||||
backgroundColor: 'var(--ctp-mantle)',
|
||||
flexShrink: 0,
|
||||
}}>
|
||||
<span style={{ color: 'var(--ctp-yellow)', fontWeight: 600, fontSize: '0.9rem' }}>Import Items (CSV)</span>
|
||||
<div style={{ display: "flex", flexDirection: "column", height: "100%" }}>
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "0.75rem",
|
||||
padding: "0.5rem 0.75rem",
|
||||
borderBottom: "1px solid var(--ctp-surface1)",
|
||||
backgroundColor: "var(--ctp-mantle)",
|
||||
flexShrink: 0,
|
||||
}}
|
||||
>
|
||||
<span
|
||||
style={{
|
||||
color: "var(--ctp-yellow)",
|
||||
fontWeight: 600,
|
||||
fontSize: "0.9rem",
|
||||
}}
|
||||
>
|
||||
Import Items (CSV)
|
||||
</span>
|
||||
<span style={{ flex: 1 }} />
|
||||
<button onClick={onCancel} style={headerBtnStyle}>Cancel</button>
|
||||
<button onClick={onCancel} style={headerBtnStyle}>
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div style={{ flex: 1, overflow: 'auto', padding: '0.75rem' }}>
|
||||
<div style={{ flex: 1, overflow: "auto", padding: "0.75rem" }}>
|
||||
{error && (
|
||||
<div style={{ color: 'var(--ctp-red)', backgroundColor: 'rgba(243,139,168,0.1)', padding: '0.5rem', borderRadius: '0.3rem', marginBottom: '0.5rem', fontSize: '0.85rem' }}>
|
||||
<div
|
||||
style={{
|
||||
color: "var(--ctp-red)",
|
||||
backgroundColor: "rgba(243,139,168,0.1)",
|
||||
padding: "0.5rem",
|
||||
borderRadius: "0.3rem",
|
||||
marginBottom: "0.5rem",
|
||||
fontSize: "0.85rem",
|
||||
}}
|
||||
>
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Instructions */}
|
||||
<div style={{ fontSize: '0.8rem', color: 'var(--ctp-subtext0)', marginBottom: '0.75rem' }}>
|
||||
<p style={{ marginBottom: '0.25rem' }}>Upload a CSV file with items to import.</p>
|
||||
<p>Required column: <strong style={{ color: 'var(--ctp-text)' }}>category</strong></p>
|
||||
<p>Optional: description, projects, sourcing_type, sourcing_link, long_description, standard_cost, + property columns</p>
|
||||
<div
|
||||
style={{
|
||||
fontSize: "0.8rem",
|
||||
color: "var(--ctp-subtext0)",
|
||||
marginBottom: "0.75rem",
|
||||
}}
|
||||
>
|
||||
<p style={{ marginBottom: "0.25rem" }}>
|
||||
Upload a CSV file with items to import.
|
||||
</p>
|
||||
<p>
|
||||
Required column:{" "}
|
||||
<strong style={{ color: "var(--ctp-text)" }}>category</strong>
|
||||
</p>
|
||||
<p>
|
||||
Optional: description, projects, sourcing_type, long_description, +
|
||||
property columns (including sourcing_link, standard_cost)
|
||||
</p>
|
||||
<a
|
||||
href="/api/items/template.csv"
|
||||
style={{ color: 'var(--ctp-sapphire)', fontSize: '0.8rem' }}
|
||||
style={{ color: "var(--ctp-sapphire)", fontSize: "0.8rem" }}
|
||||
>
|
||||
Download CSV template
|
||||
</a>
|
||||
</div>
|
||||
|
||||
{/* File input */}
|
||||
<div style={{ marginBottom: '0.75rem' }}>
|
||||
<div style={{ marginBottom: "0.75rem" }}>
|
||||
<input
|
||||
ref={fileRef}
|
||||
type="file"
|
||||
@@ -94,76 +137,144 @@ export function ImportItemsPane({ onImported, onCancel }: ImportItemsPaneProps)
|
||||
setResult(null);
|
||||
setValidated(false);
|
||||
}}
|
||||
style={{ display: 'none' }}
|
||||
style={{ display: "none" }}
|
||||
/>
|
||||
<button
|
||||
onClick={() => fileRef.current?.click()}
|
||||
style={{
|
||||
padding: '0.75rem 1.5rem', border: '2px dashed var(--ctp-surface2)',
|
||||
borderRadius: '0.5rem', backgroundColor: 'var(--ctp-surface0)',
|
||||
color: 'var(--ctp-subtext1)', cursor: 'pointer', width: '100%',
|
||||
fontSize: '0.85rem',
|
||||
padding: "0.75rem 1.5rem",
|
||||
border: "2px dashed var(--ctp-surface2)",
|
||||
borderRadius: "0.5rem",
|
||||
backgroundColor: "var(--ctp-surface0)",
|
||||
color: "var(--ctp-subtext1)",
|
||||
cursor: "pointer",
|
||||
width: "100%",
|
||||
fontSize: "0.85rem",
|
||||
}}
|
||||
>
|
||||
{file ? file.name : 'Choose CSV file...'}
|
||||
{file ? file.name : "Choose CSV file..."}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Options */}
|
||||
<label style={{ display: 'flex', alignItems: 'center', gap: '0.4rem', fontSize: '0.85rem', color: 'var(--ctp-subtext1)', marginBottom: '0.75rem' }}>
|
||||
<input type="checkbox" checked={skipExisting} onChange={(e) => setSkipExisting(e.target.checked)} />
|
||||
<label
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "0.4rem",
|
||||
fontSize: "0.85rem",
|
||||
color: "var(--ctp-subtext1)",
|
||||
marginBottom: "0.75rem",
|
||||
}}
|
||||
>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={skipExisting}
|
||||
onChange={(e) => setSkipExisting(e.target.checked)}
|
||||
/>
|
||||
Skip existing items
|
||||
</label>
|
||||
|
||||
{/* Actions */}
|
||||
<div style={{ display: 'flex', gap: '0.5rem', marginBottom: '0.75rem' }}>
|
||||
<div
|
||||
style={{ display: "flex", gap: "0.5rem", marginBottom: "0.75rem" }}
|
||||
>
|
||||
{!validated ? (
|
||||
<button
|
||||
onClick={() => void doImport(true)}
|
||||
disabled={!file || importing}
|
||||
style={{
|
||||
padding: '0.4rem 0.75rem', fontSize: '0.85rem', border: 'none', borderRadius: '0.3rem',
|
||||
backgroundColor: 'var(--ctp-yellow)', color: 'var(--ctp-crust)', cursor: 'pointer',
|
||||
opacity: (!file || importing) ? 0.5 : 1,
|
||||
padding: "0.4rem 0.75rem",
|
||||
fontSize: "0.85rem",
|
||||
border: "none",
|
||||
borderRadius: "0.3rem",
|
||||
backgroundColor: "var(--ctp-yellow)",
|
||||
color: "var(--ctp-crust)",
|
||||
cursor: "pointer",
|
||||
opacity: !file || importing ? 0.5 : 1,
|
||||
}}
|
||||
>
|
||||
{importing ? 'Validating...' : 'Validate (Dry Run)'}
|
||||
{importing ? "Validating..." : "Validate (Dry Run)"}
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
onClick={() => void doImport(false)}
|
||||
disabled={importing || (result?.error_count ?? 0) > 0}
|
||||
style={{
|
||||
padding: '0.4rem 0.75rem', fontSize: '0.85rem', border: 'none', borderRadius: '0.3rem',
|
||||
backgroundColor: 'var(--ctp-green)', color: 'var(--ctp-crust)', cursor: 'pointer',
|
||||
opacity: (importing || (result?.error_count ?? 0) > 0) ? 0.5 : 1,
|
||||
padding: "0.4rem 0.75rem",
|
||||
fontSize: "0.85rem",
|
||||
border: "none",
|
||||
borderRadius: "0.3rem",
|
||||
backgroundColor: "var(--ctp-green)",
|
||||
color: "var(--ctp-crust)",
|
||||
cursor: "pointer",
|
||||
opacity: importing || (result?.error_count ?? 0) > 0 ? 0.5 : 1,
|
||||
}}
|
||||
>
|
||||
{importing ? 'Importing...' : 'Import Now'}
|
||||
{importing ? "Importing..." : "Import Now"}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Results */}
|
||||
{result && (
|
||||
<div style={{ padding: '0.5rem', backgroundColor: 'var(--ctp-surface0)', borderRadius: '0.4rem', fontSize: '0.8rem' }}>
|
||||
<p>Total rows: <strong>{result.total_rows}</strong></p>
|
||||
<p>Success: <strong style={{ color: 'var(--ctp-green)' }}>{result.success_count}</strong></p>
|
||||
<div
|
||||
style={{
|
||||
padding: "0.5rem",
|
||||
backgroundColor: "var(--ctp-surface0)",
|
||||
borderRadius: "0.4rem",
|
||||
fontSize: "0.8rem",
|
||||
}}
|
||||
>
|
||||
<p>
|
||||
Total rows: <strong>{result.total_rows}</strong>
|
||||
</p>
|
||||
<p>
|
||||
Success:{" "}
|
||||
<strong style={{ color: "var(--ctp-green)" }}>
|
||||
{result.success_count}
|
||||
</strong>
|
||||
</p>
|
||||
{result.error_count > 0 && (
|
||||
<p>Errors: <strong style={{ color: 'var(--ctp-red)' }}>{result.error_count}</strong></p>
|
||||
<p>
|
||||
Errors:{" "}
|
||||
<strong style={{ color: "var(--ctp-red)" }}>
|
||||
{result.error_count}
|
||||
</strong>
|
||||
</p>
|
||||
)}
|
||||
{result.errors && result.errors.length > 0 && (
|
||||
<div style={{ marginTop: '0.5rem', maxHeight: 200, overflow: 'auto' }}>
|
||||
<div
|
||||
style={{
|
||||
marginTop: "0.5rem",
|
||||
maxHeight: 200,
|
||||
overflow: "auto",
|
||||
}}
|
||||
>
|
||||
{result.errors.map((err, i) => (
|
||||
<div key={i} style={{ color: 'var(--ctp-red)', fontSize: '0.75rem', padding: '0.1rem 0' }}>
|
||||
Row {err.row}{err.field ? ` [${err.field}]` : ''}: {err.message}
|
||||
<div
|
||||
key={i}
|
||||
style={{
|
||||
color: "var(--ctp-red)",
|
||||
fontSize: "0.75rem",
|
||||
padding: "0.1rem 0",
|
||||
}}
|
||||
>
|
||||
Row {err.row}
|
||||
{err.field ? ` [${err.field}]` : ""}: {err.message}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{result.created_items && result.created_items.length > 0 && (
|
||||
<div style={{ marginTop: '0.5rem', color: 'var(--ctp-green)', fontSize: '0.75rem' }}>
|
||||
Created: {result.created_items.join(', ')}
|
||||
<div
|
||||
style={{
|
||||
marginTop: "0.5rem",
|
||||
color: "var(--ctp-green)",
|
||||
fontSize: "0.75rem",
|
||||
}}
|
||||
>
|
||||
Created: {result.created_items.join(", ")}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
@@ -174,6 +285,10 @@ export function ImportItemsPane({ onImported, onCancel }: ImportItemsPaneProps)
|
||||
}
|
||||
|
||||
const headerBtnStyle: React.CSSProperties = {
|
||||
background: 'none', border: 'none', cursor: 'pointer',
|
||||
color: 'var(--ctp-subtext1)', fontSize: '0.8rem', padding: '0.2rem 0.4rem',
|
||||
background: "none",
|
||||
border: "none",
|
||||
cursor: "pointer",
|
||||
color: "var(--ctp-subtext1)",
|
||||
fontSize: "0.8rem",
|
||||
padding: "0.2rem 0.4rem",
|
||||
};
|
||||
|
||||
@@ -110,15 +110,19 @@ export function MainTab({ item, onReload, isEditor }: MainTabProps) {
|
||||
{row("Description", item.description)}
|
||||
{row("Type", item.item_type)}
|
||||
{row("Sourcing", item.sourcing_type || "—")}
|
||||
{item.sourcing_link &&
|
||||
{item.properties?.sourcing_link != null &&
|
||||
row(
|
||||
"Source Link",
|
||||
<a href={item.sourcing_link} target="_blank" rel="noreferrer">
|
||||
{item.sourcing_link}
|
||||
<a
|
||||
href={String(item.properties.sourcing_link)}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
{String(item.properties.sourcing_link)}
|
||||
</a>,
|
||||
)}
|
||||
{item.standard_cost != null &&
|
||||
row("Std Cost", `$${item.standard_cost.toFixed(2)}`)}
|
||||
{item.properties?.standard_cost != null &&
|
||||
row("Std Cost", `$${Number(item.properties.standard_cost).toFixed(2)}`)}
|
||||
{row("Revision", `Rev ${item.current_revision}`)}
|
||||
{row("Created", formatDate(item.created_at))}
|
||||
{row("Updated", formatDate(item.updated_at))}
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { get } from "../api/client";
|
||||
import type { Schema } from "../api/types";
|
||||
|
||||
// Module-level cache to avoid refetching across mounts.
|
||||
let cached: Record<string, string> | null = null;
|
||||
|
||||
export function useCategories() {
|
||||
const [categories, setCategories] = useState<Record<string, string>>(
|
||||
cached ?? {},
|
||||
);
|
||||
const [loading, setLoading] = useState(cached === null);
|
||||
|
||||
useEffect(() => {
|
||||
if (cached) return;
|
||||
get<Schema>("/api/schemas/kindred-rd")
|
||||
.then((schema) => {
|
||||
const seg = schema.segments.find((s) => s.name === "category");
|
||||
const vals = seg?.values ?? {};
|
||||
cached = vals;
|
||||
setCategories(vals);
|
||||
})
|
||||
.catch(() => {})
|
||||
.finally(() => setLoading(false));
|
||||
}, []);
|
||||
|
||||
return { categories, loading };
|
||||
}
|
||||
37
web/src/hooks/useFormDescriptor.ts
Normal file
37
web/src/hooks/useFormDescriptor.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { get } from "../api/client";
|
||||
import type { FormDescriptor } from "../api/types";
|
||||
|
||||
// Module-level cache to avoid refetching across mounts.
|
||||
let cached: FormDescriptor | null = null;
|
||||
|
||||
export function useFormDescriptor(schemaName = "kindred-rd") {
|
||||
const [descriptor, setDescriptor] = useState<FormDescriptor | null>(cached);
|
||||
const [loading, setLoading] = useState(cached === null);
|
||||
|
||||
useEffect(() => {
|
||||
if (cached) return;
|
||||
get<FormDescriptor>(`/api/schemas/${encodeURIComponent(schemaName)}/form`)
|
||||
.then((desc) => {
|
||||
cached = desc;
|
||||
setDescriptor(desc);
|
||||
})
|
||||
.catch(() => {})
|
||||
.finally(() => setLoading(false));
|
||||
}, [schemaName]);
|
||||
|
||||
// Derive flat categories map from the category_picker stages
|
||||
const categories: Record<string, string> = {};
|
||||
if (descriptor?.category_picker) {
|
||||
const subcatStage = descriptor.category_picker.stages.find(
|
||||
(s) => s.values_by_domain,
|
||||
);
|
||||
if (subcatStage?.values_by_domain) {
|
||||
for (const domainVals of Object.values(subcatStage.values_by_domain)) {
|
||||
Object.assign(categories, domainVals);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { descriptor, categories, loading };
|
||||
}
|
||||
Reference in New Issue
Block a user