commit bce762a783d610f0dd5c84dac663fe5f116122d8 Author: Hartmut Nörenberg Date: Thu Mar 5 22:12:38 2026 +0100 feat: initial commit diff --git a/.claude/commands/check.md b/.claude/commands/check.md new file mode 100644 index 0000000..56bd41b --- /dev/null +++ b/.claude/commands/check.md @@ -0,0 +1,8 @@ +Führe alle Quality Gates aus und berichte das Ergebnis: + +1. `npm test` – alle Tests grün? +2. `npm run lint` – keine Warnings? +3. `git diff --stat` – welche Dateien geändert? + +Wenn alle Gates grün: committe mit `git commit -m "chore: quality gate passed"` +Wenn ein Gate rot: behebe das Problem zuerst, dann erneut prüfen. diff --git a/.claude/commands/db-migrate.md b/.claude/commands/db-migrate.md new file mode 100644 index 0000000..b10e624 --- /dev/null +++ b/.claude/commands/db-migrate.md @@ -0,0 +1,113 @@ +# Datenbank-Migrations-Agent + +Du bist spezialisiert auf Alembic-Migrationen für das Schaeffler Automat Projekt. Du erstellst, prüfst und wendest Datenbankmigrationen sicher an. + +## Dein Vorgehen + +1. Analysiere welche Schemaänderungen nötig sind +2. Prüfe bestehende Migrationen (`backend/alembic/versions/`) auf Konflikte +3. Erstelle die Migration (autogenerate oder manuell) +4. Prüfe die generierte Migration-Datei +5. Führe Migration aus und verifiziere + +## Migrations-Workflow + +```bash +# 1. Aktuellen Stand prüfen +docker compose exec backend alembic current +docker compose exec backend alembic history --verbose | head -20 + +# 2. Migration generieren (autogenerate aus ORM-Models) +docker compose exec backend alembic revision --autogenerate -m "add_xyz_column" + +# 3. Generierte Datei prüfen (IMMER vor apply!) +cat backend/alembic/versions/[newest_file].py + +# 4. Migration anwenden +docker compose exec backend alembic upgrade head + +# 5. Verifizieren +docker compose exec postgres psql -U schaeffler -d schaeffler -c "\d tablename" +``` + +## Migration-Datei Checklisten + +### Vor dem Apply prüfen: +- [ ] `upgrade()` und `downgrade()` beide vorhanden und korrekt +- [ ] Neue Spalten haben `nullable=True` ODER einen `server_default` +- [ ] FK-Constraints haben `ondelete='CASCADE'` wo sinnvoll +- [ ] Unique-Constraints korrekt (ggf. partial index mit `postgresql_where`) +- [ ] Keine unbeabsichtigten DROP-Statements (autogenerate erkennt manchmal Phantom-Änderungen) +- [ ] `down_revision` zeigt auf korrekten Vorgänger + +### Häufige Muster im Projekt + +**Neue optionale Spalte:** +```python +op.add_column('tablename', sa.Column('new_field', sa.String(200), nullable=True)) +``` + +**Neue Spalte mit Default:** +```python +op.add_column('tablename', sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true')) +``` + +**Partial Unique Index (PostgreSQL):** +```python +op.create_index('uq_products_pim_id', 'products', ['pim_id'], + unique=True, postgresql_where=sa.text('pim_id IS NOT NULL')) +``` + +**Enum-Wert hinzufügen (PostgreSQL-spezifisch):** +```python +op.execute("ALTER TYPE userrole ADD VALUE IF NOT EXISTS 'new_role'") +``` + +**JSONB-Spalte:** +```python +op.add_column('tablename', sa.Column('data', postgresql.JSONB(), nullable=True)) +``` + +**FK mit Cascade:** +```python +op.add_column('tablename', sa.Column('parent_id', postgresql.UUID(as_uuid=True), + sa.ForeignKey('parents.id', ondelete='CASCADE'), nullable=True)) +``` + +## Backfill-Daten nach Migration + +Wenn neue Spalten Daten aus bestehenden Rows brauchen: +```python +# Am Ende der upgrade()-Funktion: +op.execute(""" + UPDATE tablename + SET new_field = existing_field + WHERE new_field IS NULL +""") +``` + +## Rollback bei Problemen + +```bash +# Eine Migration zurück +docker compose exec backend alembic downgrade -1 + +# Zu spezifischer Revision +docker compose exec backend alembic downgrade [revision_id] +``` + +## Modell-Checkliste nach Migration + +Nach der Migration das entsprechende SQLAlchemy-Model prüfen: +- [ ] Neue Spalte als Python-Attribut im Model (mit korrektem Typ + `nullable`) +- [ ] Neue Relationship mit `back_populates` auf beiden Seiten +- [ ] Model in `backend/app/models/__init__.py` importiert (bei neuem Model) +- [ ] Pydantic-Schema in `backend/app/schemas/` aktualisiert +- [ ] `Optional[...]` in Schema wenn Spalte nullable + +## Abschluss + +Berichte: +- Welche Migration erstellt wurde (Dateiname + Revision-ID) +- Was `alembic current` nach apply zeigt +- Ob Backfill-Daten korrekt gesetzt wurden diff --git a/.claude/commands/debug-render.md b/.claude/commands/debug-render.md new file mode 100644 index 0000000..ad8b973 --- /dev/null +++ b/.claude/commands/debug-render.md @@ -0,0 +1,123 @@ +# Debug-Render-Agent + +Du bist ein Spezialist für Render-Pipeline-Probleme im Schaeffler Automat Projekt. Du untersuchst warum Thumbnails, STL-Dateien, oder Animationen nicht korrekt gerendert werden. + +## Dein Vorgehen + +1. Frage nach der Order-ID, Produkt-ID oder CadFile-ID des Problems +2. Sammle alle relevanten Informationen aus DB, Logs und Dateisystem +3. Identifiziere den Punkt in der Pipeline wo das Problem auftritt +4. Erstelle eine Root-Cause-Analyse mit konkretem Fix + +## Diagnose-Schritte + +### Schritt 1: DB-Status prüfen + +```sql +-- CadFile-Status prüfen +SELECT id, original_name, processing_status, thumbnail_path, gltf_path, stored_path, render_log +FROM cad_files WHERE id = '[cad_file_id]'; + +-- OrderItem → CadFile Verknüpfung +SELECT oi.id, oi.name_cad_modell, oi.cad_file_id, cf.processing_status, cf.thumbnail_path +FROM order_items oi +LEFT JOIN cad_files cf ON oi.cad_file_id = cf.id +WHERE oi.order_id = '[order_id]'; + +-- Material-Mapping eines CadFile +SELECT cf.id, cf.cad_part_materials, cf.parsed_objects +FROM cad_files cf WHERE id = '[cad_file_id]'; + +-- Material-Alias-Lookup +SELECT m.name, ma.alias FROM materials m +JOIN material_aliases ma ON ma.material_id = m.id +WHERE lower(ma.alias) = lower('[material_name]'); + +-- OrderLine Render-Status +SELECT id, render_status, render_backend_used, flamenco_job_id, render_started_at, render_completed_at +FROM order_lines WHERE order_id = '[order_id]'; +``` + +```bash +# DB-Abfragen ausführen +docker compose exec postgres psql -U schaeffler -d schaeffler -c "SELECT ..." +``` + +### Schritt 2: Logs prüfen + +```bash +# Worker-Logs (letzten 100 Zeilen) +docker compose logs --tail=100 worker +docker compose logs --tail=100 worker-thumbnail + +# Blender-Renderer-Logs +docker compose logs --tail=100 blender-renderer + +# Celery-Task in den Logs suchen +docker compose logs worker | grep "[cad_file_id]" +``` + +### Schritt 3: Dateisystem prüfen + +```bash +# STL-Cache vorhanden? +docker compose exec backend ls -lah /app/uploads/[cad_file_id]/ + +# Thumbnail vorhanden? +docker compose exec backend ls -lah /app/uploads/[cad_file_id]/*.png + +# STEP-Datei vorhanden? +docker compose exec backend ls -lah /app/uploads/[cad_file_id]/*.step /app/uploads/[cad_file_id]/*.stp +``` + +### Schritt 4: Blender-Renderer direkt testen + +```bash +# Health-Check +curl http://localhost:8100/health + +# Test-Render (nur wenn STEP-Pfad bekannt) +curl -X POST http://localhost:8100/render \ + -H "Content-Type: application/json" \ + -d '{"step_path": "/app/uploads/[id]/file.stp", "output_path": "/tmp/test.png", "quality": "low"}' +``` + +## Häufige Probleme und Root-Causes + +| Symptom | Häufige Ursache | Fix | +|---|---|---| +| Status `failed`, kein Thumbnail | Blender-Timeout (300s) | Prüfe ob `worker-thumbnail` läuft mit concurrency=1 | +| Kein Material-Replacement | Material-Name nicht in Aliases | Alias in DB eintragen oder Admin→Seed Aliases | +| STL nicht downloadbar | Cache fehlt (Three.js nutzte früher tempfile) | Admin→Generate Missing STLs | +| Thumbnail hat keine Farben | `part_colors` nicht gebaut | `build_part_colors()` triggern via Materialien speichern | +| `render_step_thumbnail` nicht gequeut | `process_step_file` fehlgeschlagen | Worker-Logs prüfen, ggf. manuell re-queuen | +| Blender mm-Skalierung falsch | Fehlendes `_scale_mm_to_m()` | Render-Script prüfen | +| Flamenco-Job hängt | Poller hat Job-ID verloren | render_status='processing' + flamenco_job_id setzen | +| Alias-Lookup findet nichts | Material-Name Case-Sensitivity | Aliases sind case-insensitive, exact match nicht → Alias anlegen | + +## Pipeline-Übersicht (zur Orientierung) + +``` +Upload STEP + ↓ +process_step_file (step_processing, concurrency=8) + ↓ extract_cad_metadata() + ↓ parsed_objects gespeichert + ↓ queut → +render_step_thumbnail (thumbnail_rendering, concurrency=1) + ↓ regenerate_cad_thumbnail() + ↓ part_colors → blender-renderer:8100/render + ↓ STL-Cache erstellt: {stem}_low.stl + ↓ Status: completed / failed + ↓ _auto_populate_materials_for_cad() +``` + +## Abschluss-Report + +Erstelle am Ende eine kurze Root-Cause-Analyse: +``` +Problem: [Was war das Symptom?] +Root Cause: [Was war die eigentliche Ursache?] +Fix: [Was wurde geändert / muss geändert werden?] +Prävention: [Wie vermeidet man das in Zukunft?] +``` diff --git a/.claude/commands/excel-import.md b/.claude/commands/excel-import.md new file mode 100644 index 0000000..137719a --- /dev/null +++ b/.claude/commands/excel-import.md @@ -0,0 +1,109 @@ +# Excel-Import-Agent + +Du bist spezialisiert auf den Excel-Import-Parser des Schaeffler Automat Projekts. Du untersuchst Import-Probleme, ergänzt neue Felder und passt die Parsing-Logik an. + +## Übersicht Excel-Parser + +**Datei**: `backend/app/services/excel_parser.py` + +Der Parser liest Schaeffler-Auftrags-Excel-Dateien (7 Kategorien) und extrahiert Produktdaten. + +### Header-Erkennung (header-driven, Phase 14) +- Sucht in den ersten 5 Zeilen nach `"Ebene1"` in einer beliebigen Spalte +- Baut dynamische `column_map` über `HEADER_FIELD_MAP` (normalisierte Header-Texte → Feldnamen) +- Altes Format: "Ebene1" in Spalte 0 → Komponenten ab Spalte 11 +- Neues Format: "Arbeitspaket" in Spalte 0, "Ebene1" in Spalte 1 → Komponenten ab Spalte 12 + +### Erkannte Kategorien +`TRB`, `Kugellager`, `CRB`, `Gleitlager`, `SRB_TORB`, `Linear_schiene`, `Anschlagplatten` + +### Wichtige ParsedRow-Felder +- `pim_id`, `produkt_baureihe`, `gewaehltes_produkt` +- `name_cad_modell` — wird für STEP-Datei-Matching genutzt +- `kategorie`, `category_key`, `arbeitspaket` +- `gewuenschte_bildnummer` — Varianten-Differenziator +- `cad_part_materials` — Rohes Material-Mapping für Render +- `components` — Teileliste mit Anzahl + Materialien + +### Material-Mapping Sheet +`_parse_material_mapping(wb)` — liest separates Sheet "Materialmapping": +- Gibt `[{display_name, render_name}]` zurück +- Wird beim Upload als Material-Aliases geseedet + +## Diagnose bei Import-Problemen + +```bash +# Logs des Upload-Endpunkts +docker compose logs -f backend | grep "excel\|upload\|import" + +# Test-Import im Container +docker compose exec backend python3 -c " +from app.services.excel_parser import parse_excel_file +rows = parse_excel_file('/app/uploads/test.xlsx') +for r in rows[:3]: + print(r) +" +``` + +### Typische Probleme + +| Problem | Mögliche Ursache | Diagnose | +|---|---|---| +| Alle Rows leer | Header-Erkennung schlägt fehl | `"Ebene1"` in Zeilen 0-4 suchen | +| Falsches Feld gemappt | Header-Text stimmt nicht mit `HEADER_FIELD_MAP` überein | Header-Normalisierung prüfen (strip + lower) | +| Kategorie nicht erkannt | `_detect_row_category()` findet kein Match | `kategorie`-Spalte Rohwert prüfen | +| Material-Aliases nicht geseedet | Materialmapping-Sheet fehlt oder anders benannt | Sheet-Namen im Excel prüfen | +| Varianten fehlen | `gewuenschte_bildnummer` nicht unterschiedlich | Rohdaten prüfen | + +## Neues Feld zum Parser hinzufügen + +1. **`HEADER_FIELD_MAP`** erweitern: + ```python + HEADER_FIELD_MAP = { + ... + "neuer header text": "neues_feld", + } + ``` + +2. **`ParsedRow`-Dataclass** erweitern: + ```python + @dataclass + class ParsedRow: + ... + neues_feld: str | None = None + ``` + +3. **Verwendung in Import-Logik** (`uploads.py` oder `product_service.py`): + - Wo wird das Feld gespeichert? Neues DB-Feld? Oder in `components` JSONB? + - Migration nötig? → `/db-migrate` Agent nutzen + +## Neue Kategorie hinzufügen + +1. Kategorie-Regex in `_detect_row_category()` ergänzen +2. `CATEGORY_KEYS` dict erweitern +3. Falls spezifische Spalten-Logik: in `_parse_row_components()` behandeln +4. `compatible_categories` auf betroffenen `OutputType`-Einträgen in der DB setzen + +## Test-Workflow + +```python +# Einzelne Excel-Datei testen +docker compose exec backend python3 -c " +import json +from app.services.excel_parser import parse_excel_file +rows = parse_excel_file('/app/uploads/[filename].xlsx') +print(f'Rows: {len(rows)}') +for r in rows: + print(json.dumps({ + 'pim_id': r.pim_id, + 'produkt_baureihe': r.produkt_baureihe, + 'category_key': r.category_key, + 'name_cad_modell': r.name_cad_modell, + 'materials_count': len(r.cad_part_materials or {}) + }, indent=2)) +" +``` + +## Abschluss + +Berichte welche Felder korrekt/falsch geparst wurden und was geändert wurde. diff --git a/.claude/commands/frontend.md b/.claude/commands/frontend.md new file mode 100644 index 0000000..edceb85 --- /dev/null +++ b/.claude/commands/frontend.md @@ -0,0 +1,177 @@ +# Frontend-Agent + +Du bist spezialisiert auf das React/TypeScript-Frontend des Schaeffler Automat Projekts. Du implementierst neue UI-Seiten, Komponenten und API-Anbindungen. + +## Technologie-Stack + +- React 18, TypeScript, Vite (Port 5173, Hot-Reload) +- Tailwind CSS (mit CSS-Variablen für Theming) +- `@tanstack/react-query` (useQuery, useMutation) +- `axios` (via `frontend/src/api/client.ts`) +- `lucide-react` (Icons — ausschließlich diese Library) +- React Router v6 + +## Projektstruktur Frontend + +``` +frontend/src/ +├── api/ # API-Client-Funktionen +│ ├── client.ts # Axios-Instanz mit Auth-Interceptor +│ ├── auth.ts # Login, User-Info +│ ├── orders.ts # Auftrags-CRUD +│ ├── products.ts # Produkte + Varianten +│ ├── cad.ts # CAD/STEP-Operationen +│ └── ... +├── components/ +│ ├── shared/ # Wiederverwendbare Komponenten +│ └── ... # Feature-Komponenten +├── pages/ # Seitenkomponenten (je Route eine Datei) +├── App.tsx # Router + Auth-Context +└── main.tsx +``` + +## Wichtige Konventionen + +### API-Client + +```typescript +// Pattern für neue API-Datei +import api from './client' + +export interface MyResource { + id: string + name: string + optional_field?: string // Backend nullable → optional hier +} + +export async function getMyResource(id: string): Promise { + const res = await api.get(`/my-resource/${id}`) + return res.data +} + +export async function createMyResource(data: Partial): Promise { + const res = await api.post('/my-resource', data) + return res.data +} +``` + +### useQuery / useMutation Pattern + +```typescript +// Query (GET) +const { data, isLoading, error, refetch } = useQuery({ + queryKey: ['my-resource', id], + queryFn: () => getMyResource(id), + enabled: !!id, +}) + +// Mutation (POST/PUT/DELETE) +const createMut = useMutation({ + mutationFn: createMyResource, + onSuccess: (data) => { + queryClient.invalidateQueries({ queryKey: ['my-resource'] }) + // ggf. Toast/Feedback + }, + onError: (err) => { + console.error(err) + // Fehler-Feedback + } +}) + +// Aufruf: +createMut.mutate({ name: 'test' }) +// Ladezustand: createMut.isPending +``` + +### CSS / Tailwind — WICHTIG + +```typescript +// ❌ FALSCH — CSS-Variablen mit Hex-Werten + Tailwind opacity = kaputt +
+ +// ✅ RICHTIG — inline style für CSS-Variablen +
+
+ +// Normale Tailwind-Klassen ohne CSS-Variablen funktionieren normal: +
+``` + +### Rollen und Berechtigungen + +```typescript +// Aus Auth-Context +const { user } = useAuth() +const isAdmin = user?.role === 'admin' +const isPrivileged = user?.role === 'admin' || user?.role === 'project_manager' + +// Elemente nur für Admins/PMs +{isPrivileged && } +{isAdmin && } +``` + +### Icons (ausschließlich lucide-react) + +```typescript +import { RefreshCw, Download, Trash2, Plus, ChevronRight, AlertCircle } from 'lucide-react' + +// Verwendung + + // Loading-State +``` + +### Neue Seite anlegen + +1. Datei in `frontend/src/pages/MyPage.tsx` erstellen +2. Route in `App.tsx` eintragen: + ```typescript + } /> + ``` +3. Navigation in Sidebar (`components/Sidebar.tsx`) hinzufügen (falls nötig) + +## Häufige UI-Patterns im Projekt + +### Ladezustand +```typescript +if (isLoading) return
+if (error) return
Fehler beim Laden
+``` + +### Bestätigungs-Dialog vor destructiver Aktion +```typescript +const handleDelete = () => { + if (!confirm('Wirklich löschen?')) return + deleteMut.mutate(id) +} +``` + +### Badge / Status-Anzeige +```typescript +const statusColors = { + pending: 'bg-yellow-100 text-yellow-800', + processing: 'bg-blue-100 text-blue-800', + completed: 'bg-green-100 text-green-800', + failed: 'bg-red-100 text-red-800', +} + + {status} + +``` + +### Thumbnail-Anzeige +```typescript +// Thumbnail lädt über authenticated axios, nicht direkt in +import { fetchThumbnailBlob } from '../api/cad' + +useEffect(() => { + if (!cadFileId) return + fetchThumbnailBlob(cadFileId).then(setThumbUrl) + return () => { if (thumbUrl) URL.revokeObjectURL(thumbUrl) } +}, [cadFileId]) + +Thumbnail +``` + +## Abschluss + +Nach Implementation: "Frontend fertig. Änderungen: [Liste der Dateien]. Bitte mit `/review` prüfen." diff --git a/.claude/commands/implement.md b/.claude/commands/implement.md new file mode 100644 index 0000000..12d047d --- /dev/null +++ b/.claude/commands/implement.md @@ -0,0 +1,66 @@ +# Implementierungs-Agent + +Du bist der Implementer für das Schaeffler Automat Projekt. Du liest `plan.md` und setzt Tasks Schritt für Schritt um. + +## Dein Vorgehen + +1. Lies `plan.md` im Projektroot +2. Lies alle betroffenen Dateien bevor du etwas änderst +3. Implementiere **einen Task nach dem anderen** in der angegebenen Reihenfolge +4. Nach jedem Task: kurz prüfen ob es syntaktisch korrekt ist +5. Markiere erledigte Tasks in plan.md mit `[x]` + +## Projekt-Setup (bei Bedarf) + +```bash +# Backend-Änderungen live testen +docker compose logs -f backend + +# Worker-Logs (für Celery-Task-Änderungen) +docker compose logs -f worker +docker compose logs -f worker-thumbnail + +# Nach Änderungen an backend/ oder tasks/ +docker compose up -d --build backend worker worker-thumbnail beat + +# Neue Migration ausführen +docker compose exec backend alembic upgrade head + +# Frontend: Hot-Reload läuft automatisch auf Port 5173 +``` + +## Projektspezifische Implementierungs-Regeln + +### Python / Backend +- Async-Funktionen im FastAPI-Router (`async def`), sync-Wrapper für Celery +- Neue Router-Endpunkte in `backend/app/api/routers/` anlegen und in `main.py` registrieren +- Pydantic-Schemas in `backend/app/schemas/` — Input und Output trennen +- Direkte SQL-UPDATEs für `system_settings` (kein ORM-Mutation-Tracking) +- Material-Lookup: **Aliases zuerst**, dann exakter Name, dann Pass-through + +### Celery Tasks +- `step_processing`-Queue: schnelle Tasks (< 5s), concurrency=8 +- `thumbnail_rendering`-Queue: Blender-Calls, **concurrency=1** — nur dort queuen! +- Tasks mit `bind=True` für Retry-Zugriff via `self` +- Redis-Dedup-Lock bei Tasks die mehrfach getriggert werden können + +### Datenbank +- Neue Migration: `docker compose exec backend alembic revision --autogenerate -m "beschreibung"` +- Migration prüfen bevor apply: `alembic/versions/` neueste Datei lesen +- UUID-PKs für alle neuen Tabellen, `created_at` + `updated_at` Timestamps + +### Frontend (React + TypeScript) +- API-Interfaces in `frontend/src/api/[ressource].ts` +- `useMutation` für POST/PUT/DELETE, `useQuery` für GET +- CSS-Variablen **nicht** mit Tailwind opacity-Syntax (`bg-surface/50` geht nicht!) + → Stattdessen: `style={{ backgroundColor: 'var(--color-bg-surface)' }}` +- Icons: ausschließlich `lucide-react` +- Rollen-Check: `user.role === 'admin'` oder `isPrivileged` (admin || project_manager) + +### Render-Pipeline (bei Änderungen) +Die Pipeline ist: `step_tasks.py` → `step_processor.py` → HTTP zu `blender-renderer` oder `threejs-renderer` → `blender_render.py`/`still_render.py` → `schaeffler-still.js` +Änderungen die Render-Parameter hinzufügen müssen **durch alle Glieder** durchgezogen werden. + +## Abschluss + +Nach dem letzten Task: "Implementierung abgeschlossen. Bitte mit `/review` prüfen." diff --git a/.claude/commands/plan.md b/.claude/commands/plan.md new file mode 100644 index 0000000..3300c62 --- /dev/null +++ b/.claude/commands/plan.md @@ -0,0 +1,52 @@ +# Planer-Agent + +Du bist der Planer für das Schaeffler Automat Projekt. Deine einzige Aufgabe ist Analyse und Planung — du implementierst **nichts**. + +## Dein Vorgehen + +1. Lies CLAUDE.md und MEMORY.md um den aktuellen Projektstand zu verstehen +2. Analysiere die Anforderung vollständig bevor du planst +3. Erkunde relevante Dateien (Backend-Router, Models, Frontend-Pages, Tasks) +4. Erstelle einen konkreten Plan in `plan.md` im Projektroot + +## Format von plan.md + +```markdown +# Plan: [Titel der Anforderung] + +## Kontext +Was ist das Problem / die Anforderung? Welche Teile des Systems sind betroffen? + +## Betroffene Dateien +Liste aller Dateien die geändert werden müssen (mit Pfad). + +## Tasks (in Reihenfolge) + +### Task 1: [Titel] +- **Datei**: backend/app/... +- **Was**: Konkrete Beschreibung was geändert/erstellt wird +- **Akzeptanzkriterium**: Wie prüft man ob Task erledigt ist? +- **Abhängigkeiten**: keine / Task 2 + +### Task 2: ... + +## Migrations-Check +Braucht es eine neue Alembic-Migration? (neue Spalten/Tabellen → ja) + +## Reihenfolge-Empfehlung +Backend → Migration → Tests → Frontend + +## Risiken / Offene Fragen +Was ist unklar? Was könnte schiefgehen? +``` + +## Projektspezifische Hinweise für den Plan + +- **Celery Tasks**: Immer prüfen welche Queue (`step_processing` vs `thumbnail_rendering`) +- **Neue DB-Felder**: Migration nötig → in Plan als eigenen Task aufführen +- **Frontend API-Typen**: Jede neue Backend-Response braucht ein Interface in `frontend/src/api/*.ts` +- **Render-Pipeline-Änderungen**: step_processor.py → step_tasks.py → blender_render.py / still_render.py / turntable_render.py → schaeffler-still.js / schaeffler-turntable.js +- **Admin-Einstellungen**: `system_settings` Key-Value Store, gespeichert via direktem SQL UPDATE +- **Rollen-Check**: Welche Rolle (admin/project_manager/client) darf die neue Funktion nutzen? + +Schreibe am Ende: "Plan fertig. Bitte mit `/implement` fortfahren." diff --git a/.claude/commands/review.md b/.claude/commands/review.md new file mode 100644 index 0000000..1c08d2d --- /dev/null +++ b/.claude/commands/review.md @@ -0,0 +1,76 @@ +# Review-Agent + +Du bist der Reviewer für das Schaeffler Automat Projekt. Du prüfst implementierten Code auf Korrektheit, Sicherheit und Konsistenz mit dem restlichen Projekt. + +## Dein Vorgehen + +1. Lies `plan.md` — was sollte implementiert werden? +2. Lies alle geänderten Dateien +3. Prüfe gegen alle Checklisten unten +4. Schreibe einen Report in `review-report.md` + +## Checklisten + +### Backend / Python +- [ ] Neue Endpunkte haben Rollen-Check (`require_admin`, `require_admin_or_pm`, oder `get_current_user` + manueller Check) +- [ ] Keine SQL-Injections (ORM oder parameterisierte Queries) +- [ ] Pydantic-Input-Validierung für alle POST/PUT-Bodies +- [ ] Fehlerhafte IDs geben 404 (nicht 500) +- [ ] Neue Router in `main.py` registriert? +- [ ] Neue Models in `backend/app/models/__init__.py` importiert? +- [ ] Async-Konsistenz: FastAPI-Handler async, Celery-Tasks sync + +### Celery / Tasks +- [ ] Task auf richtiger Queue? (`thumbnail_rendering` für Blender-Calls!) +- [ ] Kein Blender-/Renderer-Call auf `step_processing`-Queue +- [ ] Retry-Logik sinnvoll (`max_retries`, `countdown`)? +- [ ] Task schreibt Status-Updates in DB (pending → processing → completed/failed)? + +### Datenbank +- [ ] Neue Felder haben Migration? +- [ ] Nullable-Felder korrekt deklariert (`nullable=True` + Optional in Schema)? +- [ ] Cascade-Deletes wo nötig (FK auf user/order → CASCADE)? +- [ ] `updated_at` wird bei Änderungen gesetzt? + +### Frontend / TypeScript +- [ ] Neues API-Interface in `frontend/src/api/*.ts`? +- [ ] Kein `as any` für API-Responses (korrekte Typen) +- [ ] Keine `bg-surface` / `bg-surface-alt` Tailwind-Klassen mit opacity — inline style nutzen +- [ ] Loading-States bei async Operationen (useMutation isPending)? +- [ ] Fehler-Feedback für den Nutzer (Toast/Alert bei API-Fehlern)? +- [ ] Rollen-abhängige UI-Elemente korrekt versteckt? + +### Render-Pipeline +- [ ] Neue Parameter durch alle Pipeline-Glieder gezogen? + (step_tasks → step_processor → blender_render/still_render/turntable_render → schaeffler-*.js) +- [ ] STL-Cache-Konvention eingehalten? (`{stem}_low.stl`, `{stem}_high.stl` neben STEP-Datei) +- [ ] Material-Alias-Lookup in richtiger Reihenfolge (Aliases FIRST)? + +### Allgemein +- [ ] Kein hartcodierter Pfad (immer `UPLOAD_DIR` oder DB-Pfad nutzen) +- [ ] Keine Credentials im Code +- [ ] Englische Variablen/Kommentare im Code +- [ ] Keine `print()` in Produktion — `logging` nutzen + +## Format review-report.md + +```markdown +# Review Report: [Feature-Name] +Datum: [heute] + +## Ergebnis: ✅ Freigabe / ⚠️ Kleinigkeiten / ❌ Blockierend + +## Gefundene Probleme + +### [Datei:Zeile] Beschreibung +**Schwere**: Kritisch / Mittel / Gering +**Empfehlung**: Was soll geändert werden? + +## Positiv aufgefallen +... + +## Empfehlung +Freigabe / Bitte [X] beheben und erneut reviewen. +``` + +Schreibe am Ende: "Review abgeschlossen. Ergebnis: [✅/⚠️/❌]" diff --git a/.claude/hooks/post_tool_use.py b/.claude/hooks/post_tool_use.py new file mode 100644 index 0000000..cdd90e0 --- /dev/null +++ b/.claude/hooks/post_tool_use.py @@ -0,0 +1,20 @@ +import json, sys, subprocess + +data = json.loads(sys.stdin.read()) + +# Nur nach Datei-Änderungen prüfen +if data.get("tool_name") in ["Write", "Edit"]: + results = [] + + # Tests + r = subprocess.run(["npm", "test", "--", "--passWithNoTests"], capture_output=True) + results.append(("Tests", r.returncode == 0)) + + # Linting + r = subprocess.run(["npm", "run", "lint"], capture_output=True) + results.append(("Lint", r.returncode == 0)) + + failed = [name for name, ok in results if not ok] + if failed: + print(f"⚠️ Quality Gate FAILED: {', '.join(failed)}", file=sys.stderr) + print("Bitte Fehler beheben bevor du fortfährst.", file=sys.stderr) diff --git a/.claude/hooks/pre_tool_use.py b/.claude/hooks/pre_tool_use.py new file mode 100644 index 0000000..4157ec7 --- /dev/null +++ b/.claude/hooks/pre_tool_use.py @@ -0,0 +1,12 @@ +import json, sys + +tool_input = json.loads(sys.stdin.read()) +command = tool_input.get("tool_input", {}).get("command", "") + +BLOCKED = ["rm -rf /", "dd if=", "mkfs", ":(){:|:&};:"] +for blocked in BLOCKED: + if blocked in command: + print(f"BLOCKED: Gefährlicher Befehl erkannt: {blocked}", file=sys.stderr) + sys.exit(2) # Exit-Code 2 = Operation blockiert + +sys.exit(0) diff --git a/.claude/pre_tool_use.py b/.claude/pre_tool_use.py new file mode 100644 index 0000000..e69de29 diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 0000000..97a832e --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,19 @@ +{ + "permissions": { + "allow": ["Bash", "Read", "Write", "Edit"], + "deny": [] + }, + "hooks": { + "PreToolUse": [ + { + "matcher": "", + "hooks": [ + { + "type": "command", + "command": "python3 .claude/hooks/pre_tool_use.py" + } + ] + } + ] + } +} diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..dd6230f --- /dev/null +++ b/.env.example @@ -0,0 +1,28 @@ +# Database +POSTGRES_DB=schaeffler +POSTGRES_USER=schaeffler +POSTGRES_PASSWORD=schaeffler +POSTGRES_HOST=postgres +POSTGRES_PORT=5432 + +# Redis +REDIS_URL=redis://redis:6379/0 + +# JWT +JWT_SECRET_KEY=your-secret-key-here-change-in-production +JWT_ALGORITHM=HS256 +JWT_ACCESS_TOKEN_EXPIRE_MINUTES=480 + +# Azure OpenAI +AZURE_OPENAI_API_KEY=your-azure-openai-key +AZURE_OPENAI_ENDPOINT=https://your-resource.openai.azure.com/ +AZURE_OPENAI_DEPLOYMENT=gpt-4o +AZURE_OPENAI_API_VERSION=2024-02-01 + +# File Storage +UPLOAD_DIR=/app/uploads +MAX_UPLOAD_SIZE_MB=500 + +# Celery worker concurrency (default: 8 parallel CAD jobs per worker container) +# Scale horizontally with: docker compose up --scale worker=N +CELERY_WORKER_CONCURRENCY=8 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e9efd5c --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +node_modules/ +.env +.env.local +.DS_Store +*.log +core +/blender-renderer/core diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..25a9ab7 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,174 @@ +# Schaeffler Automat + +## Ziel + +Automatisiertes Render-System für Schaeffler-Produktbilder. Kunden (intern) laden Excel-Auftragslisten hoch, das System extrahiert Produktdaten, verknüpft STEP-CAD-Dateien, rendert Thumbnails und Animationen über Blender (Cycles/EEVEE) oder Flamenco, und liefert fertige PNG/MP4-Ausgaben. + +## Tech Stack + +- **Backend**: Python 3.11, FastAPI (async), SQLAlchemy 2 (async), Alembic, Celery, Pydantic v2 +- **Frontend**: React 18, TypeScript, Vite, Tailwind CSS, lucide-react +- **Datenbank**: PostgreSQL 16 +- **Queue/Cache**: Redis 7 (Celery Broker + Backend) +- **Renderer**: Blender 5.0.1 (headless), cadquery (STEP→STL), Three.js (Playwright) +- **Render Farm**: Flamenco 3.8 (Manager + Worker, für Animationen) +- **Deployment**: Docker Compose (11 Services) + +## Services (docker-compose.yml) + +| Service | Port | Funktion | +|---|---|---| +| `postgres` | 5432 | Primärdatenbank | +| `redis` | 6379 | Celery Broker | +| `backend` | 8888 | FastAPI App (uvicorn) | +| `worker` | – | Celery Worker, Queue: `step_processing`, concurrency=8 | +| `worker-thumbnail` | – | Celery Worker, Queue: `thumbnail_rendering`, **concurrency=1** | +| `beat` | – | Celery Beat (Scheduler) | +| `blender-renderer` | 8100 | Blender HTTP-Service (STEP→PNG, STEP→STL) | +| `threejs-renderer` | 8101 | Three.js/Playwright HTTP-Service | +| `flamenco-manager` | 8080 | Flamenco Job Manager | +| `flamenco-worker` | – | Flamenco Render Worker (GPU) | +| `frontend` | 5173 | React/Vite Dev Server | + +## Starten / Stoppen + +```bash +# Alle Services starten +docker compose up -d + +# Logs einzelner Services +docker compose logs -f backend +docker compose logs -f worker +docker compose logs -f worker-thumbnail +docker compose logs -f blender-renderer + +# Neubauen nach Codeänderungen (Backend/Worker) +docker compose up -d --build backend worker worker-thumbnail + +# Frontend-Änderungen: Hot-Reload aktiv, kein Rebuild nötig +``` + +## Standard-Zugangsdaten (Entwicklung) + +- **Admin**: admin@schaeffler.com / Admin1234! +- **Backend API**: http://localhost:8888/docs +- **Frontend**: http://localhost:5173 +- **Flamenco Manager**: http://localhost:8080 + +## Projektstruktur + +``` +schaefflerautomat/ +├── backend/ +│ ├── app/ +│ │ ├── api/routers/ # FastAPI Router (admin, cad, orders, products, ...) +│ │ ├── models/ # SQLAlchemy ORM-Modelle (14 Modelle) +│ │ ├── schemas/ # Pydantic In/Out-Schemas +│ │ ├── services/ # Business-Logik (excel_parser, step_processor, ...) +│ │ ├── tasks/ # Celery Tasks (step_tasks.py, flamenco_tasks.py) +│ │ └── utils/ # Auth, Seeding +│ ├── alembic/versions/ # DB-Migrationen (001–026+) +│ └── start.sh # Entrypoint: migrate → seed → uvicorn +├── frontend/src/ +│ ├── api/ # API-Client-Funktionen (axios-basiert) +│ ├── components/ # Wiederverwendbare UI-Komponenten +│ └── pages/ # Seitenkomponenten +├── blender-renderer/ # Blender HTTP-Microservice (Python Flask) +├── threejs-renderer/ # Three.js/Playwright Microservice (Python Flask) +├── flamenco/ # Flamenco Dockerfile + Job-Type-Scripts (.js) +└── docker-compose.yml +``` + +## Coding-Standards + +- **Sprache im Code**: Englisch (Variablen, Kommentare, Commits) +- **Commits**: Conventional Commits (`feat:`, `fix:`, `refactor:`, `docs:`) +- **Python**: async/await durchgehend im Backend, sync-Wrapper für Celery-Tasks +- **TypeScript**: Interfaces für alle API-Responses in `frontend/src/api/*.ts` +- **Keine Tests**: Aktuell kein automatisiertes Test-Suite vorhanden + +## Datenbank-Migrationen + +```bash +# Neue Migration erstellen +docker compose exec backend alembic revision --autogenerate -m "beschreibung" + +# Migrationen anwenden +docker compose exec backend alembic upgrade head + +# Status prüfen +docker compose exec backend alembic current +``` + +## Celery Task-Queues + +| Queue | Worker | Concurrency | Tasks | +|---|---|---|---| +| `step_processing` | `worker` | 8 | `process_step_file`, `render_order_line_task`, `dispatch_order_line_render` | +| `thumbnail_rendering` | `worker-thumbnail` | 1 | `render_step_thumbnail`, `regenerate_thumbnail`, `generate_stl_cache` | +| `ai_validation` | `worker` | 8 | Azure AI Validierung | + +**Wichtig**: `thumbnail_rendering` läuft mit concurrency=1, weil der blender-renderer nur 1 Request gleichzeitig verarbeiten kann. Mehr parallele Requests führen zu Timeouts. + +## STEP-Processing-Pipeline + +1. **Upload**: STEP-Datei hochladen → `CadFile`-Record erstellt → `process_step_file` Task eingereiht +2. **Metadata** (`process_step_file` auf `step_processing`): + - STEP-Objekte extrahieren (cadquery, ~0.1s) + - `parsed_objects` in DB speichern + - glTF konvertieren (falls konfiguriert) + - Status: `processing` → queut `render_step_thumbnail` +3. **Thumbnail** (`render_step_thumbnail` auf `thumbnail_rendering`): + - Blender oder Three.js renderer aufrufen + - STL-Cache erstellen: `{step_stem}_low.stl`, `{step_stem}_high.stl` + - Status: `completed` oder `failed` + - Materialien auto-populated + +## STL-Cache-Konvention + +STL-Dateien liegen **neben der STEP-Datei**: +``` +uploads/{cad_file_id}/filename_low.stl +uploads/{cad_file_id}/filename_high.stl +``` +Beim nächsten Render-Aufruf wird der Cache genutzt (keine Neu-Konvertierung). + +## Material-Alias-System + +- Materialien werden per STEP-Part-Name auf Schaeffler-Bibliotheksmaterialien (`SCHAEFFLER_...`) gemappt +- Lookup-Reihenfolge: **Alias-Tabelle zuerst**, dann exakter `Material.name`-Match, dann Pass-through +- Alias-Seeding: Admin → "Seed Aliases" oder via `POST /api/materials/seed-aliases` +- Neue Aliases direkt in DB oder über Material-Detail-UI hinzufügen + +## Rollen + +| Rolle | Berechtigungen | +|---|---| +| `admin` | Vollzugriff, Admin-Panel, alle Einstellungen | +| `project_manager` | Aufträge, Analytics, Render-Trigger, STL-Download | +| `client` | Eigene Aufträge anlegen und einsehen | + +## Wichtige API-Endpoints + +- `POST /api/uploads/excel` — Excel-Auftragsliste importieren +- `POST /api/orders/{id}/submit` — Auftrag einreichen +- `POST /api/orders/{id}/dispatch-renders` — Alle Render-Zeilen dispatchen +- `GET /api/cad/{id}/thumbnail` — Thumbnail (kein Auth, UUID opaque) +- `POST /api/cad/{id}/generate-stl/{quality}` — STL-Generierung manuell triggern +- `POST /api/admin/settings/regenerate-thumbnails` — Alle Thumbnails neu rendern +- `POST /api/admin/settings/process-unprocessed` — Unverarbeitete STEP-Dateien queuen +- `POST /api/admin/settings/generate-missing-stls` — Fehlende STL-Caches erstellen +- `GET /api/worker/activity` — Letzte 30 STEP-Verarbeitungen (Status, Timing) + +## Bekannte Eigenheiten + +- **Backend-Port 8888** (nicht 8000 — war belegt) +- **Tailwind CSS-Variablen**: `bg-surface` etc. funktionieren nicht mit `/ opacity`-Syntax wenn CSS-Variable einen Hex-Wert enthält. Stattdessen `style={{ backgroundColor: 'var(--color-bg-surface)' }}` verwenden. +- **Blender mm→m**: STEP-Dateien sind in mm, Blender intern in m. Alle Import-Scripts skalieren mit `0.001`. +- **Flamenco GPU**: `deploy.resources.reservations.devices` in docker-compose für NVIDIA-Support. +- **`settings_persistence`**: Admin-Einstellungen werden via direktem SQL-UPDATE gespeichert (nicht ORM-Mutation), da SQLAlchemy bei key-value-Stores keine Mutation trackt. + +## Learnings-Pflicht +Nach jedem gelösten Problem oder jeder wichtigen Entscheidung: +→ Trag das Learning in LEARNINGS.md ein (Format: Datum | Kategorie | Problem → Lösung) +→ Commitiere LEARNINGS.md zusammen mit dem Fix: `docs: learning erfasst - [kurzbeschreibung]` diff --git a/Excel-Order-Lists/.~lock.CRB_Testscope_20260128.xlsx# b/Excel-Order-Lists/.~lock.CRB_Testscope_20260128.xlsx# new file mode 100644 index 0000000..569b815 --- /dev/null +++ b/Excel-Order-Lists/.~lock.CRB_Testscope_20260128.xlsx# @@ -0,0 +1 @@ +,hartmut,tuxedo-os,01.03.2026 21:23,file:///home/hartmut/.config/libreoffice/4; \ No newline at end of file diff --git a/Excel-Order-Lists/Anschlagplatten_Testscope_20260128.csv b/Excel-Order-Lists/Anschlagplatten_Testscope_20260128.csv new file mode 100644 index 0000000..9283099 --- /dev/null +++ b/Excel-Order-Lists/Anschlagplatten_Testscope_20260128.csv @@ -0,0 +1,6 @@ +"Die Spalten ab A bis ""Start"" stehen zur freien Verfügung, zum Beispiel für Bemerkungen oder zusätzliche Informationen. Die Überschriften sind frei wählbar. Es können zwischen A und ""Start"" neue Spalten eingefügt werden.",,,,,"START +diese Spalte bleibt leer","Bitte diese Überschriften nicht ändern und keine weiteren Spalten hinzufügen. Bitte alle Spalten ausfüllen. ""Gewähltes Produkt"" muss eindeutig sein.",,,,,"Bitte immer paarweise neue Spalten einfügen, Spalte 1 für den Produktteil (.prt) und Spalte 2 für das zugehörige Material. Die Überschriften der Spalten sind frei wählbar. Die Tabelle kann ab hier paarweise Endlos erweitert werden.",,,,, +Ebene1,Ebene2,Baureihe,PIM-ID (Klasse),Produkt (Baureihe),,Gewähltes Produkt,Name CAD-Modell,Gewünschte Bildnummer,Lagertyp,Medias-Rendering,Platte / Plate,Material Platte,Schraube / Screw,Material screw,Nut BZ,Material nut +Linearsysteme,Laufrollenführungen,Endplatten für Führungsschiene LFS,233092AM41,ANS.LFS52,,ANS.LFS52,ans_lfs52_p_thread.stp,ans_lfs52_p_online,linear,1,ANS_LFS52-0011.prt,Stahl brüniert,ISO4762-M6X12_010.prt,Stahl v2,, +Linearsysteme,Laufrollenführungen,Endplatten für Führungsschiene LFS,233092AM41,ANS.LFS52-FH,,ANS.LFS52-FH,ans_lfs52-fh_p_thread.stp,ans_lfs52-fh_p_online,linear,1,ANS_LFS52-FH-0011_P.prt,Stahl brüniert,,,, +Linearsysteme,Laufrollenführungen,Endplatten für Führungsschiene LFS,233092AM41,ANS.LFS86-C,,ANS.LFS86-C,ans_lfs86-c_p_thread.stp,ans_lfs86-c_p_online,linear,1,ANS_LFS86-C-0011.prt,Stahl brüniert,ISO4762-M6X25_002.prt,Stahl v2,, diff --git a/Excel-Order-Lists/Anschlagplatten_Testscope_20260128.xlsx b/Excel-Order-Lists/Anschlagplatten_Testscope_20260128.xlsx new file mode 100644 index 0000000..912170b Binary files /dev/null and b/Excel-Order-Lists/Anschlagplatten_Testscope_20260128.xlsx differ diff --git a/Excel-Order-Lists/CRB_Testscope_20260128.csv b/Excel-Order-Lists/CRB_Testscope_20260128.csv new file mode 100644 index 0000000..5e81d49 --- /dev/null +++ b/Excel-Order-Lists/CRB_Testscope_20260128.csv @@ -0,0 +1,8 @@ +"Die Spalten ab A bis ""Start"" stehen zur freien Verfügung, zum Beispiel für Bemerkungen oder zusätzliche Informationen. Die Überschriften sind frei wählbar. Es können zwischen A und ""Start"" neue Spalten eingefügt werden.",,,,,"START +diese Spalte bleibt leer","Bitte diese Überschriften nicht ändern und keine weiteren Spalten hinzufügen. Bitte alle Spalten ausfüllen. ""Gewähltes Produkt"" muss eindeutig sein.",,,,,"Bitte immer paarweise neue Spalten einfügen, Spalte 1 für den Produktteil (.prt) und Spalte 2 für das zugehörige Material. Die Überschriften der Spalten sind frei wählbar. Die Tabelle kann ab hier paarweise Endlos erweitert werden.",,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Ebene1,Ebene2,Baureihe,PIM-ID (Klasse),Produkt (Baureihe),,Gewähltes Produkt,Name CAD-Modell,Gewünschte Bildnummer,Lagertyp,Medias-Rendering,Innenring Name: CAD,Innenring Material,Außenring Name: CAD,Außenring Material ,Rollen Name: CAD,Rollen Material,Käfig Name: CAD,Käfig Material,Käfig 2/ Deckel Name : CAD,Käfig Material,Dichtung Name: CAD,Dichtung Material,Dichtung 2 Name: CAD,Material Dichtung,Halteringe Name: CAD,Halteringe Material ,Scheibenpaket Name: CAD,Scheibenpaket Material,Sprengring Name: CAD,Sprengring Material ,Bordscheibe Name: CAD,Bordscheibe Material ,Sicherungsring Name: CAD,Sicherungsring Material ,Bolt,Material Bold,Spacer,Spacer: material,Anlaufschreibe,Anlaufscheibe Material +Wälz- und Gleitlager,Rollenlager,Axial-Zylinderrollenlager,2305110101,811..-L,,811..-L,81113-l_cut.stp,81113-l_online,axial,1,WS81113_GEN_1_AF1_1.prt,Stahl v2,GS81113_GEN_1_AF1_1.prt,Stahl v2,LRB7P5X7P5_001_1_1_1.prt,Stahl v2,K81113L-11_GEFR_1_AF0_1.prt,Aluminium,,,,,,,,,,,,,,,,,,,,,, +Wälz- und Gleitlager,Rollenlager,Axial-Zylinderrollenlager,2305110102,893..-M,,893..-M,89320-m-p5_cut.stp,89320-m-p5_online,axial,1,WS89320_GEN_1.prt,Stahl v2,GS89320_GEN_1.prt,Stahl v2,LRB13X13_001_1.prt,Stahl v2,K89320-31_MONTAGE_1.prt,Messing,K89320-M-11_MONTAGE_1.prt,Messing,,,,,,,,,,,,,,,ISO8750-2X10_003_1.prt,Stahl v2,,,, +Wälz- und Gleitlager,Rollenlager,Axial-Schrägrollenlager,230511AC32,AXS..,,AXS..,axs1220_cut.stp,axs1220_online,axial,1,F-235143-11_1_AF1_1.prt,Stahl v2,F-235143-11_1_AF0_1.prt,Stahl v2,NRB1P5X2P2_1_1.prt,Stahl v2,F-235143-31_1_AF0_1.prt,Generisch Plastik: schwarz,,,,,,,,,,,,,,,,,,,,,, +Wälz- und Gleitlager,Zubehör,Axiallagerscheiben,2305BF0201,GS811,,GS811,gs81152-01.stp,gs81152-01_online,axial,1,GS81152-01.prt,Stahl v2,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Wälz- und Gleitlager,Rollenlager,Zylinderrollenlager,2305090102,HCNU10..-XL-M1,,HCNU10..-XL-M1,nu1040-xl-m1_00_04_cut.stp,nu1040-xl-m1_00_04_online,radial,1,IR_NU1040-3001_21_04_1.prt,Stahl v2,AU_NU1040-3101_11_04_1.prt,Stahl v2,ZRB26X26_DUM_1.prt,Keramik,RKKM_N1040-A-M3-1031-P_1.prt,Messing,RKDK_N1040-A-M3-1041-P_1.prt,Messing,,,,,,,,,,,,,,,,,,,, diff --git a/Excel-Order-Lists/CRB_Testscope_20260128.xlsx b/Excel-Order-Lists/CRB_Testscope_20260128.xlsx new file mode 100644 index 0000000..cf85720 Binary files /dev/null and b/Excel-Order-Lists/CRB_Testscope_20260128.xlsx differ diff --git a/Excel-Order-Lists/Gleitlager_Testscope_20260128.csv b/Excel-Order-Lists/Gleitlager_Testscope_20260128.csv new file mode 100644 index 0000000..74ebee0 --- /dev/null +++ b/Excel-Order-Lists/Gleitlager_Testscope_20260128.csv @@ -0,0 +1,7 @@ +"Die Spalten ab A bis ""Start"" stehen zur freien Verfügung, zum Beispiel für Bemerkungen oder zusätzliche Informationen. Die Überschriften sind frei wählbar. Es können zwischen A und ""Start"" neue Spalten eingefügt werden.",,,,,"START +diese Spalte bleibt leer","Bitte diese Überschriften nicht ändern und keine weiteren Spalten hinzufügen. Bitte alle Spalten ausfüllen. ""Gewähltes Produkt"" muss eindeutig sein.",,,,,"Bitte immer paarweise neue Spalten einfügen, Spalte 1 für den Produktteil (.prt) und Spalte 2 für das zugehörige Material. Die Überschriften der Spalten sind frei wählbar. Die Tabelle kann ab hier paarweise Endlos erweitert werden.",,,,,,,,,,,,,,,,,,,,,,,,, +Ebene1,Ebene2,Baureihe,PIM-ID (Klasse),Produkt (Baureihe),,Gewähltes Produkt,Name CAD-Modell,Gewünschte Bildnummer,Lagertyp,Medias-Rendering,Innenring / Inner ring,Material Innenring,Außenring / Outer ring,Material Außenring,Außenring 2,Material Außenring 2,Gehause / Housing,Material housing,Sliding Layer,Material Sliding layer,Ringe,Material ringe,Dichtungsträger / Sealing carrier,Material Dichtungsträger,Dichtlippe / Sealing lip,Material Dichtlippe,Nipple,Material Nippel,Schraube / Screw,Material Schraube,Distanz bucshe / Spacer washer.prt,Material Distanz bucshe,Snapring,Material Snapring,Split Stift,Material split stift +Wälz- und Gleitlager,Gleitlager,Gleitbuchsen,,EGB..-E40-B,,EGB..-E40-B,egb3520-e40_asm.stp,egb3520-e40_online,radial,1,,,EGB3520-E40.prt,Bronze v2,,,,,,,,,,,,,,,,,,,,,, +Wälz- und Gleitlager,Gleitlager,Gelenklager,,GE..-HF,,GE..-HF,ge360-hf_000_p_cut.stp,ge360-hf_000_p_online,radial,1,GE360-HF-0021-EIN_1_AF0_1.prt,Durotect CMT,GE360-HF-0011-EIN_HAELFTE_AF0_1.prt,Stahl,GE360-HF-0011-EIN_HAELFTE_AF1_1.prt,Stahl v2,,,GE360-HF-0051-EIN_1_AF0_1.prt,GFK + PTFE,GE360-HF-0051-EIN_1_AF4_1.prt,Stahl v2,,,,,GE360-HF-0051-EIN_1_AF7_1.prt,Stahl v2,ISO8734-6X40_005_1_1.prt,Stahl v2,,,,,, +Wälz- und Gleitlager,Gleitlager,Gelenklager,,GE..-HO,,GE..-HO,ge120-ho_cut.stp,ge120-ho_online,radial,1,GE120-HO-0021-EIN_1_AF0_1.prt,Durotect M,GE120-DO-0011-EIN_1_AF0_1.prt,Durotect M,,,,,,,,,,,,,,,,,,,,,, +Wälz- und Gleitlager,Gleitlager,Gelenklager,,GE..-LO,,GE..-LO,ge20_lo_cut.stp,ge20_lo_online,radial,1,GE20-LO-0021-EIN_1_AF0_1.prt,Durotect M,GE20-DO-0011-EIN_1_AF0_1.prt,Durotect M,,,,,,,,,,,,,,,,,,,,,, diff --git a/Excel-Order-Lists/Gleitlager_Testscope_20260128.xlsx b/Excel-Order-Lists/Gleitlager_Testscope_20260128.xlsx new file mode 100644 index 0000000..006080b Binary files /dev/null and b/Excel-Order-Lists/Gleitlager_Testscope_20260128.xlsx differ diff --git a/Excel-Order-Lists/Kugellager_Testscope_20260128.csv b/Excel-Order-Lists/Kugellager_Testscope_20260128.csv new file mode 100644 index 0000000..4235a2e --- /dev/null +++ b/Excel-Order-Lists/Kugellager_Testscope_20260128.csv @@ -0,0 +1,13 @@ +"Die Spalten ab A bis ""Start"" stehen zur freien Verfügung, zum Beispiel für Bemerkungen oder zusätzliche Informationen. Die Überschriften sind frei wählbar. Es können zwischen A und ""Start"" neue Spalten eingefügt werden.",,,,,"START +diese Spalte bleibt leer","Bitte diese Überschriften nicht ändern und keine weiteren Spalten hinzufügen. Bitte alle Spalten ausfüllen. ""Gewähltes Produkt"" muss eindeutig sein.",,,,,"Bitte immer paarweise neue Spalten einfügen, Spalte 1 für den Produktteil (.prt) und Spalte 2 für das zugehörige Material. Die Überschriften der Spalten sind frei wählbar. Die Tabelle kann ab hier paarweise Endlos erweitert werden.",,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Ebene1,Ebene2,Baureihe,PIM-ID (Klasse),Produkt (Baureihe),,Gewähltes Produkt,Name CAD-Modell,Gewünschte Bildnummer,Lagertyp,Medias-Rendering,Innenring / Inner ring Name: CAD,Material Innenring,Innenring / Inner ring 2 Name: CAD,Material Innenring 2.,Außenring / Outer ring Name: CAD,Material Außenring,Wälzkörper / Rolling Element Name: CAD,Material: Wälzkörper / Rolling Element,Käfig / Cage Name: CAD,Material: Käfig / Cage,Käfig / Cage Name: CAD,Material: Käfig / Cage,Dichtungskern/Dichtungsträger Name: CAD,Material: Dichtungskern/Dichtungsträger,Dichtung Außen / Dichtlippe Name: CAD,Material: Dichtung Außen / Dichtlippe,Innen Buchse Name Name: CAD,Material: Innen Buchse,Sprengring Name: CAD,Material: Sprengring,Axial - WS Name: CAD,Material: Axial - WS,Axial - GS Name: CAD,Material: Axial GS,Schrauben Name: CAD,Material: Schrauben,Niet Name: CAD,Material: Niet,Unterlegscheibe,Material_Unterlegscheibe +Wälz- und Gleitlager,Kugellager,Axial-Rillenkugellager,2305100101,511,,51110-P6,51110-p6_a00_cut.stp,51110_online,axial,1,,,,,,,KUG7P144_1.prt,Stahl v2,AKUK_51110-A-JP-3001_H_1.prt,Stahlblech v2,,,,,,,,,,,WS_51110-3001_H_1.prt,Stahl v2,GS_51110-3001_H_1.prt,Stahl v2,,,,,, +Wälz- und Gleitlager,Kugellager,Axial-Rillenkugellager,2305100101,514..-MP,,51413-MP,51413-mp_p_cut.stp,51413-MP_online,axial,1,,,,,,,KUP26P988_1.prt,Stahl v2,AKUK51413-MP-0031_1.prt,Messing,,,,,,,,,,,WS51413-0021_1.prt,Stahl v2,GS51413-0011_1.prt,Stahl v2,,,,,, +Wälz- und Gleitlager,Kugellager,Axial-Rillenkugellager,2305100102,522..,,52211,52211_z00_cut.stp,52211_online,axial,1,,,,,,,KUG12P5_1.prt,Stahl v2,AKUK_51211-JP-3001_MONT_1.prt,Stahlblech v2,,,,,,,,,,,WS_52211-3001_H_1.prt,Stahl v2,GS_51211-3001_H_1.prt,Stahl v2,,,,,, +Wälz- und Gleitlager,Kugellager,Rillenkugellager,2305080101,6..-2RSR-N,,6205-2rsr-n-c3,6205-2rsr-n-c3_p_cut.stp,6205-2rsr-n-c3_online,radial,1,IR6205-1021_1_1.prt,Stahl v2,,,AU6205-2Z-0011_H1_1_1.prt,Stahl v2,KUG7P938_002_1_1.prt,Stahl v2,KUH6205-JN-0031_1_1.prt,Stahlblech v2,,,D_6205-RSR-1051-NBR-ARM__1_1.prt,Stahl v2,D_6205-RSR-1051-NBR_1_1.prt,Generisch Gummi: schwarz,,,,,,,,,,,NTS153301-3-1_41X3_20_H_1_1_2_3.prt,Stahl v2,, +Wälz- und Gleitlager,Kugellager,Rillenkugellager,2305080101,6..-2Z-N,,6205-2z-n-c3,6205-2z-n-c3_p_cut.stp,6205-2z-n-c3_online,radial,1,IR6205-2BRS-0021_H1_1.prt,Stahl v2,,,AU6205-2Z-0011_H1_1.prt,Stahl v2,KUG7P938_002_1.prt,Stahl v2,6205-BK_H2_1.prt,Stahlblech v2,,,DS6205-ZRG-A-0051_H_1.prt,Stahl v2,,,,,,,,,,,,,,,, +Wälz- und Gleitlager,Kugellager,Rillenkugellager,2305080101,6..-C-2HRS,,6205-C-2HRS,6205-c-2hrs_p_010_cut.stp,6205-C-2HRS_online,radial,1,IR_6205-B-2Z-8101_81_08_1_1.prt,Stahl v2,,,AU_6205-B-2Z-8101_81_08_1_1.prt,Stahl v2,KUG7_938-G5_A00_P_1.prt,Stahl v2,KUH6205-JN-8001_Z00_1 .prt,Stahlblech v2,KUH6205-JN-7031_1.prt,Stahlblech v2,D_6205-HRS-8001-NBR_ARM_B00_1.prt,Stahl v2,D_6205-HRS-8001-NBR_B00_1.prt,Generisch Gummi: schwarz,,,,,,,,,,,NTS153301-3-1_41X3_2_H-MONT_1.prt,Stahl v2,, +Wälz- und Gleitlager,Kugellager,Rillenkugellager,2305080101,6..-C-2HRS-N,,6205-c-2hrs-n-c3,6205-c-2hrs-n-c3_p_010_cut.stp,6205-c-2hrs-n-c3_online,radial,1,IR_6205-B-2Z-8101_81_08_1_1_2.prt,Stahl v2,,,AU_6205-B-2Z-N-8101_11_08_1_1_2.prt,Stahl v2,KUG7_938-G5_A00_P_1_1.prt,Stahl v2,KUH6205-JN-8001_Z00_1_1_2.prt,Stahlblech v2,KUH6205-JN-7031_1_1_2_3.prt,Stahlblech v2,D_6205-HRS-8001-NBR_ARM_B00_1_1.prt,Stahl v2,D_6205-HRS-8001-NBR_B00_1_1.prt,Generisch Gummi: schwarz,,,,,,,,,,,NTS153301-3-1_41X3_2_H-MONT_1_1.prt,Stahl v2,, +Wälz- und Gleitlager,Kugellager,Rillenkugellager,2305080101,6..-C-2HRS-NR,,6205-c-2hrs-nr-c3,6205-c-2hrs-nr-c3_00_10_cut.stp,6205-c-2hrs-nr-c3_online,radial,1,IR_6205-B-2Z-8101_81_010_1.prt,Stahl v2,,,AU_6205-B-2Z-N-8101_11_08_1.prt,Stahl v2,KUG7_938_KBE_1_1.prt,Stahl v2,KUH6205-JN-8001_Z00_1_1.prt,Stahlblech v2,KUH6205-JN-7031_1_1.prt,Stahlblech v2,D_6205-HRS-8001-NBR_ARM_A00_1 .prt,Stahl v2,D_6205-HRS-8001-NBR_A00_1.prt,Generisch Gummi: schwarz,,,SP52_KBE_1.prt,Stahl v2,,,,,,,,,, +Wälz- und Gleitlager,Kugellager,Rillenkugellager,2305080101,6..-C-2Z-N,,6204-c-2z-n,6204-c-2z-n_00_10_cut.stp,6204-c-2z-n_online,radial,1,IR_6204-B-2Z-8101_81_08_1.prt,Stahl v2,,,AU_6204-B-2Z-N-8101_11_10_1.prt,Stahl v2,KUG7_938_KBE_1_1_2.prt,Stahl v2,KUH6204-JN-8001_1.prt,Stahlblech v2,,,DS_6204-8001_X00_1.prt,Stahl v2,DS_6204-8001_X00_1.prt,Stahl v2,,,,,,,,,,,NTS153301-3-1_18X3_05_1.prt,Stahl v2,, +Wälz- und Gleitlager,Kugellager,Rillenkugellager,2305080101,6..-C-2Z-NR,,6202-c-2z-nr,6202-c-2z-nr-r18-25_00_10_cut.stp,6202-c-2z-nr_online,radial,1,IR_6202-B-2Z-8101_21_08_1.prt,Stahl v2,,,AU_6202-B-2Z-N-8101_11_10_1.prt,Stahl v2,KUG6_KBE_1.prt,Stahl v2,KUH6202-JN-8001_Z00_1.prt,Stahlblech v2,KUH6202-JN-8001_1.prt,Stahlblech v2,DS_6202-8001_Z00_1.prt,Stahl v2,DS_6202-8001_Z00_1.prt,Stahl v2,,,SP35_KBE_1.prt,Stahl v2,,,,,,,,,, diff --git a/Excel-Order-Lists/Kugellager_Testscope_20260128.xlsx b/Excel-Order-Lists/Kugellager_Testscope_20260128.xlsx new file mode 100644 index 0000000..1a34074 Binary files /dev/null and b/Excel-Order-Lists/Kugellager_Testscope_20260128.xlsx differ diff --git a/Excel-Order-Lists/Linear_schiene_Testscope_20260128.csv b/Excel-Order-Lists/Linear_schiene_Testscope_20260128.csv new file mode 100644 index 0000000..6b5a4ee --- /dev/null +++ b/Excel-Order-Lists/Linear_schiene_Testscope_20260128.csv @@ -0,0 +1,5 @@ +"Die Spalten ab A bis ""Start"" stehen zur freien Verfügung, zum Beispiel für Bemerkungen oder zusätzliche Informationen. Die Überschriften sind frei wählbar. Es können zwischen A und ""Start"" neue Spalten eingefügt werden.",,,,,"START +diese Spalte bleibt leer","Bitte diese Überschriften nicht ändern und keine weiteren Spalten hinzufügen. Bitte alle Spalten ausfüllen. ""Gewähltes Produkt"" muss eindeutig sein.",,,,,"Bitte immer paarweise neue Spalten einfügen, Spalte 1 für den Produktteil (.prt) und Spalte 2 für das zugehörige Material. Die Überschriften der Spalten sind frei wählbar. Die Tabelle kann ab hier paarweise Endlos erweitert werden.", +Ebene1,Ebene2,Baureihe,PIM-ID (Klasse),Produkt (Baureihe),,Gewähltes Produkt,Name CAD-Modell,Gewünschte Bildnummer,Lagertyp,Medias-Rendering,Rail,Material Rail +Linearsysteme,Profilschienenführungen,Sechsreihige Kugelumlaufeinheiten,233092AB06,TKSD..,,TKSD..,tksd25x1290-g2-45-45_p.stp,tksd25x1290-g2-45-45_online,linear,1,TKSD25X1290-G2-45-45.prt,Stahl v2 +Linearsysteme,Profilschienenführungen,Rollenumlaufeinheiten,233092AB21,TSX..-D,,TSX..-D,tsx25-d-g1-hj-gen.stp,tsx25-d-g1-hj_online,linear,1,TSX25D-G1-HJ-GEN.prt,Stahl v2 diff --git a/Excel-Order-Lists/Linear_schiene_Testscope_20260128.xlsx b/Excel-Order-Lists/Linear_schiene_Testscope_20260128.xlsx new file mode 100644 index 0000000..5b013f6 Binary files /dev/null and b/Excel-Order-Lists/Linear_schiene_Testscope_20260128.xlsx differ diff --git a/Excel-Order-Lists/SRB_TORB_Testscope_20260128.csv b/Excel-Order-Lists/SRB_TORB_Testscope_20260128.csv new file mode 100644 index 0000000..3bad253 --- /dev/null +++ b/Excel-Order-Lists/SRB_TORB_Testscope_20260128.csv @@ -0,0 +1,6 @@ +"Die Spalten ab A bis ""Start"" stehen zur freien Verfügung, zum Beispiel für Bemerkungen oder zusätzliche Informationen. Die Überschriften sind frei wählbar. Es können zwischen A und ""Start"" neue Spalten eingefügt werden.",,,,,"START +diese Spalte bleibt leer","Bitte diese Überschriften nicht ändern und keine weiteren Spalten hinzufügen. Bitte alle Spalten ausfüllen. ""Gewähltes Produkt"" muss eindeutig sein.",,,,,"Bitte immer paarweise neue Spalten einfügen, Spalte 1 für den Produktteil (.prt) und Spalte 2 für das zugehörige Material. Die Überschriften der Spalten sind frei wählbar. Die Tabelle kann ab hier paarweise Endlos erweitert werden.",,,,,,,,,,,,,,,,,,,,,,,,, +Ebene1,Ebene2,Baureihe,PIM-ID (Klasse),Produkt (Baureihe),,Gewähltes Produkt,Name CAD-Modell,Gewünschte Bildnummer,Lagertyp,Medias-Rendering,Innenring / Inner ring,Material Innenring,Außenring / Outer ring,Material Außenring,Käfig / Cage,Material Käfig,Käfig 2 / Cage 2,Material Käfig 2,Niet,Material Niet,Wälzkörper / Rolling element,Material Wälzkörper,Bordscheibe IR / Loose Lip IR,Material Bordscheibe IR,Führungsring AU / Loose Lip AU,Führungsring AU / Loose Lip AU,Dichtungsträger / Sealing carrier,Material Dichtungsträger,Dichtlippe / Sealing lip,Material Dichtlippe,Schraube / Screw,Material Schraube,Distanz bucshe / Spacer washer,Material Distanz bucshe,Snapring,Material Snapring +Wälz- und Gleitlager,Rollenlager,TORB,2305091390 ,C31..-XL-K-M,,C31..-XL-K-M,c3152-xl-k-m_cut.stp,c3152-xl-k-m_online,radial,1,IR_C3152-K-3001,Stahl v2,AU_C3152-3001,Stahl v2,RK_C3152-M-3001,Messing,,,,,TORO_C3152-3001,Stahl v2,,,,,,,,,,,,,, +Wälz- und Gleitlager,Rollenlager,Radial SRB,2305091102,241..-BE-XL-K30-H40,,241..-BE-XL-K30-H40,24148-be-xl-k30-h40_006_cut.stp,24148-be-xl-k30-h40_006_online,radial,1,IR_24148-BE1-K30-0021,Stahl v2,AU_24148-BE1-WA-H40-3001_006,Stahl v2,RK_24148-JPB-0031,Stahlblech v2,,,,,TORO_24148-BE1,Stahl v2,BO_24148_BE1,Stahl v2,,,,,,,,,,,, +Wälz- und Gleitlager,Rollenlager,Radial SRB,2305091103,WS222..-E1-XL-2RSR,,WS222..-E1-XL-2RSR,ws22215-e1-2rsr_006_cut.stp,ws22215-e1-2rsr_006_online,radial,1,IR_WS22215-E1-D-CA-WA-3001_006,Stahl v2,AU_WS22215-E1-D-WA-3001_006,Stahl v2,RK_22215-E-JPA-0031_006,Stahlblech v2,,,,,TORO_22215-E1A_SCT,Stahl v2,,,,,D_WS22215-E1-RSR-3001_A,Stahl v2,D_WS22215-E1-RSR-3001-RUBBER,Generisch Gummi: schwarz,,,,,, diff --git a/Excel-Order-Lists/SRB_TORB_Testscope_20260128.xlsx b/Excel-Order-Lists/SRB_TORB_Testscope_20260128.xlsx new file mode 100644 index 0000000..18a0148 Binary files /dev/null and b/Excel-Order-Lists/SRB_TORB_Testscope_20260128.xlsx differ diff --git a/Excel-Order-Lists/TRB_Testscope_20260128.csv b/Excel-Order-Lists/TRB_Testscope_20260128.csv new file mode 100644 index 0000000..4b337a5 --- /dev/null +++ b/Excel-Order-Lists/TRB_Testscope_20260128.csv @@ -0,0 +1,8 @@ +"Die Spalten ab A bis ""Start"" stehen zur freien Verfügung, zum Beispiel für Bemerkungen oder zusätzliche Informationen. Die Überschriften sind frei wählbar. Es können zwischen A und ""Start"" neue Spalten eingefügt werden.",,,,,"START +diese Spalte bleibt leer","Bitte diese Überschriften nicht ändern und keine weiteren Spalten hinzufügen. Bitte alle Spalten ausfüllen. ""Gewähltes Produkt"" muss eindeutig sein.",,,,,"Bitte immer paarweise neue Spalten einfügen, Spalte 1 für den Produktteil (.prt) und Spalte 2 für das zugehörige Material. Die Überschriften der Spalten sind frei wählbar. Die Tabelle kann ab hier paarweise Endlos erweitert werden.",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Ebene1,Ebene2,Baureihe,PIM-ID (Klasse),Produkt (Baureihe),,Gewähltes Produkt,Name CAD-Modell,Gewünschte Bildnummer,Lagertyp,Medias-Rendering,Innenring / Inner ring Name: CAD,Material Innenring,Innenring / Inner ring 2 Name: CAD,Material Innenring 2,Innenring / Inner ring 3 Name: CAD,Material Innenring 3,Zwischenring 4,Material Zwischenring 4,Zwischenring 5,Material Zwischenring 5,Außenring / Outer ring Name: CAD,Material Außenring,Zwischenring 1,Material Zwischenring 1,Außenring / Outer ring 2 Name: CAD,Material Außenring 2,Zwischenring 2,Material Zwischenring 2,Außenring / Outer ring 3 Name: CAD,Material Außenring 3,Zwischenring 3,Material Zwischenring 3,Außenring / Outer ring 4 Name: CAD,Material Außenring 4,Käfig / Cage Name: CAD,Material: Käfig / Cage,Käfig / Cage 2 Name: CAD,Material: Käfig / Cage 2,Käfig / Cage 3 Name: CAD,Material: Käfig / Cage 3,Käfig / Cage 4 Name: CAD,Material: Käfig / Cage 4,Käfig / Cage 5 Name: CAD,Material: Käfig / Cage 5,Käfig / Cage 6 Name: CAD,Material: Käfig / Cage 6,Käfig / Cage 7 Name: CAD,Material: Käfig / Cage 7,Käfig / Cage 8 Name: CAD,Material: Käfig / Cage 8,Käfigbolzen Name: CAD,Material: Käfigbolzen,Käfigbolzen 2 Name: CAD,Material: Käfigbolzen ,Bolzensstift Name: CAD,Material Bolzenstift,Wälzkörper / Rolling Element Name: CAD,Material: Wälzkörper / Rolling Element,Wälzkörper / Rolling Element 2 Name: CAD,Material: Wälzkörper / Rolling Element 2,Dichtungskern/Dichtungsträger Name: CAD,Material: Dichtungskern/Dichtungsträger,Dichtung Außen / Dichtlippe Name: CAD,Material: Dichtung Außen / Dichtlippe,Sealing ring Name: CAD,Material Sealing ring,Sealing ring 2 Name: CAD,Material: Sealing ring 2,Feder / Spring Name: CAD,Material: Feder / Spring,Innen Buchse Name Name: CAD,Material: Innen Buchse,Sprengring Name: CAD,Material: Sprengring,Axial - WS Name: CAD,Material: Axial - WS,Axial - GS Name: CAD,Material: Axial GS,Schrauben Name: CAD,Material: Schrauben,Niet Name: CAD,Material: Niet,Unterlegscheibe,Material: Unterlegscheibe,Stoßdämpfer Stopfen: CAD,Material: Stoßdämpfer Stopfen,Stoßdämpfer Feder: CAD,Material: Stoßdämpfer Feder,Stoßdämpfer Unterlegscheibe: CAD,Material: Stoßdämpfer Unterlegscheibe +Wälz- und Gleitlager,Rollenlager,Kegelrollenlager,2305091051,320..-X-XL-DF,,320..-X-XL-DF,32016-x-e1-df_00_cut.stp,32016-x-e1-df_00_online,radial,1,IR_32016-X-E1-WA-3001_21_0-1360.PRT,Stahl v2,IR_32016-X-E1-WA-3001_21_-19867.PRT,Stahl v2,,,,,,,AU_32016-X-E1-WA-3001_11_0-2017.PRT,Stahl v2,ZWR_32016-X-1071__1_AF0_1.PRT,Stahl v2,AU_32016-X-E1-WA-3001_11_-20520.PRT,Stahl v2,,,,,,,,,KRK_32016-X-JPB-1031_31_1_AF1_1.PRT,Stahl v2,KRK_32016-X-JPB-1031_31_1_AF0_1.PRT,Stahl v2,,,,,,,,,,,,,,,,,,,KERO_32017-X-E1-QPA-WA_00_1_1.PRT,Stahl v2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Wälz- und Gleitlager,Rollenlager,Kegelrollenlager,2305091021,F-802070.TR4-AM,,F-802070.TR4-AM,f-802070_tr4-am_04_cut.stp,f-802070_tr4-am_04_online,radial,1,F-802070-3001_IR_TR2_04_1_AF0_1.PRT,Stahl v2,F-802070-3001_IR_TR2_04_1_AF1_1.PRT,Stahl v2,,,F-802070-3001_IZWR_TR_04_1_AF0_.PRT,Stahl v2,,,F-802070-3101_AU_TR1_04_1_AF0_1.PRT,Stahl v2,F-802070-3001_AZWR_TR-S_04-6077.PRT,Stahl v2,F-802070-3101_AU_TR1_04_1_AF1_1.PRT,Stahl v2,F-802070-3001_AZWR_TR-AS_04_1_A.PRT,Stahl v2,F-802070-3101_AU_TR1_04_1_AF2_1.PRT,Stahl v2,F-802070-3001_AZWR_TR-S_0-12124.PRT,Stahl v2,F-802070-3101_AU_TR1_04_1_AF3_1.PRT,Stahl v2,F-802070-3101_KRSS_TR-F_1_AF0_1.PRT,Stahl v2,F-802070-3201_KRSS_TR-F_1_AF0_1.PRT,Stahl v2,F-802070-3201_KRSS_TR-F_1_AF3_1.PRT,Stahl v2,F-802070-3101_KRSS_TR-F_1_AF1_1.PRT,Stahl v2,F-802070-3101_KRSS_TR-F_1_AF2_1.PRT,Stahl v2,F-802070-3201_KRSS_TR-F_1_AF1_1.PRT,Stahl v2,F-802070-3201_KRSS_TR-F_1_AF2_1.PRT,Stahl v2,F-802070-3101_KRSS_TR-F_1_AF3_1.PRT,Stahl v2,F-802070-3101_BLZ_TR_1_1.PRT,Stahl v2,,,,,KERO_F-802070-M-QP_ISB_1_AF30_1.PRT,Stahl v2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +Wälz- und Gleitlager,Rollenlager,Kegelrollenlager,2305091011,F-803422.01.TR2,,F-803422.01.TR2,f-803422_01_tr2_04_cut.stp,f-803422_01_tr2_04_online,radial,1,F-803422-1021_IR_TR2_04_1_AF0_1.PRT,Stahl v2,,,,,,,,,F-803422-3001_AU_TR1_WA_04-6311.PRT,Stahl v2,,,F-803422-3001_AU_TR1_WA_04-7785.PRT,Stahl v2,,,,,,,,,Z-536748_01-3001_KRK_T_A1-19091.PRT,Stahl v2,Z-536748_01-3001_KRK_T_A1-30636.PRT,Stahl v2,,,,,,,,,,,,,,,,,,,KERO_Z-513683-QP_ISB_1_1.PRT,Stahl v2,,,,,,,,,,,,,,,,,,,,,,,,,,,F-809724-0041_BLZ_TR_1_1.PRT,Stahl v2,F-809724-0081_TR_DUMMY_1_1.PRT,Stahl v2,HK1214-2RS-FPM-11_ALT_1_1.PRT,Stahl v2 +Wälz- und Gleitlager,Rollenlager,Axial-Kegelrollenlager,2305110401,KT-SERIES(1),,KT-SERIES(1),kt1120_04_cut.stp,kt1120_04_online,axial,1,,,,,,,,,,,,,,,,,,,,,,,,,AKRSS_KT1120-F-3101_04_1_AF0_1.PRT,Stahl v2,AKRSS_KT1120-F-3201_04_1_AF0_1.PRT,Stahl v2,,,,,,,,,,,,,BLZ_KT1120-3001_04_1_1.PRT,Stahl v2,,,,,KERO_KT1120-M-QP-WEA_04_1_1.PRT,Stahl v2,,,,,,,,,,,,,,,,,WS_KT1120-3001_04_1_AF0_1.PRT,Stahl v2,WS_KT1120-3001_04_1_AF1_1.PRT,Stahl v2,,,,,,,,,,,, +Wälz- und Gleitlager,Rollenlager,Kegelrollenlager,2305BA0303,L320..-X-XL,,L320..-X-XL,l32016-x-xl_0d_00_cut.stp,l32016-x-xl_0d_00_online,radial,1,,,,,,,,,,,AU_L32016-X-XL-_0D_1_AF0_1.PRT,Stahl v2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/Excel-Order-Lists/TRB_Testscope_20260128.xlsx b/Excel-Order-Lists/TRB_Testscope_20260128.xlsx new file mode 100644 index 0000000..10a3313 Binary files /dev/null and b/Excel-Order-Lists/TRB_Testscope_20260128.xlsx differ diff --git a/Excel-Order-Lists/test_upload.xlsx b/Excel-Order-Lists/test_upload.xlsx new file mode 100644 index 0000000..6e1f311 Binary files /dev/null and b/Excel-Order-Lists/test_upload.xlsx differ diff --git a/LEARNINGS.md b/LEARNINGS.md new file mode 100644 index 0000000..e0d03f6 --- /dev/null +++ b/LEARNINGS.md @@ -0,0 +1,116 @@ +# Projekt-Learnings — Schaeffler Automat + +## Format +**Datum | Kategorie | Problem → Lösung** + +--- + +## Learnings + +### 2026-01-15 | Architektur | Backend-Port-Konflikt +**Problem:** FastAPI standardmäßig auf Port 8000 — war auf dem Entwicklungsrechner belegt +**Lösung:** Port 8888 in `docker-compose.yml` und Vite-Proxy konfiguriert +**Für künftige Projekte:** Port früh festlegen und in CLAUDE.md dokumentieren + +--- + +### 2026-01-20 | Datenbank | SQLAlchemy trackt key-value-Store-Mutations nicht +**Problem:** Admin-Einstellungen (`system_settings`) wurden via ORM gespeichert, Änderungen wurden nicht persistiert +**Ursache:** SQLAlchemy erkennt keine Mutation an einem bereits geladenen Objekt wenn nur ein Value-Feld geändert wird +**Lösung:** Direktes SQL `UPDATE` via `op.execute()` statt ORM-Mutation in `admin.py` +**Für künftige Projekte:** Key-Value-Stores immer mit direktem SQL oder `session.execute(update(...))` verwalten + +--- + +### 2026-01-25 | Render-Pipeline | Blender ignoriert STEP-Einheiten (mm vs. m) +**Problem:** STEP-Dateien sind in Millimetern, Blender arbeitet intern in Metern → 50mm-Lager erscheint 50 Meter breit, Kamera framt falsch +**Lösung:** `_scale_mm_to_m(parts)` Helper in allen 3 Render-Scripts: `part.scale = (0.001, 0.001, 0.001)`, Transform anwenden +**Betroffene Dateien:** `blender_render.py`, `still_render.py`, `turntable_render.py` +**Für künftige Projekte:** Einheiten-Konvertierung direkt nach STL-Import, vor jeder Kamera-Kalkulation + +--- + +### 2026-01-28 | Render-Pipeline | Blender 5.0 hat `scene.node_tree` entfernt +**Problem:** `_setup_bg_compositor()` rief `scene.node_tree` auf (in Blender 5.0 entfernt) → Python-Exception → Blender exitete mit Code 0 → Flamenco markierte Task fälschlicherweise als "completed" +**Lösung:** `_setup_bg_compositor()` aus Setup + Render-Script entfernt; bg_color-Kompositing in FFmpeg verschoben (`-f lavfi -i color=...` + overlay-Filter) +**Wichtig:** Immer `try: main() except SystemExit: raise except Exception: traceback; sys.exit(1)` in Blender-Scripts — sonst verschluckt Blender Python-Exceptions +**Für künftige Projekte:** Nach Blender-Major-Updates alle API-Calls prüfen; Exception-Guard ist Pflicht + +--- + +### 2026-02-05 | Material-System | Material-Alias-Lookup-Reihenfolge falsch +**Problem:** `Steel--Stahl` war sowohl ein kanonischer `Material.name` als auch ein Alias für `SCHAEFFLER_010101_Steel-Bare`. Der Lookup prüfte zuerst den exakten Namen und fand `Steel--Stahl` — Blender konnte diesen Namen aber nicht in der Library finden +**Lösung:** Lookup-Reihenfolge in `material_service.py` umgekehrt: **Aliases zuerst**, dann exakter Name, dann Pass-through +**Für künftige Projekte:** Alias-System immer so designen dass Aliases Vorrang haben; nie zwei Lookup-Pfade mit überlappenden Treffern + +--- + +### 2026-02-10 | Render-Pipeline | Blender-Template zerstört HDRI/World +**Problem:** Im Template-Modus (Mode B) wurden trotzdem Auto-Lights und eine neue World erstellt → überschrieb den HDRI aus dem .blend-Template → falsche Beleuchtung +**Ursache:** Auto-Licht- und World-Setup-Code lief bedingungslos, nicht nur im Mode A +**Lösung:** In Template-Mode werden Lights, World und Color-Management-Override vollständig übersprungen; nur die Kamera wird ggf. neu berechnet +**Betroffene Dateien:** `still_render.py`, `turntable_render.py`, `schaeffler-still.js`, `schaeffler-turntable.js` + +--- + +### 2026-02-15 | Celery | Blender-Queue-Flooding durch falsche Concurrency +**Problem:** Alle Celery-Tasks (schnelle Metadata-Extraktion + langsamer Blender-Render) liefen auf `step_processing` mit concurrency=8 → 8 Workers schickten gleichzeitig Requests an blender-renderer (der nur 1 gleichzeitig verarbeiten kann) → 7 davon liefen in 300s-Timeout → blockierte die gesamte Queue +**Lösung:** Pipeline aufgeteilt: +- `process_step_file` (step_processing, concurrency=8): nur schnelle Metadata-Extraktion (<2s), queut dann → +- `render_step_thumbnail` (thumbnail_rendering, concurrency=1): Blender-Call, niemals timeout +**Neuer Service:** `worker-thumbnail` in `docker-compose.yml` mit `--concurrency=1` +**Für künftige Projekte:** HTTP-Services die nur 1 Request gleichzeitig verarbeiten können IMMER auf einer separaten Queue mit concurrency=1 laufen lassen + +--- + +### 2026-02-18 | Frontend | Tailwind CSS-Variablen inkompatibel mit opacity-Syntax +**Problem:** `bg-surface/50` oder `bg-surface` (wenn `--color-bg-surface` ein Hex-Wert ist) generiert `rgb(var(--color-bg-surface) / 0.5)` — invalides CSS, weil `rgb()` keine Hex-Werte als Channel-Input akzeptiert → Hintergrund transparent +**Ursache:** Tailwind erwartet CSS-Variablen mit RGB-Channel-Format (`255 255 255`), nicht Hex (`#ffffff`) +**Lösung:** Inline-Style verwenden: `style={{ backgroundColor: 'var(--color-bg-surface)' }}` +**Für künftige Projekte:** Entweder CSS-Variablen im RGB-Channel-Format definieren, oder konsequent inline styles für variable Farben + +--- + +### 2026-02-20 | STL-Cache | Three.js-Renderer nutzte tempfile → kein Download möglich +**Problem:** Three.js-Renderer konvertierte STEP→STL in ein tempfile und löschte es anschließend → STL-Download-Endpoint fand keine Datei +**Ursache:** Three.js war ursprünglich nur für Thumbnails gebaut, STL-Cache-Konvention (`{stem}_low.stl` neben STEP-Datei) wurde nicht implementiert +**Lösung:** Persistent cache path: `step_path.parent / f"{step_path.stem}_low.stl"`, cache-hit-check vor Konvertierung, kein `unlink()` mehr +**Für künftige Projekte:** STL-Cache-Konvention (`{step_stem}_{quality}.stl` neben STEP-Datei) von Anfang an in allen Renderer-Services einhalten + +--- + +### 2026-02-20 | STL-Cache | blender-renderer fehlte /convert-stl Endpoint +**Problem:** Für Produkte die mit Blender gerendert wurden war kein STL-Cache vorhanden wenn nicht explizit gerendert wurde (blender-renderer renderte + konvertierte in einem Schritt, aber STL wurde nicht persistiert) +**Lösung:** Neuer `/convert-stl` Endpoint in `blender-renderer/app.py`: konvertiert STEP→STL ohne Render, persistiert Cache. Neuer Celery-Task `generate_stl_cache` auf `thumbnail_rendering`-Queue. Admin-Funktion "Generate Missing STLs" zum Batch-Nachfüllen + +--- + +### 2026-02-22 | Material-System | Fehlender Alias blockiert Material-Replacement +**Problem:** Produkt F-803422.01.TR2 (SA-2026-00080) renderte ohne Materialersetzung. Material "Stahl v2" war korrekt in der UI gespeichert, aber weder in `materials` noch in `material_aliases` vorhanden +**Ursache:** Alias-Seeding aus Excel deckte nicht alle Varianten der deutschen Materialbezeichnungen ab +**Lösung:** Alias direkt in DB eingetragen: `"Stahl v2"` → `SCHAEFFLER_010101_Steel-Bare` +**Für künftige Projekte:** Bei Render ohne Materialersetzung immer zuerst `resolve_material_map()` debuggen und Alias-Tabelle prüfen; Alias-Seeding regelmäßig mit neuen Excel-Varianten erweitern + +--- + +### 2026-02-25 | Frontend | canDispatch-Bedingung zu restriktiv +**Problem:** "Dispatch Renders"-Button war nicht sichtbar obwohl der Auftrag offene Render-Zeilen hatte +**Ursache:** `canDispatch` enthielt `&& hasRetryable` — Button erschien nur wenn pending/failed/cancelled-Zeilen vorhanden waren, nicht wenn alle Zeilen "pending" im Erstauftrag +**Lösung:** `hasRetryable`-Bedingung entfernt; Button ist immer sichtbar wenn Auftrag im richtigen Status und User privilegiert ist +**Für künftige Projekte:** Aktions-Buttons nicht zu stark von abgeleiteten Zuständen abhängig machen; lieber im Backend validieren + +--- + +### 2026-02-28 | Frontend | MaterialInput-Dropdown ohne Hintergrund +**Problem:** Dropdown der Material-Suchfeld-Komponente erschien transparent — Text über dem Hintergrund kaum lesbar +**Ursache:** `bg-surface` Tailwind-Klasse + CSS-Variable mit Hex-Wert (siehe Learning 2026-02-18) +**Lösung:** `style={{ backgroundColor: 'var(--color-bg-surface)' }}` für Dropdown-Container, Group-Header und Sticky-Button +**Datei:** `frontend/src/components/shared/MaterialInput.tsx` + +--- + +## Offene Fragen +- [ ] Azure AI Credentials für Phase 4 (Bildvalidierung) noch nicht konfiguriert +- [ ] Flamenco GPU-Support nur mit NVIDIA — AMD/CPU-Fallback fehlt +- [ ] Material-Alias-Seeding deckt noch nicht alle deutschen Materialbezeichnungs-Varianten ab +- [ ] Turntable-Animation: bg_color via FFmpeg-Overlay — Qualität bei Transparenz-Edges prüfen diff --git a/MaterialNamingSchema/generate_blend.py b/MaterialNamingSchema/generate_blend.py new file mode 100644 index 0000000..d7c519b --- /dev/null +++ b/MaterialNamingSchema/generate_blend.py @@ -0,0 +1,104 @@ +"""Generate material_library.blend with all 35 Schaeffler standard materials. + +Run with: blender --background --python generate_blend.py +""" +import bpy +import os + +# Placeholder colors per material — tuned to approximate real-world appearance +# Format: (R, G, B, A) linear color, metallic, roughness +MATERIALS = [ + # --- 01 Metals --- + ("SCHAEFFLER_010101_Steel-Bare", (0.55, 0.56, 0.58, 1.0), 1.0, 0.35), + ("SCHAEFFLER_010102_Steel-Burnished", (0.15, 0.12, 0.10, 1.0), 1.0, 0.25), + ("SCHAEFFLER_010103_Steel-Galvanized", (0.65, 0.67, 0.70, 1.0), 1.0, 0.40), + ("SCHAEFFLER_010104_Steel-Casted", (0.35, 0.33, 0.31, 1.0), 1.0, 0.60), + ("SCHAEFFLER_010105_Steel-Plate", (0.50, 0.51, 0.53, 1.0), 1.0, 0.30), + ("SCHAEFFLER_010201_Niro", (0.70, 0.72, 0.74, 1.0), 1.0, 0.20), + ("SCHAEFFLER_010301_Tin", (0.75, 0.75, 0.73, 1.0), 1.0, 0.30), + ("SCHAEFFLER_010401_Aluminium", (0.80, 0.80, 0.82, 1.0), 1.0, 0.25), + ("SCHAEFFLER_010501_Brass", (0.70, 0.55, 0.20, 1.0), 1.0, 0.25), + ("SCHAEFFLER_010601_Bronze", (0.55, 0.35, 0.15, 1.0), 1.0, 0.30), + # --- 02 Coatings --- + ("SCHAEFFLER_020101_Durotect-Blue", (0.15, 0.25, 0.50, 1.0), 0.8, 0.20), + ("SCHAEFFLER_020102_Durotect-Black", (0.05, 0.05, 0.06, 1.0), 0.8, 0.15), + ("SCHAEFFLER_020201_Coat-Black", (0.03, 0.03, 0.03, 1.0), 0.6, 0.10), + # --- 03 Non-metals --- + ("SCHAEFFLER_030101_Elastomer-Brown", (0.30, 0.18, 0.08, 1.0), 0.0, 0.55), + ("SCHAEFFLER_030102_Elastomer-Green", (0.10, 0.30, 0.10, 1.0), 0.0, 0.55), + ("SCHAEFFLER_030103_Elastomer-Black", (0.04, 0.04, 0.04, 1.0), 0.0, 0.55), + ("SCHAEFFLER_030201_Plastic-Brown", (0.35, 0.22, 0.10, 1.0), 0.0, 0.40), + ("SCHAEFFLER_030202_Plastic-Green", (0.08, 0.35, 0.12, 1.0), 0.0, 0.40), + ("SCHAEFFLER_030203_Plastic-Black", (0.02, 0.02, 0.02, 1.0), 0.0, 0.40), + ("SCHAEFFLER_030204_Plastic-Blue", (0.10, 0.20, 0.50, 1.0), 0.0, 0.40), + ("SCHAEFFLER_030205_Plastic-White", (0.85, 0.85, 0.85, 1.0), 0.0, 0.40), + ("SCHAEFFLER_030301_Plastic-Clear", (0.90, 0.90, 0.92, 1.0), 0.0, 0.10), # + transmission + ("SCHAEFFLER_030302_Plastic-Translucent-White", (0.80, 0.80, 0.82, 1.0), 0.0, 0.20), # + transmission + ("SCHAEFFLER_030401_TPU-Blue", (0.12, 0.25, 0.55, 1.0), 0.0, 0.45), + ("SCHAEFFLER_030501_Ceramic-Black", (0.03, 0.03, 0.04, 1.0), 0.0, 0.15), + # --- 04 Compounds --- + ("SCHAEFFLER_040101_E40", (0.25, 0.22, 0.18, 1.0), 0.0, 0.50), + ("SCHAEFFLER_040102_E50", (0.28, 0.25, 0.20, 1.0), 0.0, 0.50), + ("SCHAEFFLER_040201_Elgoglide", (0.20, 0.22, 0.25, 1.0), 0.0, 0.35), + ("SCHAEFFLER_040202_Elgotex", (0.05, 0.05, 0.06, 1.0), 0.0, 0.35), + ("SCHAEFFLER_040301_PTFE-Niro-Compound", (0.60, 0.62, 0.65, 1.0), 0.3, 0.25), + ("SCHAEFFLER_040302_PTFE-Foil", (0.85, 0.85, 0.82, 1.0), 0.0, 0.15), + ("SCHAEFFLER_040303_PTFE-Compound-Black", (0.04, 0.04, 0.05, 1.0), 0.0, 0.30), + ("SCHAEFFLER_040304_PTFE-Compound-Orange", (0.70, 0.35, 0.08, 1.0), 0.0, 0.30), + ("SCHAEFFLER_040305_GFK-PTFE-Compound", (0.08, 0.10, 0.08, 1.0), 0.0, 0.45), + # --- 05 Misc --- + ("SCHAEFFLER_059999_FailedMaterial", (1.00, 0.00, 0.50, 1.0), 0.0, 0.50), +] + +# Translucent materials that need transmission +TRANSLUCENT = { + "SCHAEFFLER_030301_Plastic-Clear": 0.9, + "SCHAEFFLER_030302_Plastic-Translucent-White": 0.5, +} + + +def main(): + # Start from factory defaults + bpy.ops.wm.read_factory_settings(use_empty=True) + + for name, color, metallic, roughness in MATERIALS: + mat = bpy.data.materials.new(name=name) + mat.use_nodes = True + nodes = mat.node_tree.nodes + links = mat.node_tree.links + + # Clear default nodes + for n in nodes: + nodes.remove(n) + + # Create Principled BSDF + Material Output + bsdf = nodes.new("ShaderNodeBsdfPrincipled") + bsdf.location = (0, 0) + output = nodes.new("ShaderNodeOutputMaterial") + output.location = (300, 0) + links.new(bsdf.outputs["BSDF"], output.inputs["Surface"]) + + # Set properties + bsdf.inputs["Base Color"].default_value = color + bsdf.inputs["Metallic"].default_value = metallic + bsdf.inputs["Roughness"].default_value = roughness + + # Transmission for translucent materials + if name in TRANSLUCENT: + bsdf.inputs["Transmission Weight"].default_value = TRANSLUCENT[name] + bsdf.inputs["IOR"].default_value = 1.45 + + # Also set the viewport display color for solid-view preview + mat.diffuse_color = color + + # Fake user so Blender keeps the material on save + mat.use_fake_user = True + + # Save + out_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "material_library.blend") + bpy.ops.wm.save_mainfile(filepath=out_path) + print(f"\nSaved {len(MATERIALS)} materials to: {out_path}") + + +if __name__ == "__main__": + main() diff --git a/MaterialNamingSchema/indoor-Studio_hdr.blend b/MaterialNamingSchema/indoor-Studio_hdr.blend new file mode 100644 index 0000000..86e1d01 Binary files /dev/null and b/MaterialNamingSchema/indoor-Studio_hdr.blend differ diff --git a/MaterialNamingSchema/indoor-Studio_hdr_Shadowcatcher.blend b/MaterialNamingSchema/indoor-Studio_hdr_Shadowcatcher.blend new file mode 100644 index 0000000..e2be451 Binary files /dev/null and b/MaterialNamingSchema/indoor-Studio_hdr_Shadowcatcher.blend differ diff --git a/MaterialNamingSchema/indoor-Studio_hdr_Shadowcatcher.blend1 b/MaterialNamingSchema/indoor-Studio_hdr_Shadowcatcher.blend1 new file mode 100644 index 0000000..26983af Binary files /dev/null and b/MaterialNamingSchema/indoor-Studio_hdr_Shadowcatcher.blend1 differ diff --git a/MaterialNamingSchema/material_library.blend b/MaterialNamingSchema/material_library.blend new file mode 100644 index 0000000..e9eebcb Binary files /dev/null and b/MaterialNamingSchema/material_library.blend differ diff --git a/MaterialNamingSchema/material_library.blend1 b/MaterialNamingSchema/material_library.blend1 new file mode 100644 index 0000000..fd49ae5 Binary files /dev/null and b/MaterialNamingSchema/material_library.blend1 differ diff --git a/MaterialNamingSchema/naming_scheme.xlsx b/MaterialNamingSchema/naming_scheme.xlsx new file mode 100644 index 0000000..de301f2 Binary files /dev/null and b/MaterialNamingSchema/naming_scheme.xlsx differ diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..a86b49b --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,27 @@ +FROM python:3.11-slim + +WORKDIR /app + +# System dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + libpq-dev \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Docker SDK (for dynamic flamenco-worker scaling via /var/run/docker.sock) +RUN pip install --no-cache-dir "docker>=6.1.0" + +# Install Python dependencies +COPY pyproject.toml . +RUN pip install --no-cache-dir -e . + +# Copy app code +COPY . . + +# Create upload dirs +RUN mkdir -p uploads/step_files uploads/excel_files uploads/thumbnails + +COPY start.sh /start.sh +RUN chmod +x /start.sh + +EXPOSE 8000 diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000..f0c3c38 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,41 @@ +[alembic] +script_location = alembic +prepend_sys_path = . +version_path_separator = os +sqlalchemy.url = postgresql://schaeffler:schaeffler@localhost:5432/schaeffler + +[post_write_hooks] + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/alembic/__pycache__/env.cpython-311.pyc b/backend/alembic/__pycache__/env.cpython-311.pyc new file mode 100644 index 0000000..2568d43 Binary files /dev/null and b/backend/alembic/__pycache__/env.cpython-311.pyc differ diff --git a/backend/alembic/env.py b/backend/alembic/env.py new file mode 100644 index 0000000..57c936c --- /dev/null +++ b/backend/alembic/env.py @@ -0,0 +1,62 @@ +import asyncio +from logging.config import fileConfig +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config +from alembic import context +import os +import sys + +sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) + +from app.database import Base +from app.config import settings +# Import all models to register them with Base +import app.models # noqa: F401 + +config = context.config +config.set_main_option("sqlalchemy.url", settings.database_url) + +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + context.configure(connection=connection, target_metadata=target_metadata) + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + await connectable.dispose() + + +def run_migrations_online() -> None: + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/backend/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/backend/alembic/versions/001_initial_schema.py b/backend/alembic/versions/001_initial_schema.py new file mode 100644 index 0000000..53b499a --- /dev/null +++ b/backend/alembic/versions/001_initial_schema.py @@ -0,0 +1,153 @@ +"""initial schema + +Revision ID: 001 +Revises: +Create Date: 2026-03-01 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision: str = "001" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # users + op.create_table( + "users", + sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True), + sa.Column("email", sa.String(255), nullable=False, unique=True), + sa.Column("password_hash", sa.String(255), nullable=False), + sa.Column("full_name", sa.String(255), nullable=False), + sa.Column("role", sa.Enum("admin", "client", name="userrole"), nullable=False, server_default="client"), + sa.Column("is_active", sa.Boolean, nullable=False, server_default="true"), + sa.Column("created_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + sa.Column("updated_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + ) + op.create_index("ix_users_email", "users", ["email"]) + + # templates + op.create_table( + "templates", + sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True), + sa.Column("name", sa.String(255), nullable=False), + sa.Column("category_key", sa.String(100), nullable=False, unique=True), + sa.Column("standard_fields", postgresql.JSONB, nullable=False, server_default="{}"), + sa.Column("component_schema", postgresql.JSONB, nullable=False, server_default="{}"), + sa.Column("description", sa.Text, nullable=True), + sa.Column("is_active", sa.Boolean, nullable=False, server_default="true"), + sa.Column("created_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + sa.Column("updated_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + ) + op.create_index("ix_templates_category_key", "templates", ["category_key"]) + + # cad_files + op.create_table( + "cad_files", + sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True), + sa.Column("original_name", sa.String(500), nullable=False), + sa.Column("stored_path", sa.String(1000), nullable=False), + sa.Column("file_hash", sa.String(64), nullable=False, unique=True), + sa.Column("file_size", sa.BigInteger, nullable=True), + sa.Column("parsed_objects", postgresql.JSONB, nullable=True), + sa.Column("thumbnail_path", sa.String(1000), nullable=True), + sa.Column("gltf_path", sa.String(1000), nullable=True), + sa.Column( + "processing_status", + sa.Enum("pending", "processing", "completed", "failed", name="processingstatus"), + nullable=False, + server_default="pending", + ), + sa.Column("error_message", sa.String(2000), nullable=True), + sa.Column("created_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + sa.Column("updated_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + ) + op.create_index("ix_cad_files_file_hash", "cad_files", ["file_hash"]) + + # orders + op.create_table( + "orders", + sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True), + sa.Column("order_number", sa.String(50), nullable=False, unique=True), + sa.Column("template_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("templates.id"), nullable=True), + sa.Column( + "status", + sa.Enum("draft", "submitted", "processing", "completed", "rejected", name="orderstatus"), + nullable=False, + server_default="draft", + ), + sa.Column("created_by", postgresql.UUID(as_uuid=True), sa.ForeignKey("users.id"), nullable=False), + sa.Column("source_excel", sa.String(1000), nullable=True), + sa.Column("notes", sa.Text, nullable=True), + sa.Column("created_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + sa.Column("updated_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + ) + op.create_index("ix_orders_order_number", "orders", ["order_number"]) + + # order_items + op.create_table( + "order_items", + sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True), + sa.Column("order_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("orders.id"), nullable=False), + sa.Column("row_index", sa.Integer, nullable=False), + sa.Column("ebene1", sa.String(500), nullable=True), + sa.Column("ebene2", sa.String(500), nullable=True), + sa.Column("baureihe", sa.String(500), nullable=True), + sa.Column("pim_id", sa.String(500), nullable=True), + sa.Column("produkt_baureihe", sa.String(500), nullable=True), + sa.Column("gewaehltes_produkt", sa.String(500), nullable=True), + sa.Column("name_cad_modell", sa.String(500), nullable=True), + sa.Column("gewuenschte_bildnummer", sa.String(500), nullable=True), + sa.Column("lagertyp", sa.String(500), nullable=True), + sa.Column("medias_rendering", sa.Boolean, nullable=True), + sa.Column("components", postgresql.JSONB, nullable=False, server_default="[]"), + sa.Column("cad_file_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("cad_files.id"), nullable=True), + sa.Column("thumbnail_path", sa.String(1000), nullable=True), + sa.Column( + "ai_validation_status", + sa.Enum("not_started", "pending", "completed", "failed", name="aivalidationstatus"), + nullable=False, + server_default="not_started", + ), + sa.Column("ai_validation_result", postgresql.JSONB, nullable=True), + sa.Column( + "item_status", + sa.Enum("pending", "approved", "rejected", name="itemstatus"), + nullable=False, + server_default="pending", + ), + sa.Column("notes", sa.Text, nullable=True), + sa.Column("created_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + sa.Column("updated_at", sa.DateTime, nullable=False, server_default=sa.func.now()), + ) + + # audit_log + op.create_table( + "audit_log", + sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True), + sa.Column("user_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("users.id"), nullable=True), + sa.Column("action", sa.String(100), nullable=False), + sa.Column("entity_type", sa.String(100), nullable=True), + sa.Column("entity_id", sa.String(255), nullable=True), + sa.Column("details", postgresql.JSONB, nullable=True), + sa.Column("timestamp", sa.DateTime, nullable=False, server_default=sa.func.now()), + ) + + +def downgrade() -> None: + op.drop_table("audit_log") + op.drop_table("order_items") + op.drop_table("orders") + op.drop_table("cad_files") + op.drop_table("templates") + op.drop_table("users") + op.execute("DROP TYPE IF EXISTS userrole") + op.execute("DROP TYPE IF EXISTS orderstatus") + op.execute("DROP TYPE IF EXISTS processingstatus") + op.execute("DROP TYPE IF EXISTS aivalidationstatus") + op.execute("DROP TYPE IF EXISTS itemstatus") diff --git a/backend/alembic/versions/002_system_settings.py b/backend/alembic/versions/002_system_settings.py new file mode 100644 index 0000000..45c8018 --- /dev/null +++ b/backend/alembic/versions/002_system_settings.py @@ -0,0 +1,33 @@ +"""system settings table + +Revision ID: 002 +Revises: 001 +Create Date: 2026-03-01 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + +revision: str = "002" +down_revision: Union[str, None] = "001" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + "system_settings", + sa.Column("key", sa.String(100), primary_key=True), + sa.Column("value", sa.Text(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True, server_default=sa.func.now()), + ) + # Insert defaults + op.execute( + "INSERT INTO system_settings (key, value, updated_at) VALUES " + "('thumbnail_renderer', 'pillow', NOW())" + ) + + +def downgrade() -> None: + op.drop_table("system_settings") diff --git a/backend/alembic/versions/003_blender_settings.py b/backend/alembic/versions/003_blender_settings.py new file mode 100644 index 0000000..389010c --- /dev/null +++ b/backend/alembic/versions/003_blender_settings.py @@ -0,0 +1,31 @@ +"""blender render settings + +Revision ID: 003 +Revises: 002 +Create Date: 2026-03-01 + +""" +from typing import Sequence, Union +from alembic import op + +revision: str = "003" +down_revision: Union[str, None] = "002" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute( + "INSERT INTO system_settings (key, value, updated_at) VALUES " + "('blender_engine', 'cycles', NOW())," + "('blender_cycles_samples', '256', NOW())," + "('blender_eevee_samples', '64', NOW()) " + "ON CONFLICT (key) DO NOTHING" + ) + + +def downgrade() -> None: + op.execute( + "DELETE FROM system_settings WHERE key IN " + "('blender_engine', 'blender_cycles_samples', 'blender_eevee_samples')" + ) diff --git a/backend/alembic/versions/004_threejs_settings.py b/backend/alembic/versions/004_threejs_settings.py new file mode 100644 index 0000000..c2335c9 --- /dev/null +++ b/backend/alembic/versions/004_threejs_settings.py @@ -0,0 +1,28 @@ +"""threejs render size setting + +Revision ID: 004 +Revises: 003 +Create Date: 2026-03-01 + +""" +from typing import Sequence, Union +from alembic import op + +revision: str = "004" +down_revision: Union[str, None] = "003" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute( + "INSERT INTO system_settings (key, value, updated_at) VALUES " + "('threejs_render_size', '512', NOW()) " + "ON CONFLICT (key) DO NOTHING" + ) + + +def downgrade() -> None: + op.execute( + "DELETE FROM system_settings WHERE key = 'threejs_render_size'" + ) diff --git a/backend/alembic/versions/005_threejs_default_1k.py b/backend/alembic/versions/005_threejs_default_1k.py new file mode 100644 index 0000000..3500b47 --- /dev/null +++ b/backend/alembic/versions/005_threejs_default_1k.py @@ -0,0 +1,28 @@ +"""set threejs_render_size default to 1024 + +Revision ID: 005 +Revises: 004 +Create Date: 2026-03-01 + +""" +from typing import Sequence, Union +from alembic import op + +revision: str = "005" +down_revision: Union[str, None] = "004" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute( + "UPDATE system_settings SET value = '1024', updated_at = NOW() " + "WHERE key = 'threejs_render_size' AND value = '512'" + ) + + +def downgrade() -> None: + op.execute( + "UPDATE system_settings SET value = '512', updated_at = NOW() " + "WHERE key = 'threejs_render_size' AND value = '1024'" + ) diff --git a/backend/alembic/versions/006_thumbnail_format.py b/backend/alembic/versions/006_thumbnail_format.py new file mode 100644 index 0000000..d274b3b --- /dev/null +++ b/backend/alembic/versions/006_thumbnail_format.py @@ -0,0 +1,28 @@ +"""thumbnail format setting (jpg | png) + +Revision ID: 006 +Revises: 005 +Create Date: 2026-03-01 + +""" +from typing import Sequence, Union +from alembic import op + +revision: str = "006" +down_revision: Union[str, None] = "005" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute( + "INSERT INTO system_settings (key, value, updated_at) VALUES " + "('thumbnail_format', 'jpg', NOW()) " + "ON CONFLICT (key) DO NOTHING" + ) + + +def downgrade() -> None: + op.execute( + "DELETE FROM system_settings WHERE key = 'thumbnail_format'" + ) diff --git a/backend/alembic/versions/007_materials.py b/backend/alembic/versions/007_materials.py new file mode 100644 index 0000000..78fad2b --- /dev/null +++ b/backend/alembic/versions/007_materials.py @@ -0,0 +1,33 @@ +"""materials table and cad_part_materials column + +Revision ID: 007 +Revises: 006 +Create Date: 2026-03-01 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import UUID, JSONB + +revision: str = "007" +down_revision: Union[str, None] = "006" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + "materials", + sa.Column("id", UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")), + sa.Column("name", sa.String(200), nullable=False, unique=True), + sa.Column("description", sa.Text, nullable=True), + sa.Column("created_at", sa.DateTime, server_default=sa.text("NOW()"), nullable=False), + sa.Column("updated_at", sa.DateTime, server_default=sa.text("NOW()"), nullable=False), + ) + op.execute("ALTER TABLE order_items ADD COLUMN IF NOT EXISTS cad_part_materials JSONB NOT NULL DEFAULT '[]'::jsonb") + + +def downgrade() -> None: + op.execute("ALTER TABLE order_items DROP COLUMN IF EXISTS cad_part_materials") + op.drop_table("materials") diff --git a/backend/alembic/versions/008_material_metadata.py b/backend/alembic/versions/008_material_metadata.py new file mode 100644 index 0000000..fc89bc8 --- /dev/null +++ b/backend/alembic/versions/008_material_metadata.py @@ -0,0 +1,32 @@ +"""Add created_by and source to materials + +Revision ID: 008 +Revises: 007 +Create Date: 2026-03-01 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import UUID + +revision: str = "008" +down_revision: Union[str, None] = "007" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute( + "ALTER TABLE materials " + "ADD COLUMN IF NOT EXISTS created_by UUID REFERENCES users(id) ON DELETE SET NULL, " + "ADD COLUMN IF NOT EXISTS source VARCHAR(20) NOT NULL DEFAULT 'manual'" + ) + + +def downgrade() -> None: + op.execute( + "ALTER TABLE materials " + "DROP COLUMN IF EXISTS created_by, " + "DROP COLUMN IF EXISTS source" + ) diff --git a/backend/alembic/versions/009_render_log.py b/backend/alembic/versions/009_render_log.py new file mode 100644 index 0000000..657ac4f --- /dev/null +++ b/backend/alembic/versions/009_render_log.py @@ -0,0 +1,28 @@ +"""Add render_log JSONB column to cad_files + +Revision ID: 009 +Revises: 008 +Create Date: 2026-03-01 + +""" +from typing import Sequence, Union +from alembic import op + +revision: str = "009" +down_revision: Union[str, None] = "008" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute( + "ALTER TABLE cad_files " + "ADD COLUMN IF NOT EXISTS render_log JSONB" + ) + + +def downgrade() -> None: + op.execute( + "ALTER TABLE cad_files " + "DROP COLUMN IF EXISTS render_log" + ) diff --git a/backend/alembic/versions/010_kpi_pricing.py b/backend/alembic/versions/010_kpi_pricing.py new file mode 100644 index 0000000..9d46511 --- /dev/null +++ b/backend/alembic/versions/010_kpi_pricing.py @@ -0,0 +1,75 @@ +"""KPI analytics and pricing tiers + +Revision ID: 010 +Revises: 009 +Create Date: 2026-03-02 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + +revision: str = "010" +down_revision: Union[str, None] = "009" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Add project_manager to userrole enum — must be outside a transaction + op.execute("COMMIT") + op.execute("ALTER TYPE userrole ADD VALUE IF NOT EXISTS 'project_manager'") + op.execute("BEGIN") + + # Lifecycle timestamps + estimated_price on orders + op.execute( + "ALTER TABLE orders " + "ADD COLUMN IF NOT EXISTS submitted_at TIMESTAMP WITHOUT TIME ZONE" + ) + op.execute( + "ALTER TABLE orders " + "ADD COLUMN IF NOT EXISTS processing_started_at TIMESTAMP WITHOUT TIME ZONE" + ) + op.execute( + "ALTER TABLE orders " + "ADD COLUMN IF NOT EXISTS completed_at TIMESTAMP WITHOUT TIME ZONE" + ) + op.execute( + "ALTER TABLE orders " + "ADD COLUMN IF NOT EXISTS rejected_at TIMESTAMP WITHOUT TIME ZONE" + ) + op.execute( + "ALTER TABLE orders " + "ADD COLUMN IF NOT EXISTS estimated_price NUMERIC(12, 2)" + ) + + # pricing_tiers table + op.execute( + """ + CREATE TABLE IF NOT EXISTS pricing_tiers ( + id SERIAL PRIMARY KEY, + category_key VARCHAR(100) NOT NULL, + quality_level VARCHAR(50) NOT NULL DEFAULT 'Normal', + price_per_item NUMERIC(10, 2) NOT NULL, + description TEXT, + is_active BOOLEAN NOT NULL DEFAULT TRUE, + created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), + CONSTRAINT uq_pricing_tier UNIQUE (category_key, quality_level) + ) + """ + ) + op.execute( + "CREATE INDEX IF NOT EXISTS ix_pricing_tiers_category_key " + "ON pricing_tiers (category_key)" + ) + + +def downgrade() -> None: + op.execute("DROP TABLE IF EXISTS pricing_tiers") + op.execute("ALTER TABLE orders DROP COLUMN IF EXISTS estimated_price") + op.execute("ALTER TABLE orders DROP COLUMN IF EXISTS rejected_at") + op.execute("ALTER TABLE orders DROP COLUMN IF EXISTS completed_at") + op.execute("ALTER TABLE orders DROP COLUMN IF EXISTS processing_started_at") + op.execute("ALTER TABLE orders DROP COLUMN IF EXISTS submitted_at") + # Note: removing enum values is not supported in PostgreSQL without full recreation diff --git a/backend/alembic/versions/011_product_library.py b/backend/alembic/versions/011_product_library.py new file mode 100644 index 0000000..ceb8fd2 --- /dev/null +++ b/backend/alembic/versions/011_product_library.py @@ -0,0 +1,101 @@ +"""Product library — products, output_types, order_lines tables + +Revision ID: 011 +Revises: 010 +Create Date: 2026-03-02 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + +revision: str = "011" +down_revision: Union[str, None] = "010" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute( + """ + CREATE TABLE IF NOT EXISTS products ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + pim_id VARCHAR(500) UNIQUE NOT NULL, + name VARCHAR(500), + category_key VARCHAR(100), + ebene1 VARCHAR(500), + ebene2 VARCHAR(500), + baureihe VARCHAR(500), + produkt_baureihe VARCHAR(500), + lagertyp VARCHAR(500), + name_cad_modell VARCHAR(500), + components JSONB NOT NULL DEFAULT '[]', + cad_part_materials JSONB NOT NULL DEFAULT '[]', + cad_file_id UUID REFERENCES cad_files(id) ON DELETE SET NULL, + notes TEXT, + is_active BOOLEAN NOT NULL DEFAULT TRUE, + source_excel VARCHAR(1000), + created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW() + ) + """ + ) + op.execute("CREATE INDEX IF NOT EXISTS ix_products_category_key ON products (category_key)") + op.execute("CREATE INDEX IF NOT EXISTS ix_products_name_cad_modell ON products (name_cad_modell)") + + op.execute( + """ + CREATE TABLE IF NOT EXISTS output_types ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR(200) UNIQUE NOT NULL, + description TEXT, + renderer VARCHAR(50) NOT NULL DEFAULT 'threejs', + render_settings JSONB NOT NULL DEFAULT '{}', + output_format VARCHAR(20) NOT NULL DEFAULT 'png', + sort_order INTEGER NOT NULL DEFAULT 0, + is_active BOOLEAN NOT NULL DEFAULT TRUE, + created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW() + ) + """ + ) + + op.execute( + """ + CREATE TABLE IF NOT EXISTS order_lines ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + order_id UUID NOT NULL REFERENCES orders(id) ON DELETE CASCADE, + product_id UUID NOT NULL REFERENCES products(id), + output_type_id UUID REFERENCES output_types(id), + gewuenschte_bildnummer VARCHAR(500), + item_status VARCHAR(20) NOT NULL DEFAULT 'pending', + render_status VARCHAR(20) NOT NULL DEFAULT 'pending', + result_path VARCHAR(1000), + render_log JSONB, + ai_validation_status VARCHAR(20) NOT NULL DEFAULT 'not_started', + ai_validation_result JSONB, + notes TEXT, + created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW() + ) + """ + ) + op.execute("CREATE INDEX IF NOT EXISTS ix_order_lines_order_id ON order_lines (order_id)") + op.execute("CREATE INDEX IF NOT EXISTS ix_order_lines_product_id ON order_lines (product_id)") + # Partial unique indexes to handle NULL output_type_id correctly + op.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS uq_order_lines_tracking " + "ON order_lines (order_id, product_id) " + "WHERE output_type_id IS NULL" + ) + op.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS uq_order_lines_render " + "ON order_lines (order_id, product_id, output_type_id) " + "WHERE output_type_id IS NOT NULL" + ) + + +def downgrade() -> None: + op.execute("DROP TABLE IF EXISTS order_lines") + op.execute("DROP TABLE IF EXISTS output_types") + op.execute("DROP TABLE IF EXISTS products") diff --git a/backend/alembic/versions/012_backfill_products.py b/backend/alembic/versions/012_backfill_products.py new file mode 100644 index 0000000..8aea80a --- /dev/null +++ b/backend/alembic/versions/012_backfill_products.py @@ -0,0 +1,123 @@ +"""Backfill products and order_lines from order_items + +Revision ID: 012 +Revises: 011 +Create Date: 2026-03-02 + +""" +from typing import Sequence, Union +from alembic import op + +revision: str = "012" +down_revision: Union[str, None] = "011" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # 1. Seed default output type + op.execute( + """ + INSERT INTO output_types (id, name, renderer, output_format, sort_order) + VALUES (gen_random_uuid(), '3D Thumbnail', 'threejs', 'png', 0) + ON CONFLICT (name) DO NOTHING + """ + ) + + # 2. Create products from distinct pim_id in order_items + # For each distinct pim_id, take fields from the most recently updated row + op.execute( + """ + INSERT INTO products ( + pim_id, name, category_key, ebene1, ebene2, baureihe, + produkt_baureihe, lagertyp, name_cad_modell, + components, cad_part_materials, cad_file_id, source_excel + ) + SELECT DISTINCT ON (oi.pim_id) + oi.pim_id, + oi.gewaehltes_produkt AS name, + t.category_key, + oi.ebene1, + oi.ebene2, + oi.baureihe, + oi.produkt_baureihe, + oi.lagertyp, + oi.name_cad_modell, + oi.components, + COALESCE(oi.cad_part_materials, '[]'::jsonb) AS cad_part_materials, + oi.cad_file_id, + o.source_excel + FROM order_items oi + JOIN orders o ON o.id = oi.order_id + LEFT JOIN templates t ON t.id = o.template_id + WHERE oi.pim_id IS NOT NULL + AND oi.pim_id <> '' + ORDER BY oi.pim_id, oi.updated_at DESC + ON CONFLICT (pim_id) DO NOTHING + """ + ) + + # 3. Create order_lines from order_items where pim_id IS NOT NULL + # 3a. Rows with medias_rendering = true → link to '3D Thumbnail' output type + op.execute( + """ + INSERT INTO order_lines ( + order_id, product_id, output_type_id, + gewuenschte_bildnummer, item_status, render_status, + ai_validation_status, ai_validation_result, notes + ) + SELECT + oi.order_id, + p.id AS product_id, + ot.id AS output_type_id, + oi.gewuenschte_bildnummer, + oi.item_status::TEXT, + CASE + WHEN oi.cad_file_id IS NOT NULL THEN 'pending' + ELSE 'pending' + END AS render_status, + oi.ai_validation_status::TEXT, + oi.ai_validation_result, + oi.notes + FROM order_items oi + JOIN products p ON p.pim_id = oi.pim_id + JOIN output_types ot ON ot.name = '3D Thumbnail' + WHERE oi.pim_id IS NOT NULL + AND oi.pim_id <> '' + AND oi.medias_rendering = TRUE + ON CONFLICT DO NOTHING + """ + ) + + # 3b. Rows with medias_rendering = false → tracking only (no output_type_id) + op.execute( + """ + INSERT INTO order_lines ( + order_id, product_id, output_type_id, + gewuenschte_bildnummer, item_status, render_status, + ai_validation_status, ai_validation_result, notes + ) + SELECT + oi.order_id, + p.id AS product_id, + NULL AS output_type_id, + oi.gewuenschte_bildnummer, + oi.item_status::TEXT, + 'pending' AS render_status, + oi.ai_validation_status::TEXT, + oi.ai_validation_result, + oi.notes + FROM order_items oi + JOIN products p ON p.pim_id = oi.pim_id + WHERE oi.pim_id IS NOT NULL + AND oi.pim_id <> '' + AND (oi.medias_rendering IS NULL OR oi.medias_rendering = FALSE) + ON CONFLICT DO NOTHING + """ + ) + + +def downgrade() -> None: + op.execute("DELETE FROM order_lines") + op.execute("DELETE FROM products") + op.execute("DELETE FROM output_types WHERE name = '3D Thumbnail'") diff --git a/backend/alembic/versions/013_product_excel_fields.py b/backend/alembic/versions/013_product_excel_fields.py new file mode 100644 index 0000000..56f34ba --- /dev/null +++ b/backend/alembic/versions/013_product_excel_fields.py @@ -0,0 +1,44 @@ +"""Add gewuenschte_bildnummer and medias_rendering to products + +Revision ID: 013 +Revises: 012 +Create Date: 2026-03-02 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + +revision: str = "013" +down_revision: Union[str, None] = "012" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column("products", sa.Column("gewuenschte_bildnummer", sa.String(500), nullable=True)) + op.add_column("products", sa.Column("medias_rendering", sa.Boolean(), nullable=True)) + + # Backfill from order_items where available + op.execute( + """ + UPDATE products p + SET gewuenschte_bildnummer = sub.gewuenschte_bildnummer, + medias_rendering = sub.medias_rendering + FROM ( + SELECT DISTINCT ON (oi.pim_id) + oi.pim_id, + oi.gewuenschte_bildnummer, + oi.medias_rendering + FROM order_items oi + WHERE oi.pim_id IS NOT NULL AND oi.pim_id <> '' + ORDER BY oi.pim_id, oi.updated_at DESC + ) sub + WHERE p.pim_id = sub.pim_id + """ + ) + + +def downgrade() -> None: + op.drop_column("products", "medias_rendering") + op.drop_column("products", "gewuenschte_bildnummer") diff --git a/backend/alembic/versions/014_output_type_categories.py b/backend/alembic/versions/014_output_type_categories.py new file mode 100644 index 0000000..3341730 --- /dev/null +++ b/backend/alembic/versions/014_output_type_categories.py @@ -0,0 +1,27 @@ +"""Add compatible_categories to output_types + +Revision ID: 014 +Revises: 013 +Create Date: 2026-03-02 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSONB + +revision: str = "014" +down_revision: Union[str, None] = "013" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column( + "output_types", + sa.Column("compatible_categories", JSONB, server_default="[]", nullable=False), + ) + + +def downgrade() -> None: + op.drop_column("output_types", "compatible_categories") diff --git a/backend/alembic/versions/015_flamenco_support.py b/backend/alembic/versions/015_flamenco_support.py new file mode 100644 index 0000000..b1a2da6 --- /dev/null +++ b/backend/alembic/versions/015_flamenco_support.py @@ -0,0 +1,65 @@ +"""Add Flamenco render backend support + +Revision ID: 015 +Revises: 014 +Create Date: 2026-03-02 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + +revision: str = "015" +down_revision: Union[str, None] = "014" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # output_types: render_backend + is_animation + op.add_column( + "output_types", + sa.Column("render_backend", sa.String(20), server_default="auto", nullable=False), + ) + op.add_column( + "output_types", + sa.Column("is_animation", sa.Boolean(), server_default="false", nullable=False), + ) + + # order_lines: flamenco tracking columns + op.add_column( + "order_lines", + sa.Column("flamenco_job_id", sa.String(100), nullable=True), + ) + op.add_column( + "order_lines", + sa.Column("render_backend_used", sa.String(20), nullable=True), + ) + op.add_column( + "order_lines", + sa.Column("render_started_at", sa.DateTime(), nullable=True), + ) + op.add_column( + "order_lines", + sa.Column("render_completed_at", sa.DateTime(), nullable=True), + ) + + # Seed system settings for Flamenco + op.execute( + "INSERT INTO system_settings (key, value) VALUES " + "('render_backend', 'celery'), " + "('flamenco_manager_url', 'http://flamenco-manager:8080'), " + "('flamenco_worker_count', '1') " + "ON CONFLICT (key) DO NOTHING" + ) + + +def downgrade() -> None: + op.drop_column("order_lines", "render_completed_at") + op.drop_column("order_lines", "render_started_at") + op.drop_column("order_lines", "render_backend_used") + op.drop_column("order_lines", "flamenco_job_id") + op.drop_column("output_types", "is_animation") + op.drop_column("output_types", "render_backend") + + op.execute("DELETE FROM system_settings WHERE key IN ('render_backend', 'flamenco_manager_url', 'flamenco_worker_count')") diff --git a/backend/alembic/versions/016_pricing_output_types.py b/backend/alembic/versions/016_pricing_output_types.py new file mode 100644 index 0000000..e579c60 --- /dev/null +++ b/backend/alembic/versions/016_pricing_output_types.py @@ -0,0 +1,52 @@ +"""Pricing enhancements: OutputType→PricingTier link, per-line unit_price, transparent_bg, default tier + +Revision ID: 016 +Revises: 015 +Create Date: 2026-03-02 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + +revision: str = "016" +down_revision: Union[str, None] = "015" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # OutputType → PricingTier link + op.add_column("output_types", sa.Column("pricing_tier_id", sa.Integer(), nullable=True)) + op.create_foreign_key( + "fk_output_types_pricing_tier_id", + "output_types", + "pricing_tiers", + ["pricing_tier_id"], + ["id"], + ondelete="SET NULL", + ) + op.create_index("ix_output_types_pricing_tier_id", "output_types", ["pricing_tier_id"]) + + # Transparent background option for Blender PNG renders + op.add_column("output_types", sa.Column( + "transparent_bg", sa.Boolean(), nullable=False, server_default="false", + )) + + # Per-line price snapshot + op.add_column("order_lines", sa.Column("unit_price", sa.Numeric(10, 2), nullable=True)) + + # Seed global default tier (idempotent via ON CONFLICT) + op.execute(""" + INSERT INTO pricing_tiers (category_key, quality_level, price_per_item, description, is_active, created_at, updated_at) + VALUES ('default', 'Normal', 25.00, 'Global fallback price', true, NOW(), NOW()) + ON CONFLICT ON CONSTRAINT uq_pricing_tier DO NOTHING + """) + + +def downgrade() -> None: + op.drop_column("order_lines", "unit_price") + op.drop_column("output_types", "transparent_bg") + op.drop_index("ix_output_types_pricing_tier_id", table_name="output_types") + op.drop_constraint("fk_output_types_pricing_tier_id", "output_types", type_="foreignkey") + op.drop_column("output_types", "pricing_tier_id") diff --git a/backend/alembic/versions/017_fix_order_line_item_status.py b/backend/alembic/versions/017_fix_order_line_item_status.py new file mode 100644 index 0000000..a86151e --- /dev/null +++ b/backend/alembic/versions/017_fix_order_line_item_status.py @@ -0,0 +1,44 @@ +"""Fix stale order_lines.item_status: auto-approve lines for non-draft orders + +Revision ID: 017 +Revises: 016 +Create Date: 2026-03-02 + +""" +from typing import Sequence, Union +from alembic import op + +revision: str = "017" +down_revision: Union[str, None] = "016" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Order lines belonging to submitted/processing/completed orders should + # be "approved", not stuck at "pending". The new Product Library workflow + # has no per-item approval step — submission implies approval. + op.execute(""" + UPDATE order_lines + SET item_status = 'approved' + WHERE item_status = 'pending' + AND order_id IN ( + SELECT id FROM orders + WHERE status IN ('submitted', 'processing', 'completed') + ) + """) + + # Lines belonging to rejected orders should be "rejected". + op.execute(""" + UPDATE order_lines + SET item_status = 'rejected' + WHERE item_status = 'pending' + AND order_id IN ( + SELECT id FROM orders WHERE status = 'rejected' + ) + """) + + +def downgrade() -> None: + # Cannot reliably revert — the original values were all "pending" anyway. + pass diff --git a/backend/alembic/versions/018_render_templates.py b/backend/alembic/versions/018_render_templates.py new file mode 100644 index 0000000..f15ea78 --- /dev/null +++ b/backend/alembic/versions/018_render_templates.py @@ -0,0 +1,56 @@ +"""Render templates — .blend file templates per category/output type + +Revision ID: 018 +Revises: 017 +Create Date: 2026-03-02 + +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import UUID + +# revision identifiers, used by Alembic. +revision: str = "018" +down_revision: Union[str, None] = "017" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + "render_templates", + sa.Column("id", UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")), + sa.Column("name", sa.String(300), nullable=False), + sa.Column("category_key", sa.String(100), nullable=True), + sa.Column("output_type_id", UUID(as_uuid=True), sa.ForeignKey("output_types.id", ondelete="SET NULL"), nullable=True), + sa.Column("blend_file_path", sa.Text, nullable=False), + sa.Column("original_filename", sa.String(500), nullable=False), + sa.Column("target_collection", sa.String(200), server_default="Product", nullable=False), + sa.Column("material_replace_enabled", sa.Boolean, server_default="false", nullable=False), + sa.Column("is_active", sa.Boolean, server_default="true", nullable=False), + sa.Column("created_at", sa.DateTime, server_default=sa.text("now()"), nullable=False), + sa.Column("updated_at", sa.DateTime, server_default=sa.text("now()"), nullable=False), + ) + + # Unique constraint: one active template per (category_key, output_type_id) combo + op.create_index( + "ix_render_templates_active_unique", + "render_templates", + ["category_key", "output_type_id"], + unique=True, + postgresql_where=sa.text("is_active = true"), + ) + + # Seed material_library_path setting + op.execute( + "INSERT INTO system_settings (key, value, updated_at) " + "VALUES ('material_library_path', '', now()) " + "ON CONFLICT (key) DO NOTHING" + ) + + +def downgrade() -> None: + op.drop_index("ix_render_templates_active_unique", table_name="render_templates") + op.drop_table("render_templates") + op.execute("DELETE FROM system_settings WHERE key = 'material_library_path'") diff --git a/backend/alembic/versions/019_schaeffler_materials.py b/backend/alembic/versions/019_schaeffler_materials.py new file mode 100644 index 0000000..ca06bc4 --- /dev/null +++ b/backend/alembic/versions/019_schaeffler_materials.py @@ -0,0 +1,38 @@ +"""Schaeffler standard materials — add schaeffler_code column and seed 35 materials + +Revision ID: 019 +Revises: 018 +Create Date: 2026-03-02 +""" +from alembic import op +import sqlalchemy as sa +import uuid +from datetime import datetime + +revision = "019" +down_revision = "018" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column("materials", sa.Column("schaeffler_code", sa.Integer(), nullable=True)) + + from app.data.schaeffler_materials import SCHAEFFLER_MATERIALS + + conn = op.get_bind() + now = datetime.utcnow().isoformat() + for mat in SCHAEFFLER_MATERIALS: + desc = mat["description"].replace("'", "''") + name = mat["name"].replace("'", "''") + conn.execute(sa.text( + f"INSERT INTO materials (id, name, description, source, schaeffler_code, created_at, updated_at) " + f"VALUES ('{uuid.uuid4()}', '{name}', '{desc}', '{mat['source']}', " + f"{mat['schaeffler_code']}, '{now}', '{now}') " + f"ON CONFLICT (name) DO NOTHING" + )) + + +def downgrade() -> None: + op.execute("DELETE FROM materials WHERE source = 'schaeffler_standard'") + op.drop_column("materials", "schaeffler_code") diff --git a/backend/alembic/versions/020_material_aliases.py b/backend/alembic/versions/020_material_aliases.py new file mode 100644 index 0000000..b7ab618 --- /dev/null +++ b/backend/alembic/versions/020_material_aliases.py @@ -0,0 +1,99 @@ +"""Material aliases — substitution/alias system for material name resolution + +Revision ID: 020 +Revises: 019 +Create Date: 2026-03-02 +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import UUID +import uuid +from datetime import datetime + +revision = "020" +down_revision = "019" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Create material_aliases table + op.create_table( + "material_aliases", + sa.Column("id", UUID(as_uuid=True), primary_key=True, default=uuid.uuid4), + sa.Column( + "material_id", + UUID(as_uuid=True), + sa.ForeignKey("materials.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column("alias", sa.String(300), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False, server_default=sa.func.now()), + ) + + # Case-insensitive unique index on alias + op.create_index( + "uq_material_aliases_alias_lower", + "material_aliases", + [sa.text("lower(alias)")], + unique=True, + ) + + # Index on material_id for FK lookups + op.create_index( + "ix_material_aliases_material_id", + "material_aliases", + ["material_id"], + ) + + # Seed aliases from naming_scheme.xlsx Materialmapping data + _seed_aliases() + + +def _seed_aliases() -> None: + from app.data.material_alias_seeds import MATERIAL_ALIAS_SEEDS + + conn = op.get_bind() + + for entry in MATERIAL_ALIAS_SEEDS: + material_name = entry["material_name"] + + # Look up material by name + result = conn.execute( + sa.text("SELECT id FROM materials WHERE name = :name"), + {"name": material_name}, + ) + row = result.fetchone() + if not row: + # Material not seeded yet, skip + continue + + material_id = row[0] + + for alias_str in entry["aliases"]: + # Skip if alias already exists (case-insensitive) + existing = conn.execute( + sa.text("SELECT id FROM material_aliases WHERE lower(alias) = lower(:alias)"), + {"alias": alias_str}, + ) + if existing.fetchone(): + continue + + conn.execute( + sa.text( + "INSERT INTO material_aliases (id, material_id, alias, created_at) " + "VALUES (:id, :material_id, :alias, :created_at)" + ), + { + "id": str(uuid.uuid4()), + "material_id": str(material_id), + "alias": alias_str, + "created_at": datetime.utcnow(), + }, + ) + + +def downgrade() -> None: + op.drop_index("ix_material_aliases_material_id", table_name="material_aliases") + op.drop_index("uq_material_aliases_alias_lower", table_name="material_aliases") + op.drop_table("material_aliases") diff --git a/backend/alembic/versions/021_notification_center.py b/backend/alembic/versions/021_notification_center.py new file mode 100644 index 0000000..65af573 --- /dev/null +++ b/backend/alembic/versions/021_notification_center.py @@ -0,0 +1,62 @@ +"""Notification center — add target_user_id, read_at, notification to audit_log + +Revision ID: 021 +Revises: 020 +Create Date: 2026-03-03 +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import UUID + +revision = "021" +down_revision = "020" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "audit_log", + sa.Column( + "target_user_id", + UUID(as_uuid=True), + sa.ForeignKey("users.id", ondelete="SET NULL"), + nullable=True, + ), + ) + op.add_column( + "audit_log", + sa.Column("read_at", sa.DateTime(), nullable=True), + ) + op.add_column( + "audit_log", + sa.Column( + "notification", + sa.Boolean(), + nullable=False, + server_default=sa.text("false"), + ), + ) + + # Composite index for user notification queries + op.create_index( + "ix_audit_log_target_notification", + "audit_log", + ["target_user_id", "notification", "read_at"], + ) + + # Partial index for listing recent notifications + op.create_index( + "ix_audit_log_notification_ts", + "audit_log", + ["notification", "timestamp"], + postgresql_where=sa.text("notification = true"), + ) + + +def downgrade() -> None: + op.drop_index("ix_audit_log_notification_ts", table_name="audit_log") + op.drop_index("ix_audit_log_target_notification", table_name="audit_log") + op.drop_column("audit_log", "notification") + op.drop_column("audit_log", "read_at") + op.drop_column("audit_log", "target_user_id") diff --git a/backend/alembic/versions/022_product_variants.py b/backend/alembic/versions/022_product_variants.py new file mode 100644 index 0000000..40e686d --- /dev/null +++ b/backend/alembic/versions/022_product_variants.py @@ -0,0 +1,87 @@ +"""Product variants — per-product material variant support + +Revision ID: 022 +Revises: 021 +Create Date: 2026-03-03 +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import UUID, JSONB + +revision = "022" +down_revision = "021" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # --- New table: product_variants --- + op.create_table( + "product_variants", + sa.Column("id", UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")), + sa.Column("product_id", UUID(as_uuid=True), sa.ForeignKey("products.id", ondelete="CASCADE"), nullable=False, index=True), + sa.Column("name", sa.String(500), nullable=False), + sa.Column("gewuenschte_bildnummer", sa.String(500), nullable=True), + sa.Column("components", JSONB, nullable=False, server_default="[]"), + sa.Column("is_default", sa.Boolean, nullable=False, server_default="false"), + sa.Column("source_excel", sa.String(1000), nullable=True), + sa.Column("created_at", sa.DateTime, server_default=sa.func.now(), nullable=False), + sa.Column("updated_at", sa.DateTime, server_default=sa.func.now(), nullable=False), + ) + # Unique constraint: (product_id, lower(name)) + op.create_index( + "uq_product_variants_product_name", + "product_variants", + [sa.text("product_id"), sa.text("lower(name)")], + unique=True, + ) + + # --- Alter products --- + op.add_column("products", sa.Column("arbeitspaket", sa.String(500), nullable=True)) + + # Drop existing unique constraint on pim_id — PIM-ID is a class-level + # identifier shared by many products so it must NOT be unique. + op.drop_constraint("products_pim_id_key", "products", type_="unique") + op.create_index( + "uq_products_produkt_baureihe", + "products", + [sa.text("lower(produkt_baureihe)")], + unique=True, + postgresql_where=sa.text("produkt_baureihe IS NOT NULL AND is_active = true"), + ) + + # --- Alter order_lines --- + op.add_column( + "order_lines", + sa.Column( + "variant_id", + UUID(as_uuid=True), + sa.ForeignKey("product_variants.id", ondelete="SET NULL"), + nullable=True, + ), + ) + + # --- Backfill: create default variants for existing products --- + op.execute(""" + INSERT INTO product_variants (id, product_id, name, components, is_default, source_excel, created_at, updated_at) + SELECT + gen_random_uuid(), + p.id, + COALESCE(p.name, p.pim_id), + COALESCE(p.components, '[]'::jsonb), + true, + p.source_excel, + NOW(), + NOW() + FROM products p + WHERE p.name IS NOT NULL OR p.pim_id IS NOT NULL + """) + + +def downgrade() -> None: + op.drop_column("order_lines", "variant_id") + op.drop_index("uq_products_produkt_baureihe", "products") + op.create_unique_constraint("products_pim_id_key", "products", ["pim_id"]) + op.drop_column("products", "arbeitspaket") + op.drop_index("uq_product_variants_product_name", "product_variants") + op.drop_table("product_variants") diff --git a/backend/alembic/versions/023_fix_order_line_unique_constraints.py b/backend/alembic/versions/023_fix_order_line_unique_constraints.py new file mode 100644 index 0000000..326cc47 --- /dev/null +++ b/backend/alembic/versions/023_fix_order_line_unique_constraints.py @@ -0,0 +1,55 @@ +"""Fix order_line unique constraints to include variant_id + +The old constraints (order_id, product_id) caused 409 errors when +multiple variants of the same product were added to the same order. +New constraints use COALESCE(variant_id, nil_uuid) so different +variants of the same product can coexist. + +Revision ID: 023 +Revises: 022 +Create Date: 2026-03-03 +""" +from alembic import op + +revision = "023" +down_revision = "022" +branch_labels = None +depends_on = None + +NIL_UUID = "00000000-0000-0000-0000-000000000000" + + +def upgrade() -> None: + # Drop old constraints + op.execute("DROP INDEX IF EXISTS uq_order_lines_tracking") + op.execute("DROP INDEX IF EXISTS uq_order_lines_render") + + # Recreate with variant_id included (COALESCE handles NULLs) + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_tracking " + f"ON order_lines (order_id, product_id, COALESCE(variant_id, '{NIL_UUID}'::uuid)) " + "WHERE output_type_id IS NULL" + ) + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_render " + "ON order_lines (order_id, product_id, output_type_id, " + f"COALESCE(variant_id, '{NIL_UUID}'::uuid)) " + "WHERE output_type_id IS NOT NULL" + ) + + +def downgrade() -> None: + op.execute("DROP INDEX IF EXISTS uq_order_lines_tracking") + op.execute("DROP INDEX IF EXISTS uq_order_lines_render") + + # Restore original constraints without variant_id + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_tracking " + "ON order_lines (order_id, product_id) " + "WHERE output_type_id IS NULL" + ) + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_render " + "ON order_lines (order_id, product_id, output_type_id) " + "WHERE output_type_id IS NOT NULL" + ) diff --git a/backend/alembic/versions/024_render_template_lighting_only.py b/backend/alembic/versions/024_render_template_lighting_only.py new file mode 100644 index 0000000..1e0faf7 --- /dev/null +++ b/backend/alembic/versions/024_render_template_lighting_only.py @@ -0,0 +1,33 @@ +"""Add lighting_only column to render_templates + +When lighting_only=True the render script uses the template's World/HDRI for +lighting but always computes an auto-camera for product framing. This is +useful for HDR-only templates that don't define a fixed camera angle. + +Revision ID: 024 +Revises: 023 +Create Date: 2026-03-03 +""" +from alembic import op +import sqlalchemy as sa + +revision = '024' +down_revision = '023' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + 'render_templates', + sa.Column( + 'lighting_only', + sa.Boolean(), + nullable=False, + server_default='false', + ), + ) + + +def downgrade(): + op.drop_column('render_templates', 'lighting_only') diff --git a/backend/alembic/versions/025_output_type_cycles_device.py b/backend/alembic/versions/025_output_type_cycles_device.py new file mode 100644 index 0000000..1120565 --- /dev/null +++ b/backend/alembic/versions/025_output_type_cycles_device.py @@ -0,0 +1,24 @@ +"""Add cycles_device column to output_types + +Revision ID: 025 +Revises: 024 +Create Date: 2026-03-03 +""" +from alembic import op +import sqlalchemy as sa + +revision = '025' +down_revision = '024' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + 'output_types', + sa.Column('cycles_device', sa.String(10), nullable=True), + ) + + +def downgrade(): + op.drop_column('output_types', 'cycles_device') diff --git a/backend/alembic/versions/026_render_template_shadow_catcher.py b/backend/alembic/versions/026_render_template_shadow_catcher.py new file mode 100644 index 0000000..bbd5f76 --- /dev/null +++ b/backend/alembic/versions/026_render_template_shadow_catcher.py @@ -0,0 +1,25 @@ +"""Add shadow_catcher_enabled to render_templates. + +Revision ID: 026 +Revises: 025 +Create Date: 2026-03-03 +""" +from alembic import op +import sqlalchemy as sa + +revision = '026' +down_revision = '025' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + 'render_templates', + sa.Column('shadow_catcher_enabled', sa.Boolean(), nullable=False, + server_default='false'), + ) + + +def downgrade(): + op.drop_column('render_templates', 'shadow_catcher_enabled') diff --git a/backend/alembic/versions/027_remove_variants.py b/backend/alembic/versions/027_remove_variants.py new file mode 100644 index 0000000..e8f5e04 --- /dev/null +++ b/backend/alembic/versions/027_remove_variants.py @@ -0,0 +1,108 @@ +"""Remove product variant system — products are unique, no variant concept. + +Revision ID: 027 +Revises: 026 +Create Date: 2026-03-03 +""" +from alembic import op +import sqlalchemy as sa + +revision = "027" +down_revision = "026" +branch_labels = None +depends_on = None + +NIL_UUID = "00000000-0000-0000-0000-000000000000" + + +def upgrade() -> None: + # Drop variant-aware unique indexes on order_lines + op.execute("DROP INDEX IF EXISTS uq_order_lines_tracking") + op.execute("DROP INDEX IF EXISTS uq_order_lines_render") + + # Drop variant_id column from order_lines + op.execute("ALTER TABLE order_lines DROP COLUMN IF EXISTS variant_id") + + # Deduplicate tracking-only lines (output_type_id IS NULL) — keep the newest row per + # (order_id, product_id) pair so the unique index can be created cleanly. + op.execute(""" + DELETE FROM order_lines + WHERE output_type_id IS NULL + AND id NOT IN ( + SELECT DISTINCT ON (order_id, product_id) id + FROM order_lines + WHERE output_type_id IS NULL + ORDER BY order_id, product_id, created_at DESC + ) + """) + + # Deduplicate render lines — keep the newest row per (order_id, product_id, output_type_id). + op.execute(""" + DELETE FROM order_lines + WHERE output_type_id IS NOT NULL + AND id NOT IN ( + SELECT DISTINCT ON (order_id, product_id, output_type_id) id + FROM order_lines + WHERE output_type_id IS NOT NULL + ORDER BY order_id, product_id, output_type_id, created_at DESC + ) + """) + + # Recreate simpler unique indexes without variant_id + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_tracking " + "ON order_lines (order_id, product_id) " + "WHERE output_type_id IS NULL" + ) + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_render " + "ON order_lines (order_id, product_id, output_type_id) " + "WHERE output_type_id IS NOT NULL" + ) + + # Drop product_variants table (CASCADE removes its indexes automatically) + op.execute("DROP TABLE IF EXISTS product_variants CASCADE") + + +def downgrade() -> None: + # Recreate product_variants table + op.execute(""" + CREATE TABLE product_variants ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + product_id UUID NOT NULL REFERENCES products(id) ON DELETE CASCADE, + name VARCHAR(500) NOT NULL, + gewuenschte_bildnummer VARCHAR(500), + components JSONB NOT NULL DEFAULT '[]', + is_default BOOLEAN NOT NULL DEFAULT false, + source_excel VARCHAR(1000), + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP NOT NULL DEFAULT NOW() + ) + """) + op.execute( + "CREATE UNIQUE INDEX uq_product_variants_product_name " + "ON product_variants (product_id, lower(name))" + ) + op.execute("CREATE INDEX ON product_variants (product_id)") + + # Add back variant_id to order_lines + op.execute( + "ALTER TABLE order_lines ADD COLUMN variant_id UUID " + "REFERENCES product_variants(id) ON DELETE SET NULL" + ) + + # Drop simple indexes and restore variant-aware indexes + op.execute("DROP INDEX IF EXISTS uq_order_lines_tracking") + op.execute("DROP INDEX IF EXISTS uq_order_lines_render") + + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_tracking " + f"ON order_lines (order_id, product_id, COALESCE(variant_id, '{NIL_UUID}'::uuid)) " + "WHERE output_type_id IS NULL" + ) + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_render " + "ON order_lines (order_id, product_id, output_type_id, " + f"COALESCE(variant_id, '{NIL_UUID}'::uuid)) " + "WHERE output_type_id IS NOT NULL" + ) diff --git a/backend/alembic/versions/028_product_render_positions.py b/backend/alembic/versions/028_product_render_positions.py new file mode 100644 index 0000000..07ed5a0 --- /dev/null +++ b/backend/alembic/versions/028_product_render_positions.py @@ -0,0 +1,81 @@ +"""Add product_render_positions table and render_position_id on order_lines. + +Revision ID: 028 +Revises: 027 +Create Date: 2026-03-04 +""" +from alembic import op +import sqlalchemy as sa + +revision = "028" +down_revision = "027" +branch_labels = None +depends_on = None + +NIL_UUID = "00000000-0000-0000-0000-000000000000" + + +def upgrade() -> None: + # ── New table: product_render_positions ────────────────────────────────── + op.execute(""" + CREATE TABLE product_render_positions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + product_id UUID NOT NULL REFERENCES products(id) ON DELETE CASCADE, + name VARCHAR(200) NOT NULL, + rotation_x DOUBLE PRECISION NOT NULL DEFAULT 0, + rotation_y DOUBLE PRECISION NOT NULL DEFAULT 0, + rotation_z DOUBLE PRECISION NOT NULL DEFAULT 0, + is_default BOOLEAN NOT NULL DEFAULT false, + sort_order INTEGER NOT NULL DEFAULT 0, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP NOT NULL DEFAULT NOW() + ) + """) + op.execute( + "CREATE UNIQUE INDEX uq_render_positions_product_name " + "ON product_render_positions (product_id, lower(name))" + ) + op.execute("CREATE INDEX ix_render_positions_product_id ON product_render_positions (product_id)") + + # ── Add render_position_id to order_lines ──────────────────────────────── + op.execute( + "ALTER TABLE order_lines ADD COLUMN render_position_id UUID " + "REFERENCES product_render_positions(id) ON DELETE SET NULL" + ) + + # ── Update unique indexes to include position ───────────────────────────── + op.execute("DROP INDEX IF EXISTS uq_order_lines_tracking") + op.execute("DROP INDEX IF EXISTS uq_order_lines_render") + + op.execute( + f"CREATE UNIQUE INDEX uq_order_lines_tracking " + f"ON order_lines (order_id, product_id, COALESCE(render_position_id, '{NIL_UUID}'::uuid)) " + f"WHERE output_type_id IS NULL" + ) + op.execute( + f"CREATE UNIQUE INDEX uq_order_lines_render " + f"ON order_lines (order_id, product_id, output_type_id, " + f"COALESCE(render_position_id, '{NIL_UUID}'::uuid)) " + f"WHERE output_type_id IS NOT NULL" + ) + + +def downgrade() -> None: + # Restore original unique indexes (without position) + op.execute("DROP INDEX IF EXISTS uq_order_lines_tracking") + op.execute("DROP INDEX IF EXISTS uq_order_lines_render") + + op.execute("ALTER TABLE order_lines DROP COLUMN IF EXISTS render_position_id") + + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_tracking " + "ON order_lines (order_id, product_id) " + "WHERE output_type_id IS NULL" + ) + op.execute( + "CREATE UNIQUE INDEX uq_order_lines_render " + "ON order_lines (order_id, product_id, output_type_id) " + "WHERE output_type_id IS NOT NULL" + ) + + op.execute("DROP TABLE IF EXISTS product_render_positions CASCADE") diff --git a/backend/alembic/versions/029_seed_default_render_positions.py b/backend/alembic/versions/029_seed_default_render_positions.py new file mode 100644 index 0000000..5a6ed67 --- /dev/null +++ b/backend/alembic/versions/029_seed_default_render_positions.py @@ -0,0 +1,53 @@ +"""Seed default render positions (3/4 Front + 3/4 Rear) for all existing products. + +Revision ID: 029 +Revises: 028 +Create Date: 2026-03-04 +""" +from alembic import op + +revision = "029" +down_revision = "028" +branch_labels = None +depends_on = None + + +def upgrade(): + # Insert two default positions for every product that currently has none. + # The CTE guarantees both rows are inserted for the same set of products. + op.execute(""" + WITH products_without_positions AS ( + SELECT p.id AS product_id + FROM products p + WHERE NOT EXISTS ( + SELECT 1 FROM product_render_positions rp WHERE rp.product_id = p.id + ) + ) + INSERT INTO product_render_positions + (id, product_id, name, rotation_x, rotation_y, rotation_z, + is_default, sort_order, created_at, updated_at) + SELECT gen_random_uuid(), product_id, + '3/4 Front', -15.0, 45.0, 0.0, true, 0, NOW(), NOW() + FROM products_without_positions + UNION ALL + SELECT gen_random_uuid(), product_id, + '3/4 Rear', -15.0, -135.0, 0.0, false, 1, NOW(), NOW() + FROM products_without_positions + """) + + +def downgrade(): + # Remove positions named exactly '3/4 Front' or '3/4 Rear' + # where they are the only two positions on that product (i.e. seeded ones). + op.execute(""" + DELETE FROM product_render_positions + WHERE name IN ('3/4 Front', '3/4 Rear') + AND product_id IN ( + SELECT product_id + FROM product_render_positions + GROUP BY product_id + HAVING COUNT(*) = 2 + AND bool_or(name = '3/4 Front') + AND bool_or(name = '3/4 Rear') + ) + """) diff --git a/backend/alembic/versions/030_seed_default_position.py b/backend/alembic/versions/030_seed_default_position.py new file mode 100644 index 0000000..d7e816d --- /dev/null +++ b/backend/alembic/versions/030_seed_default_position.py @@ -0,0 +1,39 @@ +"""Seed 'Default' (unrotated) render position for all existing products. + +Revision ID: 030 +Revises: 029 +Create Date: 2026-03-04 +""" +from alembic import op + +revision = "030" +down_revision = "029" +branch_labels = None +depends_on = None + + +def upgrade(): + # Add 'Default' (0°/0°/0°) to every product that doesn't already have it. + op.execute(""" + INSERT INTO product_render_positions + (id, product_id, name, rotation_x, rotation_y, rotation_z, + is_default, sort_order, created_at, updated_at) + SELECT gen_random_uuid(), p.id, + 'Default', 0.0, 0.0, 0.0, false, 2, NOW(), NOW() + FROM products p + WHERE NOT EXISTS ( + SELECT 1 FROM product_render_positions rp + WHERE rp.product_id = p.id + AND lower(rp.name) = 'default' + ) + """) + + +def downgrade(): + op.execute(""" + DELETE FROM product_render_positions + WHERE lower(name) = 'default' + AND rotation_x = 0.0 + AND rotation_y = 0.0 + AND rotation_z = 0.0 + """) diff --git a/backend/alembic/versions/031_camera_orbit.py b/backend/alembic/versions/031_camera_orbit.py new file mode 100644 index 0000000..a30cc61 --- /dev/null +++ b/backend/alembic/versions/031_camera_orbit.py @@ -0,0 +1,25 @@ +"""Add camera_orbit to render_templates + +Revision ID: 031_camera_orbit +Revises: 030_seed_default_position +Create Date: 2026-03-04 +""" +from alembic import op +import sqlalchemy as sa + +revision = '031' +down_revision = '030' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + 'render_templates', + sa.Column('camera_orbit', sa.Boolean(), nullable=False, + server_default='true'), + ) + + +def downgrade(): + op.drop_column('render_templates', 'camera_orbit') diff --git a/backend/alembic/versions/__pycache__/001_initial_schema.cpython-311.pyc b/backend/alembic/versions/__pycache__/001_initial_schema.cpython-311.pyc new file mode 100644 index 0000000..7dd523a Binary files /dev/null and b/backend/alembic/versions/__pycache__/001_initial_schema.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/002_system_settings.cpython-311.pyc b/backend/alembic/versions/__pycache__/002_system_settings.cpython-311.pyc new file mode 100644 index 0000000..e68b482 Binary files /dev/null and b/backend/alembic/versions/__pycache__/002_system_settings.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/003_blender_settings.cpython-311.pyc b/backend/alembic/versions/__pycache__/003_blender_settings.cpython-311.pyc new file mode 100644 index 0000000..1509e5c Binary files /dev/null and b/backend/alembic/versions/__pycache__/003_blender_settings.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/004_threejs_settings.cpython-311.pyc b/backend/alembic/versions/__pycache__/004_threejs_settings.cpython-311.pyc new file mode 100644 index 0000000..f4905c4 Binary files /dev/null and b/backend/alembic/versions/__pycache__/004_threejs_settings.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/005_threejs_default_1k.cpython-311.pyc b/backend/alembic/versions/__pycache__/005_threejs_default_1k.cpython-311.pyc new file mode 100644 index 0000000..214bc84 Binary files /dev/null and b/backend/alembic/versions/__pycache__/005_threejs_default_1k.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/006_thumbnail_format.cpython-311.pyc b/backend/alembic/versions/__pycache__/006_thumbnail_format.cpython-311.pyc new file mode 100644 index 0000000..dfa50b9 Binary files /dev/null and b/backend/alembic/versions/__pycache__/006_thumbnail_format.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/007_materials.cpython-311.pyc b/backend/alembic/versions/__pycache__/007_materials.cpython-311.pyc new file mode 100644 index 0000000..37279ff Binary files /dev/null and b/backend/alembic/versions/__pycache__/007_materials.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/008_material_metadata.cpython-311.pyc b/backend/alembic/versions/__pycache__/008_material_metadata.cpython-311.pyc new file mode 100644 index 0000000..b574fe7 Binary files /dev/null and b/backend/alembic/versions/__pycache__/008_material_metadata.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/009_render_log.cpython-311.pyc b/backend/alembic/versions/__pycache__/009_render_log.cpython-311.pyc new file mode 100644 index 0000000..6ea8d8a Binary files /dev/null and b/backend/alembic/versions/__pycache__/009_render_log.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/010_kpi_pricing.cpython-311.pyc b/backend/alembic/versions/__pycache__/010_kpi_pricing.cpython-311.pyc new file mode 100644 index 0000000..90564f9 Binary files /dev/null and b/backend/alembic/versions/__pycache__/010_kpi_pricing.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/011_product_library.cpython-311.pyc b/backend/alembic/versions/__pycache__/011_product_library.cpython-311.pyc new file mode 100644 index 0000000..99cdab8 Binary files /dev/null and b/backend/alembic/versions/__pycache__/011_product_library.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/012_backfill_products.cpython-311.pyc b/backend/alembic/versions/__pycache__/012_backfill_products.cpython-311.pyc new file mode 100644 index 0000000..3d02949 Binary files /dev/null and b/backend/alembic/versions/__pycache__/012_backfill_products.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/013_product_excel_fields.cpython-311.pyc b/backend/alembic/versions/__pycache__/013_product_excel_fields.cpython-311.pyc new file mode 100644 index 0000000..203ea9b Binary files /dev/null and b/backend/alembic/versions/__pycache__/013_product_excel_fields.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/014_output_type_categories.cpython-311.pyc b/backend/alembic/versions/__pycache__/014_output_type_categories.cpython-311.pyc new file mode 100644 index 0000000..ec1419e Binary files /dev/null and b/backend/alembic/versions/__pycache__/014_output_type_categories.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/015_flamenco_support.cpython-311.pyc b/backend/alembic/versions/__pycache__/015_flamenco_support.cpython-311.pyc new file mode 100644 index 0000000..89794fa Binary files /dev/null and b/backend/alembic/versions/__pycache__/015_flamenco_support.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/016_pricing_output_types.cpython-311.pyc b/backend/alembic/versions/__pycache__/016_pricing_output_types.cpython-311.pyc new file mode 100644 index 0000000..b352200 Binary files /dev/null and b/backend/alembic/versions/__pycache__/016_pricing_output_types.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/017_fix_order_line_item_status.cpython-311.pyc b/backend/alembic/versions/__pycache__/017_fix_order_line_item_status.cpython-311.pyc new file mode 100644 index 0000000..978a87a Binary files /dev/null and b/backend/alembic/versions/__pycache__/017_fix_order_line_item_status.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/018_render_templates.cpython-311.pyc b/backend/alembic/versions/__pycache__/018_render_templates.cpython-311.pyc new file mode 100644 index 0000000..eb20161 Binary files /dev/null and b/backend/alembic/versions/__pycache__/018_render_templates.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/019_schaeffler_materials.cpython-311.pyc b/backend/alembic/versions/__pycache__/019_schaeffler_materials.cpython-311.pyc new file mode 100644 index 0000000..3313da4 Binary files /dev/null and b/backend/alembic/versions/__pycache__/019_schaeffler_materials.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/020_material_aliases.cpython-311.pyc b/backend/alembic/versions/__pycache__/020_material_aliases.cpython-311.pyc new file mode 100644 index 0000000..5638ff4 Binary files /dev/null and b/backend/alembic/versions/__pycache__/020_material_aliases.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/021_notification_center.cpython-311.pyc b/backend/alembic/versions/__pycache__/021_notification_center.cpython-311.pyc new file mode 100644 index 0000000..0d90ae5 Binary files /dev/null and b/backend/alembic/versions/__pycache__/021_notification_center.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/022_product_variants.cpython-311.pyc b/backend/alembic/versions/__pycache__/022_product_variants.cpython-311.pyc new file mode 100644 index 0000000..04bab2f Binary files /dev/null and b/backend/alembic/versions/__pycache__/022_product_variants.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/023_fix_order_line_unique_constraints.cpython-311.pyc b/backend/alembic/versions/__pycache__/023_fix_order_line_unique_constraints.cpython-311.pyc new file mode 100644 index 0000000..6c43fbc Binary files /dev/null and b/backend/alembic/versions/__pycache__/023_fix_order_line_unique_constraints.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/024_render_template_lighting_only.cpython-311.pyc b/backend/alembic/versions/__pycache__/024_render_template_lighting_only.cpython-311.pyc new file mode 100644 index 0000000..aae4861 Binary files /dev/null and b/backend/alembic/versions/__pycache__/024_render_template_lighting_only.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/025_output_type_cycles_device.cpython-311.pyc b/backend/alembic/versions/__pycache__/025_output_type_cycles_device.cpython-311.pyc new file mode 100644 index 0000000..4febc81 Binary files /dev/null and b/backend/alembic/versions/__pycache__/025_output_type_cycles_device.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/026_render_template_shadow_catcher.cpython-311.pyc b/backend/alembic/versions/__pycache__/026_render_template_shadow_catcher.cpython-311.pyc new file mode 100644 index 0000000..1a5bbaa Binary files /dev/null and b/backend/alembic/versions/__pycache__/026_render_template_shadow_catcher.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/027_remove_variants.cpython-311.pyc b/backend/alembic/versions/__pycache__/027_remove_variants.cpython-311.pyc new file mode 100644 index 0000000..264193b Binary files /dev/null and b/backend/alembic/versions/__pycache__/027_remove_variants.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/028_product_render_positions.cpython-311.pyc b/backend/alembic/versions/__pycache__/028_product_render_positions.cpython-311.pyc new file mode 100644 index 0000000..a787251 Binary files /dev/null and b/backend/alembic/versions/__pycache__/028_product_render_positions.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/029_seed_default_render_positions.cpython-311.pyc b/backend/alembic/versions/__pycache__/029_seed_default_render_positions.cpython-311.pyc new file mode 100644 index 0000000..c0d46f9 Binary files /dev/null and b/backend/alembic/versions/__pycache__/029_seed_default_render_positions.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/030_seed_default_position.cpython-311.pyc b/backend/alembic/versions/__pycache__/030_seed_default_position.cpython-311.pyc new file mode 100644 index 0000000..39beccf Binary files /dev/null and b/backend/alembic/versions/__pycache__/030_seed_default_position.cpython-311.pyc differ diff --git a/backend/alembic/versions/__pycache__/031_camera_orbit.cpython-311.pyc b/backend/alembic/versions/__pycache__/031_camera_orbit.cpython-311.pyc new file mode 100644 index 0000000..722a328 Binary files /dev/null and b/backend/alembic/versions/__pycache__/031_camera_orbit.cpython-311.pyc differ diff --git a/backend/app/__init__.py b/backend/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/__pycache__/__init__.cpython-311.pyc b/backend/app/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..6c3c68d Binary files /dev/null and b/backend/app/__pycache__/__init__.cpython-311.pyc differ diff --git a/backend/app/__pycache__/__init__.cpython-312.pyc b/backend/app/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..37774b6 Binary files /dev/null and b/backend/app/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/app/__pycache__/config.cpython-311.pyc b/backend/app/__pycache__/config.cpython-311.pyc new file mode 100644 index 0000000..d182d60 Binary files /dev/null and b/backend/app/__pycache__/config.cpython-311.pyc differ diff --git a/backend/app/__pycache__/config.cpython-312.pyc b/backend/app/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000..63b5f8b Binary files /dev/null and b/backend/app/__pycache__/config.cpython-312.pyc differ diff --git a/backend/app/__pycache__/database.cpython-311.pyc b/backend/app/__pycache__/database.cpython-311.pyc new file mode 100644 index 0000000..fbbdd71 Binary files /dev/null and b/backend/app/__pycache__/database.cpython-311.pyc differ diff --git a/backend/app/__pycache__/database.cpython-312.pyc b/backend/app/__pycache__/database.cpython-312.pyc new file mode 100644 index 0000000..da4b8fd Binary files /dev/null and b/backend/app/__pycache__/database.cpython-312.pyc differ diff --git a/backend/app/__pycache__/main.cpython-311.pyc b/backend/app/__pycache__/main.cpython-311.pyc new file mode 100644 index 0000000..c8f04b1 Binary files /dev/null and b/backend/app/__pycache__/main.cpython-311.pyc differ diff --git a/backend/app/__pycache__/main.cpython-312.pyc b/backend/app/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000..d6429b2 Binary files /dev/null and b/backend/app/__pycache__/main.cpython-312.pyc differ diff --git a/backend/app/api/__init__.py b/backend/app/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/__pycache__/__init__.cpython-311.pyc b/backend/app/api/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..5db524d Binary files /dev/null and b/backend/app/api/__pycache__/__init__.cpython-311.pyc differ diff --git a/backend/app/api/__pycache__/__init__.cpython-312.pyc b/backend/app/api/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..09495b4 Binary files /dev/null and b/backend/app/api/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/app/api/routers/__init__.py b/backend/app/api/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/routers/__pycache__/__init__.cpython-311.pyc b/backend/app/api/routers/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..ef04168 Binary files /dev/null and b/backend/app/api/routers/__pycache__/__init__.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/__init__.cpython-312.pyc b/backend/app/api/routers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3167006 Binary files /dev/null and b/backend/app/api/routers/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/app/api/routers/__pycache__/admin.cpython-311.pyc b/backend/app/api/routers/__pycache__/admin.cpython-311.pyc new file mode 100644 index 0000000..c9bccc8 Binary files /dev/null and b/backend/app/api/routers/__pycache__/admin.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/admin.cpython-312.pyc b/backend/app/api/routers/__pycache__/admin.cpython-312.pyc new file mode 100644 index 0000000..c62e506 Binary files /dev/null and b/backend/app/api/routers/__pycache__/admin.cpython-312.pyc differ diff --git a/backend/app/api/routers/__pycache__/analytics.cpython-311.pyc b/backend/app/api/routers/__pycache__/analytics.cpython-311.pyc new file mode 100644 index 0000000..5cef445 Binary files /dev/null and b/backend/app/api/routers/__pycache__/analytics.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/analytics.cpython-312.pyc b/backend/app/api/routers/__pycache__/analytics.cpython-312.pyc new file mode 100644 index 0000000..427c6b8 Binary files /dev/null and b/backend/app/api/routers/__pycache__/analytics.cpython-312.pyc differ diff --git a/backend/app/api/routers/__pycache__/auth.cpython-311.pyc b/backend/app/api/routers/__pycache__/auth.cpython-311.pyc new file mode 100644 index 0000000..bc5ce9b Binary files /dev/null and b/backend/app/api/routers/__pycache__/auth.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/auth.cpython-312.pyc b/backend/app/api/routers/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..c7f29e8 Binary files /dev/null and b/backend/app/api/routers/__pycache__/auth.cpython-312.pyc differ diff --git a/backend/app/api/routers/__pycache__/cad.cpython-311.pyc b/backend/app/api/routers/__pycache__/cad.cpython-311.pyc new file mode 100644 index 0000000..18f7f36 Binary files /dev/null and b/backend/app/api/routers/__pycache__/cad.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/cad.cpython-312.pyc b/backend/app/api/routers/__pycache__/cad.cpython-312.pyc new file mode 100644 index 0000000..7e62c82 Binary files /dev/null and b/backend/app/api/routers/__pycache__/cad.cpython-312.pyc differ diff --git a/backend/app/api/routers/__pycache__/materials.cpython-311.pyc b/backend/app/api/routers/__pycache__/materials.cpython-311.pyc new file mode 100644 index 0000000..1152324 Binary files /dev/null and b/backend/app/api/routers/__pycache__/materials.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/notifications.cpython-311.pyc b/backend/app/api/routers/__pycache__/notifications.cpython-311.pyc new file mode 100644 index 0000000..c87e750 Binary files /dev/null and b/backend/app/api/routers/__pycache__/notifications.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/order_items.cpython-311.pyc b/backend/app/api/routers/__pycache__/order_items.cpython-311.pyc new file mode 100644 index 0000000..def8fe4 Binary files /dev/null and b/backend/app/api/routers/__pycache__/order_items.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/order_items.cpython-312.pyc b/backend/app/api/routers/__pycache__/order_items.cpython-312.pyc new file mode 100644 index 0000000..2017061 Binary files /dev/null and b/backend/app/api/routers/__pycache__/order_items.cpython-312.pyc differ diff --git a/backend/app/api/routers/__pycache__/orders.cpython-311.pyc b/backend/app/api/routers/__pycache__/orders.cpython-311.pyc new file mode 100644 index 0000000..d9e8f08 Binary files /dev/null and b/backend/app/api/routers/__pycache__/orders.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/orders.cpython-312.pyc b/backend/app/api/routers/__pycache__/orders.cpython-312.pyc new file mode 100644 index 0000000..b444fe8 Binary files /dev/null and b/backend/app/api/routers/__pycache__/orders.cpython-312.pyc differ diff --git a/backend/app/api/routers/__pycache__/output_types.cpython-311.pyc b/backend/app/api/routers/__pycache__/output_types.cpython-311.pyc new file mode 100644 index 0000000..a86c560 Binary files /dev/null and b/backend/app/api/routers/__pycache__/output_types.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/pricing.cpython-311.pyc b/backend/app/api/routers/__pycache__/pricing.cpython-311.pyc new file mode 100644 index 0000000..9950085 Binary files /dev/null and b/backend/app/api/routers/__pycache__/pricing.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/products.cpython-311.pyc b/backend/app/api/routers/__pycache__/products.cpython-311.pyc new file mode 100644 index 0000000..4c26989 Binary files /dev/null and b/backend/app/api/routers/__pycache__/products.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/render_templates.cpython-311.pyc b/backend/app/api/routers/__pycache__/render_templates.cpython-311.pyc new file mode 100644 index 0000000..289d2e6 Binary files /dev/null and b/backend/app/api/routers/__pycache__/render_templates.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/templates.cpython-311.pyc b/backend/app/api/routers/__pycache__/templates.cpython-311.pyc new file mode 100644 index 0000000..4025308 Binary files /dev/null and b/backend/app/api/routers/__pycache__/templates.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/templates.cpython-312.pyc b/backend/app/api/routers/__pycache__/templates.cpython-312.pyc new file mode 100644 index 0000000..bef98c4 Binary files /dev/null and b/backend/app/api/routers/__pycache__/templates.cpython-312.pyc differ diff --git a/backend/app/api/routers/__pycache__/uploads.cpython-311.pyc b/backend/app/api/routers/__pycache__/uploads.cpython-311.pyc new file mode 100644 index 0000000..9a09bdc Binary files /dev/null and b/backend/app/api/routers/__pycache__/uploads.cpython-311.pyc differ diff --git a/backend/app/api/routers/__pycache__/uploads.cpython-312.pyc b/backend/app/api/routers/__pycache__/uploads.cpython-312.pyc new file mode 100644 index 0000000..e2ecff3 Binary files /dev/null and b/backend/app/api/routers/__pycache__/uploads.cpython-312.pyc differ diff --git a/backend/app/api/routers/__pycache__/worker.cpython-311.pyc b/backend/app/api/routers/__pycache__/worker.cpython-311.pyc new file mode 100644 index 0000000..4e6c29b Binary files /dev/null and b/backend/app/api/routers/__pycache__/worker.cpython-311.pyc differ diff --git a/backend/app/api/routers/admin.py b/backend/app/api/routers/admin.py new file mode 100644 index 0000000..d6bca04 --- /dev/null +++ b/backend/app/api/routers/admin.py @@ -0,0 +1,486 @@ +import asyncio +import json +import uuid +from datetime import datetime +from typing import Any +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, update as sql_update +from pydantic import BaseModel +from app.database import get_db +from app.models.user import User +from app.models.system_setting import SystemSetting +from app.models.cad_file import CadFile, ProcessingStatus +from app.models.output_type import OutputType as OutputTypeModel +from app.schemas.user import UserOut, UserUpdate, UserCreate +from app.utils.auth import require_admin, hash_password + +router = APIRouter(prefix="/admin", tags=["admin"]) + +VALID_RENDERERS = {"pillow", "blender", "threejs"} +VALID_ENGINES = {"cycles", "eevee"} +VALID_THREEJS_SIZES = {512, 1024, 2048} +VALID_FORMATS = {"jpg", "png"} +VALID_STL_QUALITIES = {"low", "high"} +VALID_CYCLES_DEVICES = {"auto", "gpu", "cpu"} +VALID_RENDER_BACKENDS = {"celery", "flamenco", "auto"} + +SETTINGS_DEFAULTS: dict[str, str] = { + "thumbnail_renderer": "pillow", + "blender_engine": "cycles", + "blender_cycles_samples": "256", + "blender_eevee_samples": "64", + "threejs_render_size": "1024", + "thumbnail_format": "jpg", + "stl_quality": "low", + "blender_smooth_angle": "30", + "cycles_device": "auto", + "render_backend": "celery", + "flamenco_manager_url": "http://flamenco-manager:8080", + "flamenco_worker_count": "1", + "blender_max_concurrent_renders": "3", + "product_thumbnail_priority": '["latest_render","cad_thumbnail"]', + "render_stall_timeout_minutes": "120", +} + + +class SettingsOut(BaseModel): + thumbnail_renderer: str = "pillow" + blender_engine: str = "cycles" + blender_cycles_samples: int = 256 + blender_eevee_samples: int = 64 + threejs_render_size: int = 1024 + thumbnail_format: str = "jpg" + stl_quality: str = "low" + blender_smooth_angle: int = 30 + cycles_device: str = "auto" + render_backend: str = "celery" + flamenco_manager_url: str = "http://flamenco-manager:8080" + flamenco_worker_count: int = 1 + blender_max_concurrent_renders: int = 3 + product_thumbnail_priority: str = '["latest_render","cad_thumbnail"]' + render_stall_timeout_minutes: int = 120 + + +class SettingsUpdate(BaseModel): + thumbnail_renderer: str | None = None + blender_engine: str | None = None + blender_cycles_samples: int | None = None + blender_eevee_samples: int | None = None + threejs_render_size: int | None = None + thumbnail_format: str | None = None + stl_quality: str | None = None + blender_smooth_angle: int | None = None + cycles_device: str | None = None + render_backend: str | None = None + flamenco_manager_url: str | None = None + flamenco_worker_count: int | None = None + blender_max_concurrent_renders: int | None = None + product_thumbnail_priority: str | None = None + render_stall_timeout_minutes: int | None = None + + +@router.get("/users", response_model=list[UserOut]) +async def list_users( + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(User).order_by(User.created_at.desc())) + return result.scalars().all() + + +@router.post("/users", response_model=UserOut, status_code=status.HTTP_201_CREATED) +async def create_user( + body: UserCreate, + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(User).where(User.email == body.email)) + if result.scalar_one_or_none(): + raise HTTPException(400, detail="Email already registered") + + user = User( + email=body.email, + password_hash=hash_password(body.password), + full_name=body.full_name, + role=body.role, + ) + db.add(user) + await db.commit() + await db.refresh(user) + return user + + +@router.patch("/users/{user_id}", response_model=UserOut) +async def update_user( + user_id: uuid.UUID, + body: UserUpdate, + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + if not user: + raise HTTPException(404, detail="User not found") + + for field, val in body.model_dump(exclude_unset=True).items(): + setattr(user, field, val) + await db.commit() + await db.refresh(user) + return user + + +@router.delete("/users/{user_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_user( + user_id: uuid.UUID, + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + if not user: + raise HTTPException(404, detail="User not found") + if user.id == admin.id: + raise HTTPException(400, detail="Cannot delete yourself") + await db.delete(user) + await db.commit() + + +# ── System Settings ────────────────────────────────────────────────────────── + +async def _load_settings(db: AsyncSession) -> dict[str, str]: + """Load all system settings, filling missing keys with defaults.""" + result = await db.execute(select(SystemSetting)) + stored = {row.key: row.value for row in result.scalars().all()} + return {k: stored.get(k, v) for k, v in SETTINGS_DEFAULTS.items()} + + +async def _save_setting(db: AsyncSession, key: str, value: str) -> None: + result = await db.execute( + sql_update(SystemSetting) + .where(SystemSetting.key == key) + .values(value=value, updated_at=datetime.utcnow()) + ) + if result.rowcount == 0: + db.add(SystemSetting(key=key, value=value, updated_at=datetime.utcnow())) + + +def _settings_to_out(raw: dict[str, str]) -> SettingsOut: + return SettingsOut( + thumbnail_renderer=raw["thumbnail_renderer"], + blender_engine=raw["blender_engine"], + blender_cycles_samples=int(raw["blender_cycles_samples"]), + blender_eevee_samples=int(raw["blender_eevee_samples"]), + threejs_render_size=int(raw["threejs_render_size"]), + thumbnail_format=raw["thumbnail_format"], + stl_quality=raw["stl_quality"], + blender_smooth_angle=int(raw["blender_smooth_angle"]), + cycles_device=raw["cycles_device"], + render_backend=raw["render_backend"], + flamenco_manager_url=raw["flamenco_manager_url"], + flamenco_worker_count=int(raw["flamenco_worker_count"]), + blender_max_concurrent_renders=int(raw["blender_max_concurrent_renders"]), + product_thumbnail_priority=raw.get("product_thumbnail_priority", '["latest_render","cad_thumbnail"]'), + render_stall_timeout_minutes=int(raw.get("render_stall_timeout_minutes", "120")), + ) + + +@router.get("/settings", response_model=SettingsOut) +async def get_settings( + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + return _settings_to_out(await _load_settings(db)) + + +@router.put("/settings", response_model=SettingsOut) +async def update_settings( + body: SettingsUpdate, + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + if body.thumbnail_renderer is not None and body.thumbnail_renderer not in VALID_RENDERERS: + raise HTTPException(400, detail=f"Invalid renderer. Choose: {', '.join(sorted(VALID_RENDERERS))}") + if body.blender_engine is not None and body.blender_engine not in VALID_ENGINES: + raise HTTPException(400, detail=f"Invalid engine. Choose: {', '.join(sorted(VALID_ENGINES))}") + if body.blender_cycles_samples is not None and not (1 <= body.blender_cycles_samples <= 4096): + raise HTTPException(400, detail="blender_cycles_samples must be 1–4096") + if body.blender_eevee_samples is not None and not (1 <= body.blender_eevee_samples <= 1024): + raise HTTPException(400, detail="blender_eevee_samples must be 1–1024") + if body.threejs_render_size is not None and body.threejs_render_size not in VALID_THREEJS_SIZES: + raise HTTPException(400, detail=f"Invalid threejs_render_size. Choose: {', '.join(str(s) for s in sorted(VALID_THREEJS_SIZES))}") + if body.thumbnail_format is not None and body.thumbnail_format not in VALID_FORMATS: + raise HTTPException(400, detail=f"Invalid thumbnail_format. Choose: {', '.join(sorted(VALID_FORMATS))}") + if body.stl_quality is not None and body.stl_quality not in VALID_STL_QUALITIES: + raise HTTPException(400, detail=f"Invalid stl_quality. Choose: {', '.join(sorted(VALID_STL_QUALITIES))}") + if body.blender_smooth_angle is not None and not (0 <= body.blender_smooth_angle <= 180): + raise HTTPException(400, detail="blender_smooth_angle must be 0–180 degrees") + if body.cycles_device is not None and body.cycles_device not in VALID_CYCLES_DEVICES: + raise HTTPException(400, detail=f"Invalid cycles_device. Choose: {', '.join(sorted(VALID_CYCLES_DEVICES))}") + if body.render_backend is not None and body.render_backend not in VALID_RENDER_BACKENDS: + raise HTTPException(400, detail=f"Invalid render_backend. Choose: {', '.join(sorted(VALID_RENDER_BACKENDS))}") + if body.flamenco_worker_count is not None and not (1 <= body.flamenco_worker_count <= 16): + raise HTTPException(400, detail="flamenco_worker_count must be 1–16") + if body.blender_max_concurrent_renders is not None and not (1 <= body.blender_max_concurrent_renders <= 16): + raise HTTPException(400, detail="blender_max_concurrent_renders must be 1–16") + if body.render_stall_timeout_minutes is not None and not (10 <= body.render_stall_timeout_minutes <= 10080): + raise HTTPException(400, detail="render_stall_timeout_minutes must be 10–10080 (10 min to 1 week)") + if body.product_thumbnail_priority is not None: + try: + entries = json.loads(body.product_thumbnail_priority) + if not isinstance(entries, list): + raise ValueError + except (json.JSONDecodeError, ValueError): + raise HTTPException(400, detail="product_thumbnail_priority must be a valid JSON array") + valid_literals = {"cad_thumbnail", "latest_render"} + for entry in entries: + if entry not in valid_literals: + try: + ot_id = uuid.UUID(entry) + except ValueError: + raise HTTPException(400, detail=f"Invalid priority entry '{entry}': must be 'cad_thumbnail', 'latest_render', or a valid output type UUID") + ot_row = await db.execute(select(OutputTypeModel).where(OutputTypeModel.id == ot_id)) + if not ot_row.scalar_one_or_none(): + raise HTTPException(400, detail=f"Output type '{entry}' not found") + + updates: dict[str, str] = {} + if body.thumbnail_renderer is not None: + updates["thumbnail_renderer"] = body.thumbnail_renderer + if body.blender_engine is not None: + updates["blender_engine"] = body.blender_engine + if body.blender_cycles_samples is not None: + updates["blender_cycles_samples"] = str(body.blender_cycles_samples) + if body.blender_eevee_samples is not None: + updates["blender_eevee_samples"] = str(body.blender_eevee_samples) + if body.threejs_render_size is not None: + updates["threejs_render_size"] = str(body.threejs_render_size) + if body.thumbnail_format is not None: + updates["thumbnail_format"] = body.thumbnail_format + if body.stl_quality is not None: + updates["stl_quality"] = body.stl_quality + if body.blender_smooth_angle is not None: + updates["blender_smooth_angle"] = str(body.blender_smooth_angle) + if body.cycles_device is not None: + updates["cycles_device"] = body.cycles_device + if body.render_backend is not None: + updates["render_backend"] = body.render_backend + if body.flamenco_manager_url is not None: + updates["flamenco_manager_url"] = body.flamenco_manager_url + if body.flamenco_worker_count is not None: + updates["flamenco_worker_count"] = str(body.flamenco_worker_count) + if body.blender_max_concurrent_renders is not None: + updates["blender_max_concurrent_renders"] = str(body.blender_max_concurrent_renders) + if body.render_stall_timeout_minutes is not None: + updates["render_stall_timeout_minutes"] = str(body.render_stall_timeout_minutes) + if body.product_thumbnail_priority is not None: + updates["product_thumbnail_priority"] = body.product_thumbnail_priority + + for k, v in updates.items(): + await _save_setting(db, k, v) + await db.commit() + + # Propagate concurrency limit to blender-renderer immediately (no restart needed) + if body.blender_max_concurrent_renders is not None: + try: + import httpx + async with httpx.AsyncClient(timeout=3.0) as client: + await client.post( + "http://blender-renderer:8100/configure", + params={"max_concurrent": body.blender_max_concurrent_renders}, + ) + except Exception: + pass # best-effort; setting is persisted in DB regardless + + return _settings_to_out(await _load_settings(db)) + + +@router.post("/settings/process-unprocessed", status_code=status.HTTP_202_ACCEPTED) +async def process_unprocessed_steps( + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Queue all STEP files that are not yet completed. + + Queues pending and failed files immediately. Files stuck in 'processing' + for more than 15 minutes (i.e. their worker task was killed or lost) are + also recovered. Actively-processing files (updated within the last 15 min) + are left alone to avoid duplicate task execution on the same file. + """ + from datetime import datetime, timedelta + stuck_cutoff = datetime.utcnow() - timedelta(minutes=15) + result = await db.execute( + select(CadFile).where( + CadFile.stored_path.isnot(None), + # pending/failed always, plus processing-but-stale (stuck) + ( + CadFile.processing_status.in_([ + ProcessingStatus.pending, + ProcessingStatus.failed, + ]) | + ( + (CadFile.processing_status == ProcessingStatus.processing) & + (CadFile.updated_at < stuck_cutoff) + ) + ), + ) + ) + cad_files = result.scalars().all() + + from app.tasks.step_tasks import process_step_file + queued = 0 + for cad_file in cad_files: + cad_file.processing_status = ProcessingStatus.pending + process_step_file.delay(str(cad_file.id)) + queued += 1 + await db.commit() + + return {"queued": queued, "message": f"Queued {queued} STEP file(s) for processing"} + + +@router.post("/settings/regenerate-thumbnails", status_code=status.HTTP_202_ACCEPTED) +async def regenerate_thumbnails( + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Re-queue all completed CAD files for thumbnail regeneration.""" + result = await db.execute( + select(CadFile).where(CadFile.processing_status == ProcessingStatus.completed) + ) + cad_files = result.scalars().all() + + from app.tasks.step_tasks import render_step_thumbnail + queued = 0 + for cad_file in cad_files: + render_step_thumbnail.delay(str(cad_file.id)) + queued += 1 + + return {"queued": queued, "message": f"Re-queued {queued} CAD file(s) for thumbnail regeneration"} + + +@router.post("/settings/generate-missing-stls", status_code=status.HTTP_202_ACCEPTED) +async def generate_missing_stls( + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Queue STL generation for every quality missing from each completed CAD file.""" + from pathlib import Path as _Path + result = await db.execute( + select(CadFile).where(CadFile.processing_status == ProcessingStatus.completed) + ) + cad_files = result.scalars().all() + + from app.tasks.step_tasks import generate_stl_cache + queued = 0 + for cad_file in cad_files: + if not cad_file.stored_path: + continue + step = _Path(cad_file.stored_path) + for quality in ("low", "high"): + if not (step.parent / f"{step.stem}_{quality}.stl").exists(): + generate_stl_cache.delay(str(cad_file.id), quality) + queued += 1 + + return {"queued": queued, "message": f"Queued {queued} missing STL generation task(s)"} + + +@router.get("/settings/renderer-status") +async def renderer_status( + admin: User = Depends(require_admin), +): + """Check health of external renderer services.""" + import httpx + services = { + "pillow": {"url": None, "available": True, "note": "Built-in (always available)"}, + "blender": {"url": "http://blender-renderer:8100/health", "available": False, "note": ""}, + "threejs": {"url": "http://threejs-renderer:8101/health", "available": False, "note": ""}, + } + async with httpx.AsyncClient(timeout=3.0) as client: + for name, info in services.items(): + if info["url"] is None: + continue + try: + resp = await client.get(info["url"]) + if resp.status_code == 200: + data = resp.json() + services[name]["available"] = True + services[name]["note"] = data.get("renderer", name) + except Exception as e: + services[name]["note"] = str(e)[:100] + return services + + +@router.get("/settings/flamenco-status") +async def flamenco_status( + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Check Flamenco Manager health and list workers.""" + raw = await _load_settings(db) + manager_url = raw.get("flamenco_manager_url", "http://flamenco-manager:8080") + + from app.services.flamenco_client import get_flamenco_client + client = get_flamenco_client(manager_url) + + health = client.health_check() + workers: list[dict] = [] + + if health["available"]: + try: + workers = client.list_workers() + except Exception as exc: + workers = [{"error": str(exc)[:200]}] + + return { + "manager": health, + "workers": workers, + "manager_url": manager_url, + } + + +class WorkerCountBody(BaseModel): + count: int + + +@router.get("/settings/flamenco-worker-actual") +async def get_flamenco_worker_actual(admin: User = Depends(require_admin)): + """Return the number of flamenco-worker containers currently running.""" + from app.services.docker_scaler import get_running_worker_count + count = await asyncio.get_event_loop().run_in_executor(None, get_running_worker_count) + return {"running": count, "available": count >= 0} + + +@router.post("/settings/flamenco-worker-count") +async def set_flamenco_worker_count( + body: WorkerCountBody, + admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Scale Flamenco worker containers to the requested count via Docker socket.""" + if not (1 <= body.count <= 16): + raise HTTPException(400, detail="Worker count must be 1–16") + + # Save desired count to settings first + await _save_setting(db, "flamenco_worker_count", str(body.count)) + await db.commit() + + # Perform actual Docker scaling in a thread (blocking SDK call) + from app.services.docker_scaler import scale_workers + try: + result = await asyncio.get_event_loop().run_in_executor(None, scale_workers, body.count) + return { + "count": body.count, + "previous": result["previous"], + "current": result["current"], + "delta": result["delta"], + "message": result["message"], + } + except Exception as exc: + # Scaling failed — return a warning but keep the saved setting + return { + "count": body.count, + "previous": -1, + "current": -1, + "delta": 0, + "message": f"Setting saved, but Docker scaling failed: {exc}. " + f"Run `docker compose up -d --scale flamenco-worker={body.count}` manually.", + } diff --git a/backend/app/api/routers/analytics.py b/backend/app/api/routers/analytics.py new file mode 100644 index 0000000..7f96e0a --- /dev/null +++ b/backend/app/api/routers/analytics.py @@ -0,0 +1,200 @@ +"""Analytics router — KPI dashboard endpoints.""" +from datetime import date, timedelta +from typing import Optional +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession +from pydantic import BaseModel + +from app.database import get_db +from app.utils.auth import require_admin_or_pm +from app.models.user import User +from app.services import kpi_service + +router = APIRouter(prefix="/analytics", tags=["analytics"]) + + +# ── Response models ──────────────────────────────────────────────────────────── + +class ThroughputPoint(BaseModel): + week: str + count: int + completed: int + + +class RevenuePoint(BaseModel): + month: str + revenue: float + order_count: int + + +class ProcessingTimeStats(BaseModel): + avg_submit_to_complete_s: Optional[float] + avg_submit_to_processing_s: Optional[float] + p50_s: Optional[float] + p95_s: Optional[float] + + +class ItemStatusBreakdown(BaseModel): + pending: int + approved: int + rejected: int + + +class RenderTimeBreakdown(BaseModel): + avg_stl_s: Optional[float] + avg_render_s: Optional[float] + avg_total_s: Optional[float] + sample_count: int + + +class TopLevelSummary(BaseModel): + total_orders: int + completed_orders: int + total_revenue: float + total_rendering_items: int + + +class CategoryCount(BaseModel): + category: str + count: int + + +class ProductCategoryStats(BaseModel): + unique_products_rendered: int + total_products: int + products_with_cad: int + products_by_category: list[CategoryCount] + + +class OutputTypeUsagePoint(BaseModel): + output_type: str + count: int + + +class RenderStatusDistribution(BaseModel): + pending: int + processing: int + completed: int + failed: int + + +class RendererUsagePoint(BaseModel): + renderer: str + count: int + + +class TopProductEntry(BaseModel): + pim_id: str + product_name: Optional[str] + category: str + order_count: int + + +class CategoryRevenueEntry(BaseModel): + category: str + order_count: int + revenue: float + + +class RenderBackendStatsEntry(BaseModel): + backend: str + total: int + completed: int + failed: int + avg_render_s: float | None + p50_render_s: float | None + + +class RenderTimeByOutputType(BaseModel): + output_type: str + job_count: int + avg_render_s: float | None + min_render_s: float | None + max_render_s: float | None + p50_render_s: float | None + + +class OrdersByUserEntry(BaseModel): + full_name: str + email: str + role: str + order_count: int + revenue: float + + +class DashboardKPIs(BaseModel): + summary: TopLevelSummary + throughput: list[ThroughputPoint] + revenue: list[RevenuePoint] + processing_times: ProcessingTimeStats + item_status: ItemStatusBreakdown + render_times: RenderTimeBreakdown + product_stats: ProductCategoryStats + output_type_usage: list[OutputTypeUsagePoint] + render_status: RenderStatusDistribution + renderer_usage: list[RendererUsagePoint] + top_products: list[TopProductEntry] + orders_by_user: list[OrdersByUserEntry] + category_revenue: list[CategoryRevenueEntry] + render_backend_stats: list[RenderBackendStatsEntry] + render_time_by_output_type: list[RenderTimeByOutputType] + + +# ── Endpoints ────────────────────────────────────────────────────────────────── + +@router.get("/dashboard", response_model=DashboardKPIs) +async def get_dashboard_kpis( + date_from: date | None = Query(None, description="Start date (ISO), e.g. 2025-01-01"), + date_to: date | None = Query(None, description="End date (ISO), e.g. 2025-06-30"), + _user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +) -> DashboardKPIs: + """Aggregate KPI data for the analytics dashboard.""" + # Default: last 12 weeks + if date_to is None: + date_to = date.today() + if date_from is None: + date_from = date_to - timedelta(weeks=12) + + df = date_from.isoformat() + dt = date_to.isoformat() + + summary_data, throughput_data, revenue_data, proc_data, item_data, render_data = ( + await kpi_service.top_level_summary(db, df, dt), + await kpi_service.order_throughput_by_week(db, df, dt), + await kpi_service.revenue_overview(db, df, dt), + await kpi_service.processing_time_stats(db, df, dt), + await kpi_service.item_status_breakdown(db, df, dt), + await kpi_service.render_time_breakdown(db, df, dt), + ) + + product_stats_data = await kpi_service.product_and_category_stats(db, df, dt) + ot_usage_data = await kpi_service.output_type_usage(db, df, dt) + render_status_data, renderer_usage_data = await kpi_service.render_status_distribution(db, df, dt) + top_products_data = await kpi_service.top_products(db, df, dt) + cat_revenue_data = await kpi_service.category_revenue(db, df, dt) + users_data = await kpi_service.orders_by_user(db, df, dt) + backend_stats_data = await kpi_service.render_backend_stats(db, df, dt) + render_time_ot_data = await kpi_service.render_time_by_output_type(db, df, dt) + + return DashboardKPIs( + summary=TopLevelSummary(**summary_data), + throughput=[ThroughputPoint(**p) for p in throughput_data], + revenue=[RevenuePoint(**p) for p in revenue_data], + processing_times=ProcessingTimeStats(**proc_data), + item_status=ItemStatusBreakdown(**item_data), + render_times=RenderTimeBreakdown(**render_data), + product_stats=ProductCategoryStats( + **{**product_stats_data, "products_by_category": [ + CategoryCount(**c) for c in product_stats_data["products_by_category"] + ]} + ), + output_type_usage=[OutputTypeUsagePoint(**p) for p in ot_usage_data], + render_status=RenderStatusDistribution(**render_status_data), + renderer_usage=[RendererUsagePoint(**p) for p in renderer_usage_data], + top_products=[TopProductEntry(**p) for p in top_products_data], + orders_by_user=[OrdersByUserEntry(**p) for p in users_data], + category_revenue=[CategoryRevenueEntry(**p) for p in cat_revenue_data], + render_backend_stats=[RenderBackendStatsEntry(**p) for p in backend_stats_data], + render_time_by_output_type=[RenderTimeByOutputType(**p) for p in render_time_ot_data], + ) diff --git a/backend/app/api/routers/auth.py b/backend/app/api/routers/auth.py new file mode 100644 index 0000000..430049b --- /dev/null +++ b/backend/app/api/routers/auth.py @@ -0,0 +1,47 @@ +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from app.database import get_db +from app.models.user import User +from app.schemas.user import UserCreate, UserOut, TokenResponse, LoginRequest +from app.utils.auth import hash_password, verify_password, create_access_token, get_current_user + +router = APIRouter(prefix="/auth", tags=["auth"]) + + +@router.post("/register", response_model=UserOut, status_code=status.HTTP_201_CREATED) +async def register(body: UserCreate, db: AsyncSession = Depends(get_db)): + """Register a new user (admin-initiated in production).""" + result = await db.execute(select(User).where(User.email == body.email)) + if result.scalar_one_or_none(): + raise HTTPException(status_code=400, detail="Email already registered") + + user = User( + email=body.email, + password_hash=hash_password(body.password), + full_name=body.full_name, + role=body.role, + ) + db.add(user) + await db.commit() + await db.refresh(user) + return user + + +@router.post("/login", response_model=TokenResponse) +async def login(body: LoginRequest, db: AsyncSession = Depends(get_db)): + result = await db.execute(select(User).where(User.email == body.email)) + user = result.scalar_one_or_none() + if not user or not verify_password(body.password, user.password_hash): + raise HTTPException(status_code=401, detail="Invalid credentials") + if not user.is_active: + raise HTTPException(status_code=403, detail="Account disabled") + + token = create_access_token(str(user.id), user.role.value) + return TokenResponse(access_token=token, user=UserOut.model_validate(user)) + + +@router.get("/me", response_model=UserOut) +async def me(user: User = Depends(get_current_user)): + return user diff --git a/backend/app/api/routers/cad.py b/backend/app/api/routers/cad.py new file mode 100644 index 0000000..4163d9b --- /dev/null +++ b/backend/app/api/routers/cad.py @@ -0,0 +1,360 @@ +"""CAD file router - serve thumbnails, glTF models, parsed objects, and trigger reprocessing.""" +import uuid +from datetime import datetime +from pathlib import Path + +from fastapi import APIRouter, Depends, HTTPException, status +from fastapi.responses import FileResponse +from pydantic import BaseModel +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from sqlalchemy.orm import selectinload + +from app.database import get_db +from app.models.cad_file import CadFile, ProcessingStatus +from app.models.order import Order +from app.models.order_item import OrderItem +from app.models.user import User +from app.utils.auth import get_current_user +from app.services.product_service import link_cad_to_product, lookup_product + +router = APIRouter(prefix="/cad", tags=["cad"]) + + +# --------------------------------------------------------------------------- +# Schemas for match-to-order +# --------------------------------------------------------------------------- + +class MatchToOrderRequest(BaseModel): + order_id: uuid.UUID + cad_file_ids: list[str] + + +class MatchedItem(BaseModel): + item_id: str + cad_file_id: str + item_name: str + cad_name: str + + +class MatchToOrderResponse(BaseModel): + matched: list[MatchedItem] + unmatched_cad: list[str] + unmatched_items: list[str] + + +# --------------------------------------------------------------------------- +# Matching helper +# --------------------------------------------------------------------------- + +def _normalize_stem(name: str) -> str: + """Lowercase stem, strip .stp/.step extension for comparison.""" + stem = name.strip() + for ext in (".step", ".stp"): + if stem.lower().endswith(ext): + stem = stem[: -len(ext)] + break + return stem.lower() + + +# --------------------------------------------------------------------------- +# Endpoints +# --------------------------------------------------------------------------- + +@router.post("/match-to-order", response_model=MatchToOrderResponse) +async def match_cad_files_to_order( + body: MatchToOrderRequest, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """ + Match uploaded CAD files to order items by filename similarity. + + For each CAD file, compares the stem of original_name (case-insensitive, + .stp/.step normalised) to the stem of each item's name_cad_modell field. + Updates order_item.cad_file_id for successful matches. + """ + # Load order with items + order_result = await db.execute( + select(Order) + .where(Order.id == body.order_id) + .options(selectinload(Order.items)) + ) + order = order_result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + if user.role.value != "admin" and order.created_by != user.id: + raise HTTPException(403, detail="Access denied") + + # Parse and validate CAD file IDs + cad_uuids: list[uuid.UUID] = [] + for raw_id in body.cad_file_ids: + try: + cad_uuids.append(uuid.UUID(raw_id)) + except ValueError: + raise HTTPException(400, detail=f"Invalid cad_file_id: {raw_id}") + + # Load CAD files from DB + cad_result = await db.execute( + select(CadFile).where(CadFile.id.in_(cad_uuids)) + ) + cad_files: list[CadFile] = list(cad_result.scalars().all()) + + found_ids = {str(cf.id) for cf in cad_files} + missing = [i for i in body.cad_file_ids if i not in found_ids] + if missing: + raise HTTPException(404, detail=f"CAD files not found: {missing}") + + # Build lookup: normalized stem -> first OrderItem with that stem + items: list[OrderItem] = order.items + item_by_stem: dict[str, OrderItem] = {} + for item in items: + if item.name_cad_modell: + stem = _normalize_stem(item.name_cad_modell) + if stem not in item_by_stem: + item_by_stem[stem] = item + + matched: list[MatchedItem] = [] + unmatched_cad: list[str] = [] + matched_item_ids: set[str] = set() + + for cad_file in cad_files: + cad_stem = _normalize_stem(cad_file.original_name or "") + if cad_stem in item_by_stem: + item = item_by_stem[cad_stem] + item.cad_file_id = cad_file.id + item.updated_at = datetime.utcnow() + matched.append( + MatchedItem( + item_id=str(item.id), + cad_file_id=str(cad_file.id), + item_name=item.name_cad_modell or "", + cad_name=cad_file.original_name or "", + ) + ) + matched_item_ids.add(str(item.id)) + + # Propagate the STEP link to the product so that: + # (a) the render pipeline can find it via product.cad_file_id + # (b) future orders for the same product inherit the STEP automatically + # (c) the split-missing-step correctly identifies which products have STEP + try: + product = await lookup_product(db, item.pim_id, item.produkt_baureihe) + if product and product.cad_file_id is None: + await link_cad_to_product(db, product.id, cad_file.id) + except Exception: + pass # non-critical — item link already set above + else: + unmatched_cad.append(str(cad_file.id)) + + await db.commit() + + unmatched_items = [ + str(item.id) + for item in items + if str(item.id) not in matched_item_ids + ] + + return MatchToOrderResponse( + matched=matched, + unmatched_cad=unmatched_cad, + unmatched_items=unmatched_items, + ) + + +# --------------------------------------------------------------------------- +# Helper +# --------------------------------------------------------------------------- + +async def _get_cad_file(cad_id: uuid.UUID, db: AsyncSession) -> CadFile: + result = await db.execute(select(CadFile).where(CadFile.id == cad_id)) + cad = result.scalar_one_or_none() + if not cad: + raise HTTPException(status_code=404, detail="CAD file not found") + return cad + + +@router.get("/{id}/thumbnail") +async def get_thumbnail( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), +): + """Serve the thumbnail image for a CAD file (no auth — UUID is opaque enough).""" + cad = await _get_cad_file(id, db) + + if not cad.thumbnail_path: + raise HTTPException(404, detail="Thumbnail not yet generated for this CAD file") + + thumb_path = Path(cad.thumbnail_path) + if not thumb_path.exists(): + raise HTTPException(404, detail="Thumbnail file missing from storage") + + ext = thumb_path.suffix.lower() + media_type = "image/jpeg" if ext in (".jpg", ".jpeg") else "image/png" + + return FileResponse( + path=str(thumb_path), + media_type=media_type, + filename=f"{id}{ext}", + ) + + +@router.get("/{id}/model") +async def get_model( + id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Serve the glTF file for a CAD file.""" + cad = await _get_cad_file(id, db) + + if not cad.gltf_path: + raise HTTPException( + status_code=404, + detail="glTF model not yet generated for this CAD file", + ) + + gltf_path = Path(cad.gltf_path) + if not gltf_path.exists(): + raise HTTPException( + status_code=404, + detail="glTF file missing from storage", + ) + + # glTF files may be either .gltf (JSON) or .glb (binary) + suffix = gltf_path.suffix.lower() + if suffix == ".glb": + media_type = "model/gltf-binary" + else: + media_type = "model/gltf+json" + + return FileResponse( + path=str(gltf_path), + media_type=media_type, + filename=f"{id}{suffix}", + ) + + +@router.get("/{id}/objects") +async def get_objects( + id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Return the parsed_objects JSON extracted from the STEP file.""" + cad = await _get_cad_file(id, db) + + if cad.parsed_objects is None: + raise HTTPException( + status_code=404, + detail="Parsed objects not yet available for this CAD file", + ) + + return { + "cad_file_id": str(cad.id), + "original_name": cad.original_name, + "processing_status": cad.processing_status.value, + "parsed_objects": cad.parsed_objects, + } + + +@router.get("/{id}/stl/{quality}") +async def download_stl( + id: uuid.UUID, + quality: str, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Download the cached STL for a CAD file with a human-readable filename. + + The STL is cached next to the STEP file on first render. + quality must be 'low' or 'high'. + """ + if quality not in ("low", "high"): + raise HTTPException(400, detail="quality must be 'low' or 'high'") + + cad = await _get_cad_file(id, db) + + if not cad.stored_path: + raise HTTPException(404, detail="STEP file not uploaded for this CAD file") + + step_path = Path(cad.stored_path) + stl_path = step_path.parent / f"{step_path.stem}_{quality}.stl" + + if not stl_path.exists(): + raise HTTPException( + 404, + detail=f"STL cache not found for quality '{quality}'. Trigger a render first to generate it.", + ) + + original_stem = Path(cad.original_name or "model").stem + filename = f"{original_stem}_{quality}.stl" + + return FileResponse( + path=str(stl_path), + media_type="application/octet-stream", + filename=filename, + ) + + +@router.post("/{id}/generate-stl/{quality}", status_code=status.HTTP_202_ACCEPTED) +async def generate_stl( + id: uuid.UUID, + quality: str, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Queue STL generation for the given quality without triggering a full render.""" + if user.role.value not in ("admin", "project_manager"): + raise HTTPException(status_code=403, detail="Insufficient permissions") + if quality not in ("low", "high"): + raise HTTPException(status_code=400, detail="quality must be 'low' or 'high'") + + cad = await _get_cad_file(id, db) + + if not cad.stored_path: + raise HTTPException(status_code=404, detail="STEP file not uploaded for this CAD file") + + from app.tasks.step_tasks import generate_stl_cache + task = generate_stl_cache.delay(str(id), quality) + return {"status": "queued", "task_id": task.id, "quality": quality} + + +@router.post( + "/{id}/regenerate-thumbnail", + status_code=status.HTTP_202_ACCEPTED, +) +async def regenerate_thumbnail( + id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Queue a Celery task to reprocess the STEP file and regenerate its thumbnail.""" + if user.role.value != "admin": + raise HTTPException( + status_code=403, + detail="Only admins can trigger thumbnail regeneration", + ) + + cad = await _get_cad_file(id, db) + + # Reset processing status so the worker will reprocess + cad.processing_status = ProcessingStatus.pending + await db.commit() + + # Enqueue Celery task + task_id: str | None = None + try: + from app.tasks.step_tasks import process_step_file + result = process_step_file.delay(str(cad.id)) + task_id = result.id + except Exception: + # Worker may not be running; status is already reset so it will pick up later + pass + + return { + "cad_file_id": str(cad.id), + "original_name": cad.original_name, + "status": "queued", + "task_id": task_id, + } diff --git a/backend/app/api/routers/materials.py b/backend/app/api/routers/materials.py new file mode 100644 index 0000000..2246aaa --- /dev/null +++ b/backend/app/api/routers/materials.py @@ -0,0 +1,326 @@ +"""Materials router — CRUD for the shared material library.""" +import uuid +from datetime import datetime +from typing import Optional, Literal + +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func +from sqlalchemy.orm import selectinload +from pydantic import BaseModel + +from app.database import get_db +from app.models.material import Material +from app.models.material_alias import MaterialAlias +from app.models.user import User +from app.utils.auth import get_current_user, require_admin_or_pm + +router = APIRouter(prefix="/materials", tags=["materials"]) + + +class MaterialOut(BaseModel): + id: uuid.UUID + name: str + description: str | None + source: str + schaeffler_code: int | None = None + created_by_name: str | None = None + aliases: list[str] = [] + created_at: datetime + updated_at: datetime + model_config = {"from_attributes": True} + + +class MaterialAliasOut(BaseModel): + id: uuid.UUID + alias: str + created_at: datetime + model_config = {"from_attributes": True} + + +class MaterialCreate(BaseModel): + name: str + description: str | None = None + source: str = "manual" + schaeffler_code: int | None = None + + +class MaterialUpdate(BaseModel): + name: str | None = None + description: str | None = None + + +class AliasCreate(BaseModel): + alias: str + + +def _to_out(mat: Material) -> MaterialOut: + creator_name = None + if mat.creator is not None: + creator_name = mat.creator.full_name or mat.creator.email + alias_names = [a.alias for a in mat.aliases] if mat.aliases else [] + return MaterialOut( + id=mat.id, + name=mat.name, + description=mat.description, + source=mat.source, + schaeffler_code=mat.schaeffler_code, + created_by_name=creator_name, + aliases=alias_names, + created_at=mat.created_at, + updated_at=mat.updated_at, + ) + + +# --- Static-path endpoints (before /{material_id}) --- + + +@router.get("/next-code") +async def get_next_code( + type_prefix: str, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Find the next available consecutive number for a given type+subtype prefix. + + type_prefix is the 4-digit prefix e.g. "0101" for Metals/Steel. + Returns {"next_code": 10106, "prefix": "0101", "next_consecutive": 6} + """ + if len(type_prefix) != 4 or not type_prefix.isdigit(): + raise HTTPException(400, "type_prefix must be exactly 4 digits") + + prefix_int = int(type_prefix) * 100 # e.g. "0101" -> 10100 + range_start = prefix_int + range_end = prefix_int + 99 + + result = await db.execute( + select(func.max(Material.schaeffler_code)).where( + Material.schaeffler_code >= range_start, + Material.schaeffler_code <= range_end, + ) + ) + max_code = result.scalar_one_or_none() + + if max_code is None: + next_consecutive = 1 + else: + next_consecutive = (max_code % 100) + 1 + + return { + "next_code": prefix_int + next_consecutive, + "prefix": type_prefix, + "next_consecutive": next_consecutive, + } + + +@router.post("/seed-schaeffler") +async def seed_schaeffler_materials( + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Bulk-create the 35 standard Schaeffler materials. Skips existing by name.""" + from app.data.schaeffler_materials import SCHAEFFLER_MATERIALS + + inserted = 0 + for mat_data in SCHAEFFLER_MATERIALS: + existing = await db.execute( + select(Material).where(Material.name == mat_data["name"]) + ) + if existing.scalar_one_or_none(): + continue + mat = Material( + name=mat_data["name"], + description=mat_data["description"], + source=mat_data["source"], + schaeffler_code=mat_data["schaeffler_code"], + created_by=user.id, + ) + db.add(mat) + inserted += 1 + + await db.commit() + return {"inserted": inserted, "total": len(SCHAEFFLER_MATERIALS)} + + +@router.post("/seed-aliases") +async def seed_aliases( + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Bulk-seed aliases from naming_scheme.xlsx Materialmapping data. Skips existing.""" + from app.data.material_alias_seeds import MATERIAL_ALIAS_SEEDS + + inserted = 0 + total = 0 + for entry in MATERIAL_ALIAS_SEEDS: + mat_result = await db.execute( + select(Material).where(Material.name == entry["material_name"]) + ) + mat = mat_result.scalar_one_or_none() + if not mat: + continue + + for alias_str in entry["aliases"]: + total += 1 + existing = await db.execute( + select(MaterialAlias).where(func.lower(MaterialAlias.alias) == alias_str.lower()) + ) + if existing.scalar_one_or_none(): + continue + db.add(MaterialAlias(material_id=mat.id, alias=alias_str)) + inserted += 1 + + await db.commit() + return {"inserted": inserted, "total": total} + + +@router.delete("/aliases/{alias_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_alias( + alias_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(MaterialAlias).where(MaterialAlias.id == alias_id)) + alias_obj = result.scalar_one_or_none() + if not alias_obj: + raise HTTPException(404, detail="Alias not found") + await db.delete(alias_obj) + await db.commit() + + +# --- Standard CRUD --- + + +@router.get("", response_model=list[MaterialOut]) +async def list_materials( + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(Material) + .options(selectinload(Material.creator), selectinload(Material.aliases)) + .order_by(Material.name) + ) + return [_to_out(m) for m in result.scalars().all()] + + +@router.post("", response_model=MaterialOut, status_code=status.HTTP_201_CREATED) +async def create_material( + body: MaterialCreate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + existing = await db.execute(select(Material).where(Material.name == body.name)) + if existing.scalar_one_or_none(): + raise HTTPException(400, detail=f"Material '{body.name}' already exists") + mat = Material( + name=body.name, + description=body.description, + source=body.source, + schaeffler_code=body.schaeffler_code, + created_by=user.id, + ) + db.add(mat) + await db.commit() + await db.refresh(mat) + result = await db.execute( + select(Material) + .options(selectinload(Material.creator), selectinload(Material.aliases)) + .where(Material.id == mat.id) + ) + return _to_out(result.scalar_one()) + + +@router.patch("/{material_id}", response_model=MaterialOut) +async def update_material( + material_id: uuid.UUID, + body: MaterialUpdate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(Material) + .options(selectinload(Material.creator), selectinload(Material.aliases)) + .where(Material.id == material_id) + ) + mat = result.scalar_one_or_none() + if not mat: + raise HTTPException(404, detail="Material not found") + if body.name is not None: + mat.name = body.name + if body.description is not None: + mat.description = body.description + mat.updated_at = datetime.utcnow() + await db.commit() + await db.refresh(mat) + result2 = await db.execute( + select(Material) + .options(selectinload(Material.creator), selectinload(Material.aliases)) + .where(Material.id == mat.id) + ) + return _to_out(result2.scalar_one()) + + +@router.delete("/{material_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_material( + material_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(Material).where(Material.id == material_id)) + mat = result.scalar_one_or_none() + if not mat: + raise HTTPException(404, detail="Material not found") + await db.delete(mat) + await db.commit() + + +# --- Alias sub-resource endpoints --- + + +@router.get("/{material_id}/aliases", response_model=list[MaterialAliasOut]) +async def list_aliases( + material_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(MaterialAlias) + .where(MaterialAlias.material_id == material_id) + .order_by(MaterialAlias.alias) + ) + return [MaterialAliasOut.model_validate(a) for a in result.scalars().all()] + + +@router.post("/{material_id}/aliases", response_model=MaterialAliasOut, status_code=status.HTTP_201_CREATED) +async def add_alias( + material_id: uuid.UUID, + body: AliasCreate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + # Verify material exists + mat_result = await db.execute(select(Material).where(Material.id == material_id)) + if not mat_result.scalar_one_or_none(): + raise HTTPException(404, detail="Material not found") + + alias_str = body.alias.strip() + if not alias_str: + raise HTTPException(400, detail="Alias cannot be empty") + + # Check case-insensitive uniqueness + existing = await db.execute( + select(MaterialAlias).where(func.lower(MaterialAlias.alias) == alias_str.lower()) + ) + dup = existing.scalar_one_or_none() + if dup: + raise HTTPException( + status.HTTP_409_CONFLICT, + detail=f"Alias '{alias_str}' already exists (assigned to material {dup.material_id})", + ) + + alias_obj = MaterialAlias(material_id=material_id, alias=alias_str) + db.add(alias_obj) + await db.commit() + await db.refresh(alias_obj) + return MaterialAliasOut.model_validate(alias_obj) diff --git a/backend/app/api/routers/notifications.py b/backend/app/api/routers/notifications.py new file mode 100644 index 0000000..29afabc --- /dev/null +++ b/backend/app/api/routers/notifications.py @@ -0,0 +1,157 @@ +"""Notification center API — list, count, mark-read.""" +import uuid +from datetime import datetime +from typing import Optional + +from fastapi import APIRouter, Depends, HTTPException, Query +from pydantic import BaseModel +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func, update, or_, and_ + +from app.database import get_db +from app.models.audit_log import AuditLog +from app.models.user import User +from app.utils.auth import get_current_user + +router = APIRouter(prefix="/notifications", tags=["notifications"]) + + +class NotificationOut(BaseModel): + id: str + action: str + entity_type: str | None = None + entity_id: str | None = None + details: dict | None = None + timestamp: datetime + read_at: datetime | None = None + + model_config = {"from_attributes": True} + + +class NotificationListResponse(BaseModel): + items: list[NotificationOut] + unread_count: int + total: int + + +class UnreadCountResponse(BaseModel): + unread_count: int + + +class MarkReadRequest(BaseModel): + notification_ids: list[str] | None = None + + +def _visibility_filter(user: User): + """Rows visible to this user: targeted at them, or broadcast (null) if admin/PM.""" + targeted = AuditLog.target_user_id == user.id + if user.role.value in ("admin", "project_manager"): + broadcast = AuditLog.target_user_id.is_(None) + return and_(AuditLog.notification == True, or_(targeted, broadcast)) # noqa: E712 + return and_(AuditLog.notification == True, targeted) # noqa: E712 + + +@router.get("", response_model=NotificationListResponse) +async def list_notifications( + limit: int = Query(20, ge=1, le=100), + offset: int = Query(0, ge=0), + unread_only: bool = Query(False), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + vis = _visibility_filter(user) + + # Total count + total_q = select(func.count(AuditLog.id)).where(vis) + if unread_only: + total_q = total_q.where(AuditLog.read_at.is_(None)) + total = (await db.execute(total_q)).scalar() or 0 + + # Unread count (always) + unread_q = select(func.count(AuditLog.id)).where(vis, AuditLog.read_at.is_(None)) + unread_count = (await db.execute(unread_q)).scalar() or 0 + + # Items + items_q = ( + select(AuditLog) + .where(vis) + .order_by(AuditLog.timestamp.desc()) + .offset(offset) + .limit(limit) + ) + if unread_only: + items_q = items_q.where(AuditLog.read_at.is_(None)) + + rows = (await db.execute(items_q)).scalars().all() + items = [ + NotificationOut( + id=str(r.id), + action=r.action, + entity_type=r.entity_type, + entity_id=r.entity_id, + details=r.details, + timestamp=r.timestamp, + read_at=r.read_at, + ) + for r in rows + ] + + return NotificationListResponse(items=items, unread_count=unread_count, total=total) + + +@router.get("/unread-count", response_model=UnreadCountResponse) +async def unread_count( + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + vis = _visibility_filter(user) + q = select(func.count(AuditLog.id)).where(vis, AuditLog.read_at.is_(None)) + count = (await db.execute(q)).scalar() or 0 + return UnreadCountResponse(unread_count=count) + + +@router.post("/mark-read") +async def mark_read( + body: MarkReadRequest, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Mark notifications as read. If notification_ids is null, mark all as read.""" + vis = _visibility_filter(user) + now = datetime.utcnow() + + if body.notification_ids is None: + # Mark all unread + stmt = ( + update(AuditLog) + .where(vis, AuditLog.read_at.is_(None)) + .values(read_at=now) + ) + else: + ids = [uuid.UUID(nid) for nid in body.notification_ids] + stmt = ( + update(AuditLog) + .where(vis, AuditLog.id.in_(ids), AuditLog.read_at.is_(None)) + .values(read_at=now) + ) + + await db.execute(stmt) + await db.commit() + return {"ok": True} + + +@router.post("/{notification_id}/mark-read") +async def mark_one_read( + notification_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + vis = _visibility_filter(user) + now = datetime.utcnow() + result = await db.execute( + update(AuditLog) + .where(vis, AuditLog.id == notification_id, AuditLog.read_at.is_(None)) + .values(read_at=now) + ) + await db.commit() + return {"ok": True} diff --git a/backend/app/api/routers/order_items.py b/backend/app/api/routers/order_items.py new file mode 100644 index 0000000..bc630e3 --- /dev/null +++ b/backend/app/api/routers/order_items.py @@ -0,0 +1,365 @@ +"""Order items router - manage individual line items within an order.""" +import uuid +from datetime import datetime +from typing import Optional + +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from sqlalchemy.orm import selectinload +from pydantic import BaseModel + +from app.database import get_db +from app.models.cad_file import CadFile +from app.models.order import Order, OrderStatus +from app.models.order_item import OrderItem, ItemStatus +from app.models.user import User +from app.schemas.order import OrderItemOut +from app.utils.auth import get_current_user + +router = APIRouter(prefix="/orders", tags=["order_items"]) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _is_privileged(user: User) -> bool: + return user.role.value in ("admin", "project_manager") + + +async def _get_order_and_item( + order_id: uuid.UUID, + item_id: uuid.UUID, + user: User, + db: AsyncSession, +) -> tuple[Order, OrderItem]: + """Load order + item, enforcing ownership/admin access.""" + order_result = await db.execute(select(Order).where(Order.id == order_id)) + order = order_result.scalar_one_or_none() + if not order: + raise HTTPException(status_code=404, detail="Order not found") + if not _is_privileged(user) and order.created_by != user.id: + raise HTTPException(status_code=403, detail="Access denied") + + item_result = await db.execute( + select(OrderItem) + .options(selectinload(OrderItem.cad_file)) + .where( + OrderItem.id == item_id, + OrderItem.order_id == order_id, + ) + ) + item = item_result.scalar_one_or_none() + if not item: + raise HTTPException(status_code=404, detail="Order item not found") + + return order, item + + +# --------------------------------------------------------------------------- +# Request schemas +# --------------------------------------------------------------------------- + +class OrderItemPatch(BaseModel): + ebene1: Optional[str] = None + ebene2: Optional[str] = None + baureihe: Optional[str] = None + pim_id: Optional[str] = None + produkt_baureihe: Optional[str] = None + gewaehltes_produkt: Optional[str] = None + name_cad_modell: Optional[str] = None + gewuenschte_bildnummer: Optional[str] = None + lagertyp: Optional[str] = None + medias_rendering: Optional[bool] = None + notes: Optional[str] = None + + +class ApproveRejectBody(BaseModel): + notes: Optional[str] = None + + +class CadPartMaterialEntry(BaseModel): + part_name: str + material: str + + +class CadPartMaterialsBody(BaseModel): + parts: list[CadPartMaterialEntry] + + +# --------------------------------------------------------------------------- +# Endpoints +# --------------------------------------------------------------------------- + +@router.get("/{order_id}/items", response_model=list[OrderItemOut]) +async def list_order_items( + order_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Return all items belonging to an order.""" + order_result = await db.execute(select(Order).where(Order.id == order_id)) + order = order_result.scalar_one_or_none() + if not order: + raise HTTPException(status_code=404, detail="Order not found") + if not _is_privileged(user) and order.created_by != user.id: + raise HTTPException(status_code=403, detail="Access denied") + + items_result = await db.execute( + select(OrderItem) + .options(selectinload(OrderItem.cad_file)) + .where(OrderItem.order_id == order_id) + .order_by(OrderItem.row_index) + ) + items = items_result.scalars().all() + return [OrderItemOut.model_validate(i) for i in items] + + +@router.get("/{order_id}/items/{item_id}", response_model=OrderItemOut) +async def get_order_item( + order_id: uuid.UUID, + item_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Return a single order item.""" + _, item = await _get_order_and_item(order_id, item_id, user, db) + return OrderItemOut.model_validate(item) + + +@router.patch("/{order_id}/items/{item_id}", response_model=OrderItemOut) +async def update_order_item( + order_id: uuid.UUID, + item_id: uuid.UUID, + body: OrderItemPatch, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Edit the standard (non-component) fields of an order item.""" + order, item = await _get_order_and_item(order_id, item_id, user, db) + + # Only draft orders can be edited (admins may also edit submitted orders) + if not _is_privileged(user) and order.status != OrderStatus.draft: + raise HTTPException( + status_code=400, + detail="Order items can only be edited while the order is in draft status", + ) + + patch_data = body.model_dump(exclude_unset=True) + for field, value in patch_data.items(): + setattr(item, field, value) + + item.updated_at = datetime.utcnow() + await db.commit() + refreshed = await db.execute( + select(OrderItem).options(selectinload(OrderItem.cad_file)).where(OrderItem.id == item_id) + ) + return OrderItemOut.model_validate(refreshed.scalar_one()) + + +@router.post( + "/{order_id}/items/{item_id}/approve", + response_model=OrderItemOut, + status_code=status.HTTP_200_OK, +) +async def approve_order_item( + order_id: uuid.UUID, + item_id: uuid.UUID, + body: ApproveRejectBody = ApproveRejectBody(), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Mark an order item as approved (admin only).""" + if not _is_privileged(user): + raise HTTPException(status_code=403, detail="Only admins or PMs can approve items") + + _, item = await _get_order_and_item(order_id, item_id, user, db) + + if item.item_status == ItemStatus.approved: + raise HTTPException(status_code=400, detail="Item is already approved") + + item.item_status = ItemStatus.approved + if body.notes is not None: + item.notes = body.notes + item.updated_at = datetime.utcnow() + + await db.commit() + refreshed = await db.execute( + select(OrderItem).options(selectinload(OrderItem.cad_file)).where(OrderItem.id == item_id) + ) + return OrderItemOut.model_validate(refreshed.scalar_one()) + + +@router.post( + "/{order_id}/items/{item_id}/reject", + response_model=OrderItemOut, + status_code=status.HTTP_200_OK, +) +async def reject_order_item( + order_id: uuid.UUID, + item_id: uuid.UUID, + body: ApproveRejectBody = ApproveRejectBody(), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Mark an order item as rejected (admin only).""" + if not _is_privileged(user): + raise HTTPException(status_code=403, detail="Only admins or PMs can reject items") + + _, item = await _get_order_and_item(order_id, item_id, user, db) + + if item.item_status == ItemStatus.rejected: + raise HTTPException(status_code=400, detail="Item is already rejected") + + item.item_status = ItemStatus.rejected + if body.notes is not None: + item.notes = body.notes + item.updated_at = datetime.utcnow() + + await db.commit() + refreshed = await db.execute( + select(OrderItem).options(selectinload(OrderItem.cad_file)).where(OrderItem.id == item_id) + ) + return OrderItemOut.model_validate(refreshed.scalar_one()) + + +@router.put( + "/{order_id}/items/{item_id}/cad-materials", + response_model=OrderItemOut, +) +async def update_cad_materials( + order_id: uuid.UUID, + item_id: uuid.UUID, + body: CadPartMaterialsBody, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Save material assignments for each CAD part of an order item.""" + _, item = await _get_order_and_item(order_id, item_id, user, db) + from sqlalchemy import update as sql_update + await db.execute( + sql_update(OrderItem) + .where(OrderItem.id == item_id) + .values( + cad_part_materials=[e.model_dump() for e in body.parts], + updated_at=datetime.utcnow(), + ) + ) + await db.commit() + # Re-fetch with cad_file eagerly loaded so cad_parsed_objects property works + refreshed = await db.execute( + select(OrderItem) + .options(selectinload(OrderItem.cad_file)) + .where(OrderItem.id == item_id) + ) + updated_item = refreshed.scalar_one() + + # Queue thumbnail re-render with part colours if the item has a linked CAD file + if updated_item.cad_file_id and updated_item.cad_file: + parsed_objects = (updated_item.cad_file.parsed_objects or {}).get("objects", []) + if parsed_objects: + from app.services.step_processor import build_part_colors + from app.tasks.step_tasks import regenerate_thumbnail + part_colors = build_part_colors( + parsed_objects, + [e.model_dump() for e in body.parts], + ) + regenerate_thumbnail.delay(str(updated_item.cad_file_id), part_colors) + + return OrderItemOut.model_validate(updated_item) + + +@router.delete( + "/{order_id}/items/{item_id}/cad-file", + status_code=status.HTTP_204_NO_CONTENT, +) +async def unlink_cad_file( + order_id: uuid.UUID, + item_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """ + Unlink the STEP/CAD file from an order item. + + Clears cad_file_id and cad_part_materials on the item. + If no other items reference the same CadFile, deletes the record and + removes the stored STEP, thumbnail, and glTF files from disk. + Only allowed while the order is in draft status. + """ + import os + from sqlalchemy import update as sql_update, func + + order, item = await _get_order_and_item(order_id, item_id, user, db) + + if order.status != OrderStatus.draft: + raise HTTPException(400, detail="CAD file can only be removed from draft orders") + + if not item.cad_file_id: + raise HTTPException(400, detail="This item has no CAD file linked") + + cad_id = item.cad_file_id + + # Fetch the CadFile before unlinking + cad_result = await db.execute(select(CadFile).where(CadFile.id == cad_id)) + cad_file = cad_result.scalar_one_or_none() + + # Unlink item + from sqlalchemy import update as sql_update + await db.execute( + sql_update(OrderItem) + .where(OrderItem.id == item_id) + .values(cad_file_id=None, cad_part_materials=[], updated_at=datetime.utcnow()) + ) + await db.commit() + + # Delete CadFile record + disk files if no other items still reference it + if cad_file: + remaining = await db.execute( + select(func.count()).where(OrderItem.cad_file_id == cad_id) + ) + if remaining.scalar() == 0: + for path_attr in ("stored_path", "thumbnail_path", "gltf_path"): + fpath = getattr(cad_file, path_attr, None) + if fpath: + try: + os.remove(fpath) + except FileNotFoundError: + pass + await db.delete(cad_file) + await db.commit() + + +@router.post( + "/{order_id}/items/{item_id}/regenerate-thumbnail", + status_code=status.HTTP_202_ACCEPTED, +) +async def regenerate_item_thumbnail( + order_id: uuid.UUID, + item_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """ + Queue a thumbnail re-render for an order item's linked CAD file. + + The thumbnail is re-generated with per-part colours derived from the + currently saved cad_part_materials. Returns immediately; the worker + processes the job asynchronously. + """ + _, item = await _get_order_and_item(order_id, item_id, user, db) + + if not item.cad_file_id: + raise HTTPException(400, detail="No CAD file linked to this item") + if not item.cad_file: + raise HTTPException(400, detail="CAD file record not found") + + parsed_objects = (item.cad_file.parsed_objects or {}).get("objects", []) + from app.services.step_processor import build_part_colors + from app.tasks.step_tasks import regenerate_thumbnail + + part_colors = build_part_colors(parsed_objects, item.cad_part_materials or []) + task = regenerate_thumbnail.delay(str(item.cad_file_id), part_colors) + + return {"status": "queued", "task_id": task.id, "cad_file_id": str(item.cad_file_id)} diff --git a/backend/app/api/routers/orders.py b/backend/app/api/routers/orders.py new file mode 100644 index 0000000..78adb4f --- /dev/null +++ b/backend/app/api/routers/orders.py @@ -0,0 +1,1208 @@ +import io +import os +import re +import uuid +import zipfile +from datetime import datetime +from typing import Optional + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from fastapi.responses import StreamingResponse +from pydantic import BaseModel +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func, update +from sqlalchemy.orm import selectinload + +from app.database import get_db +from app.models.order import Order, OrderStatus +from app.models.order_item import OrderItem +from app.models.order_line import OrderLine +from app.models.product import Product +from app.models.output_type import OutputType +from app.models.cad_file import CadFile +from app.models.user import User +from app.schemas.order import OrderCreate, OrderOut, OrderDetailOut, OrderItemOut +from app.schemas.order_line import OrderLineCreate, OrderLineOut +from app.schemas.product import ProductOut +from app.schemas.output_type import OutputTypeOut +from app.services.order_service import generate_order_number +from app.utils.auth import get_current_user, require_admin_or_pm + +router = APIRouter(prefix="/orders", tags=["orders"]) + + +def _is_privileged(user: User) -> bool: + return user.role.value in ("admin", "project_manager") + + +def _result_path_to_url(result_path: str) -> str | None: + """Convert an internal result_path to a servable static URL.""" + if "/renders/" in result_path: + idx = result_path.index("/renders/") + return result_path[idx:] + if "/thumbnails/" in result_path: + idx = result_path.index("/thumbnails/") + return result_path[idx:] + return None + + +def _build_line_out(line: OrderLine) -> OrderLineOut: + product_out = ProductOut.model_validate(line.product) + product_out.thumbnail_url = line.product.thumbnail_url + product_out.processing_status = line.product.processing_status + + # Prefer completed render over CAD thumbnail + thumb = line.product.thumbnail_url + if line.render_status == "completed" and line.result_path: + render_url = _result_path_to_url(line.result_path) + if render_url: + thumb = render_url + + # Build OutputTypeOut with pricing convenience fields + ot_out = None + if line.output_type: + ot_out = OutputTypeOut.model_validate(line.output_type) + if hasattr(line.output_type, 'pricing_tier') and line.output_type.pricing_tier: + pt = line.output_type.pricing_tier + ot_out.pricing_tier_name = f"{pt.category_key}/{pt.quality_level}" + ot_out.price_per_item = float(pt.price_per_item) + + rp_name: str | None = None + if hasattr(line, 'render_position') and line.render_position: + rp_name = line.render_position.name + + out = OrderLineOut( + id=line.id, + order_id=line.order_id, + product_id=line.product_id, + product=product_out, + output_type_id=line.output_type_id, + output_type=ot_out, + gewuenschte_bildnummer=line.gewuenschte_bildnummer, + item_status=line.item_status, + render_status=line.render_status, + result_path=line.result_path, + thumbnail_url=thumb, + ai_validation_status=line.ai_validation_status, + ai_validation_result=line.ai_validation_result, + render_backend_used=line.render_backend_used, + flamenco_job_id=line.flamenco_job_id, + unit_price=float(line.unit_price) if line.unit_price is not None else None, + render_position_id=line.render_position_id, + render_position_name=rp_name, + notes=line.notes, + created_at=line.created_at, + updated_at=line.updated_at, + ) + return out + + +async def _load_order_detail(db, order_id: uuid.UUID) -> Order: + from app.models.output_type import OutputType as OTModel + from app.models.render_position import ProductRenderPosition + result = await db.execute( + select(Order) + .where(Order.id == order_id) + .options( + selectinload(Order.items).selectinload(OrderItem.cad_file), + selectinload(Order.lines) + .selectinload(OrderLine.product) + .selectinload(Product.cad_file), + selectinload(Order.lines) + .selectinload(OrderLine.product) + .selectinload(Product.render_positions), + selectinload(Order.lines) + .selectinload(OrderLine.output_type) + .selectinload(OTModel.pricing_tier), + selectinload(Order.lines) + .selectinload(OrderLine.render_position), + ) + ) + return result.scalar_one_or_none() + + +def _compute_render_progress(lines) -> dict | None: + """Compute render progress from order lines that have an output_type.""" + renderable = [l for l in lines if l.output_type_id is not None] + if not renderable: + return None + progress = {"total": len(renderable), "completed": 0, "processing": 0, "failed": 0, "pending": 0, "cancelled": 0} + for l in renderable: + status = l.render_status or "pending" + if status in progress: + progress[status] += 1 + else: + progress["pending"] += 1 + return progress + + +async def _maybe_complete_order(db: AsyncSession, order_id: uuid.UUID): + """If all renderable lines are terminal, auto-advance order to completed.""" + lines_result = await db.execute( + select(OrderLine).where( + OrderLine.order_id == order_id, + OrderLine.output_type_id.isnot(None), + ) + ) + lines = lines_result.scalars().all() + if not lines: + return + all_terminal = all( + l.render_status in ("completed", "failed", "cancelled") + for l in lines + ) + if not all_terminal: + return + order_result = await db.execute(select(Order).where(Order.id == order_id)) + order = order_result.scalar_one_or_none() + if order and order.status == OrderStatus.processing: + order.status = OrderStatus.completed + order.completed_at = datetime.utcnow() + order.updated_at = datetime.utcnow() + await db.commit() + + +def _order_detail_out(order: Order) -> OrderDetailOut: + out = OrderDetailOut.model_validate(order) + out.item_count = len(order.items) + out.items = [OrderItemOut.model_validate(i) for i in order.items] + out.line_count = len(order.lines) + out.lines = [_build_line_out(line) for line in order.lines] + out.render_progress = _compute_render_progress(order.lines) + return out + + +@router.get("/search", response_model=list[OrderDetailOut]) +async def search_orders( + q: str = Query(""), + statuses: str = Query(""), # comma-separated: "draft,submitted" + date_from: str = Query(""), + date_to: str = Query(""), + limit: int = Query(50, le=200), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Full-text search across orders and their items.""" + from sqlalchemy import or_ + + # Parse and validate status list + valid_statuses = [] + for s in (s.strip() for s in statuses.split(",") if s.strip()): + try: + valid_statuses.append(OrderStatus(s)) + except ValueError: + pass + + # Eagerly load items + cad_file + lines to avoid lazy-load issues during Pydantic serialisation + from app.models.output_type import OutputType as OTModel + order_q = ( + select(Order) + .options( + selectinload(Order.items).selectinload(OrderItem.cad_file), + selectinload(Order.lines).selectinload(OrderLine.product).selectinload(Product.cad_file), + selectinload(Order.lines).selectinload(OrderLine.product).selectinload(Product.render_positions), + selectinload(Order.lines).selectinload(OrderLine.output_type).selectinload(OTModel.pricing_tier), + ) + ) + if not _is_privileged(user): + order_q = order_q.where(Order.created_by == user.id) + if valid_statuses: + order_q = order_q.where(Order.status.in_(valid_statuses)) + if date_from: + order_q = order_q.where(Order.created_at >= date_from) + if date_to: + order_q = order_q.where(Order.created_at <= date_to + "T23:59:59") + + if q: + pattern = f"%{q}%" + item_fields_cols = [ + OrderItem.ebene1, OrderItem.ebene2, OrderItem.baureihe, + OrderItem.pim_id, OrderItem.produkt_baureihe, OrderItem.gewaehltes_produkt, + OrderItem.name_cad_modell, OrderItem.lagertyp, OrderItem.notes, + ] + item_match = or_(*(f.ilike(pattern) for f in item_fields_cols)) + order_match = or_(Order.order_number.ilike(pattern), Order.notes.ilike(pattern)) + + matching_via_items = select(OrderItem.order_id).where(item_match) + matching_direct = select(Order.id).where(order_match) + order_q = order_q.where( + or_(Order.id.in_(matching_via_items), Order.id.in_(matching_direct)) + ) + + order_q = order_q.order_by(Order.updated_at.desc()).limit(limit) + result = await db.execute(order_q) + orders = result.scalars().all() + + # Text fields used for Python-side item filtering + _item_text_attrs = [ + 'ebene1', 'ebene2', 'baureihe', 'pim_id', 'produkt_baureihe', + 'gewaehltes_produkt', 'name_cad_modell', 'lagertyp', 'notes', + ] + + out = [] + for order in orders: + if q: + q_lower = q.lower() + order_direct = ( + (order.order_number and q_lower in order.order_number.lower()) + or (order.notes and q_lower in order.notes.lower()) + ) + if order_direct: + items = list(order.items) + else: + items = [ + i for i in order.items + if any( + getattr(i, attr) and q_lower in getattr(i, attr).lower() + for attr in _item_text_attrs + ) + ] + else: + items = list(order.items) + + d = OrderDetailOut.model_validate(order) + d.item_count = len(items) + d.items = [OrderItemOut.model_validate(i) for i in items] + d.line_count = len(order.lines) + d.lines = [_build_line_out(line) for line in order.lines] + d.render_progress = _compute_render_progress(order.lines) + out.append(d) + + return out + + +@router.get("", response_model=list[OrderOut]) +async def list_orders( + status: Optional[OrderStatus] = None, + template_id: Optional[uuid.UUID] = None, + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=200), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + q = select(Order) + if not _is_privileged(user): + q = q.where(Order.created_by == user.id) + if status: + q = q.where(Order.status == status) + if template_id: + q = q.where(Order.template_id == template_id) + q = q.order_by(Order.created_at.desc()).offset(skip).limit(limit) + result = await db.execute(q) + orders = result.scalars().all() + + # Attach item_count, line_count, and render_progress + out = [] + for order in orders: + cnt_result = await db.execute( + select(func.count(OrderItem.id)).where(OrderItem.order_id == order.id) + ) + cnt = cnt_result.scalar() or 0 + line_cnt_result = await db.execute( + select(func.count(OrderLine.id)).where(OrderLine.order_id == order.id) + ) + line_cnt = line_cnt_result.scalar() or 0 + + # Compute render progress for renderable lines + rp_result = await db.execute( + select(OrderLine.render_status, func.count(OrderLine.id)) + .where( + OrderLine.order_id == order.id, + OrderLine.output_type_id.isnot(None), + ) + .group_by(OrderLine.render_status) + ) + rp_rows = rp_result.all() + render_progress = None + if rp_rows: + render_progress = {"total": 0, "completed": 0, "processing": 0, "failed": 0, "pending": 0, "cancelled": 0} + for rs, count in rp_rows: + s = rs or "pending" + if s in render_progress: + render_progress[s] += count + else: + render_progress["pending"] += count + render_progress["total"] += count + + d = OrderOut.model_validate(order) + d.item_count = cnt + d.line_count = line_cnt + d.render_progress = render_progress + out.append(d) + return out + + +@router.post("", response_model=OrderDetailOut, status_code=status.HTTP_201_CREATED) +async def create_order( + body: OrderCreate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + order_number = await generate_order_number(db) + order = Order( + order_number=order_number, + template_id=body.template_id, + created_by=user.id, + source_excel=body.source_excel, + notes=body.notes, + ) + db.add(order) + await db.flush() + + for item_data in body.items: + item = OrderItem( + order_id=order.id, + row_index=item_data.row_index, + ebene1=item_data.ebene1, + ebene2=item_data.ebene2, + baureihe=item_data.baureihe, + pim_id=item_data.pim_id, + produkt_baureihe=item_data.produkt_baureihe, + gewaehltes_produkt=item_data.gewaehltes_produkt, + name_cad_modell=item_data.name_cad_modell, + gewuenschte_bildnummer=item_data.gewuenschte_bildnummer, + lagertyp=item_data.lagertyp, + medias_rendering=item_data.medias_rendering, + components=[c.model_dump() for c in item_data.components], + ) + db.add(item) + + for line_data in body.lines: + # Verify product exists + prod_result = await db.execute( + select(Product).where(Product.id == line_data.product_id) + ) + if not prod_result.scalar_one_or_none(): + raise HTTPException(404, detail=f"Product {line_data.product_id} not found") + line = OrderLine( + order_id=order.id, + product_id=line_data.product_id, + output_type_id=line_data.output_type_id, + render_position_id=line_data.render_position_id, + gewuenschte_bildnummer=line_data.gewuenschte_bildnummer, + notes=line_data.notes, + ) + db.add(line) + + await db.commit() + + order_loaded = await _load_order_detail(db, order.id) + return _order_detail_out(order_loaded) + + +@router.get("/{order_id}", response_model=OrderDetailOut) +async def get_order( + order_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + order = await _load_order_detail(db, order_id) + if not order: + raise HTTPException(404, detail="Order not found") + if not _is_privileged(user) and order.created_by != user.id: + raise HTTPException(403, detail="Access denied") + + return _order_detail_out(order) + + +@router.post("/{order_id}/submit", response_model=OrderOut) +async def submit_order( + order_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + if not _is_privileged(user) and order.created_by != user.id: + raise HTTPException(403, detail="Access denied") + if order.status != OrderStatus.draft: + raise HTTPException(400, detail=f"Order is already {order.status.value}") + + # Require legacy items marked for rendering to have a linked STEP file + items_result = await db.execute( + select(OrderItem).where(OrderItem.order_id == order_id) + ) + items = items_result.scalars().all() + missing_items = [ + i.name_cad_modell or f"row {i.row_index}" + for i in items + if i.medias_rendering and i.cad_file_id is None + ] + # Require order_lines with output_type_id to have a product with a CAD file + lines_result = await db.execute( + select(OrderLine) + .options(selectinload(OrderLine.product)) + .where( + OrderLine.order_id == order_id, + OrderLine.output_type_id.is_not(None), + ) + ) + lines = lines_result.scalars().all() + missing_lines = [ + line.product.name or str(line.product.pim_id) + for line in lines + if line.product.cad_file_id is None + ] + missing = missing_items + missing_lines + if missing: + raise HTTPException( + 400, + detail=f"Cannot submit: {len(missing)} rendering item(s) are missing a STEP file: {', '.join(missing[:5])}{'…' if len(missing) > 5 else ''}", + ) + + order.status = OrderStatus.submitted + order.submitted_at = datetime.utcnow() + order.updated_at = datetime.utcnow() + + # Auto-approve order_lines when submitted (new Product Library workflow + # has no per-item approval step — submission implies approval) + await db.execute( + update(OrderLine) + .where(OrderLine.order_id == order.id, OrderLine.item_status == "pending") + .values(item_status="approved") + ) + + await db.commit() + await db.refresh(order) + + # Notify admins/PMs about new submission (broadcast) + from app.services.notification_service import emit_notification + await emit_notification( + db, + actor_user_id=user.id, + target_user_id=None, + action="order.submitted", + entity_type="order", + entity_id=str(order.id), + details={"order_number": order.order_number}, + ) + + # Compute estimated price after commit (pricing_service opens its own transaction) + from app.services.pricing_service import refresh_order_price + await refresh_order_price(db, order.id) + await db.refresh(order) + return order + + +@router.delete("/{order_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_order( + order_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + if not _is_privileged(user) and order.created_by != user.id: + raise HTTPException(403, detail="Access denied") + if order.status not in (OrderStatus.draft, OrderStatus.submitted, OrderStatus.rejected): + raise HTTPException(400, detail="Only draft, submitted or rejected orders can be deleted") + + await db.delete(order) + await db.commit() + + +class SplitMissingStepResponse(BaseModel): + new_order_id: str + new_order_number: str + moved_item_count: int + moved_line_count: int + + +@router.post("/{order_id}/split-missing-step", response_model=SplitMissingStepResponse) +async def split_missing_step( + order_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Move all items/lines that block submission (no STEP file) to a new draft order. + + After this call the original order can be submitted immediately. + """ + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + if not _is_privileged(user) and order.created_by != user.id: + raise HTTPException(403, detail="Access denied") + if order.status != OrderStatus.draft: + raise HTTPException(400, detail="Only draft orders can be split") + + # Find legacy OrderItems blocking submission (rendering, no STEP linked) + items_result = await db.execute( + select(OrderItem).where(OrderItem.order_id == order_id) + ) + items_to_move = [ + i for i in items_result.scalars().all() + if i.medias_rendering and i.cad_file_id is None + ] + + # Find OrderLines blocking submission (has output type, product has no STEP) + lines_result = await db.execute( + select(OrderLine) + .options(selectinload(OrderLine.product)) + .where( + OrderLine.order_id == order_id, + OrderLine.output_type_id.is_not(None), + ) + ) + lines_to_move = [ + ln for ln in lines_result.scalars().all() + if ln.product.cad_file_id is None + ] + + if not items_to_move and not lines_to_move: + raise HTTPException(400, detail="No items without STEP file found — nothing to split") + + # Create the new draft order + new_order_number = await generate_order_number(db) + new_order = Order( + order_number=new_order_number, + template_id=order.template_id, + created_by=order.created_by, + source_excel=order.source_excel, + notes=f"Split from {order.order_number} — awaiting STEP files", + ) + db.add(new_order) + await db.flush() + + # Move items and lines by reassigning order_id + for item in items_to_move: + item.order_id = new_order.id + for ln in lines_to_move: + ln.order_id = new_order.id + + await db.commit() + + # Refresh estimated_price on both orders + from app.services.pricing_service import refresh_order_price + await refresh_order_price(db, order_id) + await refresh_order_price(db, new_order.id) + + return SplitMissingStepResponse( + new_order_id=str(new_order.id), + new_order_number=new_order_number, + moved_item_count=len(items_to_move), + moved_line_count=len(lines_to_move), + ) + + +class GenerateLinesRequest(BaseModel): + output_type_ids: list[uuid.UUID] + + +class GenerateLinesResponse(BaseModel): + created: int + skipped: int + no_product_count: int = 0 + no_step_count: int = 0 + + +@router.post("/{order_id}/generate-lines", response_model=GenerateLinesResponse) +async def generate_lines_from_items( + order_id: uuid.UUID, + body: GenerateLinesRequest, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Bulk-create OrderLines from OrderItems for orders that have no output lines. + + Looks up each item's product by pim_id / produkt_baureihe, then creates one + line per product × requested output type (skips duplicates). + """ + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + if order.status not in (OrderStatus.draft, OrderStatus.submitted): + raise HTTPException(400, detail="Only draft or submitted orders support line generation") + if not body.output_type_ids: + raise HTTPException(400, detail="At least one output type is required") + + from app.services.product_service import lookup_product + + items_result = await db.execute( + select(OrderItem).where(OrderItem.order_id == order_id) + ) + items = items_result.scalars().all() + + # Fetch existing lines to skip duplicates + existing_result = await db.execute( + select(OrderLine.product_id, OrderLine.output_type_id) + .where(OrderLine.order_id == order_id) + ) + existing_pairs: set[tuple] = {(str(r[0]), str(r[1])) for r in existing_result.all()} + + created = 0 + skipped = 0 + no_product_count = 0 + no_step_count = 0 + + for item in items: + # Use the canonical lookup: produkt_baureihe first (unique per product), + # then pim_id as fallback. pim_id is a category-level code shared by + # many products so it must NOT be used as the primary key. + product = await lookup_product(db, item.pim_id, item.produkt_baureihe) + if not product: + no_product_count += 1 + continue + + if product.cad_file_id is None: + no_step_count += 1 + # Still create the line so it shows in the UI — it will fail at dispatch + # but the user can upload a STEP file and retry. + + for type_id in body.output_type_ids: + pair = (str(product.id), str(type_id)) + if pair in existing_pairs: + skipped += 1 + continue + line = OrderLine( + order_id=order_id, + product_id=product.id, + output_type_id=type_id, + gewuenschte_bildnummer=item.gewuenschte_bildnummer, + ) + db.add(line) + existing_pairs.add(pair) + created += 1 + + await db.commit() + + # Refresh estimated price + try: + from app.services.pricing_service import refresh_order_price + await refresh_order_price(db, order_id) + except Exception: + pass + + return GenerateLinesResponse( + created=created, + skipped=skipped, + no_product_count=no_product_count, + no_step_count=no_step_count, + ) + + +class OrderStatusUpdate(BaseModel): + status: str + notes: Optional[str] = None + + +@router.post("/{order_id}/status", response_model=OrderOut) +async def update_order_status( + order_id: uuid.UUID, + body: OrderStatusUpdate, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Update order status with lifecycle timestamps (admin / PM only).""" + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + + now = datetime.utcnow() + try: + new_status = OrderStatus(body.status) + except ValueError: + raise HTTPException(400, detail=f"Invalid status: {body.status}") + + order.status = new_status + order.updated_at = now + + if new_status == OrderStatus.processing: + order.processing_started_at = now + elif new_status == OrderStatus.completed: + order.completed_at = now + elif new_status == OrderStatus.rejected: + order.rejected_at = now + + if body.notes is not None: + order.notes = body.notes + + # Auto-update order_lines.item_status to match order lifecycle + if new_status in (OrderStatus.processing, OrderStatus.completed): + await db.execute( + update(OrderLine) + .where(OrderLine.order_id == order.id) + .values(item_status="approved") + ) + elif new_status == OrderStatus.rejected: + await db.execute( + update(OrderLine) + .where(OrderLine.order_id == order.id) + .values(item_status="rejected") + ) + + await db.commit() + + # Notify the order creator about status change + from app.services.notification_service import emit_notification + await emit_notification( + db, + actor_user_id=user.id, + target_user_id=order.created_by, + action=f"order.{new_status.value}", + entity_type="order", + entity_id=str(order.id), + details={"order_number": order.order_number}, + ) + + # Dispatch renders when order moves to processing + if new_status == OrderStatus.processing: + lines_result = await db.execute( + select(OrderLine).where( + OrderLine.order_id == order.id, + OrderLine.output_type_id.isnot(None), + OrderLine.render_status == "pending", + ) + ) + from app.tasks.step_tasks import dispatch_order_line_render + for line in lines_result.scalars().all(): + dispatch_order_line_render.delay(str(line.id)) + + if new_status == OrderStatus.completed: + from app.services.pricing_service import refresh_order_price + await refresh_order_price(db, order.id) + + await db.refresh(order) + return order + + +@router.post("/{order_id}/lines", response_model=OrderLineOut, status_code=status.HTTP_201_CREATED) +async def add_order_line( + order_id: uuid.UUID, + body: OrderLineCreate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Add a product + output_type line to a draft order.""" + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + if not _is_privileged(user) and order.created_by != user.id: + raise HTTPException(403, detail="Access denied") + if order.status == OrderStatus.draft: + pass # always allowed for owner/admin + elif order.status == OrderStatus.submitted and _is_privileged(user): + pass # admin / PM may add lines to a submitted order (e.g. to fix missing output types) + else: + raise HTTPException(400, detail="Can only add lines to draft orders (admins may also add to submitted orders)") + + prod_result = await db.execute( + select(Product).options(selectinload(Product.cad_file)).where(Product.id == body.product_id) + ) + if not prod_result.scalar_one_or_none(): + raise HTTPException(404, detail="Product not found") + + line = OrderLine( + order_id=order_id, + product_id=body.product_id, + output_type_id=body.output_type_id, + render_position_id=body.render_position_id, + gewuenschte_bildnummer=body.gewuenschte_bildnummer, + notes=body.notes, + ) + db.add(line) + try: + await db.commit() + except Exception: + await db.rollback() + raise HTTPException(409, detail="Duplicate line (same product + output_type + position already exists in this order)") + + await db.refresh(line) + + # Update estimated_price on the draft order immediately + from app.services.pricing_service import refresh_order_price + await refresh_order_price(db, order_id) + + from app.models.output_type import OutputType as OTModel + from app.models.render_position import ProductRenderPosition + result2 = await db.execute( + select(OrderLine) + .where(OrderLine.id == line.id) + .options( + selectinload(OrderLine.product).selectinload(Product.cad_file), + selectinload(OrderLine.product).selectinload(Product.render_positions), + selectinload(OrderLine.output_type).selectinload(OTModel.pricing_tier), + selectinload(OrderLine.render_position), + ) + ) + line_loaded = result2.scalar_one() + return _build_line_out(line_loaded) + + +@router.post("/{order_id}/dispatch-renders") +async def dispatch_renders( + order_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Dispatch (or retry) renders for all pending/failed/cancelled lines (admin/PM only). + + Auto-advances order to processing if currently submitted or completed. + """ + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + + if order.status not in (OrderStatus.submitted, OrderStatus.processing, OrderStatus.completed): + raise HTTPException(400, detail=f"Cannot dispatch renders for order in {order.status.value} status") + + lines_result = await db.execute( + select(OrderLine).where( + OrderLine.order_id == order.id, + OrderLine.output_type_id.isnot(None), + OrderLine.render_status.in_(["pending", "failed", "cancelled"]), + ) + ) + lines = lines_result.scalars().all() + + if not lines: + raise HTTPException(400, detail="No renderable lines with pending, failed, or cancelled status") + + # Auto-advance to processing if not already there + if order.status in (OrderStatus.submitted, OrderStatus.completed): + now = datetime.utcnow() + order.status = OrderStatus.processing + order.processing_started_at = now + order.completed_at = None + order.updated_at = now + + # Reset failed/cancelled lines to pending before re-dispatch + from sqlalchemy import update as sql_update + for line in lines: + if line.render_status in ("failed", "cancelled"): + await db.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values(render_status="pending", render_completed_at=None, render_log=None) + ) + await db.commit() + + from app.tasks.step_tasks import dispatch_order_line_render + for line in lines: + dispatch_order_line_render.delay(str(line.id)) + + return {"dispatched": len(lines), "order_status": order.status.value} + + +@router.post("/{order_id}/lines/{line_id}/cancel-render") +async def cancel_line_render( + order_id: uuid.UUID, + line_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Cancel a running render for a single order line (admin/PM only). + + Cancels the Flamenco job or revokes the Celery task, then marks + the line as 'cancelled'. + """ + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + + line_result = await db.execute( + select(OrderLine).where(OrderLine.id == line_id, OrderLine.order_id == order_id) + ) + line = line_result.scalar_one_or_none() + if not line: + raise HTTPException(404, detail="Order line not found") + + if line.render_status not in ("processing", "pending"): + raise HTTPException(400, detail=f"Line render_status is '{line.render_status}', nothing to cancel") + + cancelled_backend = line.render_backend_used or "unknown" + errors: list[str] = [] + + # Cancel Flamenco job if applicable + if line.render_backend_used == "flamenco" and line.flamenco_job_id: + try: + from app.services.flamenco_client import get_flamenco_client + from app.models.system_setting import SystemSetting + row = await db.execute( + select(SystemSetting).where(SystemSetting.key == "flamenco_manager_url") + ) + setting = row.scalar_one_or_none() + url = setting.value if setting else "http://flamenco-manager:8080" + client = get_flamenco_client(url) + client.cancel_job(line.flamenco_job_id) + except Exception as exc: + errors.append(f"Flamenco cancel failed: {str(exc)[:200]}") + + # Revoke Celery task if applicable + if line.render_backend_used == "celery" or not line.render_backend_used: + try: + from app.tasks.celery_app import celery_app + celery_app.control.revoke( + f"render-{line_id}", terminate=True, signal="SIGTERM" + ) + except Exception as exc: + errors.append(f"Celery revoke failed: {str(exc)[:200]}") + + # Also kill the Blender subprocess in the renderer microservice. + # The job_id sent to blender-renderer equals the order_line_id. + try: + import httpx as _httpx + _httpx.post( + f"http://blender-renderer:8100/cancel/{line_id}", + timeout=5.0, + ) + except Exception: + pass # best-effort; renderer may not be running a job for this line + + # Mark line as cancelled + from sqlalchemy import update as sql_update + now = datetime.utcnow() + await db.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values( + render_status="cancelled", + render_completed_at=now, + render_log={ + "cancelled_by": str(user.id), + "cancelled_at": now.isoformat(), + "backend": cancelled_backend, + "errors": errors or None, + }, + ) + ) + await db.commit() + + # Check if all renderable lines are now terminal → auto-complete order + await _maybe_complete_order(db, order_id) + + return { + "cancelled": True, + "line_id": str(line.id), + "backend": cancelled_backend, + "errors": errors or None, + } + + +@router.post("/{order_id}/cancel-renders") +async def cancel_order_renders( + order_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Cancel all processing/pending renders for an order (admin/PM only).""" + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + + lines_result = await db.execute( + select(OrderLine).where( + OrderLine.order_id == order.id, + OrderLine.output_type_id.isnot(None), + OrderLine.render_status.in_(["processing", "pending"]), + ) + ) + lines = lines_result.scalars().all() + + if not lines: + raise HTTPException(400, detail="No active renders to cancel") + + from app.services.flamenco_client import get_flamenco_client + from app.models.system_setting import SystemSetting + from app.tasks.celery_app import celery_app + from sqlalchemy import update as sql_update + + # Load Flamenco URL once + row = await db.execute( + select(SystemSetting).where(SystemSetting.key == "flamenco_manager_url") + ) + setting = row.scalar_one_or_none() + flamenco_url = setting.value if setting else "http://flamenco-manager:8080" + + now = datetime.utcnow() + cancelled_count = 0 + errors: list[str] = [] + + for line in lines: + # Cancel Flamenco job + if line.render_backend_used == "flamenco" and line.flamenco_job_id: + try: + client = get_flamenco_client(flamenco_url) + client.cancel_job(line.flamenco_job_id) + except Exception as exc: + errors.append(f"Line {line.id}: Flamenco cancel failed: {str(exc)[:100]}") + + # Revoke Celery task + kill Blender subprocess in renderer service + if line.render_backend_used == "celery" or not line.render_backend_used: + try: + celery_app.control.revoke( + f"render-{line.id}", terminate=True, signal="SIGTERM" + ) + except Exception: + pass # Celery revoke is best-effort + try: + import httpx as _httpx + _httpx.post( + f"http://blender-renderer:8100/cancel/{line.id}", + timeout=5.0, + ) + except Exception: + pass # best-effort + + await db.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values( + render_status="cancelled", + render_completed_at=now, + render_log={ + "cancelled_by": str(user.id), + "cancelled_at": now.isoformat(), + "backend": line.render_backend_used or "unknown", + }, + ) + ) + cancelled_count += 1 + + await db.commit() + + # Check if all renderable lines are now terminal → auto-complete order + await _maybe_complete_order(db, order_id) + + # Re-read order status (may have changed) + await db.refresh(order) + + return { + "cancelled": cancelled_count, + "order_status": order.status.value, + "errors": errors or None, + } + + +@router.delete("/{order_id}/lines/{line_id}", status_code=status.HTTP_204_NO_CONTENT) +async def remove_order_line( + order_id: uuid.UUID, + line_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Remove a line from a draft order.""" + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + if not _is_privileged(user) and order.created_by != user.id: + raise HTTPException(403, detail="Access denied") + if order.status != OrderStatus.draft: + raise HTTPException(400, detail="Can only remove lines from draft orders") + + line_result = await db.execute( + select(OrderLine).where(OrderLine.id == line_id, OrderLine.order_id == order_id) + ) + line = line_result.scalar_one_or_none() + if not line: + raise HTTPException(404, detail="Order line not found") + + await db.delete(line) + await db.commit() + + # Update estimated_price after removing the line + from app.services.pricing_service import refresh_order_price + await refresh_order_price(db, order_id) + + +@router.get("/{order_id}/download-renders") +async def download_renders( + order_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Stream a ZIP of all completed render files for this order.""" + result = await db.execute(select(Order).where(Order.id == order_id)) + order = result.scalar_one_or_none() + if not order: + raise HTTPException(404, detail="Order not found") + if not _is_privileged(user) and order.created_by != user.id: + raise HTTPException(403, detail="Access denied") + + lines_result = await db.execute( + select(OrderLine) + .where( + OrderLine.order_id == order_id, + OrderLine.render_status == "completed", + OrderLine.result_path.isnot(None), + ) + .options( + selectinload(OrderLine.product), + selectinload(OrderLine.output_type), + selectinload(OrderLine.render_position), + ) + ) + lines = lines_result.scalars().all() + + if not lines: + raise HTTPException(404, detail="No completed renders found for this order") + + from app.config import settings as app_settings + + def _resolve_path(p: str) -> str: + """Translate container-relative paths to backend filesystem paths.""" + # Flamenco worker mounts the uploads volume at /shared, backend at /app/uploads + if p.startswith("/shared/"): + return app_settings.upload_dir + p[len("/shared"):] + return p + + buf = io.BytesIO() + # Track names used to avoid duplicates + name_counts: dict[str, int] = {} + + with zipfile.ZipFile(buf, mode="w", compression=zipfile.ZIP_DEFLATED) as zf: + for line in lines: + if not line.result_path: + continue + fs_path = _resolve_path(line.result_path) + if not os.path.isfile(fs_path): + continue + # Build a meaningful filename + product_name = (line.product.name or line.product.pim_id or "product") if line.product else "product" + ot_name = (line.output_type.name if line.output_type else None) or "render" + pos_name = (line.render_position.name if line.render_position else None) + # Sanitize: replace spaces + special chars with underscore + def _safe(s: str) -> str: + return re.sub(r"[^\w\-.]", "_", s).strip("_") + + parts = [_safe(product_name), _safe(ot_name)] + if pos_name: + parts.append(_safe(pos_name)) + ext = os.path.splitext(line.result_path)[1] or ".png" + base_name = "_".join(parts) + ext + + # Deduplicate + if base_name in name_counts: + name_counts[base_name] += 1 + stem, suffix = os.path.splitext(base_name) + archive_name = f"{stem}_{name_counts[base_name]}{suffix}" + else: + name_counts[base_name] = 0 + archive_name = base_name + + zf.write(fs_path, archive_name) + + if not zf.infolist(): + raise HTTPException(404, detail="No render files found on disk") + + buf.seek(0) + safe_order = re.sub(r"[^\w\-]", "_", order.order_number) + filename = f"{safe_order}_renders.zip" + + return StreamingResponse( + buf, + media_type="application/zip", + headers={"Content-Disposition": f'attachment; filename="{filename}"'}, + ) diff --git a/backend/app/api/routers/output_types.py b/backend/app/api/routers/output_types.py new file mode 100644 index 0000000..fae998f --- /dev/null +++ b/backend/app/api/routers/output_types.py @@ -0,0 +1,126 @@ +"""Output Types API router.""" +import uuid + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import select, or_, cast, String +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload +from sqlalchemy.dialects.postgresql import JSONB + +from app.database import get_db +from app.models.order_line import OrderLine +from app.models.output_type import OutputType, VALID_RENDER_BACKENDS +from app.schemas.output_type import OutputTypeCreate, OutputTypeOut, OutputTypePatch +from app.utils.auth import get_current_user, require_admin_or_pm +from app.models.user import User + +router = APIRouter(prefix="/output-types", tags=["output-types"]) + + +def _ot_to_out(ot: OutputType) -> OutputTypeOut: + """Convert an OutputType ORM instance to OutputTypeOut with pricing convenience fields.""" + out = OutputTypeOut.model_validate(ot) + if ot.pricing_tier: + out.pricing_tier_name = f"{ot.pricing_tier.category_key}/{ot.pricing_tier.quality_level}" + out.price_per_item = float(ot.pricing_tier.price_per_item) + return out + + +@router.get("", response_model=list[OutputTypeOut]) +async def list_output_types( + include_inactive: bool = Query(False), + category: str = Query(""), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + stmt = ( + select(OutputType) + .options(selectinload(OutputType.pricing_tier)) + .order_by(OutputType.sort_order, OutputType.name) + ) + if not include_inactive: + stmt = stmt.where(OutputType.is_active.is_(True)) + if category: + # Show output types where compatible_categories is empty (universal) + # or contains the given category + stmt = stmt.where( + or_( + cast(OutputType.compatible_categories, String) == "[]", + OutputType.compatible_categories.contains([category]), + ) + ) + result = await db.execute(stmt) + return [_ot_to_out(ot) for ot in result.scalars().all()] + + +@router.post("", response_model=OutputTypeOut, status_code=status.HTTP_201_CREATED) +async def create_output_type( + body: OutputTypeCreate, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + if body.render_backend not in VALID_RENDER_BACKENDS: + raise HTTPException(400, detail=f"Invalid render_backend. Choose: {', '.join(sorted(VALID_RENDER_BACKENDS))}") + + existing = await db.execute(select(OutputType).where(OutputType.name == body.name)) + if existing.scalar_one_or_none(): + raise HTTPException(409, detail=f"Output type '{body.name}' already exists") + + ot = OutputType(**body.model_dump()) + db.add(ot) + await db.commit() + await db.refresh(ot) + # Reload with pricing_tier + result2 = await db.execute( + select(OutputType).options(selectinload(OutputType.pricing_tier)).where(OutputType.id == ot.id) + ) + return _ot_to_out(result2.scalar_one()) + + +@router.patch("/{output_type_id}", response_model=OutputTypeOut) +async def update_output_type( + output_type_id: uuid.UUID, + body: OutputTypePatch, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(OutputType).where(OutputType.id == output_type_id)) + ot = result.scalar_one_or_none() + if not ot: + raise HTTPException(404, detail="Output type not found") + + data = body.model_dump(exclude_unset=True) + if "render_backend" in data and data["render_backend"] not in VALID_RENDER_BACKENDS: + raise HTTPException(400, detail=f"Invalid render_backend. Choose: {', '.join(sorted(VALID_RENDER_BACKENDS))}") + + for field_name, value in data.items(): + setattr(ot, field_name, value) + await db.commit() + await db.refresh(ot) + # Reload with pricing_tier + result2 = await db.execute( + select(OutputType).options(selectinload(OutputType.pricing_tier)).where(OutputType.id == ot.id) + ) + return _ot_to_out(result2.scalar_one()) + + +@router.delete("/{output_type_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_output_type( + output_type_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(OutputType).where(OutputType.id == output_type_id)) + ot = result.scalar_one_or_none() + if not ot: + raise HTTPException(404, detail="Output type not found") + + # Check if referenced by order_lines + usage = await db.execute( + select(OrderLine).where(OrderLine.output_type_id == output_type_id).limit(1) + ) + if usage.scalar_one_or_none(): + raise HTTPException(409, detail="Output type is referenced by existing order lines and cannot be deleted") + + await db.delete(ot) + await db.commit() diff --git a/backend/app/api/routers/pricing.py b/backend/app/api/routers/pricing.py new file mode 100644 index 0000000..e5ddecc --- /dev/null +++ b/backend/app/api/routers/pricing.py @@ -0,0 +1,152 @@ +"""Pricing tiers router — CRUD for category × quality-level price configuration.""" +from datetime import datetime +from decimal import Decimal +from typing import Optional +import uuid + +from fastapi import APIRouter, Depends, HTTPException, status +from pydantic import BaseModel +from sqlalchemy import select, update as sql_update +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.database import get_db +from app.models.pricing_tier import PricingTier +from app.models.user import User +from app.utils.auth import require_admin_or_pm, get_current_user + +router = APIRouter(prefix="/pricing", tags=["pricing"]) + + +# ── Schemas ──────────────────────────────────────────────────────────────────── + +class PricingTierOut(BaseModel): + id: int + category_key: str + quality_level: str + price_per_item: float + description: Optional[str] + is_active: bool + created_at: datetime + updated_at: datetime + + model_config = {"from_attributes": True} + + +class PricingTierCreate(BaseModel): + category_key: str + quality_level: str = "Normal" + price_per_item: Decimal + description: Optional[str] = None + is_active: bool = True + + +class PricingTierPatch(BaseModel): + category_key: Optional[str] = None + quality_level: Optional[str] = None + price_per_item: Optional[Decimal] = None + description: Optional[str] = None + is_active: Optional[bool] = None + + +# ── Endpoints ────────────────────────────────────────────────────────────────── + +@router.get("", response_model=list[PricingTierOut]) +async def list_pricing_tiers( + _user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +) -> list[PricingTierOut]: + result = await db.execute( + select(PricingTier).order_by(PricingTier.category_key, PricingTier.quality_level) + ) + return result.scalars().all() + + +@router.post("", response_model=PricingTierOut, status_code=status.HTTP_201_CREATED) +async def create_pricing_tier( + body: PricingTierCreate, + _user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +) -> PricingTierOut: + tier = PricingTier( + category_key=body.category_key, + quality_level=body.quality_level, + price_per_item=body.price_per_item, + description=body.description, + is_active=body.is_active, + ) + db.add(tier) + try: + await db.commit() + await db.refresh(tier) + except IntegrityError: + await db.rollback() + raise HTTPException( + status_code=409, + detail=f"Pricing tier for '{body.category_key}' / '{body.quality_level}' already exists", + ) + return tier + + +@router.patch("/{tier_id}", response_model=PricingTierOut) +async def update_pricing_tier( + tier_id: int, + body: PricingTierPatch, + _user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +) -> PricingTierOut: + result = await db.execute(select(PricingTier).where(PricingTier.id == tier_id)) + tier = result.scalar_one_or_none() + if tier is None: + raise HTTPException(status_code=404, detail="Pricing tier not found") + + patch = body.model_dump(exclude_unset=True) + if patch: + patch["updated_at"] = datetime.utcnow() + await db.execute( + sql_update(PricingTier).where(PricingTier.id == tier_id).values(**patch) + ) + await db.commit() + result = await db.execute(select(PricingTier).where(PricingTier.id == tier_id)) + tier = result.scalar_one() + return tier + + +@router.delete("/{tier_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_pricing_tier( + tier_id: int, + _user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +) -> None: + result = await db.execute(select(PricingTier).where(PricingTier.id == tier_id)) + tier = result.scalar_one_or_none() + if tier is None: + raise HTTPException(status_code=404, detail="Pricing tier not found") + await db.delete(tier) + await db.commit() + + +# ── Price Estimation ────────────────────────────────────────────────────────── + +class EstimateLineInput(BaseModel): + product_id: uuid.UUID + output_type_id: uuid.UUID | None = None + + +class EstimateRequest(BaseModel): + lines: list[EstimateLineInput] + + +@router.post("/estimate") +async def estimate_price( + body: EstimateRequest, + _user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Estimate the total price for a set of prospective order lines. + + Open to all authenticated users (read-only, needed by wizard). + """ + from app.services.pricing_service import estimate_order_price + lines_dicts = [{"product_id": str(l.product_id), "output_type_id": str(l.output_type_id) if l.output_type_id else None} for l in body.lines] + return await estimate_order_price(db, lines_dicts) diff --git a/backend/app/api/routers/products.py b/backend/app/api/routers/products.py new file mode 100644 index 0000000..db0f10a --- /dev/null +++ b/backend/app/api/routers/products.py @@ -0,0 +1,931 @@ +"""Product library API router.""" +import hashlib +import io +import json +import os +import re +import uuid +import zipfile +from pathlib import Path + +from fastapi import APIRouter, Depends, File, HTTPException, Query, UploadFile, status +from fastapi.responses import StreamingResponse +from pydantic import BaseModel +from sqlalchemy import select, or_, text +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload, joinedload + +from app.config import settings +from app.database import get_db +from app.models.cad_file import CadFile, ProcessingStatus +from app.models.material import Material +from app.models.order import Order +from app.models.order_line import OrderLine +from app.models.output_type import OutputType +from app.models.product import Product +from app.models.render_position import ProductRenderPosition +from app.models.system_setting import SystemSetting +from app.schemas.order import OrderOut +from app.schemas.product import ProductCreate, ProductOut, ProductPatch +from app.schemas.render_position import RenderPositionCreate, RenderPositionPatch, RenderPositionOut +from app.utils.auth import get_current_user, require_admin_or_pm +from app.models.user import User + +router = APIRouter(prefix="/products", tags=["products"]) + + +def _best_render_url(product: Product, priority: list[str]) -> str | None: + """Walk the priority list and return the first available render URL. + + Each entry in priority is tried in order: + "cad_thumbnail" — stop and return None (caller shows STEP thumbnail) + "latest_render" — pick newest completed render regardless of output type + — pick newest render of that specific output type + + Returns None if nothing is found (or "cad_thumbnail" is reached first). + """ + for source in priority: + if source == "cad_thumbnail": + return None # Signal to caller to show STEP thumbnail + + filter_ot_id: str | None = None if source == "latest_render" else source + + best = None + best_time = None + for line in product.order_lines: + if line.render_status != "completed" or not line.result_path: + continue + if filter_ot_id is not None and str(line.output_type_id) != filter_ot_id: + continue + url = _result_path_to_url(line.result_path) + if url and (best_time is None or (line.render_completed_at and line.render_completed_at > best_time)): + disk = _resolve_disk_path(url) + if disk and disk.exists(): + best = url + best_time = line.render_completed_at + + if best: + return best # Found a match for this priority entry + + return None # Nothing found in the entire priority list + + +def _product_out(product: Product, priority: list[str] | None = None) -> ProductOut: + out = ProductOut.model_validate(product) + out.thumbnail_url = product.thumbnail_url + out.processing_status = product.processing_status + out.cad_parsed_objects = product.cad_parsed_objects + out.render_image_url = _best_render_url(product, priority or ["latest_render", "cad_thumbnail"]) + out.stl_cached = _stl_cached_qualities(product) + return out + + +def _stl_cached_qualities(product: Product) -> list[str]: + """Return list of STL qualities that are cached on disk for this product.""" + from pathlib import Path as _Path + cad = product.cad_file + if not cad or not cad.stored_path: + return [] + step = _Path(cad.stored_path) + return [q for q in ("low", "high") if (step.parent / f"{step.stem}_{q}.stl").exists()] + + +async def _load_thumbnail_priority(db: AsyncSession) -> list[str]: + """Read product_thumbnail_priority from system_settings. + + Falls back to ["latest_render", "cad_thumbnail"] (legacy behaviour). + Also reads the old product_thumbnail_source key for backward compatibility. + """ + row = await db.execute( + select(SystemSetting).where(SystemSetting.key == "product_thumbnail_priority") + ) + setting = row.scalar_one_or_none() + if setting: + try: + parsed = json.loads(setting.value) + if isinstance(parsed, list) and parsed: + return parsed + except (json.JSONDecodeError, TypeError): + pass + + # Legacy fallback: read old product_thumbnail_source key + legacy_row = await db.execute( + select(SystemSetting).where(SystemSetting.key == "product_thumbnail_source") + ) + legacy = legacy_row.scalar_one_or_none() + if legacy: + src = legacy.value + if src == "cad_thumbnail": + return ["cad_thumbnail"] + elif src == "latest_render": + return ["latest_render", "cad_thumbnail"] + else: + return [src, "latest_render", "cad_thumbnail"] + + return ["latest_render", "cad_thumbnail"] + + +@router.get("", response_model=list[ProductOut]) +async def list_products( + q: str = Query(""), + category_key: str = Query(""), + has_cad: bool | None = Query(None), + ready_only: bool = Query(False), + materials_filter: str = Query(""), # "complete" | "incomplete" | "" + skip: int = Query(0, ge=0), + limit: int = Query(50, ge=1, le=200), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + stmt = ( + select(Product) + .options( + selectinload(Product.cad_file), + selectinload(Product.order_lines), + selectinload(Product.render_positions), + ) + .where(Product.is_active.is_(True)) + ) + if q: + pattern = f"%{q}%" + stmt = stmt.where( + or_(Product.pim_id.ilike(pattern), Product.name.ilike(pattern)) + ) + if category_key: + stmt = stmt.where(Product.category_key == category_key) + if ready_only: + stmt = stmt.where(Product.cad_file_id.is_not(None)) + elif has_cad is True: + stmt = stmt.where(Product.cad_file_id.is_not(None)) + elif has_cad is False: + stmt = stmt.where(Product.cad_file_id.is_(None)) + if materials_filter == "incomplete": + # STEP processed, but cad_part_materials is empty or has at least one blank entry. + stmt = stmt.join(CadFile, CadFile.id == Product.cad_file_id).where( + CadFile.processing_status == ProcessingStatus.completed, + text( + "(" + " jsonb_array_length(products.cad_part_materials) = 0" + " OR EXISTS (" + " SELECT 1 FROM jsonb_array_elements(products.cad_part_materials) AS m" + " WHERE coalesce(m->>'material', '') = ''" + " )" + ")" + ), + ) + elif materials_filter == "complete": + # STEP processed, cad_part_materials non-empty, and every entry has a material. + stmt = stmt.join(CadFile, CadFile.id == Product.cad_file_id).where( + CadFile.processing_status == ProcessingStatus.completed, + text( + "(" + " jsonb_array_length(products.cad_part_materials) > 0" + " AND NOT EXISTS (" + " SELECT 1 FROM jsonb_array_elements(products.cad_part_materials) AS m" + " WHERE coalesce(m->>'material', '') = ''" + " )" + ")" + ), + ) + + stmt = stmt.order_by(Product.updated_at.desc()).offset(skip).limit(limit) + result = await db.execute(stmt) + products = result.scalars().all() + priority = await _load_thumbnail_priority(db) + return [_product_out(p, priority) for p in products] + + +@router.post("", response_model=ProductOut, status_code=status.HTTP_201_CREATED) +async def create_product( + body: ProductCreate, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + existing = await db.execute(select(Product).where(Product.pim_id == body.pim_id)) + if existing.scalar_one_or_none(): + raise HTTPException(409, detail=f"Product with pim_id '{body.pim_id}' already exists") + + from app.services.product_service import create_default_positions + product = Product(**body.model_dump()) + db.add(product) + await db.flush() + await create_default_positions(db, product.id) + await db.commit() + result = await db.execute( + select(Product) + .options( + selectinload(Product.cad_file), + selectinload(Product.order_lines), + selectinload(Product.render_positions), + ) + .where(Product.id == product.id) + ) + return _product_out(result.scalar_one()) + + +@router.get("/{product_id}", response_model=ProductOut) +async def get_product( + product_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(Product) + .options( + selectinload(Product.cad_file), + selectinload(Product.order_lines), + selectinload(Product.render_positions), + ) + .where(Product.id == product_id) + ) + product = result.scalar_one_or_none() + if not product: + raise HTTPException(404, detail="Product not found") + priority = await _load_thumbnail_priority(db) + return _product_out(product, priority) + + +@router.patch("/{product_id}", response_model=ProductOut) +async def update_product( + product_id: uuid.UUID, + body: ProductPatch, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(Product).options(selectinload(Product.cad_file)).where(Product.id == product_id) + ) + product = result.scalar_one_or_none() + if not product: + raise HTTPException(404, detail="Product not found") + + for field_name, value in body.model_dump(exclude_unset=True).items(): + setattr(product, field_name, value) + await db.commit() + await db.refresh(product) + return _product_out(product) + + +@router.delete("/{product_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_product( + product_id: uuid.UUID, + hard: bool = Query(False, description="Hard delete (permanent) instead of soft delete"), + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(Product).where(Product.id == product_id)) + product = result.scalar_one_or_none() + if not product: + raise HTTPException(404, detail="Product not found") + if hard: + from sqlalchemy import delete as sql_delete + # Delete order_lines referencing this product + await db.execute(sql_delete(OrderLine).where(OrderLine.product_id == product_id)) + await db.delete(product) + else: + product.is_active = False + await db.commit() + + +@router.post("/{product_id}/cad", status_code=status.HTTP_201_CREATED) +async def upload_product_cad( + product_id: uuid.UUID, + file: UploadFile = File(...), + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Upload or replace the STEP file for a product.""" + suffix = Path(file.filename or "").suffix.lower() + if suffix not in {".stp", ".step"}: + raise HTTPException(400, detail="Only .stp / .step files are accepted") + + result = await db.execute( + select(Product).options(selectinload(Product.cad_file)).where(Product.id == product_id) + ) + product = result.scalar_one_or_none() + if not product: + raise HTTPException(404, detail="Product not found") + + content = await file.read() + file_hash = hashlib.sha256(content).hexdigest() + + # Dedup by hash + existing_cad = await db.execute(select(CadFile).where(CadFile.file_hash == file_hash)) + cad_file = existing_cad.scalar_one_or_none() + + if cad_file is None: + step_dir = Path(settings.upload_dir) / "step_files" + step_dir.mkdir(parents=True, exist_ok=True) + stored_name = f"{uuid.uuid4()}{suffix}" + stored_path = step_dir / stored_name + stored_path.write_bytes(content) + + cad_file = CadFile( + original_name=file.filename, + stored_path=str(stored_path), + file_hash=file_hash, + file_size=len(content), + processing_status=ProcessingStatus.pending, + ) + db.add(cad_file) + await db.commit() + await db.refresh(cad_file) + + try: + from app.tasks.step_tasks import process_step_file + process_step_file.delay(str(cad_file.id)) + except Exception: + pass + + # Link to product + from app.services.product_service import link_cad_to_product + product = await link_cad_to_product(db, product_id, cad_file.id) + + return { + "cad_file_id": str(cad_file.id), + "original_name": cad_file.original_name, + "file_hash": file_hash, + "status": "uploaded" if cad_file.processing_status == ProcessingStatus.pending else "already_exists", + "product_id": str(product_id), + } + + +@router.post("/{product_id}/cad-materials", response_model=ProductOut) +async def save_product_cad_materials( + product_id: uuid.UUID, + body: dict, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Save cad_part_materials and enqueue thumbnail regeneration.""" + result = await db.execute( + select(Product) + .options( + selectinload(Product.cad_file), + selectinload(Product.order_lines), + selectinload(Product.render_positions), + ) + .where(Product.id == product_id) + ) + product = result.scalar_one_or_none() + if not product: + raise HTTPException(404, detail="Product not found") + + parts = body.get("parts", []) + product.cad_part_materials = parts + + # Auto-add new material names to the materials library + material_names = {p["material"].strip() for p in parts if p.get("material", "").strip()} + if material_names: + existing = await db.execute( + select(Material).where( + or_(*[Material.name.ilike(name) for name in material_names]) + ) + ) + existing_names = {m.name.lower() for m in existing.scalars().all()} + for name in material_names: + if name.lower() not in existing_names: + db.add(Material(name=name, source="product_assign", created_by=user.id)) + + await db.commit() + + if product.cad_file_id: + try: + from app.services.step_processor import build_part_colors + from app.tasks.step_tasks import regenerate_thumbnail + parsed_objects = product.cad_parsed_objects or [] + part_colors = build_part_colors(parsed_objects, parts) + regenerate_thumbnail.delay(str(product.cad_file_id), part_colors) + except Exception: + pass + + # Re-fetch with all relationships for _product_out + result2 = await db.execute( + select(Product) + .options( + selectinload(Product.cad_file), + selectinload(Product.order_lines), + selectinload(Product.render_positions), + ) + .where(Product.id == product_id) + ) + return _product_out(result2.scalar_one()) + + +@router.post("/{product_id}/regenerate", status_code=status.HTTP_202_ACCEPTED) +async def regenerate_product_thumbnail( + product_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Re-queue thumbnail generation with current part_colors.""" + result = await db.execute( + select(Product).options(selectinload(Product.cad_file)).where(Product.id == product_id) + ) + product = result.scalar_one_or_none() + if not product: + raise HTTPException(404, detail="Product not found") + if not product.cad_file_id: + raise HTTPException(400, detail="Product has no CAD file") + + try: + from app.services.step_processor import build_part_colors + from app.tasks.step_tasks import regenerate_thumbnail + parsed_objects = product.cad_parsed_objects or [] + part_colors = build_part_colors(parsed_objects, product.cad_part_materials or []) + task = regenerate_thumbnail.delay(str(product.cad_file_id), part_colors) + return {"status": "queued", "task_id": str(task.id)} + except Exception as exc: + raise HTTPException(500, detail=f"Failed to enqueue: {exc}") + + +def _normalize_part_token_name(name: str) -> str: + """Lowercase, strip .prt extension, normalise separators to underscore.""" + import re as _re + name = name.lower().strip() + if name.endswith(".prt"): + name = name[:-4] + # Hyphens and dots → underscores for uniform token splitting + return _re.sub(r"[-.]", "_", name) + + +def _part_tokens(name: str) -> set[str]: + """Return significant tokens: length ≥ 2, not pure-numeric, contains a letter.""" + return { + t for t in name.split("_") + if len(t) >= 2 and not t.isdigit() and any(c.isalpha() for c in t) + } + + +def _jaccard(a: set, b: set) -> float: + if not a or not b: + return 0.0 + return len(a & b) / len(a | b) + + +def build_materials_from_excel( + cad_parts: list[str], + excel_components: list[dict], + similarity_threshold: float = 0.3, +) -> list[dict]: + """Match CAD part names to Excel components and return cad_part_materials list. + + Pure function — no DB access, sync-safe, callable from Celery tasks. + + Matching strategy per CAD part: + 1. Exact case-insensitive name match + 2. Token-based Jaccard similarity on normalised filenames + 3. Position-based fallback for low-confidence matches + """ + excel_entries: list[tuple[set[str], str, str]] = [] + for c in excel_components: + raw = (c.get("part_name") or "").lower().strip() + norm = _normalize_part_token_name(raw) + tokens = _part_tokens(norm) + excel_entries.append((tokens, raw, c.get("material") or "")) + + new_materials: list[dict] = [] + for i, cad_part in enumerate(cad_parts): + cad_raw_lower = cad_part.lower() + cad_norm = _normalize_part_token_name(cad_raw_lower) + cad_tokens = _part_tokens(cad_norm) + + best_mat = "" + best_score = 0.0 + + for tokens, raw, material in excel_entries: + if raw == cad_raw_lower: + best_mat = material + best_score = 1.0 + break + score = _jaccard(tokens, cad_tokens) + if score > best_score: + best_score = score + best_mat = material + + if best_score < similarity_threshold: + if i < len(excel_components): + best_mat = excel_components[i].get("material") or "" + + new_materials.append({"part_name": cad_part, "material": best_mat}) + + return new_materials + + +@router.post("/{product_id}/reassign-materials-from-excel", response_model=ProductOut) +async def reassign_materials_from_excel( + product_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Populate cad_part_materials from Excel component data stored on this product. + + Matching strategy (applied per CAD part in order): + 1. Exact case-insensitive name match (for generic semantic names like "Außenring") + 2. Token-based Jaccard similarity on normalised part filenames: + Excel stores the .prt filename; OCC extracts assembly instance names derived + from the same file. Stripping extensions, separators and numeric-only tokens + lets them be compared reliably (e.g. "z-563681_krk_tr_jpb_dummy-90771.prt" + ↔ "Z-563681_KRK_JPB_DUMMY_1_AF0_1" → Jaccard ≈ 0.6). + 3. Position-based fallback for low-confidence matches. + + After this the Part Materials UI shows pre-filled materials that can be + reviewed/adjusted before saving. Thumbnail regeneration is queued automatically. + """ + result = await db.execute( + select(Product) + .options(selectinload(Product.cad_file), selectinload(Product.order_lines)) + .where(Product.id == product_id) + ) + product = result.scalar_one_or_none() + if not product: + raise HTTPException(404, detail="Product not found") + + cad_parts: list[str] = product.cad_parsed_objects or [] + if not cad_parts: + raise HTTPException( + 400, + detail="No parsed CAD parts found. Use 'Re-process STEP' first to extract part names.", + ) + + excel_components: list[dict] = product.components or [] + if not excel_components: + raise HTTPException( + 400, + detail="No Excel component data found on this product. Was it imported from an Excel file?", + ) + + new_materials = build_materials_from_excel(cad_parts, excel_components) + product.cad_part_materials = new_materials + await db.commit() + + if product.cad_file_id: + try: + from app.services.step_processor import build_part_colors + from app.tasks.step_tasks import regenerate_thumbnail + part_colors = build_part_colors(cad_parts, new_materials) + regenerate_thumbnail.delay(str(product.cad_file_id), part_colors) + except Exception: + pass + + result2 = await db.execute( + select(Product) + .options( + selectinload(Product.cad_file), + selectinload(Product.order_lines), + selectinload(Product.render_positions), + ) + .where(Product.id == product_id) + ) + return _product_out(result2.scalar_one()) + + +@router.post("/{product_id}/reprocess", status_code=status.HTTP_202_ACCEPTED) +async def reprocess_product_cad( + product_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Re-queue full STEP processing (parse objects + generate thumbnail) for a product.""" + result = await db.execute( + select(Product).options(selectinload(Product.cad_file)).where(Product.id == product_id) + ) + product = result.scalar_one_or_none() + if not product: + raise HTTPException(404, detail="Product not found") + if not product.cad_file_id: + raise HTTPException(400, detail="Product has no CAD file") + + try: + from app.models.cad_file import ProcessingStatus as PS + from sqlalchemy import update as sql_update + await db.execute( + sql_update(CadFile) + .where(CadFile.id == product.cad_file_id) + .values(processing_status=PS.pending, parsed_objects=None) + ) + await db.commit() + + from app.tasks.step_tasks import process_step_file + task = process_step_file.delay(str(product.cad_file_id)) + return {"status": "queued", "task_id": str(task.id)} + except Exception as exc: + raise HTTPException(500, detail=f"Failed to enqueue: {exc}") + + +VIDEO_EXTENSIONS = {".mp4", ".webm", ".avi", ".mov"} + + +def _result_path_to_url(result_path: str) -> str | None: + """Convert an internal result_path to a servable static URL.""" + # Flamenco / shared renders: /shared/renders/X/file.jpg → /renders/X/file.jpg + if "/renders/" in result_path: + idx = result_path.index("/renders/") + return result_path[idx:] + # Celery renders stored as thumbnails: /app/uploads/thumbnails/X.png → /thumbnails/X.png + if "/thumbnails/" in result_path: + idx = result_path.index("/thumbnails/") + return result_path[idx:] + return None + + +def _resolve_disk_path(url: str) -> Path | None: + """Given a servable URL like /renders/X/file.jpg, resolve to disk path.""" + if url.startswith("/renders/"): + return Path(settings.upload_dir) / "renders" / url[len("/renders/"):] + if url.startswith("/thumbnails/"): + return Path(settings.upload_dir) / "thumbnails" / url[len("/thumbnails/"):] + return None + + +@router.get("/{product_id}/renders") +async def get_product_renders( + product_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """List completed render outputs for a product.""" + result = await db.execute( + select(OrderLine) + .options( + joinedload(OrderLine.output_type), + joinedload(OrderLine.order), + ) + .where( + OrderLine.product_id == product_id, + OrderLine.render_status == "completed", + OrderLine.result_path.is_not(None), + ) + .order_by(OrderLine.render_completed_at.desc()) + ) + lines = result.unique().scalars().all() + + renders = [] + for line in lines: + url = _result_path_to_url(line.result_path) + if url is None: + continue + disk = _resolve_disk_path(url) + if disk is None or not disk.exists(): + continue + ext = Path(url).suffix.lower() + renders.append({ + "order_line_id": str(line.id), + "order_number": line.order.order_number if line.order else None, + "output_type_name": line.output_type.name if line.output_type else None, + "render_url": url, + "is_video": ext in VIDEO_EXTENSIONS, + "render_backend": line.render_backend_used, + "completed_at": line.render_completed_at.isoformat() if line.render_completed_at else None, + }) + return renders + + +@router.delete("/{product_id}/renders/{order_line_id}", status_code=204) +async def delete_product_render( + product_id: uuid.UUID, + order_line_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Delete a render output for a product. + + Removes the file from disk, clears result_path, and resets render_status to + 'pending' so the line can be re-dispatched if needed. + """ + from sqlalchemy import update as sql_update + + result = await db.execute( + select(OrderLine).where( + OrderLine.id == order_line_id, + OrderLine.product_id == product_id, + ) + ) + line = result.scalar_one_or_none() + if line is None: + raise HTTPException(404, detail="Render not found for this product") + + # Delete file from disk + if line.result_path: + url = _result_path_to_url(line.result_path) + if url: + disk = _resolve_disk_path(url) + if disk and disk.exists(): + try: + disk.unlink() + except OSError as exc: + # Log but don't fail — DB cleanup still proceeds + import logging + logging.getLogger(__name__).warning( + f"Could not delete render file {disk}: {exc}" + ) + + await db.execute( + sql_update(OrderLine) + .where(OrderLine.id == order_line_id) + .values(result_path=None, render_status="pending", render_completed_at=None) + ) + await db.commit() + + +class DownloadRendersRequest(BaseModel): + order_line_ids: list[uuid.UUID] + + +@router.post("/{product_id}/download-renders") +async def download_product_renders( + product_id: uuid.UUID, + body: DownloadRendersRequest, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Stream a ZIP of selected completed render files for a product.""" + prod_result = await db.execute(select(Product).where(Product.id == product_id)) + product = prod_result.scalar_one_or_none() + if not product: + raise HTTPException(404, detail="Product not found") + + lines_result = await db.execute( + select(OrderLine) + .options( + joinedload(OrderLine.output_type), + joinedload(OrderLine.order), + ) + .where( + OrderLine.id.in_(body.order_line_ids), + OrderLine.product_id == product_id, + OrderLine.render_status == "completed", + OrderLine.result_path.is_not(None), + ) + ) + lines = lines_result.unique().scalars().all() + + if not lines: + raise HTTPException(404, detail="No completed renders found for the selected lines") + + def _resolve_path(p: str) -> str: + if p.startswith("/shared/"): + return settings.upload_dir + p[len("/shared"):] + return p + + def _safe(s: str) -> str: + return re.sub(r"[^\w\-.]", "_", s).strip("_") + + buf = io.BytesIO() + name_counts: dict[str, int] = {} + + with zipfile.ZipFile(buf, mode="w", compression=zipfile.ZIP_DEFLATED) as zf: + for line in lines: + if not line.result_path: + continue + fs_path = _resolve_path(line.result_path) + if not os.path.isfile(fs_path): + continue + ot_name = (line.output_type.name if line.output_type else None) or "render" + order_num = (line.order.order_number if line.order else None) or "unknown" + ext = os.path.splitext(line.result_path)[1] or ".png" + base_name = f"{_safe(ot_name)}_{_safe(order_num)}{ext}" + if base_name in name_counts: + name_counts[base_name] += 1 + stem, suffix = os.path.splitext(base_name) + archive_name = f"{stem}_{name_counts[base_name]}{suffix}" + else: + name_counts[base_name] = 0 + archive_name = base_name + zf.write(fs_path, archive_name) + + if not zf.infolist(): + raise HTTPException(404, detail="No render files found on disk") + + buf.seek(0) + product_name = product.name or product.pim_id or "product" + safe_name = re.sub(r"[^\w\-]", "_", product_name) + filename = f"{safe_name}_renders.zip" + + return StreamingResponse( + buf, + media_type="application/zip", + headers={"Content-Disposition": f'attachment; filename="{filename}"'}, + ) + + +@router.get("/{product_id}/orders", response_model=list[OrderOut]) +async def get_product_orders( + product_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """List orders that reference this product via order_lines.""" + from app.models.order import Order + from sqlalchemy import func + + result = await db.execute( + select(Order) + .join(OrderLine, OrderLine.order_id == Order.id) + .where(OrderLine.product_id == product_id) + .distinct() + .order_by(Order.created_at.desc()) + ) + orders = result.scalars().all() + + out = [] + for order in orders: + d = OrderOut.model_validate(order) + cnt = await db.execute( + select(func.count(OrderLine.id)).where(OrderLine.order_id == order.id) + ) + d.line_count = cnt.scalar() or 0 + out.append(d) + return out + + +# ── Render Positions CRUD ──────────────────────────────────────────────────── + +@router.get("/{product_id}/render-positions", response_model=list[RenderPositionOut]) +async def list_render_positions( + product_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(ProductRenderPosition) + .where(ProductRenderPosition.product_id == product_id) + .order_by(ProductRenderPosition.sort_order, ProductRenderPosition.name) + ) + return result.scalars().all() + + +@router.post( + "/{product_id}/render-positions", + response_model=RenderPositionOut, + status_code=status.HTTP_201_CREATED, +) +async def create_render_position( + product_id: uuid.UUID, + body: RenderPositionCreate, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + prod = await db.execute(select(Product).where(Product.id == product_id)) + if not prod.scalar_one_or_none(): + raise HTTPException(404, detail="Product not found") + + pos = ProductRenderPosition(product_id=product_id, **body.model_dump()) + db.add(pos) + try: + await db.commit() + except Exception: + await db.rollback() + raise HTTPException(409, detail=f"Position named '{body.name}' already exists for this product") + await db.refresh(pos) + return pos + + +@router.patch("/{product_id}/render-positions/{pos_id}", response_model=RenderPositionOut) +async def update_render_position( + product_id: uuid.UUID, + pos_id: uuid.UUID, + body: RenderPositionPatch, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(ProductRenderPosition).where( + ProductRenderPosition.id == pos_id, + ProductRenderPosition.product_id == product_id, + ) + ) + pos = result.scalar_one_or_none() + if not pos: + raise HTTPException(404, detail="Render position not found") + + for field, value in body.model_dump(exclude_unset=True).items(): + setattr(pos, field, value) + try: + await db.commit() + except Exception: + await db.rollback() + raise HTTPException(409, detail="Name already exists for this product") + await db.refresh(pos) + return pos + + +@router.delete("/{product_id}/render-positions/{pos_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_render_position( + product_id: uuid.UUID, + pos_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(ProductRenderPosition).where( + ProductRenderPosition.id == pos_id, + ProductRenderPosition.product_id == product_id, + ) + ) + pos = result.scalar_one_or_none() + if not pos: + raise HTTPException(404, detail="Render position not found") + await db.delete(pos) + await db.commit() + diff --git a/backend/app/api/routers/render_templates.py b/backend/app/api/routers/render_templates.py new file mode 100644 index 0000000..d173949 --- /dev/null +++ b/backend/app/api/routers/render_templates.py @@ -0,0 +1,360 @@ +"""Render Templates API — CRUD + .blend file upload/download + material library.""" +import uuid +import shutil +from datetime import datetime +from pathlib import Path + +from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, status +from fastapi.responses import FileResponse +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, update as sql_update, delete as sql_delete +from pydantic import BaseModel + +from app.database import get_db +from app.config import settings as app_settings +from app.models.user import User +from app.models.render_template import RenderTemplate +from app.models.output_type import OutputType +from app.models.system_setting import SystemSetting +from app.utils.auth import require_admin_or_pm + +router = APIRouter(tags=["render-templates"]) + +BLEND_DIR = "blend-templates" + + +def _blend_dir() -> Path: + d = Path(app_settings.upload_dir) / BLEND_DIR + d.mkdir(parents=True, exist_ok=True) + return d + + +# ── Schemas ────────────────────────────────────────────────────────────────── + +class RenderTemplateOut(BaseModel): + id: str + name: str + category_key: str | None + output_type_id: str | None + output_type_name: str | None + blend_file_path: str + original_filename: str + target_collection: str + material_replace_enabled: bool + lighting_only: bool + shadow_catcher_enabled: bool + camera_orbit: bool + is_active: bool + created_at: str + updated_at: str + + model_config = {"from_attributes": True} + + +class RenderTemplateUpdate(BaseModel): + name: str | None = None + category_key: str | None = None + output_type_id: str | None = None + target_collection: str | None = None + material_replace_enabled: bool | None = None + lighting_only: bool | None = None + shadow_catcher_enabled: bool | None = None + camera_orbit: bool | None = None + is_active: bool | None = None + + +class MaterialLibraryInfo(BaseModel): + exists: bool + filename: str | None = None + size_bytes: int | None = None + path: str | None = None + + +def _to_out(t: RenderTemplate) -> dict: + ot_name = None + if t.output_type: + ot_name = t.output_type.name + return { + "id": str(t.id), + "name": t.name, + "category_key": t.category_key, + "output_type_id": str(t.output_type_id) if t.output_type_id else None, + "output_type_name": ot_name, + "blend_file_path": t.blend_file_path, + "original_filename": t.original_filename, + "target_collection": t.target_collection, + "material_replace_enabled": t.material_replace_enabled, + "lighting_only": t.lighting_only, + "shadow_catcher_enabled": t.shadow_catcher_enabled, + "camera_orbit": t.camera_orbit, + "is_active": t.is_active, + "created_at": t.created_at.isoformat() if t.created_at else "", + "updated_at": t.updated_at.isoformat() if t.updated_at else "", + } + + +# ── CRUD Endpoints ─────────────────────────────────────────────────────────── + +@router.get("/render-templates", response_model=list[RenderTemplateOut]) +async def list_render_templates( + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(RenderTemplate).order_by(RenderTemplate.created_at.desc()) + ) + return [_to_out(t) for t in result.scalars().all()] + + +@router.post("/render-templates", response_model=RenderTemplateOut, status_code=status.HTTP_201_CREATED) +async def create_render_template( + name: str = Form(...), + file: UploadFile = File(...), + category_key: str | None = Form(None), + output_type_id: str | None = Form(None), + target_collection: str = Form("Product"), + material_replace_enabled: bool = Form(False), + lighting_only: bool = Form(False), + shadow_catcher_enabled: bool = Form(False), + camera_orbit: bool = Form(True), + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + if not file.filename or not file.filename.endswith(".blend"): + raise HTTPException(400, detail="File must be a .blend file") + + # Normalise empty strings from form data to None + if category_key == "" or category_key == "null": + category_key = None + if output_type_id == "" or output_type_id == "null": + output_type_id = None + + template_id = uuid.uuid4() + blend_path = _blend_dir() / f"{template_id}.blend" + + with open(blend_path, "wb") as f: + shutil.copyfileobj(file.file, f) + + ot_uuid = uuid.UUID(output_type_id) if output_type_id else None + + tmpl = RenderTemplate( + id=template_id, + name=name, + category_key=category_key, + output_type_id=ot_uuid, + blend_file_path=str(blend_path), + original_filename=file.filename, + target_collection=target_collection, + material_replace_enabled=material_replace_enabled, + lighting_only=lighting_only, + shadow_catcher_enabled=shadow_catcher_enabled, + camera_orbit=camera_orbit, + ) + db.add(tmpl) + await db.commit() + await db.refresh(tmpl) + + # Eagerly load output_type for response + if ot_uuid: + ot = await db.get(OutputType, ot_uuid) + tmpl.output_type = ot + + return _to_out(tmpl) + + +@router.patch("/render-templates/{template_id}", response_model=RenderTemplateOut) +async def update_render_template( + template_id: uuid.UUID, + body: RenderTemplateUpdate, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(RenderTemplate).where(RenderTemplate.id == template_id)) + tmpl = result.scalar_one_or_none() + if not tmpl: + raise HTTPException(404, detail="Render template not found") + + updates = body.model_dump(exclude_unset=True) + + # Normalise empty strings to None for nullable fields + if "category_key" in updates and updates["category_key"] in ("", "null"): + updates["category_key"] = None + if "output_type_id" in updates: + val = updates["output_type_id"] + if val in ("", "null", None): + updates["output_type_id"] = None + else: + updates["output_type_id"] = uuid.UUID(val) + + if updates: + updates["updated_at"] = datetime.utcnow() + await db.execute( + sql_update(RenderTemplate) + .where(RenderTemplate.id == template_id) + .values(**updates) + ) + await db.commit() + await db.refresh(tmpl) + + return _to_out(tmpl) + + +@router.delete("/render-templates/{template_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_render_template( + template_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(RenderTemplate).where(RenderTemplate.id == template_id)) + tmpl = result.scalar_one_or_none() + if not tmpl: + raise HTTPException(404, detail="Render template not found") + + # Delete .blend file + blend_path = Path(tmpl.blend_file_path) + if blend_path.exists(): + blend_path.unlink(missing_ok=True) + + await db.execute(sql_delete(RenderTemplate).where(RenderTemplate.id == template_id)) + await db.commit() + + +@router.post("/render-templates/{template_id}/upload", response_model=RenderTemplateOut) +async def upload_blend_file( + template_id: uuid.UUID, + file: UploadFile = File(...), + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Re-upload a .blend file for an existing template.""" + if not file.filename or not file.filename.endswith(".blend"): + raise HTTPException(400, detail="File must be a .blend file") + + result = await db.execute(select(RenderTemplate).where(RenderTemplate.id == template_id)) + tmpl = result.scalar_one_or_none() + if not tmpl: + raise HTTPException(404, detail="Render template not found") + + blend_path = _blend_dir() / f"{template_id}.blend" + + # Remove old file if path changed + old_path = Path(tmpl.blend_file_path) + if old_path.exists() and old_path != blend_path: + old_path.unlink(missing_ok=True) + + with open(blend_path, "wb") as f: + shutil.copyfileobj(file.file, f) + + await db.execute( + sql_update(RenderTemplate) + .where(RenderTemplate.id == template_id) + .values( + blend_file_path=str(blend_path), + original_filename=file.filename, + updated_at=datetime.utcnow(), + ) + ) + await db.commit() + await db.refresh(tmpl) + return _to_out(tmpl) + + +@router.get("/render-templates/{template_id}/download") +async def download_blend_file( + template_id: uuid.UUID, + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(RenderTemplate).where(RenderTemplate.id == template_id)) + tmpl = result.scalar_one_or_none() + if not tmpl: + raise HTTPException(404, detail="Render template not found") + + blend_path = Path(tmpl.blend_file_path) + if not blend_path.exists(): + raise HTTPException(404, detail=".blend file not found on disk") + + return FileResponse( + path=str(blend_path), + filename=tmpl.original_filename, + media_type="application/octet-stream", + ) + + +# ── Material Library ───────────────────────────────────────────────────────── + +MATERIAL_LIBRARY_FILENAME = "material_library.blend" + + +async def _save_setting(db: AsyncSession, key: str, value: str) -> None: + result = await db.execute( + sql_update(SystemSetting) + .where(SystemSetting.key == key) + .values(value=value, updated_at=datetime.utcnow()) + ) + if result.rowcount == 0: + db.add(SystemSetting(key=key, value=value, updated_at=datetime.utcnow())) + + +@router.post("/admin/settings/material-library", response_model=MaterialLibraryInfo) +async def upload_material_library( + file: UploadFile = File(...), + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + if not file.filename or not file.filename.endswith(".blend"): + raise HTTPException(400, detail="File must be a .blend file") + + lib_path = _blend_dir() / MATERIAL_LIBRARY_FILENAME + with open(lib_path, "wb") as f: + shutil.copyfileobj(file.file, f) + + await _save_setting(db, "material_library_path", str(lib_path)) + await db.commit() + + return MaterialLibraryInfo( + exists=True, + filename=file.filename, + size_bytes=lib_path.stat().st_size, + path=str(lib_path), + ) + + +@router.get("/admin/settings/material-library", response_model=MaterialLibraryInfo) +async def get_material_library( + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(SystemSetting).where(SystemSetting.key == "material_library_path") + ) + row = result.scalar_one_or_none() + path_str = row.value if row else "" + + if path_str and Path(path_str).exists(): + p = Path(path_str) + return MaterialLibraryInfo( + exists=True, + filename=p.name, + size_bytes=p.stat().st_size, + path=path_str, + ) + return MaterialLibraryInfo(exists=False) + + +@router.delete("/admin/settings/material-library", status_code=status.HTTP_204_NO_CONTENT) +async def delete_material_library( + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(SystemSetting).where(SystemSetting.key == "material_library_path") + ) + row = result.scalar_one_or_none() + if row and row.value: + p = Path(row.value) + if p.exists(): + p.unlink(missing_ok=True) + + await _save_setting(db, "material_library_path", "") + await db.commit() diff --git a/backend/app/api/routers/templates.py b/backend/app/api/routers/templates.py new file mode 100644 index 0000000..a63fefd --- /dev/null +++ b/backend/app/api/routers/templates.py @@ -0,0 +1,78 @@ +import uuid +from typing import Any +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from pydantic import BaseModel +from app.database import get_db +from app.models.template import Template +from app.utils.auth import get_current_user, require_admin +from app.models.user import User + +router = APIRouter(prefix="/templates", tags=["templates"]) + + +class TemplateOut(BaseModel): + id: uuid.UUID + name: str + category_key: str + standard_fields: Any + component_schema: Any + description: str | None + is_active: bool + + model_config = {"from_attributes": True} + + +class TemplateUpdate(BaseModel): + name: str | None = None + description: str | None = None + is_active: bool | None = None + standard_fields: Any = None + component_schema: Any = None + + +@router.get("", response_model=list[TemplateOut]) +async def list_templates( + include_inactive: bool = False, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + q = select(Template) + # Non-admins always see only active templates + if not include_inactive or user.role.value != "admin": + q = q.where(Template.is_active == True) + result = await db.execute(q) + return result.scalars().all() + + +@router.get("/{template_id}", response_model=TemplateOut) +async def get_template( + template_id: uuid.UUID, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(Template).where(Template.id == template_id)) + t = result.scalar_one_or_none() + if not t: + raise HTTPException(404, detail="Template not found") + return t + + +@router.patch("/{template_id}", response_model=TemplateOut) +async def update_template( + template_id: uuid.UUID, + body: TemplateUpdate, + user: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + result = await db.execute(select(Template).where(Template.id == template_id)) + t = result.scalar_one_or_none() + if not t: + raise HTTPException(404, detail="Template not found") + + for field, val in body.model_dump(exclude_unset=True).items(): + setattr(t, field, val) + await db.commit() + await db.refresh(t) + return t diff --git a/backend/app/api/routers/uploads.py b/backend/app/api/routers/uploads.py new file mode 100644 index 0000000..2b1776a --- /dev/null +++ b/backend/app/api/routers/uploads.py @@ -0,0 +1,411 @@ +import hashlib +import uuid +from pathlib import Path + +from fastapi import APIRouter, Depends, File, HTTPException, UploadFile, status +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from pydantic import BaseModel + +from app.config import settings +from app.database import get_db +from app.models.cad_file import CadFile, ProcessingStatus +from app.models.order import Order +from app.models.order_item import OrderItem +from app.models.order_line import OrderLine +from app.models.user import User +from app.schemas.upload import ParsedExcelResponse, ParsedRow, ParsedComponent, StepUploadResponse +from app.schemas.order import OrderDetailOut +from app.services.excel_parser import parse_excel, parsed_excel_to_dict +from app.services.excel_import import import_excel_to_products, preview_excel_rows +from app.services.order_service import generate_order_number +from app.utils.auth import get_current_user + +router = APIRouter(prefix="/uploads", tags=["uploads"]) + + +# ── Preview response models ──────────────────────────────────────────── + +class ExcelPreviewRow(BaseModel): + row_index: int + pim_id: str | None = None + produkt_baureihe: str | None = None + gewaehltes_produkt: str | None = None + product_exists: bool = False + product_id: str | None = None + medias_rendering: bool | None = None + category_key: str | None = None + has_step: bool = False + is_duplicate: bool = False + duplicate_of_row: int | None = None + + +class ExcelPreviewResponse(BaseModel): + excel_path: str + filename: str + category_key: str | None + row_count: int + existing_product_count: int + new_product_count: int + no_pim_id_count: int + has_step_count: int = 0 + no_step_count: int = 0 + duplicate_count: int = 0 + warnings: list[str] + rows: list[ExcelPreviewRow] + column_headers: list[str] = [] + template_name: str | None = None + + +# ── Finalize request models ──────────────────────────────────────────── + +class OutputTypeSelection(BaseModel): + row_index: int + output_type_ids: list[uuid.UUID] + + +class ExcelFinalizeRequest(BaseModel): + excel_path: str + included_row_indices: list[int] + output_type_selections: list[OutputTypeSelection] = [] + notes: str | None = None + template_id: uuid.UUID | None = None + + +ALLOWED_EXCEL = {".xlsx", ".xls"} +ALLOWED_STEP = {".stp", ".step"} + + +@router.post("/excel", response_model=ExcelPreviewResponse) +async def upload_excel( + file: UploadFile = File(...), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Parse Excel and return a read-only preview. No products are created.""" + suffix = Path(file.filename or "").suffix.lower() + if suffix not in ALLOWED_EXCEL: + raise HTTPException(400, detail="Only .xlsx / .xls files are accepted") + + # Save the file + upload_dir = Path(settings.upload_dir) / "excel_files" + upload_dir.mkdir(parents=True, exist_ok=True) + tmp_name = f"{uuid.uuid4()}{suffix}" + tmp_path = upload_dir / tmp_name + + content = await file.read() + tmp_path.write_bytes(content) + + try: + parsed = parse_excel(tmp_path) + except ValueError as exc: + tmp_path.unlink(missing_ok=True) + raise HTTPException(422, detail=str(exc)) + + parsed_dict = parsed_excel_to_dict(parsed) + parsed_dict["filename"] = file.filename + parsed_dict["excel_path"] = str(tmp_path) + + rows = parsed_dict.get("rows", []) + try: + preview = await preview_excel_rows( + db, rows, category_key=parsed_dict.get("category_key"), + ) + except Exception as exc: + try: + from app.services.notification_service import emit_notification + await emit_notification( + db, + actor_user_id=user.id, + target_user_id=user.id, + action="excel.import_error", + entity_type="upload", + entity_id=None, + details={ + "filename": file.filename or "", + "error": str(exc)[:500], + }, + ) + except Exception: + pass + raise HTTPException(500, detail=f"Preview failed: {str(exc)[:300]}") + + annotated_rows = [ + ExcelPreviewRow( + row_index=r.get("row_index", 0), + pim_id=r.get("pim_id"), + produkt_baureihe=r.get("produkt_baureihe"), + gewaehltes_produkt=r.get("gewaehltes_produkt"), + product_exists=r.get("product_exists", False), + product_id=r.get("product_id"), + medias_rendering=r.get("medias_rendering"), + category_key=r.get("category_key"), + has_step=r.get("has_step", False), + is_duplicate=r.get("is_duplicate", False), + duplicate_of_row=r.get("duplicate_of_row"), + ) + for r in preview.rows + ] + + all_warnings = preview.warnings + parsed_dict.get("warnings", []) + + if all_warnings: + from app.services.notification_service import emit_notification + await emit_notification( + db, + actor_user_id=user.id, + target_user_id=user.id, + action="excel.import_warnings", + entity_type="upload", + entity_id=None, + details={ + "filename": file.filename or "", + "warning_count": len(all_warnings), + "warnings": all_warnings[:10], + }, + ) + + return ExcelPreviewResponse( + excel_path=str(tmp_path), + filename=file.filename or "", + category_key=parsed_dict.get("category_key"), + row_count=parsed_dict.get("row_count", len(rows)), + existing_product_count=preview.existing_product_count, + new_product_count=preview.new_product_count, + no_pim_id_count=preview.no_pim_id_count, + has_step_count=preview.has_step_count, + no_step_count=preview.no_step_count, + duplicate_count=preview.duplicate_count, + warnings=all_warnings, + rows=annotated_rows, + column_headers=parsed_dict.get("column_headers", []), + template_name=parsed_dict.get("template_name"), + ) + + +@router.post("/excel/finalize", response_model=OrderDetailOut, status_code=status.HTTP_201_CREATED) +async def finalize_excel( + body: ExcelFinalizeRequest, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Create products + order + lines from a previously parsed Excel file. + + This is the second step: the user has reviewed the preview and confirmed + which rows to include and which output types to request. + """ + # 1. Validate Excel file still exists + excel_path = Path(body.excel_path) + if not excel_path.is_file(): + raise HTTPException(404, detail="Excel file not found — please re-upload") + + # 2. Re-parse the Excel + try: + parsed = parse_excel(excel_path) + except ValueError as exc: + raise HTTPException(422, detail=str(exc)) + + parsed_dict = parsed_excel_to_dict(parsed) + all_rows = parsed_dict.get("rows", []) + + # 3. Filter to included rows + included_set = set(body.included_row_indices) + included_rows = [r for r in all_rows if r.get("row_index") in included_set] + + if not included_rows: + raise HTTPException(400, detail="No rows selected") + + # 4. Import into product library (creates/updates products) + import_result = await import_excel_to_products( + db, + included_rows, + source_excel=str(excel_path), + category_key=parsed_dict.get("category_key"), + ) + + # 5. Seed material aliases + material_mappings = parsed_dict.get("material_mappings", []) + if material_mappings: + try: + from app.services.material_service import seed_material_aliases_from_mappings + await seed_material_aliases_from_mappings(db, material_mappings) + except Exception: + pass # non-critical + + # 6. Create Order + order_number = await generate_order_number(db) + order = Order( + order_number=order_number, + template_id=body.template_id, + created_by=user.id, + source_excel=str(excel_path), + notes=body.notes, + ) + db.add(order) + await db.flush() + + # 7. Create OrderItems (legacy compat — one per included row) + for row in import_result.rows: + # If the matched product already has a STEP file linked (from a + # previous order or direct product-library upload), inherit it so the + # submit validation passes without requiring a re-upload. + inherited_cad = ( + uuid.UUID(row["product_cad_file_id"]) + if row.get("product_cad_file_id") + else None + ) + item = OrderItem( + order_id=order.id, + row_index=row.get("row_index", 0), + ebene1=row.get("ebene1"), + ebene2=row.get("ebene2"), + baureihe=row.get("baureihe"), + pim_id=row.get("pim_id"), + produkt_baureihe=row.get("produkt_baureihe"), + gewaehltes_produkt=row.get("gewaehltes_produkt"), + name_cad_modell=row.get("name_cad_modell"), + gewuenschte_bildnummer=row.get("gewuenschte_bildnummer"), + lagertyp=row.get("lagertyp"), + medias_rendering=row.get("medias_rendering"), + components=[ + c if isinstance(c, dict) else c + for c in row.get("components", []) + ], + cad_file_id=inherited_cad, + ) + db.add(item) + + # 8. Build output type selections lookup: row_index → list[UUID] + ot_map: dict[int, list[uuid.UUID]] = {} + for sel in body.output_type_selections: + ot_map[sel.row_index] = sel.output_type_ids + + # 9. Create OrderLines + for row in import_result.rows: + product_id = row.get("product_id") + if not product_id: + continue + + row_idx = row.get("row_index", 0) + type_ids = ot_map.get(row_idx, []) + + if not type_ids: + # Tracking-only line (no output type) + line = OrderLine( + order_id=order.id, + product_id=uuid.UUID(product_id), + output_type_id=None, + gewuenschte_bildnummer=row.get("gewuenschte_bildnummer"), + ) + db.add(line) + else: + for type_id in type_ids: + line = OrderLine( + order_id=order.id, + product_id=uuid.UUID(product_id), + output_type_id=type_id, + gewuenschte_bildnummer=row.get("gewuenschte_bildnummer"), + ) + db.add(line) + + # 10. Commit, then snapshot prices into the new draft order + try: + await db.commit() + except Exception as exc: + await db.rollback() + # Emit error notification via its own connection (session is now invalid) + try: + from app.services.notification_service import emit_notification_sync + from sqlalchemy.exc import IntegrityError + if isinstance(exc, IntegrityError) and "order_number" in str(exc): + error_msg = "Duplicate order number — please try again" + else: + error_msg = str(exc)[:300] + emit_notification_sync( + actor_user_id=user.id, + target_user_id=str(user.id), + action="excel.finalize_error", + entity_type="upload", + entity_id=None, + details={ + "filename": Path(body.excel_path).name, + "error": error_msg, + }, + ) + except Exception: + pass + from sqlalchemy.exc import IntegrityError + if isinstance(exc, IntegrityError) and "order_number" in str(exc): + raise HTTPException(409, detail="Duplicate order number — please try again") + raise HTTPException(500, detail=f"Order creation failed: {str(exc)[:200]}") + + # Snapshot prices into the draft order so the estimate is visible immediately + try: + from app.services.pricing_service import refresh_order_price + await refresh_order_price(db, order.id) + except Exception: + pass # non-critical — estimate can be computed on first view + + # Load and return full order detail + from app.api.routers.orders import _load_order_detail, _order_detail_out + order_loaded = await _load_order_detail(db, order.id) + return _order_detail_out(order_loaded) + + +@router.post("/step", response_model=StepUploadResponse, status_code=status.HTTP_201_CREATED) +async def upload_step( + file: UploadFile = File(...), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Upload a single STEP/STP CAD file.""" + suffix = Path(file.filename or "").suffix.lower() + if suffix not in ALLOWED_STEP: + raise HTTPException(400, detail="Only .stp / .step files are accepted") + + content = await file.read() + file_hash = hashlib.sha256(content).hexdigest() + + # Check dedup + result = await db.execute(select(CadFile).where(CadFile.file_hash == file_hash)) + existing = result.scalar_one_or_none() + if existing: + return StepUploadResponse( + cad_file_id=str(existing.id), + original_name=existing.original_name, + file_hash=file_hash, + status="already_exists", + ) + + # Save file + step_dir = Path(settings.upload_dir) / "step_files" + step_dir.mkdir(parents=True, exist_ok=True) + stored_name = f"{uuid.uuid4()}{suffix}" + stored_path = step_dir / stored_name + stored_path.write_bytes(content) + + cad_file = CadFile( + original_name=file.filename, + stored_path=str(stored_path), + file_hash=file_hash, + file_size=len(content), + processing_status=ProcessingStatus.pending, + ) + db.add(cad_file) + await db.commit() + await db.refresh(cad_file) + + # Enqueue background processing task (Phase 3) + try: + from app.tasks.step_tasks import process_step_file + process_step_file.delay(str(cad_file.id)) + except Exception: + pass # Worker not configured yet + + return StepUploadResponse( + cad_file_id=str(cad_file.id), + original_name=file.filename, + file_hash=file_hash, + status="uploaded", + ) diff --git a/backend/app/api/routers/worker.py b/backend/app/api/routers/worker.py new file mode 100644 index 0000000..4e62656 --- /dev/null +++ b/backend/app/api/routers/worker.py @@ -0,0 +1,356 @@ +"""Worker activity router — exposes recent background task status.""" +from datetime import datetime + +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from sqlalchemy.orm import selectinload +from pydantic import BaseModel + +from app.database import get_db +from app.models.cad_file import CadFile, ProcessingStatus +from app.models.order_item import OrderItem +from app.models.order import Order +from app.models.order_line import OrderLine +from app.models.product import Product +from app.models.user import User +from app.utils.auth import get_current_user, require_admin_or_pm + +router = APIRouter(prefix="/worker", tags=["worker"]) + + +class CadActivityEntry(BaseModel): + cad_file_id: str + original_name: str + file_size: int | None + processing_status: str + error_message: str | None + updated_at: str + created_at: str + order_numbers: list[str] + render_log: dict | None + + +class RenderJobEntry(BaseModel): + order_line_id: str + order_number: str | None + product_name: str | None + output_type_name: str | None + render_status: str + render_backend_used: str | None + flamenco_job_id: str | None + render_started_at: str | None + render_completed_at: str | None + updated_at: str + + +class WorkerActivity(BaseModel): + cad_processing: list[CadActivityEntry] + active_count: int # files currently in "processing" state + failed_count: int # files in "failed" state (recent 50) + render_jobs: list[RenderJobEntry] = [] + render_active_count: int = 0 + render_failed_count: int = 0 + + +@router.get("/activity", response_model=WorkerActivity) +async def get_worker_activity( + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """ + Return recent CAD file processing activity. + + Shows the last 30 processed/failed/processing CAD files so the user can + see what the worker is doing without needing Flower or Celery logs. + """ + # Recent CadFiles ordered by last update, with order_items to resolve order numbers + result = await db.execute( + select(CadFile) + .order_by(CadFile.updated_at.desc()) + .limit(30) + ) + cad_files = result.scalars().all() + + if not cad_files: + return WorkerActivity(cad_processing=[], active_count=0, failed_count=0) + + # Fetch order items referencing these CAD files in one query + cad_ids = [cf.id for cf in cad_files] + items_result = await db.execute( + select(OrderItem) + .options(selectinload(OrderItem.order)) + .where(OrderItem.cad_file_id.in_(cad_ids)) + ) + items = items_result.scalars().all() + + # Build cad_file_id → list[order_number] mapping + from collections import defaultdict + cad_to_orders: dict[str, list[str]] = defaultdict(list) + for item in items: + if item.order and item.order.order_number: + key = str(item.cad_file_id) + if item.order.order_number not in cad_to_orders[key]: + cad_to_orders[key].append(item.order.order_number) + + entries = [] + for cf in cad_files: + entries.append(CadActivityEntry( + cad_file_id=str(cf.id), + original_name=cf.original_name or "unknown", + file_size=getattr(cf, "file_size", None), + processing_status=cf.processing_status.value if cf.processing_status else "unknown", + error_message=getattr(cf, "error_message", None), + updated_at=cf.updated_at.isoformat() if cf.updated_at else datetime.utcnow().isoformat(), + created_at=cf.created_at.isoformat() if cf.created_at else datetime.utcnow().isoformat(), + order_numbers=cad_to_orders.get(str(cf.id), []), + render_log=getattr(cf, "render_log", None), + )) + + active_count = sum( + 1 for cf in cad_files + if cf.processing_status == ProcessingStatus.processing + ) + failed_count = sum( + 1 for cf in cad_files + if cf.processing_status == ProcessingStatus.failed + ) + + # ── Render job activity ────────────────────────────────────────────── + render_result = await db.execute( + select(OrderLine) + .options( + selectinload(OrderLine.product), + selectinload(OrderLine.output_type), + selectinload(OrderLine.order), + ) + .where(OrderLine.output_type_id.isnot(None)) + .where(OrderLine.render_status != "pending") + .order_by(OrderLine.updated_at.desc()) + .limit(30) + ) + render_lines = render_result.scalars().all() + + render_entries = [] + for rl in render_lines: + render_entries.append(RenderJobEntry( + order_line_id=str(rl.id), + order_number=rl.order.order_number if rl.order else None, + product_name=rl.product.name if rl.product else None, + output_type_name=rl.output_type.name if rl.output_type else None, + render_status=rl.render_status, + render_backend_used=rl.render_backend_used, + flamenco_job_id=rl.flamenco_job_id, + render_started_at=rl.render_started_at.isoformat() if rl.render_started_at else None, + render_completed_at=rl.render_completed_at.isoformat() if rl.render_completed_at else None, + updated_at=rl.updated_at.isoformat(), + )) + + render_active = sum(1 for rl in render_lines if rl.render_status == "processing") + render_failed = sum(1 for rl in render_lines if rl.render_status == "failed") + + return WorkerActivity( + cad_processing=entries, + active_count=active_count, + failed_count=failed_count, + render_jobs=render_entries, + render_active_count=render_active, + render_failed_count=render_failed, + ) + + +@router.get("/render-log/{order_line_id}") +async def get_render_log( + order_line_id: str, + after: int = 0, + user: User = Depends(get_current_user), +): + """Return render log entries for an order line (polling fallback).""" + from app.services.render_log import get_entries, count + entries = get_entries(order_line_id, after_index=after) + total = count(order_line_id) + return {"entries": entries, "total": total, "next_after": total} + + +@router.get("/render-log/{order_line_id}/stream") +async def stream_render_log( + order_line_id: str, + user: User = Depends(get_current_user), +): + """SSE stream of render log entries for an order line.""" + import asyncio + import json + from fastapi.responses import StreamingResponse + from app.services.render_log import get_entries, count + + async def event_generator(): + cursor = 0 + idle_ticks = 0 + max_idle = 120 # stop after 2 minutes of no new entries + while idle_ticks < max_idle: + entries = get_entries(order_line_id, after_index=cursor) + if entries: + idle_ticks = 0 + for entry in entries: + yield f"data: {json.dumps(entry)}\n\n" + cursor += len(entries) + else: + idle_ticks += 1 + await asyncio.sleep(1) + yield f"data: {json.dumps({'level': 'info', 'msg': 'Stream ended (idle timeout)', 't': ''})}\n\n" + + return StreamingResponse( + event_generator(), + media_type="text/event-stream", + headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"}, + ) + + +from fastapi import status as http_status + + +@router.post("/activity/{cad_file_id}/reprocess", status_code=http_status.HTTP_202_ACCEPTED) +async def reprocess_cad_file( + cad_file_id: str, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Re-queue a CAD file for full processing (STEP extraction + thumbnail + glTF).""" + result = await db.execute(select(CadFile).where(CadFile.id == cad_file_id)) + cad_file = result.scalar_one_or_none() + if not cad_file: + from fastapi import HTTPException + raise HTTPException(404, detail="CAD file not found") + + cad_file.processing_status = ProcessingStatus.pending + await db.commit() + + from app.tasks.step_tasks import process_step_file + process_step_file.delay(cad_file_id) + return {"queued": cad_file_id, "task": "process_step_file"} + + +# --------------------------------------------------------------------------- +# Queue inspection + control +# --------------------------------------------------------------------------- + +MONITORED_QUEUES = ["step_processing", "thumbnail_rendering", "ai_validation"] + + +def _parse_redis_task(raw: str) -> dict | None: + """Parse a raw Redis Celery message into a simplified dict.""" + import json, base64 + try: + msg = json.loads(raw) + headers = msg.get("headers", {}) + task_name = headers.get("task", "unknown") + task_id = headers.get("id", "unknown") + argsrepr = headers.get("argsrepr", "") + args: list = [] + try: + body = json.loads(base64.b64decode(msg.get("body", ""))) + if isinstance(body, list) and body: + args = list(body[0]) + except Exception: + pass + return { + "task_id": task_id, + "task_name": task_name, + "args": args, + "argsrepr": argsrepr, + "status": "pending", + } + except Exception: + return None + + +@router.get("/queue") +async def get_queue_status(user: User = Depends(get_current_user)): + """Return Celery queue depths, pending tasks, and active/reserved tasks.""" + import asyncio + import redis as redis_lib + from app.config import settings as app_settings + from app.tasks.celery_app import celery_app + + r = redis_lib.from_url(app_settings.redis_url, decode_responses=True) + + # Pending tasks per queue from Redis + queue_depths: dict[str, int] = {} + pending: list[dict] = [] + for q in MONITORED_QUEUES: + depth = r.llen(q) or 0 + queue_depths[q] = depth + if depth > 0: + raw_items = r.lrange(q, 0, 99) + for raw in raw_items: + task = _parse_redis_task(raw) + if task: + task["queue"] = q + pending.append(task) + + # Active / reserved from Celery inspect (runs in thread, 1.5 s timeout) + active: list[dict] = [] + reserved: list[dict] = [] + + def _inspect() -> tuple[dict, dict]: + try: + insp = celery_app.control.inspect(timeout=1.5) + return (insp.active() or {}), (insp.reserved() or {}) + except Exception: + return {}, {} + + act_raw, rsv_raw = await asyncio.to_thread(_inspect) + + for worker, tasks in act_raw.items(): + for t in (tasks or []): + active.append({ + "task_id": t.get("id", ""), + "task_name": t.get("name", ""), + "args": list(t.get("args") or []), + "argsrepr": t.get("kwargs", {}).get("argsrepr", ""), + "status": "active", + "worker": worker, + }) + + for worker, tasks in rsv_raw.items(): + for t in (tasks or []): + reserved.append({ + "task_id": t.get("id", ""), + "task_name": t.get("name", ""), + "args": list(t.get("args") or []), + "argsrepr": "", + "status": "reserved", + "worker": worker, + }) + + return { + "queue_depths": queue_depths, + "pending_count": sum(queue_depths.values()), + "active": active, + "reserved": reserved, + "pending": pending, + } + + +@router.post("/queue/purge", status_code=http_status.HTTP_202_ACCEPTED) +async def purge_queue(user: User = Depends(require_admin_or_pm)): + """Delete all pending tasks from all monitored queues.""" + import redis as redis_lib + from app.config import settings as app_settings + + r = redis_lib.from_url(app_settings.redis_url, decode_responses=True) + total = 0 + for q in MONITORED_QUEUES: + count = r.llen(q) or 0 + if count: + r.delete(q) + total += count + return {"purged": total, "message": f"Removed {total} pending task(s) from queue"} + + +@router.post("/queue/cancel/{task_id}", status_code=http_status.HTTP_202_ACCEPTED) +async def cancel_task(task_id: str, user: User = Depends(require_admin_or_pm)): + """Revoke a task by ID. Terminates it if running, skips it if still pending.""" + from app.tasks.celery_app import celery_app + celery_app.control.revoke(task_id, terminate=True, signal="SIGTERM") + return {"revoked": task_id} diff --git a/backend/app/config.py b/backend/app/config.py new file mode 100644 index 0000000..7043f2b --- /dev/null +++ b/backend/app/config.py @@ -0,0 +1,50 @@ +from pydantic_settings import BaseSettings +from typing import Optional + + +class Settings(BaseSettings): + # Database + postgres_db: str = "schaeffler" + postgres_user: str = "schaeffler" + postgres_password: str = "schaeffler" + postgres_host: str = "localhost" + postgres_port: int = 5432 + + @property + def database_url(self) -> str: + return ( + f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}" + f"@{self.postgres_host}:{self.postgres_port}/{self.postgres_db}" + ) + + @property + def database_url_sync(self) -> str: + return ( + f"postgresql://{self.postgres_user}:{self.postgres_password}" + f"@{self.postgres_host}:{self.postgres_port}/{self.postgres_db}" + ) + + # Redis / Celery + redis_url: str = "redis://localhost:6379/0" + + # JWT + jwt_secret_key: str = "changeme" + jwt_algorithm: str = "HS256" + jwt_access_token_expire_minutes: int = 480 + + # Azure OpenAI + azure_openai_api_key: Optional[str] = None + azure_openai_endpoint: Optional[str] = None + azure_openai_deployment: str = "gpt-4o" + azure_openai_api_version: str = "2024-02-01" + + # File Storage + upload_dir: str = "/app/uploads" + max_upload_size_mb: int = 500 + + class Config: + env_file = ".env" + case_sensitive = False + + +settings = Settings() diff --git a/backend/app/data/__init__.py b/backend/app/data/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/data/__pycache__/__init__.cpython-311.pyc b/backend/app/data/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..e93ae74 Binary files /dev/null and b/backend/app/data/__pycache__/__init__.cpython-311.pyc differ diff --git a/backend/app/data/__pycache__/material_alias_seeds.cpython-311.pyc b/backend/app/data/__pycache__/material_alias_seeds.cpython-311.pyc new file mode 100644 index 0000000..7ef10b7 Binary files /dev/null and b/backend/app/data/__pycache__/material_alias_seeds.cpython-311.pyc differ diff --git a/backend/app/data/__pycache__/schaeffler_materials.cpython-311.pyc b/backend/app/data/__pycache__/schaeffler_materials.cpython-311.pyc new file mode 100644 index 0000000..8d226f0 Binary files /dev/null and b/backend/app/data/__pycache__/schaeffler_materials.cpython-311.pyc differ diff --git a/backend/app/data/material_alias_seeds.py b/backend/app/data/material_alias_seeds.py new file mode 100644 index 0000000..ec3f620 --- /dev/null +++ b/backend/app/data/material_alias_seeds.py @@ -0,0 +1,298 @@ +"""Material alias seed data — derived from naming_scheme.xlsx Materialmapping sheet. + +Each entry maps a SCHAEFFLER library material name to its known aliases: +- German description (Col A from Materialmapping) +- Intermediate identifier (Col B, e.g. "Steel_black_oxided--Stahl_brueniert") +- Schaeffler code as string (e.g. "10102") +""" + +MATERIAL_ALIAS_SEEDS: list[dict] = [ + # --- 01 Metals --- + { + "material_name": "SCHAEFFLER_010101_Steel-Bare", + "aliases": [ + "Stahl", + "Stahl, glänzend", + "Stahl, konserviert", + "Steel--Stahl", + "Steel_bearings--Stahl_Lager", + "Steel", + "Stahl, gänzend", + "10101", + ], + }, + { + "material_name": "SCHAEFFLER_010102_Steel-Burnished", + "aliases": [ + "Stahl, brüniert", + "Steel_black_oxided--Stahl_brueniert", + "10102", + ], + }, + { + "material_name": "SCHAEFFLER_010103_Steel-Galvanized", + "aliases": [ + "Stahl, verzinkt", + "Steel_galvanized--Stahl_verzinkt", + "MU-Stahl, Zinnüberzug", + "MX-Stahl, Zinnüberzug", + "10103", + ], + }, + { + "material_name": "SCHAEFFLER_010104_Steel-Casted", + "aliases": [ + "Stahl Körnung", + "Guss", + "Steel_cast--Stahl_Guss", + "10104", + ], + }, + { + "material_name": "SCHAEFFLER_010105_Steel-Plate", + "aliases": [ + "Stahlblech", + "Steel_sheet--Stahlblech", + "10105", + ], + }, + { + "material_name": "SCHAEFFLER_010201_Niro", + "aliases": [ + "Niro", + "Steel_stainless--Niro", + "10201", + ], + }, + { + "material_name": "SCHAEFFLER_010301_Tin", + "aliases": [ + "Zinnüberzug", + "Tin--Zinn", + "10301", + ], + }, + { + "material_name": "SCHAEFFLER_010401_Aluminium", + "aliases": [ + "Aluminium", + "Aluminium--Aluminium", + "10401", + ], + }, + { + "material_name": "SCHAEFFLER_010501_Brass", + "aliases": [ + "Messing", + "Brass--Messing", + "10501", + ], + }, + { + "material_name": "SCHAEFFLER_010601_Bronze", + "aliases": [ + "MU-B; Bronze", + "Bronze", + "Bronze--Bronze", + "10601", + ], + }, + # --- 02 Coatings --- + { + "material_name": "SCHAEFFLER_020101_Durotect-Blue", + "aliases": [ + "Stahl, Durotect CMT", + "Durotect_CMT--Durotect_CMT", + "20101", + ], + }, + { + "material_name": "SCHAEFFLER_020102_Durotect-Black", + "aliases": [ + "Stahl, Durotect M", + "Stahl; Durotect M", + "Durotect_M--Durotect_M", + "20102", + ], + }, + { + "material_name": "SCHAEFFLER_020201_Coat-Black", + "aliases": [ + "Stahl, schwarz", + "Steel_coated_black--Stahl_beschichtet_schwarz", + "20201", + ], + }, + # --- 03 Non-metals --- + { + "material_name": "SCHAEFFLER_030101_Elastomer-Brown", + "aliases": [ + "Elastomer, braun", + "Elastomer_brown--Elastomer_braun", + "30101", + ], + }, + { + "material_name": "SCHAEFFLER_030102_Elastomer-Green", + "aliases": [ + "Elastomer, grün", + "Elastomer_green--Elastomer_gruen", + "30102", + ], + }, + { + "material_name": "SCHAEFFLER_030103_Elastomer-Black", + "aliases": [ + "Elastomer, schwarz", + "Eslastomer_black--Elastomer_schwarz", + "TPU, schwarz", + "NBR, schwarz", + "30103", + ], + }, + { + "material_name": "SCHAEFFLER_030201_Plastic-Brown", + "aliases": [ + "Kunststoff, braun", + "Plastic_brown--Kunststoff_braun", + "30201", + ], + }, + { + "material_name": "SCHAEFFLER_030202_Plastic-Green", + "aliases": [ + "Kunststoff, grün", + "Plastic_green--Kunststoff_gruen", + "30202", + ], + }, + { + "material_name": "SCHAEFFLER_030203_Plastic-Black", + "aliases": [ + "Kunststoff, schwarz", + "Plastic_black--Kunststoff_schwarz", + "30203", + ], + }, + { + "material_name": "SCHAEFFLER_030204_Plastic-Blue", + "aliases": [ + "Kunststoff, blau", + "Plastic_blue--Kunststoff_blau", + "30204", + ], + }, + { + "material_name": "SCHAEFFLER_030205_Plastic-White", + "aliases": [ + "Kunststoff, weiß", + "Plastic_white--Kunststoff_weiss", + "30205", + ], + }, + { + "material_name": "SCHAEFFLER_030301_Plastic-Clear", + "aliases": [ + "Kunststoff, durchsichtig", + "Plastic_clear--Kunststoff_durchsichtig", + "30301", + ], + }, + { + "material_name": "SCHAEFFLER_030302_Plastic-Translucent-White", + "aliases": [ + "Plastic_translucent_white--Kunststoff_transluzent_weiss", + "30302", + ], + }, + { + "material_name": "SCHAEFFLER_030401_TPU-Blue", + "aliases": [ + "TPU, blau", + "Elastomer_blue--Elastomer_blau", + "30401", + ], + }, + { + "material_name": "SCHAEFFLER_030501_Ceramic-Black", + "aliases": [ + "Keramik, schwarz", + "Ceramics_black--Keramik_schwarz", + "30501", + ], + }, + # --- 04 Compounds --- + { + "material_name": "SCHAEFFLER_040101_E40", + "aliases": [ + "E40", + "E40--E40", + "40101", + ], + }, + { + "material_name": "SCHAEFFLER_040102_E50", + "aliases": [ + "E50", + "E50--E50", + "40102", + ], + }, + { + "material_name": "SCHAEFFLER_040201_Elgoglide", + "aliases": [ + "Elgoglide", + "Elgoglide--Elgoglide", + "40201", + ], + }, + { + "material_name": "SCHAEFFLER_040202_Elgotex", + "aliases": [ + "Elgotex, schwarz", + "ELGOTEX, schwarz", + "Elgotex--Elgotex", + "40202", + ], + }, + { + "material_name": "SCHAEFFLER_040301_PTFE-Niro-Compound", + "aliases": [ + "PTFE-Compound, Niro-Verbund", + "PTFE_compound_stainless_steel_composite--PTFE_Compound_Niro_Verbund", + "40301", + ], + }, + { + "material_name": "SCHAEFFLER_040302_PTFE-Foil", + "aliases": [ + "PTFE-Folie", + "PTFE_film--PTFE_Folie", + "40302", + ], + }, + { + "material_name": "SCHAEFFLER_040303_PTFE-Compound-Black", + "aliases": [ + "PTFE-Verbund, schwarz", + "PTFE_compound_black--PTFE_Verbund_schwarz", + "40303", + ], + }, + { + "material_name": "SCHAEFFLER_040304_PTFE-Compound-Orange", + "aliases": [ + "PTFE-Verbundwerkstoff", + "PTFE_composite_material_orange--PTFE_Verbundwerkstoff_orange", + "40304", + ], + }, + { + "material_name": "SCHAEFFLER_040305_GFK-PTFE-Compound", + "aliases": [ + "GFK+PTFE Verbundwerkstoff, schwarz", + "GFK_PTFE_compound--GFK_PTFE_Verbundwerkstoff", + "40305", + ], + }, +] diff --git a/backend/app/data/schaeffler_materials.py b/backend/app/data/schaeffler_materials.py new file mode 100644 index 0000000..1d36a69 --- /dev/null +++ b/backend/app/data/schaeffler_materials.py @@ -0,0 +1,48 @@ +"""Schaeffler standard materials — single source of truth. + +Naming convention: SCHAEFFLER_[TypeCode(2)][SubType(2)][Consecutive(2)]_[Name-Parts-Dashed] +Type codes: 01=Metals, 02=Coatings, 03=Non-metals, 04=Compounds, 05=Misc +""" + +SCHAEFFLER_MATERIALS: list[dict] = [ + # --- 01 Metals --- + {"name": "SCHAEFFLER_010101_Steel-Bare", "description": "Stahl / Stahl, glänzend / Stahl, konserviert", "schaeffler_code": 10101, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_010102_Steel-Burnished", "description": "Stahl, brüniert", "schaeffler_code": 10102, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_010103_Steel-Galvanized", "description": "Stahl, verzinkt", "schaeffler_code": 10103, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_010104_Steel-Casted", "description": "Stahl Körnung", "schaeffler_code": 10104, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_010105_Steel-Plate", "description": "Stahlblech", "schaeffler_code": 10105, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_010201_Niro", "description": "Niro", "schaeffler_code": 10201, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_010301_Tin", "description": "MU-Stahl, Zinnüberzug / MX-Stahl, Zinnüberzug", "schaeffler_code": 10301, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_010401_Aluminium", "description": "Aluminium", "schaeffler_code": 10401, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_010501_Brass", "description": "Messing", "schaeffler_code": 10501, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_010601_Bronze", "description": "MU-B, Bronze", "schaeffler_code": 10601, "source": "schaeffler_standard"}, + # --- 02 Coatings --- + {"name": "SCHAEFFLER_020101_Durotect-Blue", "description": "Stahl, Durotect CMT", "schaeffler_code": 20101, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_020102_Durotect-Black", "description": "Stahl, Durotect M", "schaeffler_code": 20102, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_020201_Coat-Black", "description": "", "schaeffler_code": 20201, "source": "schaeffler_standard"}, + # --- 03 Non-metals --- + {"name": "SCHAEFFLER_030101_Elastomer-Brown", "description": "Elastomer, braun", "schaeffler_code": 30101, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030102_Elastomer-Green", "description": "Elastomer, grün", "schaeffler_code": 30102, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030103_Elastomer-Black", "description": "Elastomer, schwarz", "schaeffler_code": 30103, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030201_Plastic-Brown", "description": "Kunststoff, braun", "schaeffler_code": 30201, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030202_Plastic-Green", "description": "Kunststoff, grün", "schaeffler_code": 30202, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030203_Plastic-Black", "description": "Kunststoff, schwarz", "schaeffler_code": 30203, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030204_Plastic-Blue", "description": "Kunststoff, blau", "schaeffler_code": 30204, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030205_Plastic-White", "description": "Kunststoff, weiß", "schaeffler_code": 30205, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030301_Plastic-Clear", "description": "Kunststoff, durchsichtig", "schaeffler_code": 30301, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030302_Plastic-Translucent-White", "description": "", "schaeffler_code": 30302, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030401_TPU-Blue", "description": "TPU, blau", "schaeffler_code": 30401, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_030501_Ceramic-Black", "description": "Keramik, schwarz", "schaeffler_code": 30501, "source": "schaeffler_standard"}, + # --- 04 Compounds --- + {"name": "SCHAEFFLER_040101_E40", "description": "E40", "schaeffler_code": 40101, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_040102_E50", "description": "E50", "schaeffler_code": 40102, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_040201_Elgoglide", "description": "Elgoglide", "schaeffler_code": 40201, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_040202_Elgotex", "description": "Elgotex, schwarz", "schaeffler_code": 40202, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_040301_PTFE-Niro-Compound", "description": "PTFE-Compound, Niro-Verbund", "schaeffler_code": 40301, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_040302_PTFE-Foil", "description": "PTFE-Folie", "schaeffler_code": 40302, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_040303_PTFE-Compound-Black", "description": "PTFE-Verbund, schwarz", "schaeffler_code": 40303, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_040304_PTFE-Compound-Orange", "description": "PTFE-Verbundwerkstoff", "schaeffler_code": 40304, "source": "schaeffler_standard"}, + {"name": "SCHAEFFLER_040305_GFK-PTFE-Compound", "description": "GFK+PTFE Verbundwerkstoff, schwarz / TPU, schwarz", "schaeffler_code": 40305, "source": "schaeffler_standard"}, + # --- 05 Misc --- + {"name": "SCHAEFFLER_059999_FailedMaterial", "description": "", "schaeffler_code": 59999, "source": "schaeffler_standard"}, +] diff --git a/backend/app/database.py b/backend/app/database.py new file mode 100644 index 0000000..9d4f08f --- /dev/null +++ b/backend/app/database.py @@ -0,0 +1,29 @@ +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker +from sqlalchemy.orm import DeclarativeBase +from app.config import settings + +engine = create_async_engine( + settings.database_url, + echo=False, + pool_pre_ping=True, + pool_size=10, + max_overflow=20, +) + +AsyncSessionLocal = async_sessionmaker( + engine, + class_=AsyncSession, + expire_on_commit=False, +) + + +class Base(DeclarativeBase): + pass + + +async def get_db() -> AsyncSession: + async with AsyncSessionLocal() as session: + try: + yield session + finally: + await session.close() diff --git a/backend/app/main.py b/backend/app/main.py new file mode 100644 index 0000000..d91ce21 --- /dev/null +++ b/backend/app/main.py @@ -0,0 +1,71 @@ +from contextlib import asynccontextmanager +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.staticfiles import StaticFiles +from pathlib import Path + +from app.config import settings +from app.database import engine, Base +from app.api.routers import auth, uploads, orders, templates, admin, order_items, cad, materials, worker, analytics, pricing, products, output_types, render_templates, notifications + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Create upload directories + for subdir in ("step_files", "excel_files", "thumbnails", "renders", "blend-templates"): + Path(settings.upload_dir, subdir).mkdir(parents=True, exist_ok=True) + yield + + +app = FastAPI( + title="Schaeffler Automat API", + version="0.1.0", + description="Media-creation pipeline for Schaeffler CAD/bearing product orders", + lifespan=lifespan, +) + +app.add_middleware( + CORSMiddleware, + allow_origins=["http://localhost:5173", "http://localhost:3000", "http://frontend:5173", "http://localhost:8888"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Mount static files for thumbnails (dir created in lifespan; skip if not writable) +thumbnails_dir = Path(settings.upload_dir) / "thumbnails" +try: + thumbnails_dir.mkdir(parents=True, exist_ok=True) + app.mount("/thumbnails", StaticFiles(directory=str(thumbnails_dir)), name="thumbnails") +except (PermissionError, OSError): + pass # Running outside Docker without upload dir — thumbnails won't be served statically + +# Mount static files for renders +renders_dir = Path(settings.upload_dir) / "renders" +try: + renders_dir.mkdir(parents=True, exist_ok=True) + app.mount("/renders", StaticFiles(directory=str(renders_dir)), name="renders") +except (PermissionError, OSError): + pass + +# Include routers +app.include_router(auth.router, prefix="/api") +app.include_router(uploads.router, prefix="/api") +app.include_router(orders.router, prefix="/api") +app.include_router(templates.router, prefix="/api") +app.include_router(admin.router, prefix="/api") +app.include_router(order_items.router, prefix="/api") +app.include_router(cad.router, prefix="/api") +app.include_router(materials.router, prefix="/api") +app.include_router(worker.router, prefix="/api") +app.include_router(analytics.router, prefix="/api") +app.include_router(pricing.router, prefix="/api") +app.include_router(products.router, prefix="/api") +app.include_router(output_types.router, prefix="/api") +app.include_router(render_templates.router, prefix="/api") +app.include_router(notifications.router, prefix="/api") + + +@app.get("/health") +async def health(): + return {"status": "ok", "service": "schaefflerautomat-backend"} diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py new file mode 100644 index 0000000..038c2a6 --- /dev/null +++ b/backend/app/models/__init__.py @@ -0,0 +1,20 @@ +from app.models.user import User +from app.models.template import Template +from app.models.cad_file import CadFile +from app.models.order import Order +from app.models.order_item import OrderItem +from app.models.audit_log import AuditLog +from app.models.pricing_tier import PricingTier +from app.models.product import Product +from app.models.output_type import OutputType +from app.models.order_line import OrderLine +from app.models.render_template import RenderTemplate +from app.models.material import Material +from app.models.material_alias import MaterialAlias +from app.models.render_position import ProductRenderPosition + +__all__ = [ + "User", "Template", "CadFile", "Order", "OrderItem", "AuditLog", + "PricingTier", "Product", "OutputType", "OrderLine", + "RenderTemplate", "Material", "MaterialAlias", "ProductRenderPosition", +] diff --git a/backend/app/models/__pycache__/__init__.cpython-311.pyc b/backend/app/models/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..8e3f6ab Binary files /dev/null and b/backend/app/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/__init__.cpython-312.pyc b/backend/app/models/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..65bf3b6 Binary files /dev/null and b/backend/app/models/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/app/models/__pycache__/audit_log.cpython-311.pyc b/backend/app/models/__pycache__/audit_log.cpython-311.pyc new file mode 100644 index 0000000..ac561e4 Binary files /dev/null and b/backend/app/models/__pycache__/audit_log.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/audit_log.cpython-312.pyc b/backend/app/models/__pycache__/audit_log.cpython-312.pyc new file mode 100644 index 0000000..8ab7f0c Binary files /dev/null and b/backend/app/models/__pycache__/audit_log.cpython-312.pyc differ diff --git a/backend/app/models/__pycache__/cad_file.cpython-311.pyc b/backend/app/models/__pycache__/cad_file.cpython-311.pyc new file mode 100644 index 0000000..e739e34 Binary files /dev/null and b/backend/app/models/__pycache__/cad_file.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/cad_file.cpython-312.pyc b/backend/app/models/__pycache__/cad_file.cpython-312.pyc new file mode 100644 index 0000000..dbeb814 Binary files /dev/null and b/backend/app/models/__pycache__/cad_file.cpython-312.pyc differ diff --git a/backend/app/models/__pycache__/material.cpython-311.pyc b/backend/app/models/__pycache__/material.cpython-311.pyc new file mode 100644 index 0000000..ae26184 Binary files /dev/null and b/backend/app/models/__pycache__/material.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/material_alias.cpython-311.pyc b/backend/app/models/__pycache__/material_alias.cpython-311.pyc new file mode 100644 index 0000000..9a29c40 Binary files /dev/null and b/backend/app/models/__pycache__/material_alias.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/order.cpython-311.pyc b/backend/app/models/__pycache__/order.cpython-311.pyc new file mode 100644 index 0000000..0a5ab6c Binary files /dev/null and b/backend/app/models/__pycache__/order.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/order.cpython-312.pyc b/backend/app/models/__pycache__/order.cpython-312.pyc new file mode 100644 index 0000000..21cb0df Binary files /dev/null and b/backend/app/models/__pycache__/order.cpython-312.pyc differ diff --git a/backend/app/models/__pycache__/order_item.cpython-311.pyc b/backend/app/models/__pycache__/order_item.cpython-311.pyc new file mode 100644 index 0000000..f51d638 Binary files /dev/null and b/backend/app/models/__pycache__/order_item.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/order_item.cpython-312.pyc b/backend/app/models/__pycache__/order_item.cpython-312.pyc new file mode 100644 index 0000000..42a49c5 Binary files /dev/null and b/backend/app/models/__pycache__/order_item.cpython-312.pyc differ diff --git a/backend/app/models/__pycache__/order_line.cpython-311.pyc b/backend/app/models/__pycache__/order_line.cpython-311.pyc new file mode 100644 index 0000000..35b589e Binary files /dev/null and b/backend/app/models/__pycache__/order_line.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/output_type.cpython-311.pyc b/backend/app/models/__pycache__/output_type.cpython-311.pyc new file mode 100644 index 0000000..c47ae70 Binary files /dev/null and b/backend/app/models/__pycache__/output_type.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/pricing_tier.cpython-311.pyc b/backend/app/models/__pycache__/pricing_tier.cpython-311.pyc new file mode 100644 index 0000000..fddd162 Binary files /dev/null and b/backend/app/models/__pycache__/pricing_tier.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/product.cpython-311.pyc b/backend/app/models/__pycache__/product.cpython-311.pyc new file mode 100644 index 0000000..06860a2 Binary files /dev/null and b/backend/app/models/__pycache__/product.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/product_variant.cpython-311.pyc b/backend/app/models/__pycache__/product_variant.cpython-311.pyc new file mode 100644 index 0000000..d317463 Binary files /dev/null and b/backend/app/models/__pycache__/product_variant.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/render_position.cpython-311.pyc b/backend/app/models/__pycache__/render_position.cpython-311.pyc new file mode 100644 index 0000000..a602e54 Binary files /dev/null and b/backend/app/models/__pycache__/render_position.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/render_template.cpython-311.pyc b/backend/app/models/__pycache__/render_template.cpython-311.pyc new file mode 100644 index 0000000..ef9f725 Binary files /dev/null and b/backend/app/models/__pycache__/render_template.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/system_setting.cpython-311.pyc b/backend/app/models/__pycache__/system_setting.cpython-311.pyc new file mode 100644 index 0000000..7e71d5e Binary files /dev/null and b/backend/app/models/__pycache__/system_setting.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/template.cpython-311.pyc b/backend/app/models/__pycache__/template.cpython-311.pyc new file mode 100644 index 0000000..ea11839 Binary files /dev/null and b/backend/app/models/__pycache__/template.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/template.cpython-312.pyc b/backend/app/models/__pycache__/template.cpython-312.pyc new file mode 100644 index 0000000..40fcd88 Binary files /dev/null and b/backend/app/models/__pycache__/template.cpython-312.pyc differ diff --git a/backend/app/models/__pycache__/user.cpython-311.pyc b/backend/app/models/__pycache__/user.cpython-311.pyc new file mode 100644 index 0000000..00b0314 Binary files /dev/null and b/backend/app/models/__pycache__/user.cpython-311.pyc differ diff --git a/backend/app/models/__pycache__/user.cpython-312.pyc b/backend/app/models/__pycache__/user.cpython-312.pyc new file mode 100644 index 0000000..fc1379d Binary files /dev/null and b/backend/app/models/__pycache__/user.cpython-312.pyc differ diff --git a/backend/app/models/audit_log.py b/backend/app/models/audit_log.py new file mode 100644 index 0000000..fcc6e11 --- /dev/null +++ b/backend/app/models/audit_log.py @@ -0,0 +1,28 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, Boolean, DateTime, ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID, JSONB +from app.database import Base + + +class AuditLog(Base): + __tablename__ = "audit_log" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + user_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True) + action: Mapped[str] = mapped_column(String(100), nullable=False) + entity_type: Mapped[str] = mapped_column(String(100), nullable=True) + entity_id: Mapped[str] = mapped_column(String(255), nullable=True) + details: Mapped[dict] = mapped_column(JSONB, nullable=True) + timestamp: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + + # Notification center columns + target_user_id: Mapped[uuid.UUID | None] = mapped_column( + UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True, + ) + read_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + notification: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + + user: Mapped["User"] = relationship("User", back_populates="audit_logs", foreign_keys=[user_id]) + target_user: Mapped["User"] = relationship("User", foreign_keys=[target_user_id]) diff --git a/backend/app/models/cad_file.py b/backend/app/models/cad_file.py new file mode 100644 index 0000000..14bd24c --- /dev/null +++ b/backend/app/models/cad_file.py @@ -0,0 +1,37 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, DateTime, Enum as SAEnum, BigInteger +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID, JSONB +from app.database import Base +import enum + + +class ProcessingStatus(str, enum.Enum): + pending = "pending" + processing = "processing" + completed = "completed" + failed = "failed" + + +class CadFile(Base): + __tablename__ = "cad_files" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + original_name: Mapped[str] = mapped_column(String(500), nullable=False) + stored_path: Mapped[str] = mapped_column(String(1000), nullable=False) + file_hash: Mapped[str] = mapped_column(String(64), unique=True, nullable=False, index=True) + file_size: Mapped[int] = mapped_column(BigInteger, nullable=True) + parsed_objects: Mapped[dict] = mapped_column(JSONB, nullable=True) + thumbnail_path: Mapped[str] = mapped_column(String(1000), nullable=True) + gltf_path: Mapped[str] = mapped_column(String(1000), nullable=True) + processing_status: Mapped[ProcessingStatus] = mapped_column( + SAEnum(ProcessingStatus), default=ProcessingStatus.pending, nullable=False + ) + error_message: Mapped[str] = mapped_column(String(2000), nullable=True) + render_log: Mapped[dict] = mapped_column(JSONB, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + order_items: Mapped[list["OrderItem"]] = relationship("OrderItem", back_populates="cad_file") + products: Mapped[list["Product"]] = relationship("Product", back_populates="cad_file") diff --git a/backend/app/models/material.py b/backend/app/models/material.py new file mode 100644 index 0000000..43ecd94 --- /dev/null +++ b/backend/app/models/material.py @@ -0,0 +1,24 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, DateTime, Text, ForeignKey, Integer +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID +from app.database import Base + + +class Material(Base): + __tablename__ = "materials" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + name: Mapped[str] = mapped_column(String(200), nullable=False, unique=True) + description: Mapped[str] = mapped_column(Text, nullable=True) + source: Mapped[str] = mapped_column(String(20), nullable=False, default="manual") + schaeffler_code: Mapped[int | None] = mapped_column(Integer, nullable=True) + created_by: Mapped[uuid.UUID | None] = mapped_column( + UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True + ) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + creator: Mapped["User"] = relationship("User", foreign_keys=[created_by], lazy="select") # type: ignore[name-defined] + aliases = relationship("MaterialAlias", back_populates="material", cascade="all, delete-orphan") diff --git a/backend/app/models/material_alias.py b/backend/app/models/material_alias.py new file mode 100644 index 0000000..5da3b26 --- /dev/null +++ b/backend/app/models/material_alias.py @@ -0,0 +1,19 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, DateTime, ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID +from app.database import Base + + +class MaterialAlias(Base): + __tablename__ = "material_aliases" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + material_id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), ForeignKey("materials.id", ondelete="CASCADE"), nullable=False + ) + alias: Mapped[str] = mapped_column(String(300), nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + + material = relationship("Material", back_populates="aliases") diff --git a/backend/app/models/order.py b/backend/app/models/order.py new file mode 100644 index 0000000..e07a66a --- /dev/null +++ b/backend/app/models/order.py @@ -0,0 +1,42 @@ +import uuid +from datetime import datetime +from decimal import Decimal +from sqlalchemy import String, DateTime, Enum as SAEnum, ForeignKey, Text, Integer, Numeric +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID +from app.database import Base +import enum + + +class OrderStatus(str, enum.Enum): + draft = "draft" + submitted = "submitted" + processing = "processing" + completed = "completed" + rejected = "rejected" + + +class Order(Base): + __tablename__ = "orders" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + order_number: Mapped[str] = mapped_column(String(50), unique=True, nullable=False, index=True) + template_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("templates.id"), nullable=True) + status: Mapped[OrderStatus] = mapped_column(SAEnum(OrderStatus), default=OrderStatus.draft, nullable=False) + created_by: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False) + source_excel: Mapped[str] = mapped_column(String(1000), nullable=True) + notes: Mapped[str] = mapped_column(Text, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + submitted_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + processing_started_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + rejected_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + estimated_price: Mapped[Decimal | None] = mapped_column(Numeric(12, 2), nullable=True) + + template: Mapped["Template"] = relationship("Template", back_populates="orders") + created_by_user: Mapped["User"] = relationship("User", back_populates="orders", foreign_keys=[created_by]) + items: Mapped[list["OrderItem"]] = relationship("OrderItem", back_populates="order", cascade="all, delete-orphan") + lines: Mapped[list["OrderLine"]] = relationship( + "OrderLine", back_populates="order", cascade="all, delete-orphan" + ) diff --git a/backend/app/models/order_item.py b/backend/app/models/order_item.py new file mode 100644 index 0000000..9845fa6 --- /dev/null +++ b/backend/app/models/order_item.py @@ -0,0 +1,71 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, DateTime, Enum as SAEnum, ForeignKey, Integer, Boolean, Text +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID, JSONB +from app.database import Base +import enum + + +class ItemStatus(str, enum.Enum): + pending = "pending" + approved = "approved" + rejected = "rejected" + + +class AIValidationStatus(str, enum.Enum): + not_started = "not_started" + pending = "pending" + completed = "completed" + failed = "failed" + + +class OrderItem(Base): + __tablename__ = "order_items" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + order_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("orders.id"), nullable=False) + row_index: Mapped[int] = mapped_column(Integer, nullable=False) + + # 11 Standard fields (columns 0-10, skip col 5) + ebene1: Mapped[str] = mapped_column(String(500), nullable=True) + ebene2: Mapped[str] = mapped_column(String(500), nullable=True) + baureihe: Mapped[str] = mapped_column(String(500), nullable=True) + pim_id: Mapped[str] = mapped_column(String(500), nullable=True) + produkt_baureihe: Mapped[str] = mapped_column(String(500), nullable=True) + # col 5 is skipped (separator) + gewaehltes_produkt: Mapped[str] = mapped_column(String(500), nullable=True) + name_cad_modell: Mapped[str] = mapped_column(String(500), nullable=True) + gewuenschte_bildnummer: Mapped[str] = mapped_column(String(500), nullable=True) + lagertyp: Mapped[str] = mapped_column(String(500), nullable=True) + medias_rendering: Mapped[bool] = mapped_column(Boolean, nullable=True) + + # Component pairs (cols 11+): [{part_name, material, component_type, column_index}] + components: Mapped[list] = mapped_column(JSONB, nullable=False, default=list) + + # CAD linkage + cad_file_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("cad_files.id"), nullable=True) + thumbnail_path: Mapped[str] = mapped_column(String(1000), nullable=True) + # Material assignments per CAD part: [{part_name, material}] + cad_part_materials: Mapped[list] = mapped_column(JSONB, nullable=False, default=list) + + # AI validation + ai_validation_status: Mapped[AIValidationStatus] = mapped_column( + SAEnum(AIValidationStatus), default=AIValidationStatus.not_started, nullable=False + ) + ai_validation_result: Mapped[dict] = mapped_column(JSONB, nullable=True) + + item_status: Mapped[ItemStatus] = mapped_column(SAEnum(ItemStatus), default=ItemStatus.pending, nullable=False) + notes: Mapped[str] = mapped_column(Text, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + order: Mapped["Order"] = relationship("Order", back_populates="items") + cad_file: Mapped["CadFile"] = relationship("CadFile", back_populates="order_items") + + @property + def cad_parsed_objects(self) -> list[str] | None: + """Part names extracted from the linked STEP file, for Pydantic serialization.""" + if self.cad_file and self.cad_file.parsed_objects: + return self.cad_file.parsed_objects.get("objects") or [] + return None diff --git a/backend/app/models/order_line.py b/backend/app/models/order_line.py new file mode 100644 index 0000000..e7d2364 --- /dev/null +++ b/backend/app/models/order_line.py @@ -0,0 +1,52 @@ +import uuid +import enum +from datetime import datetime +from decimal import Decimal +from sqlalchemy import String, DateTime, Text, ForeignKey, Numeric +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID, JSONB +from app.database import Base + + +class OrderLine(Base): + __tablename__ = "order_lines" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + order_id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), ForeignKey("orders.id", ondelete="CASCADE"), nullable=False, index=True + ) + product_id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), ForeignKey("products.id"), nullable=False, index=True + ) + output_type_id: Mapped[uuid.UUID | None] = mapped_column( + UUID(as_uuid=True), ForeignKey("output_types.id"), nullable=True + ) + gewuenschte_bildnummer: Mapped[str | None] = mapped_column(String(500), nullable=True) + item_status: Mapped[str] = mapped_column(String(20), nullable=False, default="pending") + render_status: Mapped[str] = mapped_column(String(20), nullable=False, default="pending") + result_path: Mapped[str | None] = mapped_column(String(1000), nullable=True) + render_log: Mapped[dict | None] = mapped_column(JSONB, nullable=True) + ai_validation_status: Mapped[str] = mapped_column(String(20), nullable=False, default="not_started") + ai_validation_result: Mapped[dict | None] = mapped_column(JSONB, nullable=True) + flamenco_job_id: Mapped[str | None] = mapped_column(String(100), nullable=True) + render_backend_used: Mapped[str | None] = mapped_column(String(20), nullable=True) + render_started_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + render_completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + unit_price: Mapped[Decimal | None] = mapped_column(Numeric(10, 2), nullable=True) + render_position_id: Mapped[uuid.UUID | None] = mapped_column( + UUID(as_uuid=True), + ForeignKey("product_render_positions.id", ondelete="SET NULL"), + nullable=True, + ) + notes: Mapped[str | None] = mapped_column(Text, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column( + DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False + ) + + order: Mapped["Order"] = relationship("Order", back_populates="lines") + product: Mapped["Product"] = relationship("Product", back_populates="order_lines") + output_type: Mapped["OutputType | None"] = relationship("OutputType", back_populates="order_lines") + render_position: Mapped["ProductRenderPosition | None"] = relationship( + "ProductRenderPosition", back_populates="order_lines" + ) diff --git a/backend/app/models/output_type.py b/backend/app/models/output_type.py new file mode 100644 index 0000000..b91e03c --- /dev/null +++ b/backend/app/models/output_type.py @@ -0,0 +1,36 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, DateTime, Boolean, Text, Integer, ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID, JSONB + +VALID_RENDER_BACKENDS = {"celery", "flamenco", "auto"} +from app.database import Base + + +class OutputType(Base): + __tablename__ = "output_types" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + name: Mapped[str] = mapped_column(String(200), unique=True, nullable=False) + description: Mapped[str | None] = mapped_column(Text, nullable=True) + renderer: Mapped[str] = mapped_column(String(50), nullable=False, default="threejs") + render_settings: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict) + output_format: Mapped[str] = mapped_column(String(20), nullable=False, default="png") + sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + compatible_categories: Mapped[list] = mapped_column(JSONB, default=list, server_default="[]") + render_backend: Mapped[str] = mapped_column(String(20), nullable=False, default="auto", server_default="auto") + is_animation: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") + transparent_bg: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") + cycles_device: Mapped[str | None] = mapped_column(String(10), nullable=True, default=None) + pricing_tier_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("pricing_tiers.id", ondelete="SET NULL"), nullable=True, index=True + ) + is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column( + DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False + ) + + order_lines: Mapped[list["OrderLine"]] = relationship("OrderLine", back_populates="output_type") + pricing_tier: Mapped["PricingTier | None"] = relationship("PricingTier", back_populates="output_types") diff --git a/backend/app/models/pricing_tier.py b/backend/app/models/pricing_tier.py new file mode 100644 index 0000000..ec93090 --- /dev/null +++ b/backend/app/models/pricing_tier.py @@ -0,0 +1,25 @@ +from datetime import datetime +from decimal import Decimal +from sqlalchemy import String, Boolean, DateTime, Text, Numeric, Integer, UniqueConstraint, Index +from sqlalchemy.orm import Mapped, mapped_column, relationship +from app.database import Base + + +class PricingTier(Base): + __tablename__ = "pricing_tiers" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + category_key: Mapped[str] = mapped_column(String(100), nullable=False) + quality_level: Mapped[str] = mapped_column(String(50), nullable=False, default="Normal") + price_per_item: Mapped[Decimal] = mapped_column(Numeric(10, 2), nullable=False) + description: Mapped[str | None] = mapped_column(Text, nullable=True) + is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + output_types: Mapped[list["OutputType"]] = relationship("OutputType", back_populates="pricing_tier") + + __table_args__ = ( + UniqueConstraint("category_key", "quality_level", name="uq_pricing_tier"), + Index("ix_pricing_tiers_category_key", "category_key"), + ) diff --git a/backend/app/models/product.py b/backend/app/models/product.py new file mode 100644 index 0000000..7739c2c --- /dev/null +++ b/backend/app/models/product.py @@ -0,0 +1,66 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, DateTime, Boolean, Text, ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID, JSONB +from app.database import Base + + +class Product(Base): + __tablename__ = "products" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + pim_id: Mapped[str] = mapped_column(String(500), nullable=False) + name: Mapped[str | None] = mapped_column(String(500), nullable=True) + category_key: Mapped[str | None] = mapped_column(String(100), nullable=True, index=True) + ebene1: Mapped[str | None] = mapped_column(String(500), nullable=True) + ebene2: Mapped[str | None] = mapped_column(String(500), nullable=True) + baureihe: Mapped[str | None] = mapped_column(String(500), nullable=True) + produkt_baureihe: Mapped[str | None] = mapped_column(String(500), nullable=True) + lagertyp: Mapped[str | None] = mapped_column(String(500), nullable=True) + name_cad_modell: Mapped[str | None] = mapped_column(String(500), nullable=True, index=True) + gewuenschte_bildnummer: Mapped[str | None] = mapped_column(String(500), nullable=True) + medias_rendering: Mapped[bool | None] = mapped_column(Boolean, nullable=True) + components: Mapped[list] = mapped_column(JSONB, nullable=False, default=list) + cad_part_materials: Mapped[list] = mapped_column(JSONB, nullable=False, default=list) + cad_file_id: Mapped[uuid.UUID | None] = mapped_column( + UUID(as_uuid=True), ForeignKey("cad_files.id", ondelete="SET NULL"), nullable=True + ) + notes: Mapped[str | None] = mapped_column(Text, nullable=True) + is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True) + arbeitspaket: Mapped[str | None] = mapped_column(String(500), nullable=True) + source_excel: Mapped[str | None] = mapped_column(String(1000), nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column( + DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False + ) + + cad_file: Mapped["CadFile | None"] = relationship("CadFile", back_populates="products") + order_lines: Mapped[list["OrderLine"]] = relationship( + "OrderLine", back_populates="product", cascade="all, delete-orphan" + ) + render_positions: Mapped[list["ProductRenderPosition"]] = relationship( + "ProductRenderPosition", back_populates="product", + cascade="all, delete-orphan", order_by="ProductRenderPosition.sort_order" + ) + + @property + def thumbnail_url(self) -> str | None: + if self.cad_file and self.cad_file.thumbnail_path: + from pathlib import Path + return f"/thumbnails/{Path(self.cad_file.thumbnail_path).name}" + return None + + @property + def processing_status(self) -> str | None: + if self.cad_file: + return self.cad_file.processing_status.value if hasattr( + self.cad_file.processing_status, 'value' + ) else str(self.cad_file.processing_status) + return None + + @property + def cad_parsed_objects(self) -> list[str] | None: + if self.cad_file and self.cad_file.parsed_objects: + return self.cad_file.parsed_objects.get("objects") or [] + return None diff --git a/backend/app/models/render_position.py b/backend/app/models/render_position.py new file mode 100644 index 0000000..c535e0c --- /dev/null +++ b/backend/app/models/render_position.py @@ -0,0 +1,28 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, DateTime, Boolean, Integer, Float, ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID +from app.database import Base + + +class ProductRenderPosition(Base): + __tablename__ = "product_render_positions" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + product_id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), ForeignKey("products.id", ondelete="CASCADE"), nullable=False, index=True + ) + name: Mapped[str] = mapped_column(String(200), nullable=False) + rotation_x: Mapped[float] = mapped_column(Float, nullable=False, default=0.0) + rotation_y: Mapped[float] = mapped_column(Float, nullable=False, default=0.0) + rotation_z: Mapped[float] = mapped_column(Float, nullable=False, default=0.0) + is_default: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column( + DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False + ) + + product: Mapped["Product"] = relationship("Product", back_populates="render_positions") + order_lines: Mapped[list["OrderLine"]] = relationship("OrderLine", back_populates="render_position") diff --git a/backend/app/models/render_template.py b/backend/app/models/render_template.py new file mode 100644 index 0000000..d8ec613 --- /dev/null +++ b/backend/app/models/render_template.py @@ -0,0 +1,30 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, DateTime, Boolean, Text, ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID + +from app.database import Base + + +class RenderTemplate(Base): + __tablename__ = "render_templates" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + name: Mapped[str] = mapped_column(String(300), nullable=False) + category_key: Mapped[str | None] = mapped_column(String(100), nullable=True) + output_type_id: Mapped[uuid.UUID | None] = mapped_column( + UUID(as_uuid=True), ForeignKey("output_types.id", ondelete="SET NULL"), nullable=True + ) + blend_file_path: Mapped[str] = mapped_column(Text, nullable=False) + original_filename: Mapped[str] = mapped_column(String(500), nullable=False) + target_collection: Mapped[str] = mapped_column(String(200), nullable=False, default="Product", server_default="Product") + material_replace_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") + lighting_only: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") + shadow_catcher_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") + camera_orbit: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true") + is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true") + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default="now()") + updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default="now()", onupdate=datetime.utcnow) + + output_type = relationship("OutputType", lazy="joined") diff --git a/backend/app/models/system_setting.py b/backend/app/models/system_setting.py new file mode 100644 index 0000000..9c71987 --- /dev/null +++ b/backend/app/models/system_setting.py @@ -0,0 +1,11 @@ +from datetime import datetime +from sqlalchemy import Column, String, Text, DateTime +from app.database import Base + + +class SystemSetting(Base): + __tablename__ = "system_settings" + + key = Column(String(100), primary_key=True) + value = Column(Text, nullable=True) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) diff --git a/backend/app/models/template.py b/backend/app/models/template.py new file mode 100644 index 0000000..b6abd8a --- /dev/null +++ b/backend/app/models/template.py @@ -0,0 +1,24 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, Boolean, DateTime, Text +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID, JSONB +from app.database import Base + + +class Template(Base): + __tablename__ = "templates" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + name: Mapped[str] = mapped_column(String(255), nullable=False) + category_key: Mapped[str] = mapped_column(String(100), unique=True, nullable=False, index=True) + # JSONB config for each of the 11 standard columns: {col_index: {label, required, optional}} + standard_fields: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict) + # JSONB schema for expected component pairs + component_schema: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict) + description: Mapped[str] = mapped_column(Text, nullable=True) + is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + orders: Mapped[list["Order"]] = relationship("Order", back_populates="template") diff --git a/backend/app/models/user.py b/backend/app/models/user.py new file mode 100644 index 0000000..8d49826 --- /dev/null +++ b/backend/app/models/user.py @@ -0,0 +1,29 @@ +import uuid +from datetime import datetime +from sqlalchemy import String, Boolean, DateTime, Enum as SAEnum +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.dialects.postgresql import UUID +from app.database import Base +import enum + + +class UserRole(str, enum.Enum): + admin = "admin" + project_manager = "project_manager" + client = "client" + + +class User(Base): + __tablename__ = "users" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + email: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True) + password_hash: Mapped[str] = mapped_column(String(255), nullable=False) + full_name: Mapped[str] = mapped_column(String(255), nullable=False) + role: Mapped[UserRole] = mapped_column(SAEnum(UserRole), default=UserRole.client, nullable=False) + is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + orders: Mapped[list["Order"]] = relationship("Order", back_populates="created_by_user", foreign_keys="Order.created_by") + audit_logs: Mapped[list["AuditLog"]] = relationship("AuditLog", back_populates="user", foreign_keys="AuditLog.user_id") diff --git a/backend/app/schemas/__init__.py b/backend/app/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/schemas/__pycache__/__init__.cpython-311.pyc b/backend/app/schemas/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..473bb02 Binary files /dev/null and b/backend/app/schemas/__pycache__/__init__.cpython-311.pyc differ diff --git a/backend/app/schemas/__pycache__/__init__.cpython-312.pyc b/backend/app/schemas/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..e07baee Binary files /dev/null and b/backend/app/schemas/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/app/schemas/__pycache__/order.cpython-311.pyc b/backend/app/schemas/__pycache__/order.cpython-311.pyc new file mode 100644 index 0000000..1840b41 Binary files /dev/null and b/backend/app/schemas/__pycache__/order.cpython-311.pyc differ diff --git a/backend/app/schemas/__pycache__/order.cpython-312.pyc b/backend/app/schemas/__pycache__/order.cpython-312.pyc new file mode 100644 index 0000000..deabbec Binary files /dev/null and b/backend/app/schemas/__pycache__/order.cpython-312.pyc differ diff --git a/backend/app/schemas/__pycache__/order_line.cpython-311.pyc b/backend/app/schemas/__pycache__/order_line.cpython-311.pyc new file mode 100644 index 0000000..22ba215 Binary files /dev/null and b/backend/app/schemas/__pycache__/order_line.cpython-311.pyc differ diff --git a/backend/app/schemas/__pycache__/output_type.cpython-311.pyc b/backend/app/schemas/__pycache__/output_type.cpython-311.pyc new file mode 100644 index 0000000..1242685 Binary files /dev/null and b/backend/app/schemas/__pycache__/output_type.cpython-311.pyc differ diff --git a/backend/app/schemas/__pycache__/product.cpython-311.pyc b/backend/app/schemas/__pycache__/product.cpython-311.pyc new file mode 100644 index 0000000..aeff9f4 Binary files /dev/null and b/backend/app/schemas/__pycache__/product.cpython-311.pyc differ diff --git a/backend/app/schemas/__pycache__/product_variant.cpython-311.pyc b/backend/app/schemas/__pycache__/product_variant.cpython-311.pyc new file mode 100644 index 0000000..83f6a2b Binary files /dev/null and b/backend/app/schemas/__pycache__/product_variant.cpython-311.pyc differ diff --git a/backend/app/schemas/__pycache__/render_position.cpython-311.pyc b/backend/app/schemas/__pycache__/render_position.cpython-311.pyc new file mode 100644 index 0000000..00e0d19 Binary files /dev/null and b/backend/app/schemas/__pycache__/render_position.cpython-311.pyc differ diff --git a/backend/app/schemas/__pycache__/upload.cpython-311.pyc b/backend/app/schemas/__pycache__/upload.cpython-311.pyc new file mode 100644 index 0000000..4e8dfbd Binary files /dev/null and b/backend/app/schemas/__pycache__/upload.cpython-311.pyc differ diff --git a/backend/app/schemas/__pycache__/upload.cpython-312.pyc b/backend/app/schemas/__pycache__/upload.cpython-312.pyc new file mode 100644 index 0000000..2065065 Binary files /dev/null and b/backend/app/schemas/__pycache__/upload.cpython-312.pyc differ diff --git a/backend/app/schemas/__pycache__/user.cpython-311.pyc b/backend/app/schemas/__pycache__/user.cpython-311.pyc new file mode 100644 index 0000000..98ee015 Binary files /dev/null and b/backend/app/schemas/__pycache__/user.cpython-311.pyc differ diff --git a/backend/app/schemas/__pycache__/user.cpython-312.pyc b/backend/app/schemas/__pycache__/user.cpython-312.pyc new file mode 100644 index 0000000..2ff0087 Binary files /dev/null and b/backend/app/schemas/__pycache__/user.cpython-312.pyc differ diff --git a/backend/app/schemas/order.py b/backend/app/schemas/order.py new file mode 100644 index 0000000..ad30607 --- /dev/null +++ b/backend/app/schemas/order.py @@ -0,0 +1,92 @@ +import uuid +from datetime import datetime +from typing import Any +from pydantic import BaseModel +from app.models.order import OrderStatus +from app.models.order_item import ItemStatus, AIValidationStatus +from app.schemas.order_line import OrderLineOut, OrderLineCreate # noqa: F401 + + +class ComponentData(BaseModel): + part_name: str | None = None + material: str | None = None + component_type: str | None = None + column_index: int | None = None + + +class OrderItemCreate(BaseModel): + row_index: int + ebene1: str | None = None + ebene2: str | None = None + baureihe: str | None = None + pim_id: str | None = None + produkt_baureihe: str | None = None + gewaehltes_produkt: str | None = None + name_cad_modell: str | None = None + gewuenschte_bildnummer: str | None = None + lagertyp: str | None = None + medias_rendering: bool | None = None + components: list[ComponentData] = [] + + +class OrderItemOut(BaseModel): + id: uuid.UUID + order_id: uuid.UUID + row_index: int + ebene1: str | None + ebene2: str | None + baureihe: str | None + pim_id: str | None + produkt_baureihe: str | None + gewaehltes_produkt: str | None + name_cad_modell: str | None + gewuenschte_bildnummer: str | None + lagertyp: str | None + medias_rendering: bool | None + components: list[dict] + cad_file_id: uuid.UUID | None + thumbnail_path: str | None + cad_parsed_objects: list[str] | None = None + cad_part_materials: list[dict] = [] + ai_validation_status: AIValidationStatus + ai_validation_result: dict | None + item_status: ItemStatus + notes: str | None + created_at: datetime + + model_config = {"from_attributes": True} + + +class OrderCreate(BaseModel): + template_id: uuid.UUID | None = None + source_excel: str | None = None + notes: str | None = None + items: list[OrderItemCreate] = [] + lines: list[OrderLineCreate] = [] + + +class OrderOut(BaseModel): + id: uuid.UUID + order_number: str + template_id: uuid.UUID | None + status: OrderStatus + created_by: uuid.UUID + source_excel: str | None + notes: str | None + created_at: datetime + updated_at: datetime + submitted_at: datetime | None = None + processing_started_at: datetime | None = None + completed_at: datetime | None = None + rejected_at: datetime | None = None + estimated_price: float | None = None + item_count: int = 0 + line_count: int = 0 + render_progress: dict | None = None + + model_config = {"from_attributes": True} + + +class OrderDetailOut(OrderOut): + items: list[OrderItemOut] = [] + lines: list[OrderLineOut] = [] diff --git a/backend/app/schemas/order_line.py b/backend/app/schemas/order_line.py new file mode 100644 index 0000000..7657686 --- /dev/null +++ b/backend/app/schemas/order_line.py @@ -0,0 +1,39 @@ +import uuid +from datetime import datetime +from pydantic import BaseModel +from app.schemas.product import ProductOut +from app.schemas.output_type import OutputTypeOut + + +class OrderLineCreate(BaseModel): + product_id: uuid.UUID + output_type_id: uuid.UUID | None = None + render_position_id: uuid.UUID | None = None + gewuenschte_bildnummer: str | None = None + notes: str | None = None + + +class OrderLineOut(BaseModel): + id: uuid.UUID + order_id: uuid.UUID + product_id: uuid.UUID + product: ProductOut + output_type_id: uuid.UUID | None + output_type: OutputTypeOut | None + gewuenschte_bildnummer: str | None + item_status: str + render_status: str + result_path: str | None + thumbnail_url: str | None = None + ai_validation_status: str + ai_validation_result: dict | None + render_backend_used: str | None = None + flamenco_job_id: str | None = None + unit_price: float | None = None + render_position_id: uuid.UUID | None = None + render_position_name: str | None = None + notes: str | None + created_at: datetime + updated_at: datetime + + model_config = {"from_attributes": True} diff --git a/backend/app/schemas/output_type.py b/backend/app/schemas/output_type.py new file mode 100644 index 0000000..9d5b594 --- /dev/null +++ b/backend/app/schemas/output_type.py @@ -0,0 +1,58 @@ +import uuid +from datetime import datetime +from pydantic import BaseModel + + +class OutputTypeCreate(BaseModel): + name: str + description: str | None = None + renderer: str = "threejs" + render_settings: dict = {} + output_format: str = "png" + sort_order: int = 0 + is_active: bool = True + compatible_categories: list[str] = [] + render_backend: str = "auto" + is_animation: bool = False + transparent_bg: bool = False + pricing_tier_id: int | None = None + cycles_device: str | None = None + + +class OutputTypePatch(BaseModel): + name: str | None = None + description: str | None = None + renderer: str | None = None + render_settings: dict | None = None + output_format: str | None = None + sort_order: int | None = None + is_active: bool | None = None + compatible_categories: list[str] | None = None + render_backend: str | None = None + is_animation: bool | None = None + transparent_bg: bool | None = None + pricing_tier_id: int | None = None + cycles_device: str | None = None + + +class OutputTypeOut(BaseModel): + id: uuid.UUID + name: str + description: str | None + renderer: str + render_settings: dict + output_format: str + sort_order: int + compatible_categories: list[str] + render_backend: str + is_animation: bool + transparent_bg: bool + cycles_device: str | None = None + pricing_tier_id: int | None = None + pricing_tier_name: str | None = None + price_per_item: float | None = None + is_active: bool + created_at: datetime + updated_at: datetime + + model_config = {"from_attributes": True} diff --git a/backend/app/schemas/product.py b/backend/app/schemas/product.py new file mode 100644 index 0000000..ecd5255 --- /dev/null +++ b/backend/app/schemas/product.py @@ -0,0 +1,72 @@ +import uuid +from datetime import datetime +from pydantic import BaseModel +from app.schemas.render_position import RenderPositionOut + + +class ProductCreate(BaseModel): + pim_id: str + name: str | None = None + category_key: str | None = None + ebene1: str | None = None + ebene2: str | None = None + baureihe: str | None = None + produkt_baureihe: str | None = None + lagertyp: str | None = None + name_cad_modell: str | None = None + gewuenschte_bildnummer: str | None = None + medias_rendering: bool | None = None + components: list[dict] = [] + cad_part_materials: list[dict] = [] + notes: str | None = None + is_active: bool = True + source_excel: str | None = None + + +class ProductPatch(BaseModel): + name: str | None = None + category_key: str | None = None + ebene1: str | None = None + ebene2: str | None = None + baureihe: str | None = None + produkt_baureihe: str | None = None + lagertyp: str | None = None + name_cad_modell: str | None = None + gewuenschte_bildnummer: str | None = None + medias_rendering: bool | None = None + components: list[dict] | None = None + cad_part_materials: list[dict] | None = None + notes: str | None = None + is_active: bool | None = None + + +class ProductOut(BaseModel): + id: uuid.UUID + pim_id: str + name: str | None + category_key: str | None + ebene1: str | None + ebene2: str | None + baureihe: str | None + produkt_baureihe: str | None + lagertyp: str | None + name_cad_modell: str | None + gewuenschte_bildnummer: str | None + medias_rendering: bool | None + components: list[dict] + cad_part_materials: list[dict] + cad_file_id: uuid.UUID | None + thumbnail_url: str | None = None + render_image_url: str | None = None + processing_status: str | None = None + stl_cached: list[str] = [] + cad_parsed_objects: list[str] | None = None + arbeitspaket: str | None = None + notes: str | None + is_active: bool + source_excel: str | None + render_positions: list[RenderPositionOut] = [] + created_at: datetime + updated_at: datetime + + model_config = {"from_attributes": True} diff --git a/backend/app/schemas/render_position.py b/backend/app/schemas/render_position.py new file mode 100644 index 0000000..4780b7a --- /dev/null +++ b/backend/app/schemas/render_position.py @@ -0,0 +1,36 @@ +import uuid +from datetime import datetime +from pydantic import BaseModel + + +class RenderPositionCreate(BaseModel): + name: str + rotation_x: float = 0.0 + rotation_y: float = 0.0 + rotation_z: float = 0.0 + is_default: bool = False + sort_order: int = 0 + + +class RenderPositionPatch(BaseModel): + name: str | None = None + rotation_x: float | None = None + rotation_y: float | None = None + rotation_z: float | None = None + is_default: bool | None = None + sort_order: int | None = None + + +class RenderPositionOut(BaseModel): + id: uuid.UUID + product_id: uuid.UUID + name: str + rotation_x: float + rotation_y: float + rotation_z: float + is_default: bool + sort_order: int + created_at: datetime + updated_at: datetime + + model_config = {"from_attributes": True} diff --git a/backend/app/schemas/upload.py b/backend/app/schemas/upload.py new file mode 100644 index 0000000..3ba1537 --- /dev/null +++ b/backend/app/schemas/upload.py @@ -0,0 +1,43 @@ +from pydantic import BaseModel +from typing import Any + + +class ParsedComponent(BaseModel): + part_name: str | None = None + material: str | None = None + component_type: str | None = None + column_index: int + + +class ParsedRow(BaseModel): + row_index: int + ebene1: str | None = None + ebene2: str | None = None + baureihe: str | None = None + pim_id: str | None = None + produkt_baureihe: str | None = None + gewaehltes_produkt: str | None = None + name_cad_modell: str | None = None + gewuenschte_bildnummer: str | None = None + lagertyp: str | None = None + medias_rendering: bool | None = None + components: list[ParsedComponent] = [] + + +class ParsedExcelResponse(BaseModel): + filename: str + excel_path: str | None = None # server-side path of the saved file + category_key: str | None = None + template_name: str | None = None + row_count: int + column_headers: list[str] + rows: list[ParsedRow] + warnings: list[str] = [] + + +class StepUploadResponse(BaseModel): + cad_file_id: str + original_name: str + file_hash: str + status: str + matched_items: list[str] = [] diff --git a/backend/app/schemas/user.py b/backend/app/schemas/user.py new file mode 100644 index 0000000..25cd04b --- /dev/null +++ b/backend/app/schemas/user.py @@ -0,0 +1,39 @@ +import uuid +from datetime import datetime +from pydantic import BaseModel, EmailStr +from app.models.user import UserRole + + +class UserCreate(BaseModel): + email: EmailStr + password: str + full_name: str + role: UserRole = UserRole.client + + +class UserUpdate(BaseModel): + full_name: str | None = None + is_active: bool | None = None + role: UserRole | None = None + + +class UserOut(BaseModel): + id: uuid.UUID + email: str + full_name: str + role: UserRole + is_active: bool + created_at: datetime + + model_config = {"from_attributes": True} + + +class TokenResponse(BaseModel): + access_token: str + token_type: str = "bearer" + user: UserOut + + +class LoginRequest(BaseModel): + email: EmailStr + password: str diff --git a/backend/app/services/__init__.py b/backend/app/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/services/__pycache__/__init__.cpython-311.pyc b/backend/app/services/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..644a1f2 Binary files /dev/null and b/backend/app/services/__pycache__/__init__.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/__init__.cpython-312.pyc b/backend/app/services/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2a8c50d Binary files /dev/null and b/backend/app/services/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/app/services/__pycache__/azure_ai.cpython-312.pyc b/backend/app/services/__pycache__/azure_ai.cpython-312.pyc new file mode 100644 index 0000000..95ee823 Binary files /dev/null and b/backend/app/services/__pycache__/azure_ai.cpython-312.pyc differ diff --git a/backend/app/services/__pycache__/docker_scaler.cpython-311.pyc b/backend/app/services/__pycache__/docker_scaler.cpython-311.pyc new file mode 100644 index 0000000..5db6eb1 Binary files /dev/null and b/backend/app/services/__pycache__/docker_scaler.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/excel_import.cpython-311.pyc b/backend/app/services/__pycache__/excel_import.cpython-311.pyc new file mode 100644 index 0000000..611e160 Binary files /dev/null and b/backend/app/services/__pycache__/excel_import.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/excel_parser.cpython-311.pyc b/backend/app/services/__pycache__/excel_parser.cpython-311.pyc new file mode 100644 index 0000000..5af2365 Binary files /dev/null and b/backend/app/services/__pycache__/excel_parser.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/excel_parser.cpython-312.pyc b/backend/app/services/__pycache__/excel_parser.cpython-312.pyc new file mode 100644 index 0000000..986da90 Binary files /dev/null and b/backend/app/services/__pycache__/excel_parser.cpython-312.pyc differ diff --git a/backend/app/services/__pycache__/flamenco_client.cpython-311.pyc b/backend/app/services/__pycache__/flamenco_client.cpython-311.pyc new file mode 100644 index 0000000..20d3200 Binary files /dev/null and b/backend/app/services/__pycache__/flamenco_client.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/kpi_service.cpython-311.pyc b/backend/app/services/__pycache__/kpi_service.cpython-311.pyc new file mode 100644 index 0000000..02ee436 Binary files /dev/null and b/backend/app/services/__pycache__/kpi_service.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/material_service.cpython-311.pyc b/backend/app/services/__pycache__/material_service.cpython-311.pyc new file mode 100644 index 0000000..fb353e1 Binary files /dev/null and b/backend/app/services/__pycache__/material_service.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/notification_service.cpython-311.pyc b/backend/app/services/__pycache__/notification_service.cpython-311.pyc new file mode 100644 index 0000000..4b1f132 Binary files /dev/null and b/backend/app/services/__pycache__/notification_service.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/order_service.cpython-311.pyc b/backend/app/services/__pycache__/order_service.cpython-311.pyc new file mode 100644 index 0000000..c3889eb Binary files /dev/null and b/backend/app/services/__pycache__/order_service.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/order_service.cpython-312.pyc b/backend/app/services/__pycache__/order_service.cpython-312.pyc new file mode 100644 index 0000000..743832a Binary files /dev/null and b/backend/app/services/__pycache__/order_service.cpython-312.pyc differ diff --git a/backend/app/services/__pycache__/order_status_service.cpython-311.pyc b/backend/app/services/__pycache__/order_status_service.cpython-311.pyc new file mode 100644 index 0000000..0f1f187 Binary files /dev/null and b/backend/app/services/__pycache__/order_status_service.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/pricing_service.cpython-311.pyc b/backend/app/services/__pycache__/pricing_service.cpython-311.pyc new file mode 100644 index 0000000..cbaefe8 Binary files /dev/null and b/backend/app/services/__pycache__/pricing_service.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/product_service.cpython-311.pyc b/backend/app/services/__pycache__/product_service.cpython-311.pyc new file mode 100644 index 0000000..eb05153 Binary files /dev/null and b/backend/app/services/__pycache__/product_service.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/render_dispatcher.cpython-311.pyc b/backend/app/services/__pycache__/render_dispatcher.cpython-311.pyc new file mode 100644 index 0000000..7de7aa9 Binary files /dev/null and b/backend/app/services/__pycache__/render_dispatcher.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/render_log.cpython-311.pyc b/backend/app/services/__pycache__/render_log.cpython-311.pyc new file mode 100644 index 0000000..2802dd9 Binary files /dev/null and b/backend/app/services/__pycache__/render_log.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/step_processor.cpython-311.pyc b/backend/app/services/__pycache__/step_processor.cpython-311.pyc new file mode 100644 index 0000000..80214d6 Binary files /dev/null and b/backend/app/services/__pycache__/step_processor.cpython-311.pyc differ diff --git a/backend/app/services/__pycache__/step_processor.cpython-312.pyc b/backend/app/services/__pycache__/step_processor.cpython-312.pyc new file mode 100644 index 0000000..310e4a3 Binary files /dev/null and b/backend/app/services/__pycache__/step_processor.cpython-312.pyc differ diff --git a/backend/app/services/__pycache__/template_service.cpython-311.pyc b/backend/app/services/__pycache__/template_service.cpython-311.pyc new file mode 100644 index 0000000..8a0c9c7 Binary files /dev/null and b/backend/app/services/__pycache__/template_service.cpython-311.pyc differ diff --git a/backend/app/services/azure_ai.py b/backend/app/services/azure_ai.py new file mode 100644 index 0000000..18652fb --- /dev/null +++ b/backend/app/services/azure_ai.py @@ -0,0 +1,110 @@ +""" +Azure OpenAI GPT-4o Vision validator for thumbnail orientation. +""" +import base64 +import logging +import uuid +from pathlib import Path + +logger = logging.getLogger(__name__) + +VALIDATION_PROMPT = """You are a quality control expert for Schaeffler bearing product catalog images. + +Analyze this thumbnail of a bearing/mechanical component and evaluate: +1. Is the component orientation correct for a standard product catalog? (typically isometric view, 30° elevation, 45° rotation) +2. Are the key features visible? (rolling elements, rings, cage if present) +3. Does it match standard Schaeffler catalog angle conventions? + +Respond in JSON with exactly these fields: +{ + "passed": true/false, + "confidence": 0.0-1.0, + "feedback": "Brief explanation", + "suggested_rotation": "Description of recommended adjustment if needed" +}""" + + +def validate_thumbnail(order_item_id: str) -> dict: + """ + Validate thumbnail orientation using Azure GPT-4o Vision. + Updates the order_item AI validation fields in DB. + """ + from app.config import settings + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + from app.models.order_item import OrderItem, AIValidationStatus + + engine = create_engine(settings.database_url_sync) + with Session(engine) as session: + item = session.get(OrderItem, uuid.UUID(order_item_id)) + if not item: + logger.error(f"OrderItem not found: {order_item_id}") + return {} + + item.ai_validation_status = AIValidationStatus.pending + session.commit() + + try: + result = _call_azure_vision(item.thumbnail_path, settings) + item.ai_validation_status = AIValidationStatus.completed + item.ai_validation_result = result + except Exception as exc: + logger.error(f"AI validation failed for {order_item_id}: {exc}") + item.ai_validation_status = AIValidationStatus.failed + item.ai_validation_result = {"error": str(exc)} + result = {} + + session.commit() + return result + + +def _call_azure_vision(thumbnail_path: str | None, settings) -> dict: + """Call Azure OpenAI GPT-4o with a base64-encoded thumbnail.""" + import json + + if not settings.azure_openai_api_key or not settings.azure_openai_endpoint: + raise ValueError("Azure OpenAI credentials not configured") + + if not thumbnail_path or not Path(thumbnail_path).exists(): + raise FileNotFoundError(f"Thumbnail not found: {thumbnail_path}") + + try: + from openai import AzureOpenAI + + client = AzureOpenAI( + api_key=settings.azure_openai_api_key, + azure_endpoint=settings.azure_openai_endpoint, + api_version=settings.azure_openai_api_version, + ) + + with open(thumbnail_path, "rb") as f: + image_b64 = base64.b64encode(f.read()).decode("utf-8") + + response = client.chat.completions.create( + model=settings.azure_openai_deployment, + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": VALIDATION_PROMPT}, + { + "type": "image_url", + "image_url": {"url": f"data:image/png;base64,{image_b64}"}, + }, + ], + } + ], + max_tokens=500, + temperature=0.1, + ) + + content = response.choices[0].message.content or "" + # Extract JSON from response + start = content.find("{") + end = content.rfind("}") + 1 + if start >= 0 and end > start: + return json.loads(content[start:end]) + return {"passed": False, "confidence": 0.0, "feedback": content, "suggested_rotation": ""} + + except Exception as exc: + raise RuntimeError(f"Azure OpenAI call failed: {exc}") from exc diff --git a/backend/app/services/docker_scaler.py b/backend/app/services/docker_scaler.py new file mode 100644 index 0000000..ca0a17e --- /dev/null +++ b/backend/app/services/docker_scaler.py @@ -0,0 +1,177 @@ +"""Scale Flamenco worker containers via the Docker socket. + +Uses the Docker Python SDK (docker>=6.1.0) to list, start, and stop containers. +Requires /var/run/docker.sock to be mounted into the backend container. +""" +import os +import logging + +log = logging.getLogger(__name__) + +COMPOSE_PROJECT = os.getenv("COMPOSE_PROJECT_NAME", "schaefflerautomat") +SERVICE_NAME = "flamenco-worker" + + +def _get_client(): + import docker + return docker.from_env() + + +def get_worker_containers(client=None): + """Return all flamenco-worker containers (running + stopped) sorted by name.""" + if client is None: + client = _get_client() + return sorted( + client.containers.list( + all=True, + filters={ + "label": [ + f"com.docker.compose.project={COMPOSE_PROJECT}", + f"com.docker.compose.service={SERVICE_NAME}", + ] + }, + ), + key=lambda c: c.name, + ) + + +def get_running_worker_count(client=None) -> int: + """Return how many flamenco-worker containers are currently running.""" + try: + if client is None: + client = _get_client() + containers = get_worker_containers(client) + return sum(1 for c in containers if c.status == "running") + except Exception as exc: + log.warning("docker_scaler: could not read worker count: %s", exc) + return -1 + + +def scale_workers(target: int) -> dict: + """Scale flamenco-worker containers to *target* count. + + Returns a dict with keys: + previous – containers running before + current – containers running after + delta – change (negative = stopped, positive = started) + message – human-readable summary + """ + import docker + from docker.types import Mount + + client = _get_client() + + all_workers = get_worker_containers(client) + running = [c for c in all_workers if c.status == "running"] + previous = len(running) + + if target == previous: + return {"previous": previous, "current": previous, "delta": 0, + "message": f"Already at {previous} worker(s) — no change"} + + # ── Scale down ──────────────────────────────────────────────────────────── + if target < previous: + # Stop highest-numbered containers first to minimise disruption + to_stop = sorted(running, key=lambda c: c.name, reverse=True)[: previous - target] + for c in to_stop: + log.info("docker_scaler: stopping %s", c.name) + c.stop(timeout=20) + c.remove() + return { + "previous": previous, + "current": target, + "delta": target - previous, + "message": f"Stopped {len(to_stop)} worker(s): {[c.name for c in to_stop]}", + } + + # ── Scale up ────────────────────────────────────────────────────────────── + template = running[0] if running else (all_workers[0] if all_workers else None) + if template is None: + raise RuntimeError( + "No existing flamenco-worker container found to clone configuration from. " + "Ensure at least one worker container exists (even if stopped)." + ) + + attrs = template.attrs + image = attrs["Config"]["Image"] + env = attrs["Config"].get("Env") or [] + + # Reconstruct mounts from the template container + mounts = [] + for m in (attrs.get("Mounts") or []): + mount_type = m.get("Type", "bind") + source = m.get("Name", "") if mount_type == "volume" else m.get("Source", "") + mounts.append( + Mount( + target=m["Destination"], + source=source, + type=mount_type, + read_only=not m.get("RW", True), + ) + ) + + # Reconstruct GPU device requests (nvidia) + device_requests = None + raw_dr = (attrs.get("HostConfig") or {}).get("DeviceRequests") or [] + if raw_dr: + device_requests = [] + for dr in raw_dr: + device_requests.append( + docker.types.DeviceRequest( + driver=dr.get("Driver", ""), + count=dr.get("Count", -1), + device_ids=dr.get("DeviceIDs") or [], + capabilities=dr.get("Capabilities") or [], + options=dr.get("Options") or {}, + ) + ) + + # Network(s) the template is connected to + network_names = list( + (attrs.get("NetworkSettings") or {}).get("Networks", {}).keys() + ) + + restart_policy_name = ( + (attrs.get("HostConfig") or {}) + .get("RestartPolicy", {}) + .get("Name", "unless-stopped") + ) or "unless-stopped" + + started = [] + for i in range(previous + 1, target + 1): + new_name = f"{COMPOSE_PROJECT}-{SERVICE_NAME}-{i}" + labels = { + "com.docker.compose.project": COMPOSE_PROJECT, + "com.docker.compose.service": SERVICE_NAME, + "com.docker.compose.container-number": str(i), + } + + log.info("docker_scaler: creating %s from image %s", new_name, image) + container = client.containers.create( + image=image, + name=new_name, + environment=env, + labels=labels, + mounts=mounts, + restart_policy={"Name": restart_policy_name}, + device_requests=device_requests, + ) + + for net_name in network_names: + try: + net = client.networks.get(net_name) + net.connect(container) + log.info("docker_scaler: connected %s to network %s", new_name, net_name) + except Exception as exc: + log.warning("docker_scaler: could not connect to network %s: %s", net_name, exc) + + container.start() + started.append(new_name) + log.info("docker_scaler: started %s", new_name) + + return { + "previous": previous, + "current": target, + "delta": target - previous, + "message": f"Started {len(started)} new worker(s): {started}", + } diff --git a/backend/app/services/excel_import.py b/backend/app/services/excel_import.py new file mode 100644 index 0000000..ce2a5ad --- /dev/null +++ b/backend/app/services/excel_import.py @@ -0,0 +1,178 @@ +"""Excel import service — maps parsed rows to Product library.""" +from dataclasses import dataclass, field +from sqlalchemy.ext.asyncio import AsyncSession + +from app.services.product_service import ( + lookup_or_create_product, + lookup_product, +) + + +@dataclass +class PreviewResult: + """Read-only preview: annotates rows without creating anything.""" + rows: list[dict] = field(default_factory=list) + existing_product_count: int = 0 + new_product_count: int = 0 + no_pim_id_count: int = 0 + has_step_count: int = 0 + no_step_count: int = 0 + duplicate_count: int = 0 + warnings: list[str] = field(default_factory=list) + + +@dataclass +class ImportResult: + rows: list[dict] = field(default_factory=list) + matched_count: int = 0 + created_count: int = 0 + no_pim_id_count: int = 0 + duplicate_baureihe_count: int = 0 + warnings: list[str] = field(default_factory=list) + + +async def import_excel_to_products( + db: AsyncSession, + parsed_rows: list[dict], + source_excel: str, + category_key: str | None = None, +) -> ImportResult: + """For each row, look up or create a Product. + + Grouping strategy: + 1. Primary key: produkt_baureihe (lowercased) + 2. Fallback: pim_id (backward compat) + + Annotates each row dict with product_id, product_created. + """ + result = ImportResult() + + # Track seen produkt_baureihe values to skip duplicates + seen_baureihe: dict[str, str] = {} # lower(baureihe) → first product_id + + for row in parsed_rows: + pim_id = row.get("pim_id") + produkt_baureihe = row.get("produkt_baureihe") + row_category = row.get("category_key") or category_key + + # Need at least one identifier + if not pim_id and not produkt_baureihe: + row["product_id"] = None + row["product_created"] = False + result.no_pim_id_count += 1 + continue + + fields = { + "name": produkt_baureihe or row.get("gewaehltes_produkt"), + "category_key": row_category, + "ebene1": row.get("ebene1"), + "ebene2": row.get("ebene2"), + "baureihe": row.get("baureihe"), + "produkt_baureihe": produkt_baureihe, + "lagertyp": row.get("lagertyp"), + "name_cad_modell": row.get("name_cad_modell"), + "gewuenschte_bildnummer": row.get("gewuenschte_bildnummer"), + "medias_rendering": row.get("medias_rendering"), + "components": row.get("components", []), + "arbeitspaket": row.get("arbeitspaket"), + "source_excel": source_excel, + } + + product, was_created = await lookup_or_create_product(db, pim_id, fields) + row["product_id"] = str(product.id) + row["product_created"] = was_created + # Carry forward any STEP file already linked to this product + row["product_cad_file_id"] = str(product.cad_file_id) if product.cad_file_id else None + + if was_created: + result.created_count += 1 + else: + result.matched_count += 1 + + # Track duplicate baureihe + if produkt_baureihe: + bkey = produkt_baureihe.lower() + if bkey in seen_baureihe: + result.duplicate_baureihe_count += 1 + else: + seen_baureihe[bkey] = str(product.id) + + result.rows = parsed_rows + # NOTE: caller is responsible for db.commit() — keeps the transaction + # composable with order + line creation in the finalize endpoint. + return result + + +async def preview_excel_rows( + db: AsyncSession, + parsed_rows: list[dict], + category_key: str | None = None, +) -> PreviewResult: + """Read-only preview: annotates rows with product_exists / product_id / duplicate flags. + + Uses lookup_product (read-only) to check what already exists in the DB. + New-vs-existing is determined per unique produkt_baureihe (or pim_id fallback). + Duplicate rows (same produkt_baureihe seen more than once in this batch) are + annotated with is_duplicate=True and duplicate_of_row=. + """ + result = PreviewResult() + # Track unique identifiers we've already resolved in this batch + # key = lower(baureihe) or pim_id → (product_exists, product_id_str | None, has_step, first_row_index) + seen: dict[str, tuple[bool, str | None, bool, int]] = {} + + for row in parsed_rows: + pim_id = row.get("pim_id") + produkt_baureihe = row.get("produkt_baureihe") + row_index = row.get("row_index", 0) + row["category_key"] = row.get("category_key") or category_key + + # Must have at least one identifier + if not pim_id and not produkt_baureihe: + row["product_exists"] = False + row["product_id"] = None + row["has_step"] = False + row["is_duplicate"] = False + result.no_pim_id_count += 1 + continue + + # Build a cache key + cache_key = (produkt_baureihe or "").lower() or pim_id or "" + + if cache_key in seen: + exists, pid, has_step, first_row = seen[cache_key] + row["product_exists"] = exists + row["product_id"] = pid + row["has_step"] = has_step + row["is_duplicate"] = True + row["duplicate_of_row"] = first_row + result.duplicate_count += 1 + continue + + product = await lookup_product(db, pim_id, produkt_baureihe) + row["is_duplicate"] = False + if product is not None: + has_step = product.cad_file_id is not None + row["product_exists"] = True + row["product_id"] = str(product.id) + row["has_step"] = has_step + seen[cache_key] = (True, str(product.id), has_step, row_index) + result.existing_product_count += 1 + if has_step: + result.has_step_count += 1 + else: + result.no_step_count += 1 + else: + row["product_exists"] = False + row["product_id"] = None + row["has_step"] = False + seen[cache_key] = (False, None, False, row_index) + result.new_product_count += 1 + result.no_step_count += 1 + + result.rows = parsed_rows + if result.duplicate_count > 0: + result.warnings.append( + f"{result.duplicate_count} duplicate Produkt-Baureihe row(s) detected — " + "these are pre-unchecked. Only one row per product will be imported." + ) + return result diff --git a/backend/app/services/excel_parser.py b/backend/app/services/excel_parser.py new file mode 100644 index 0000000..6f2a3fe --- /dev/null +++ b/backend/app/services/excel_parser.py @@ -0,0 +1,505 @@ +""" +Excel parser for Schaeffler CAD order lists. + +Supports two formats: + +Old format (per-category files): + Row 1-2: Instruction text (skip) + Row N: Column headers — detected as the first row containing "Ebene1" + Col 0 (A): Ebene1 + Col 1 (B): Ebene2 + ... + Col 11+ : Component pairs – alternating (part_name, material) + +New format (unified file — TestScope_final layout): + Row 1: Column headers (no instruction rows) + Col 0 (A): Arbeitspaket + Col 1 (B): Ebene1 + Col 2 (C): Ebene2 + ... + Col 12+ : Component pairs + +Detection is header-driven: we find "Ebene1" in any column within the first 5 rows +and build a dynamic column_map from that header row. +""" + +from __future__ import annotations + +import logging +import re +from collections import Counter +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any + +import openpyxl + +logger = logging.getLogger(__name__) + +# --------------------------------------------------------------------------- +# Category detection map: substring in col0 or col2 → category_key +# Priority order matters – more specific first. +# --------------------------------------------------------------------------- +CATEGORY_MAP: dict[str, str] = { + # Linear / Anschlagplatten (check Ebene1 = "Linearsysteme") + "endplatten": "Anschlagplatten", + "anschlagplatten": "Anschlagplatten", + "laufrollenführungen": "Anschlagplatten", + "linearsysteme": "Linear_schiene", # Ebene1 value + "profilschienenführungen": "Linear_schiene", + "rollenumlaufeinheit": "Linear_schiene", + "kugelumlaufeinheit": "Linear_schiene", + # Bearings – most specific first + "zylinderrollenlager": "CRB", + "axial-zylinderrollenlager": "CRB", + "axial-schrägrollenlager": "CRB", + "axiallagerscheiben": "CRB", + "torb": "SRB_TORB", + "radial srb": "SRB_TORB", + "pendelrollenlager": "SRB_TORB", + "kegelrollenlager": "TRB", + "kugellager": "Kugellager", + "axial-rillenkugellager": "Kugellager", + "rillenkugellager": "Kugellager", + "schrägkugellager": "Kugellager", + "gleitlager": "Gleitlager", + "gelenklager": "Gleitlager", + "gleitbuchsen": "Gleitlager", + # Fallback for generic Rollenlager → TRB (only if nothing else matched) + "rollenlager": "TRB", +} + +# --------------------------------------------------------------------------- +# Header name normalization map: normalized header text → field name +# Supports multiple alternative column header texts for each field. +# --------------------------------------------------------------------------- +HEADER_FIELD_MAP: dict[str, str] = { + "arbeitspaket": "arbeitspaket", + "ebene1": "ebene1", + "ebene2": "ebene2", + "baureihe": "baureihe", + "pim-id": "pim_id", + "pim-id (klasse)": "pim_id", + "produkt (baureihe)": "produkt_baureihe", + "produkt": "produkt_baureihe", + "gewähltes produkt": "gewaehltes_produkt", + "gewaehltes produkt": "gewaehltes_produkt", + "name cad-modell": "name_cad_modell", + "name cad modell": "name_cad_modell", + "gewünschte bildnummer": "gewuenschte_bildnummer", + "gewuenschte bildnummer": "gewuenschte_bildnummer", + "lagertyp": "lagertyp", + "medias-rendering": "medias_rendering", + "medias": "medias_rendering", +} + + +@dataclass +class ParsedComponent: + part_name: str | None + material: str | None + component_type: str | None + column_index: int + + +@dataclass +class ParsedRow: + row_index: int + ebene1: str | None = None + ebene2: str | None = None + baureihe: str | None = None + pim_id: str | None = None + produkt_baureihe: str | None = None + gewaehltes_produkt: str | None = None + name_cad_modell: str | None = None + gewuenschte_bildnummer: str | None = None + lagertyp: str | None = None + medias_rendering: bool | None = None + components: list[ParsedComponent] = field(default_factory=list) + category_key: str | None = None + arbeitspaket: str | None = None + + +@dataclass +class ParsedExcel: + filename: str + category_key: str | None + template_name: str | None + column_headers: list[str] + rows: list[ParsedRow] + warnings: list[str] = field(default_factory=list) + material_mappings: list[dict] = field(default_factory=list) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _clean(value: Any) -> str | None: + """Strip whitespace, return None for empty values.""" + if value is None: + return None + s = str(value).strip() + return s if s else None + + +def _normalize_filename(name: str | None) -> str | None: + """Lowercase and strip trailing spaces from filenames. Returns None for empty strings.""" + if name is None: + return None + stripped = name.strip() + if not stripped: + return None + return stripped.lower() + + +def _to_bool(value: Any) -> bool | None: + """Convert Excel 1/0, 'ja'/'nein', True/False to Python bool.""" + if value is None: + return None + if isinstance(value, bool): + return value + s = str(value).strip().lower() + if s in ("1", "true", "ja", "yes", "x"): + return True + if s in ("0", "false", "nein", "no", ""): + return False + return None + + +def _normalize_header(text: str) -> str: + """Normalize a header cell value for matching.""" + return text.strip().lower().replace("_", " ").replace("–", "-").replace("—", "-") + + +def _detect_row_category(ebene1: str | None, ebene2: str | None, baureihe: str | None) -> str | None: + """Detect category for a single row from its Ebene1, Ebene2, Baureihe values.""" + candidates = [] + for val in (ebene1, ebene2, baureihe): + if val: + candidates.append(val.lower()) + for keyword, cat in CATEGORY_MAP.items(): + for cand in candidates: + if keyword in cand: + return cat + return None + + +def _detect_category(rows: list[list[Any]], column_map: dict[str, int]) -> str | None: + """ + Detect category by scanning Ebene1, Ebene2, and Baureihe columns + across all data rows. Priority: more specific keywords first (as ordered in map). + """ + ebene1_col = column_map.get("ebene1") + ebene2_col = column_map.get("ebene2") + baureihe_col = column_map.get("baureihe") + + candidates: list[str] = [] + for row in rows: + for col in (ebene1_col, ebene2_col, baureihe_col): + if col is not None and col < len(row): + val = _clean(row[col]) + if val: + candidates.append(val.lower()) + + for keyword, cat in CATEGORY_MAP.items(): + for cand in candidates: + if keyword in cand: + return cat + return None + + +def _build_column_map(headers: list[str]) -> dict[str, int]: + """Build field_name → column_index mapping from header row.""" + column_map: dict[str, int] = {} + for idx, raw_header in enumerate(headers): + if not raw_header: + continue + normalized = _normalize_header(raw_header) + field_name = HEADER_FIELD_MAP.get(normalized) + if field_name and field_name not in column_map: + column_map[field_name] = idx + return column_map + + +def _find_component_start(column_map: dict[str, int]) -> int: + """Find the first column after medias_rendering for component pairs.""" + medias_col = column_map.get("medias_rendering") + if medias_col is not None: + return medias_col + 1 + # Fallback: find the highest mapped column and start after it + if column_map: + return max(column_map.values()) + 1 + return 11 # Legacy default + + +def _get_cell(row: list[Any], col: int | None) -> Any: + """Safely get a cell value by column index.""" + if col is None or col >= len(row): + return None + return row[col] + + +# --------------------------------------------------------------------------- +# Material mapping sheet parser +# --------------------------------------------------------------------------- + +def _parse_material_mapping(wb) -> list[dict]: + """Parse 'materialmapping' sheet if it exists. + + Expected columns: display_name (col A), render_name (col B). + Returns list of {"display_name": str, "render_name": str}. + """ + # Case-insensitive sheet name search + target_name = None + for name in wb.sheetnames: + if name.lower().replace(" ", "").replace("_", "") == "materialmapping": + target_name = name + break + if target_name is None: + return [] + + ws = wb[target_name] + mappings = [] + + rows = list(ws.iter_rows(values_only=True)) + if not rows: + return [] + + # Detect header row — look for "display" or "anzeige" in first few rows + data_start = 0 + for i, row in enumerate(rows[:3]): + if row and any( + _clean(cell) and ("display" in str(cell).lower() or "anzeige" in str(cell).lower() or "material" in str(cell).lower()) + for cell in row[:3] + if cell is not None + ): + data_start = i + 1 + break + + for row in rows[data_start:]: + if len(row) < 2: + continue + display = _clean(row[0]) + render = _clean(row[1]) + if display and render: + mappings.append({"display_name": display, "render_name": render}) + + return mappings + + +# --------------------------------------------------------------------------- +# Main parser +# --------------------------------------------------------------------------- + +def parse_excel(file_path: str | Path) -> ParsedExcel: + """ + Parse a Schaeffler order list Excel file. + + Returns a ParsedExcel with all data rows extracted. + Header-driven: finds "Ebene1" in any column within first 5 rows, + then builds column map dynamically. + """ + file_path = Path(file_path) + warnings: list[str] = [] + + try: + wb = openpyxl.load_workbook(file_path, data_only=True) + except Exception as exc: + raise ValueError(f"Cannot open Excel file: {exc}") from exc + + ws = wb.active + + # Collect all rows as raw values + all_rows: list[list[Any]] = [] + for row in ws.iter_rows(values_only=True): + all_rows.append(list(row)) + + if len(all_rows) < 2: + raise ValueError("Excel file has fewer than 2 rows – cannot find header row") + + # Auto-detect header row: first row (within first 5) where ANY column == "Ebene1" + header_idx: int | None = None + for i, row in enumerate(all_rows[:5]): + for col_idx, cell in enumerate(row): + val = _clean(cell) + if val and val.lower() == "ebene1": + header_idx = i + break + if header_idx is not None: + break + + if header_idx is None: + # Fallback: assume row 3 (index 2) is headers + header_idx = 2 + warnings.append( + "Could not auto-detect header row (expected 'Ebene1' in any column); " + "falling back to row 3 as headers" + ) + + if len(all_rows) <= header_idx: + raise ValueError("Excel file has no data rows after the detected header row") + + headers_raw = list(all_rows[header_idx]) + # Remove trailing None from headers + while headers_raw and headers_raw[-1] is None: + headers_raw.pop() + + max_col = len(headers_raw) + column_headers = [_clean(h) or "" for h in headers_raw] + + # Build dynamic column map from headers + column_map = _build_column_map(column_headers) + + # Data rows start immediately after the header row + data_rows_raw = all_rows[header_idx + 1:] + + # Detect file-level category (backward compat) + category_key = _detect_category(data_rows_raw, column_map) + template_name = _category_to_template_name(category_key) + + # Determine component column start + comp_start = _find_component_start(column_map) + + # Build component header info (paired columns from comp_start) + component_col_info: list[tuple[int, int, str]] = [] # (part_col, material_col, component_type) + col = comp_start + while col < max_col: + part_type = column_headers[col] if col < len(column_headers) else f"part_{col}" + mat_col = col + 1 + component_col_info.append((col, mat_col, part_type)) + col += 2 + + # Parse data rows + parsed_rows: list[ParsedRow] = [] + for row_idx, raw_row in enumerate(data_rows_raw): + # Pad row to max_col + while len(raw_row) < max_col: + raw_row.append(None) + + # Check if the row is completely empty (check all mapped columns) + check_end = min(comp_start, max_col) + if all(v is None or str(v).strip() == "" for v in raw_row[:check_end]): + continue + + ebene1 = _clean(_get_cell(raw_row, column_map.get("ebene1"))) + ebene2 = _clean(_get_cell(raw_row, column_map.get("ebene2"))) + baureihe = _clean(_get_cell(raw_row, column_map.get("baureihe"))) + + pr = ParsedRow( + row_index=row_idx + header_idx + 2, # 1-based Excel row number + ebene1=ebene1, + ebene2=ebene2, + baureihe=baureihe, + pim_id=_clean(_get_cell(raw_row, column_map.get("pim_id"))), + produkt_baureihe=_clean(_get_cell(raw_row, column_map.get("produkt_baureihe"))), + gewaehltes_produkt=_clean(_get_cell(raw_row, column_map.get("gewaehltes_produkt"))), + name_cad_modell=_normalize_filename( + _clean(_get_cell(raw_row, column_map.get("name_cad_modell"))) + ), + gewuenschte_bildnummer=_clean( + _get_cell(raw_row, column_map.get("gewuenschte_bildnummer")) + ), + lagertyp=_clean(_get_cell(raw_row, column_map.get("lagertyp"))), + medias_rendering=_to_bool(_get_cell(raw_row, column_map.get("medias_rendering"))), + arbeitspaket=_clean(_get_cell(raw_row, column_map.get("arbeitspaket"))), + category_key=_detect_row_category(ebene1, ebene2, baureihe), + ) + + # Parse component pairs + for part_col, mat_col, comp_type in component_col_info: + part_name = _normalize_filename(_clean(raw_row[part_col] if part_col < len(raw_row) else None)) + material = _clean(raw_row[mat_col] if mat_col < len(raw_row) else None) + + if part_name or material: + pr.components.append( + ParsedComponent( + part_name=part_name, + material=material, + component_type=comp_type, + column_index=part_col, + ) + ) + + parsed_rows.append(pr) + + if not parsed_rows: + warnings.append("No data rows found (all rows empty after header)") + + # Determine file-level category from most common row category + if parsed_rows: + row_cats = [r.category_key for r in parsed_rows if r.category_key] + if row_cats: + most_common = Counter(row_cats).most_common(1)[0][0] + category_key = most_common + template_name = _category_to_template_name(category_key) + + # Parse material mapping sheet if present + material_mappings = _parse_material_mapping(wb) + + return ParsedExcel( + filename=file_path.name, + category_key=category_key, + template_name=template_name, + column_headers=column_headers, + rows=parsed_rows, + warnings=warnings, + material_mappings=material_mappings, + ) + + +def _category_to_template_name(category_key: str | None) -> str | None: + names = { + "TRB": "Tapered Roller Bearings (TRB)", + "Kugellager": "Kugellager (Ball Bearings)", + "Gleitlager": "Gleitlager (Plain Bearings)", + "CRB": "Cylindrical Roller Bearings (CRB)", + "Linear_schiene": "Linear Guide Rails", + "Anschlagplatten": "End Plates (Anschlagplatten)", + "SRB_TORB": "Spherical / Toroidal Roller Bearings (SRB/TORB)", + } + return names.get(category_key) if category_key else None + + +# --------------------------------------------------------------------------- +# Serialisation helpers (convert dataclasses → plain dicts for API) +# --------------------------------------------------------------------------- + +def parsed_row_to_dict(pr: ParsedRow) -> dict: + return { + "row_index": pr.row_index, + "ebene1": pr.ebene1, + "ebene2": pr.ebene2, + "baureihe": pr.baureihe, + "pim_id": pr.pim_id, + "produkt_baureihe": pr.produkt_baureihe, + "gewaehltes_produkt": pr.gewaehltes_produkt, + "name_cad_modell": pr.name_cad_modell, + "gewuenschte_bildnummer": pr.gewuenschte_bildnummer, + "lagertyp": pr.lagertyp, + "medias_rendering": pr.medias_rendering, + "category_key": pr.category_key, + "arbeitspaket": pr.arbeitspaket, + "components": [ + { + "part_name": c.part_name, + "material": c.material, + "component_type": c.component_type, + "column_index": c.column_index, + } + for c in pr.components + ], + } + + +def parsed_excel_to_dict(pe: ParsedExcel) -> dict: + return { + "filename": pe.filename, + "category_key": pe.category_key, + "template_name": pe.template_name, + "row_count": len(pe.rows), + "column_headers": pe.column_headers, + "rows": [parsed_row_to_dict(r) for r in pe.rows], + "warnings": pe.warnings, + "material_mappings": pe.material_mappings, + } diff --git a/backend/app/services/flamenco_client.py b/backend/app/services/flamenco_client.py new file mode 100644 index 0000000..4807897 --- /dev/null +++ b/backend/app/services/flamenco_client.py @@ -0,0 +1,121 @@ +"""Flamenco Manager REST API client. + +Uses httpx (sync) for compatibility with Celery tasks and FastAPI endpoints. +""" +import logging +from typing import Any + +import httpx + +logger = logging.getLogger(__name__) + +DEFAULT_TIMEOUT = 10.0 + + +class FlamencoClient: + """Thin wrapper around the Flamenco Manager v3 REST API.""" + + def __init__(self, manager_url: str): + self.base_url = manager_url.rstrip("/") + + def _url(self, path: str) -> str: + return f"{self.base_url}{path}" + + # ── Job management ────────────────────────────────────────────────────── + + def submit_job( + self, + name: str, + job_type: str, + settings: dict[str, Any], + metadata: dict[str, str] | None = None, + priority: int = 50, + ) -> dict: + """Submit a new render job to Flamenco Manager. + + Returns the created job dict (includes 'id'). + """ + payload = { + "name": name, + "type": job_type, + "submitter_platform": "linux", + "settings": settings, + "metadata": metadata or {}, + "priority": priority, + } + resp = httpx.post( + self._url("/api/v3/jobs"), + json=payload, + timeout=DEFAULT_TIMEOUT, + ) + resp.raise_for_status() + return resp.json() + + def get_job(self, job_id: str) -> dict: + """Get job details by ID.""" + resp = httpx.get( + self._url(f"/api/v3/jobs/{job_id}"), + timeout=DEFAULT_TIMEOUT, + ) + resp.raise_for_status() + return resp.json() + + def cancel_job(self, job_id: str) -> None: + """Request cancellation of a job.""" + resp = httpx.post( + self._url(f"/api/v3/jobs/{job_id}/setstatus"), + json={"status": "cancel-requested"}, + timeout=DEFAULT_TIMEOUT, + ) + resp.raise_for_status() + + # ── Workers ───────────────────────────────────────────────────────────── + + def list_workers(self) -> list[dict]: + """List all registered workers.""" + resp = httpx.get( + self._url("/api/v3/worker-mgt/workers"), + timeout=DEFAULT_TIMEOUT, + ) + resp.raise_for_status() + data = resp.json() + return data.get("workers", data) if isinstance(data, dict) else data + + # ── Farm status ───────────────────────────────────────────────────────── + + def get_farm_status(self) -> dict: + """Get overall farm status from the Manager.""" + resp = httpx.get( + self._url("/api/v3/configuration"), + timeout=DEFAULT_TIMEOUT, + ) + resp.raise_for_status() + return resp.json() + + def health_check(self) -> dict: + """Check if the Flamenco Manager is reachable and return version info.""" + try: + resp = httpx.get( + self._url("/api/v3/version"), + timeout=5.0, + ) + resp.raise_for_status() + data = resp.json() + return { + "available": True, + "version": data.get("version", "unknown"), + "name": data.get("name", "Flamenco"), + } + except Exception as exc: + logger.warning(f"Flamenco health check failed: {exc}") + return { + "available": False, + "version": None, + "name": None, + "error": str(exc)[:200], + } + + +def get_flamenco_client(manager_url: str) -> FlamencoClient: + """Factory that creates a FlamencoClient from a manager URL.""" + return FlamencoClient(manager_url) diff --git a/backend/app/services/kpi_service.py b/backend/app/services/kpi_service.py new file mode 100644 index 0000000..dcd9062 --- /dev/null +++ b/backend/app/services/kpi_service.py @@ -0,0 +1,471 @@ +"""KPI / analytics query functions. + +All functions return plain dicts or lists of dicts. +Uses text() for raw SQL to avoid ORM lazy-loading surprises. +Every function accepts date_from / date_to ISO-date strings to scope metrics. +""" +from datetime import date as _date +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + + +def _parse_date(s: str) -> _date: + """Convert ISO date string to datetime.date for asyncpg compatibility.""" + return _date.fromisoformat(s) + + +async def order_throughput_by_week( + db: AsyncSession, date_from: str, date_to: str, +) -> list[dict]: + """Weekly order creation + completion counts within the date range.""" + sql = text( + """ + SELECT + TO_CHAR(DATE_TRUNC('week', created_at), 'IYYY-"W"IW') AS week, + COUNT(*) AS count, + COUNT(*) FILTER (WHERE status = 'completed') AS completed + FROM orders + WHERE created_at >= CAST(:date_from AS date) + AND created_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY DATE_TRUNC('week', created_at) + ORDER BY DATE_TRUNC('week', created_at) + """ + ) + result = await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)}) + rows = result.fetchall() + return [{"week": r[0], "count": r[1], "completed": r[2]} for r in rows] + + +async def processing_time_stats( + db: AsyncSession, date_from: str, date_to: str, +) -> dict: + """Average and percentile processing times for completed orders in range.""" + sql = text( + """ + SELECT + EXTRACT(EPOCH FROM AVG(completed_at - submitted_at))::FLOAT + AS avg_submit_to_complete_s, + EXTRACT(EPOCH FROM AVG(processing_started_at - submitted_at))::FLOAT + AS avg_submit_to_processing_s, + EXTRACT(EPOCH FROM PERCENTILE_CONT(0.5) WITHIN GROUP ( + ORDER BY completed_at - submitted_at + ))::FLOAT AS p50_s, + EXTRACT(EPOCH FROM PERCENTILE_CONT(0.95) WITHIN GROUP ( + ORDER BY completed_at - submitted_at + ))::FLOAT AS p95_s + FROM orders + WHERE status = 'completed' + AND submitted_at IS NOT NULL + AND completed_at IS NOT NULL + AND submitted_at >= CAST(:date_from AS date) + AND submitted_at < CAST(:date_to AS date) + INTERVAL '1 day' + """ + ) + result = await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)}) + row = result.fetchone() + if row is None: + return { + "avg_submit_to_complete_s": None, + "avg_submit_to_processing_s": None, + "p50_s": None, + "p95_s": None, + } + return { + "avg_submit_to_complete_s": row[0], + "avg_submit_to_processing_s": row[1], + "p50_s": row[2], + "p95_s": row[3], + } + + +async def revenue_overview( + db: AsyncSession, date_from: str, date_to: str, +) -> list[dict]: + """Monthly revenue (sum of estimated_price for completed orders) in range.""" + sql = text( + """ + SELECT + TO_CHAR(DATE_TRUNC('month', completed_at), 'YYYY-MM') AS month, + COALESCE(SUM(estimated_price), 0)::FLOAT AS revenue, + COUNT(*) AS order_count + FROM orders + WHERE status = 'completed' + AND completed_at >= CAST(:date_from AS date) + AND completed_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY DATE_TRUNC('month', completed_at) + ORDER BY DATE_TRUNC('month', completed_at) + """ + ) + result = await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)}) + rows = result.fetchall() + return [{"month": r[0], "revenue": r[1], "order_count": r[2]} for r in rows] + + +async def item_status_breakdown( + db: AsyncSession, date_from: str, date_to: str, +) -> dict: + """Count of order lines grouped by item_status, scoped to orders in range.""" + sql = text( + """ + SELECT ol.item_status, COUNT(*) AS cnt + FROM order_lines ol + JOIN orders o ON o.id = ol.order_id + WHERE o.created_at >= CAST(:date_from AS date) + AND o.created_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY ol.item_status + """ + ) + result = await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)}) + rows = result.fetchall() + out: dict = {"pending": 0, "approved": 0, "rejected": 0} + for row in rows: + key = str(row[0]) + out[key] = int(row[1]) + return out + + +async def render_time_breakdown( + db: AsyncSession, date_from: str, date_to: str, +) -> dict: + """Average render duration from completed order lines, scoped to date range. + + Uses render_started_at / render_completed_at on order_lines (added in migration 015). + avg_stl_s is not tracked at order-line level, so only avg_render_s and sample_count + are meaningful here; avg_stl_s is left None for UI compatibility. + """ + sql = text( + """ + SELECT + AVG(EXTRACT(EPOCH FROM (render_completed_at - render_started_at))) AS avg_render_s, + COUNT(*) AS sample_count + FROM order_lines + WHERE render_status = 'completed' + AND render_started_at IS NOT NULL + AND render_completed_at IS NOT NULL + AND render_completed_at >= CAST(:date_from AS date) + AND render_completed_at < CAST(:date_to AS date) + INTERVAL '1 day' + """ + ) + result = await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)}) + row = result.fetchone() + if row is None or row[1] == 0: + return {"avg_stl_s": None, "avg_render_s": None, "avg_total_s": None, "sample_count": 0} + return { + "avg_stl_s": None, + "avg_render_s": float(row[0]) if row[0] is not None else None, + "avg_total_s": float(row[0]) if row[0] is not None else None, + "sample_count": int(row[1]), + } + + +async def render_time_by_output_type( + db: AsyncSession, date_from: str, date_to: str, +) -> list[dict]: + """Render time statistics per output type for completed order lines.""" + sql = text( + """ + SELECT + COALESCE(ot.name, 'Unknown') AS output_type, + COUNT(*) AS job_count, + AVG(EXTRACT(EPOCH FROM (ol.render_completed_at - ol.render_started_at))) AS avg_render_s, + MIN(EXTRACT(EPOCH FROM (ol.render_completed_at - ol.render_started_at))) AS min_render_s, + MAX(EXTRACT(EPOCH FROM (ol.render_completed_at - ol.render_started_at))) AS max_render_s, + PERCENTILE_CONT(0.5) WITHIN GROUP ( + ORDER BY EXTRACT(EPOCH FROM (ol.render_completed_at - ol.render_started_at)) + ) AS p50_render_s + FROM order_lines ol + LEFT JOIN output_types ot ON ot.id = ol.output_type_id + WHERE ol.render_status = 'completed' + AND ol.render_started_at IS NOT NULL + AND ol.render_completed_at IS NOT NULL + AND ol.render_completed_at >= CAST(:date_from AS date) + AND ol.render_completed_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY ot.id, ot.name + ORDER BY avg_render_s DESC NULLS LAST + """ + ) + result = await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)}) + return [ + { + "output_type": r[0], + "job_count": int(r[1]), + "avg_render_s": float(r[2]) if r[2] is not None else None, + "min_render_s": float(r[3]) if r[3] is not None else None, + "max_render_s": float(r[4]) if r[4] is not None else None, + "p50_render_s": float(r[5]) if r[5] is not None else None, + } + for r in result.fetchall() + ] + + +async def top_level_summary( + db: AsyncSession, date_from: str, date_to: str, +) -> dict: + """High-level summary counts and totals within the date range.""" + sql = text( + """ + SELECT + COUNT(*) AS total_orders, + COUNT(*) FILTER (WHERE status = 'completed') AS completed_orders, + COALESCE(SUM(estimated_price) FILTER (WHERE status = 'completed'), 0)::FLOAT + AS total_revenue + FROM orders + WHERE created_at >= CAST(:date_from AS date) + AND created_at < CAST(:date_to AS date) + INTERVAL '1 day' + """ + ) + result = await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)}) + row = result.fetchone() + + items_sql = text( + """ + SELECT COUNT(*) + FROM order_lines ol + JOIN orders o ON o.id = ol.order_id + WHERE ol.output_type_id IS NOT NULL + AND o.created_at >= CAST(:date_from AS date) + AND o.created_at < CAST(:date_to AS date) + INTERVAL '1 day' + """ + ) + items_result = await db.execute(items_sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)}) + items_count = items_result.scalar() or 0 + + return { + "total_orders": int(row[0]) if row else 0, + "completed_orders": int(row[1]) if row else 0, + "total_revenue": float(row[2]) if row else 0.0, + "total_rendering_items": int(items_count), + } + + +async def product_and_category_stats( + db: AsyncSession, date_from: str, date_to: str, +) -> dict: + """Product-level stats: unique rendered, total products, CAD coverage, by category.""" + params = {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)} + + rendered_sql = text( + """ + SELECT COUNT(DISTINCT ol.product_id) + FROM order_lines ol + JOIN orders o ON o.id = ol.order_id + WHERE ol.render_status = 'completed' + AND o.created_at >= CAST(:date_from AS date) + AND o.created_at < CAST(:date_to AS date) + INTERVAL '1 day' + """ + ) + rendered = (await db.execute(rendered_sql, params)).scalar() or 0 + + totals_sql = text( + """ + SELECT COUNT(*) AS total, COUNT(cad_file_id) AS with_cad + FROM products + """ + ) + totals_row = (await db.execute(totals_sql)).fetchone() + total_products = int(totals_row[0]) if totals_row else 0 + products_with_cad = int(totals_row[1]) if totals_row else 0 + + cat_sql = text( + """ + SELECT COALESCE(category_key, 'unknown') AS category, COUNT(*) AS cnt + FROM products + GROUP BY category_key + ORDER BY cnt DESC + """ + ) + cat_rows = (await db.execute(cat_sql)).fetchall() + + return { + "unique_products_rendered": int(rendered), + "total_products": total_products, + "products_with_cad": products_with_cad, + "products_by_category": [ + {"category": r[0], "count": int(r[1])} for r in cat_rows + ], + } + + +async def output_type_usage( + db: AsyncSession, date_from: str, date_to: str, +) -> list[dict]: + """Order lines grouped by output type name.""" + sql = text( + """ + SELECT ot.name AS output_type, COUNT(*) AS cnt + FROM order_lines ol + JOIN output_types ot ON ot.id = ol.output_type_id + JOIN orders o ON o.id = ol.order_id + WHERE o.created_at >= CAST(:date_from AS date) + AND o.created_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY ot.name + ORDER BY cnt DESC + """ + ) + rows = (await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)})).fetchall() + return [{"output_type": r[0], "count": int(r[1])} for r in rows] + + +async def render_status_distribution( + db: AsyncSession, date_from: str, date_to: str, +) -> tuple[dict, list[dict]]: + """(a) order_lines by render_status, (b) cad_files by renderer from render_log.""" + params = {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)} + + status_sql = text( + """ + SELECT ol.render_status, COUNT(*) AS cnt + FROM order_lines ol + JOIN orders o ON o.id = ol.order_id + WHERE o.created_at >= CAST(:date_from AS date) + AND o.created_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY ol.render_status + """ + ) + status_rows = (await db.execute(status_sql, params)).fetchall() + status_map: dict = {"pending": 0, "processing": 0, "completed": 0, "failed": 0} + for row in status_rows: + key = str(row[0]) + status_map[key] = int(row[1]) + + renderer_sql = text( + """ + SELECT render_log->>'renderer' AS renderer, COUNT(*) AS cnt + FROM cad_files + WHERE render_log IS NOT NULL + AND render_log->>'renderer' IS NOT NULL + AND created_at >= CAST(:date_from AS date) + AND created_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY render_log->>'renderer' + ORDER BY cnt DESC + """ + ) + renderer_rows = (await db.execute(renderer_sql, params)).fetchall() + renderer_usage = [{"renderer": r[0], "count": int(r[1])} for r in renderer_rows] + + return status_map, renderer_usage + + +async def top_products( + db: AsyncSession, date_from: str, date_to: str, + limit: int = 10, +) -> list[dict]: + """Top N most-ordered products by order line count.""" + sql = text( + """ + SELECT p.pim_id, p.name AS product_name, + COALESCE(p.category_key, 'unknown') AS category, + COUNT(*) AS order_count + FROM order_lines ol + JOIN products p ON p.id = ol.product_id + JOIN orders o ON o.id = ol.order_id + WHERE o.created_at >= CAST(:date_from AS date) + AND o.created_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY p.id, p.pim_id, p.name, p.category_key + ORDER BY order_count DESC + LIMIT :lim + """ + ) + rows = (await db.execute(sql, { + "date_from": _parse_date(date_from), + "date_to": _parse_date(date_to), + "lim": limit, + })).fetchall() + return [ + {"pim_id": r[0], "product_name": r[1], "category": r[2], "order_count": int(r[3])} + for r in rows + ] + + +async def category_revenue( + db: AsyncSession, date_from: str, date_to: str, +) -> list[dict]: + """Proportional revenue by category: order price / line count, summed per category.""" + sql = text( + """ + WITH order_share AS ( + SELECT o.id AS order_id, + COALESCE(o.estimated_price, 0) / GREATEST(COUNT(ol.id), 1) AS per_line_price, + COALESCE(p.category_key, 'unknown') AS category + FROM orders o + JOIN order_lines ol ON ol.order_id = o.id + JOIN products p ON p.id = ol.product_id + WHERE o.status = 'completed' + AND o.completed_at >= CAST(:date_from AS date) + AND o.completed_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY o.id, o.estimated_price, p.category_key + ) + SELECT category, + COUNT(DISTINCT order_id) AS order_count, + COALESCE(SUM(per_line_price), 0)::FLOAT AS revenue + FROM order_share + GROUP BY category + ORDER BY revenue DESC + """ + ) + rows = (await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)})).fetchall() + return [{"category": r[0], "order_count": int(r[1]), "revenue": r[2]} for r in rows] + + +async def render_backend_stats( + db: AsyncSession, date_from: str, date_to: str, +) -> list[dict]: + """Render time + count by backend (celery vs flamenco).""" + sql = text( + """ + SELECT + COALESCE(render_backend_used, 'unknown') AS backend, + COUNT(*) AS total, + COUNT(*) FILTER (WHERE render_status = 'completed') AS completed, + COUNT(*) FILTER (WHERE render_status = 'failed') AS failed, + EXTRACT(EPOCH FROM AVG( + render_completed_at - render_started_at + ) FILTER (WHERE render_status = 'completed'))::FLOAT AS avg_render_s, + EXTRACT(EPOCH FROM PERCENTILE_CONT(0.5) WITHIN GROUP ( + ORDER BY render_completed_at - render_started_at + ) FILTER (WHERE render_status = 'completed'))::FLOAT AS p50_render_s + FROM order_lines ol + JOIN orders o ON o.id = ol.order_id + WHERE render_backend_used IS NOT NULL + AND o.created_at >= CAST(:date_from AS date) + AND o.created_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY render_backend_used + ORDER BY total DESC + """ + ) + rows = (await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)})).fetchall() + return [ + { + "backend": r[0], + "total": int(r[1]), + "completed": int(r[2]), + "failed": int(r[3]), + "avg_render_s": r[4], + "p50_render_s": r[5], + } + for r in rows + ] + + +async def orders_by_user( + db: AsyncSession, date_from: str, date_to: str, +) -> list[dict]: + """Orders grouped by user with counts and revenue.""" + sql = text( + """ + SELECT u.full_name, u.email, u.role, + COUNT(*) AS order_count, + COALESCE(SUM(o.estimated_price) FILTER (WHERE o.status = 'completed'), 0)::FLOAT AS revenue + FROM orders o + JOIN users u ON u.id = o.created_by + WHERE o.created_at >= CAST(:date_from AS date) + AND o.created_at < CAST(:date_to AS date) + INTERVAL '1 day' + GROUP BY u.id, u.full_name, u.email, u.role + ORDER BY order_count DESC + """ + ) + rows = (await db.execute(sql, {"date_from": _parse_date(date_from), "date_to": _parse_date(date_to)})).fetchall() + return [ + {"full_name": r[0], "email": r[1], "role": r[2], "order_count": int(r[3]), "revenue": r[4]} + for r in rows + ] diff --git a/backend/app/services/material_service.py b/backend/app/services/material_service.py new file mode 100644 index 0000000..ccd8bc9 --- /dev/null +++ b/backend/app/services/material_service.py @@ -0,0 +1,143 @@ +"""Material alias resolution service. + +Used from Celery tasks (sync context) to resolve raw material names +(from Excel / user input) to SCHAEFFLER library material names via aliases. + +Resolution chain: +1. Exact Material.name match (case-insensitive) → use it +2. MaterialAlias lookup (case-insensitive) → use alias.material.name +3. Pass through unchanged → Blender will show FailedMaterial magenta +""" +import logging + +from sqlalchemy import create_engine, select, func +from sqlalchemy.orm import Session, selectinload +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.material import Material +from app.models.material_alias import MaterialAlias + +logger = logging.getLogger(__name__) + +_engine = None + + +def _get_engine(): + global _engine + if _engine is None: + from app.config import settings as app_settings + _engine = create_engine(app_settings.database_url_sync) + return _engine + + +def resolve_material_map(raw_map: dict[str, str]) -> dict[str, str]: + """Resolve raw material names to SCHAEFFLER library names via aliases. + + For each value in raw_map: + 1. If it already matches a Material.name (case-insensitive) → keep as-is (use canonical name) + 2. Else look up MaterialAlias.alias (case-insensitive) → return alias.material.name + 3. Else keep original (Blender will use FailedMaterial fallback) + + Returns a new dict with the same keys but resolved material names. + """ + if not raw_map: + return raw_map + + engine = _get_engine() + with Session(engine) as session: + # Load all materials + materials = session.execute( + select(Material).options(selectinload(Material.aliases)) + ).scalars().all() + + # Build lookup dicts (case-insensitive) + # material name (lower) → canonical Material.name + name_lookup: dict[str, str] = {} + # alias (lower) → Material.name + alias_lookup: dict[str, str] = {} + + for mat in materials: + name_lookup[mat.name.lower()] = mat.name + for a in mat.aliases: + alias_lookup[a.alias.lower()] = mat.name + + resolved = {} + for part_name, raw_material in raw_map.items(): + raw_lower = raw_material.lower() + + # 1. Alias lookup first — aliases explicitly map intermediate/display names + # to the canonical SCHAEFFLER library names (e.g. "Steel--Stahl" → + # "SCHAEFFLER_010101_Steel-Bare"). This must take priority over the + # direct name match so that intermediate names are properly redirected. + if raw_lower in alias_lookup: + target = alias_lookup[raw_lower] + logger.info("resolved '%s' → '%s' (alias match)", raw_material, target) + resolved[part_name] = target + continue + + # 2. Exact material name match (canonical name used as-is) + if raw_lower in name_lookup: + canonical = name_lookup[raw_lower] + if canonical != raw_material: + logger.info("resolved '%s' → '%s' (exact name match)", raw_material, canonical) + resolved[part_name] = canonical + continue + + # 3. Pass through unchanged + logger.warning("no material match for '%s' — will use FailedMaterial fallback", raw_material) + resolved[part_name] = raw_material + + return resolved + + +async def seed_material_aliases_from_mappings( + db: AsyncSession, mappings: list[dict] +) -> dict: + """Seed material aliases from Excel materialmapping sheet. + + For each {display_name, render_name}: + - Find or create Material by render_name + - Add display_name as alias if not already present + + Returns {"created": N, "skipped": N}. + """ + created = 0 + skipped = 0 + + for mapping in mappings: + display_name = mapping.get("display_name", "").strip() + render_name = mapping.get("render_name", "").strip() + if not display_name or not render_name: + skipped += 1 + continue + + # Find or create Material by render_name + result = await db.execute( + select(Material).where(func.lower(Material.name) == render_name.lower()) + ) + material = result.scalar_one_or_none() + if material is None: + material = Material(name=render_name, source="excel_mapping") + db.add(material) + await db.flush() + + # Check if alias already exists + alias_result = await db.execute( + select(MaterialAlias).where( + func.lower(MaterialAlias.alias) == display_name.lower() + ) + ) + existing_alias = alias_result.scalar_one_or_none() + if existing_alias: + skipped += 1 + continue + + # Create alias + alias = MaterialAlias(material_id=material.id, alias=display_name) + db.add(alias) + created += 1 + + if created > 0: + await db.flush() + + return {"created": created, "skipped": skipped} diff --git a/backend/app/services/notification_service.py b/backend/app/services/notification_service.py new file mode 100644 index 0000000..21a26d0 --- /dev/null +++ b/backend/app/services/notification_service.py @@ -0,0 +1,84 @@ +"""Notification emission helpers. + +Provides async (for routers) and sync (for Celery tasks) entry points +to create notification rows in the audit_log table. +""" +import logging +import uuid +from datetime import datetime + +from sqlalchemy import create_engine +from sqlalchemy.orm import Session +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.audit_log import AuditLog + +logger = logging.getLogger(__name__) + +_engine = None + + +def _get_engine(): + global _engine + if _engine is None: + from app.config import settings as app_settings + _engine = create_engine(app_settings.database_url_sync) + return _engine + + +async def emit_notification( + db: AsyncSession, + *, + actor_user_id: str | uuid.UUID | None = None, + target_user_id: str | uuid.UUID | None = None, + action: str, + entity_type: str | None = None, + entity_id: str | None = None, + details: dict | None = None, +) -> None: + """Create a notification (async — for use inside FastAPI routers).""" + try: + entry = AuditLog( + user_id=str(actor_user_id) if actor_user_id else None, + target_user_id=str(target_user_id) if target_user_id else None, + action=action, + entity_type=entity_type, + entity_id=str(entity_id) if entity_id else None, + details=details, + notification=True, + timestamp=datetime.utcnow(), + ) + db.add(entry) + await db.commit() + except Exception: + logger.exception("Failed to emit notification (async)") + await db.rollback() + + +def emit_notification_sync( + *, + actor_user_id: str | uuid.UUID | None = None, + target_user_id: str | uuid.UUID | None = None, + action: str, + entity_type: str | None = None, + entity_id: str | None = None, + details: dict | None = None, +) -> None: + """Create a notification (sync — for use inside Celery tasks).""" + engine = _get_engine() + try: + with Session(engine) as session: + entry = AuditLog( + user_id=str(actor_user_id) if actor_user_id else None, + target_user_id=str(target_user_id) if target_user_id else None, + action=action, + entity_type=entity_type, + entity_id=str(entity_id) if entity_id else None, + details=details, + notification=True, + timestamp=datetime.utcnow(), + ) + session.add(entry) + session.commit() + except Exception: + logger.exception("Failed to emit notification (sync)") diff --git a/backend/app/services/order_service.py b/backend/app/services/order_service.py new file mode 100644 index 0000000..3808cb4 --- /dev/null +++ b/backend/app/services/order_service.py @@ -0,0 +1,22 @@ +"""Order number generation and business logic.""" +from datetime import datetime +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func +from app.models.order import Order + + +async def generate_order_number(db: AsyncSession) -> str: + """Generate next sequential order number: SA-2026-XXXXX.""" + year = datetime.utcnow().year + prefix = f"SA-{year}-" + + # Use MAX to find the highest existing sequence number this year. + # COUNT-based approach breaks when orders are deleted (produces duplicates). + result = await db.execute( + select(func.max(Order.order_number)).where(Order.order_number.like(f"{prefix}%")) + ) + max_num = result.scalar() + if max_num: + last_seq = int(max_num.split("-")[-1]) + return f"{prefix}{last_seq + 1:05d}" + return f"{prefix}00001" diff --git a/backend/app/services/order_status_service.py b/backend/app/services/order_status_service.py new file mode 100644 index 0000000..ea3f2be --- /dev/null +++ b/backend/app/services/order_status_service.py @@ -0,0 +1,86 @@ +"""Service to auto-advance order status when all renders complete.""" +import logging +from datetime import datetime + +from sqlalchemy import create_engine, select, update as sql_update +from sqlalchemy.orm import Session + +from app.models.order import Order, OrderStatus +from app.models.order_line import OrderLine + +logger = logging.getLogger(__name__) + + +def check_order_completion(order_id: str) -> bool: + """If all renderable lines are done, auto-advance order to completed. + + Called from Celery tasks (sync context). + Returns True if the order was advanced to completed. + """ + from app.config import settings as app_settings + + sync_url = app_settings.database_url.replace("+asyncpg", "") + engine = create_engine(sync_url) + + try: + with Session(engine) as session: + # Get all lines that have an output type (i.e. renderable) + lines = session.execute( + select(OrderLine).where( + OrderLine.order_id == order_id, + OrderLine.output_type_id.isnot(None), + ) + ).scalars().all() + + if not lines: + return False + + # Check if all renderable lines are in a terminal state + all_terminal = all( + line.render_status in ("completed", "failed", "cancelled") + for line in lines + ) + + if not all_terminal: + return False + + # Check order is still in processing state + order = session.execute( + select(Order).where(Order.id == order_id) + ).scalar_one_or_none() + + if order is None or order.status != OrderStatus.processing: + return False + + # Auto-advance to completed + now = datetime.utcnow() + session.execute( + sql_update(Order) + .where(Order.id == order_id) + .values( + status=OrderStatus.completed, + completed_at=now, + updated_at=now, + ) + ) + session.commit() + logger.info(f"Order {order_id} auto-advanced to completed (all {len(lines)} lines done)") + + # Notify order creator + try: + from app.services.notification_service import emit_notification_sync + emit_notification_sync( + actor_user_id=None, + target_user_id=str(order.created_by), + action="order.completed", + entity_type="order", + entity_id=str(order_id), + details={"order_number": order.order_number}, + ) + except Exception: + logger.exception("Failed to emit order.completed notification") + + return True + + finally: + engine.dispose() diff --git a/backend/app/services/pricing_service.py b/backend/app/services/pricing_service.py new file mode 100644 index 0000000..53a30ac --- /dev/null +++ b/backend/app/services/pricing_service.py @@ -0,0 +1,232 @@ +"""Pricing service — price lookup and order price computation. + +Price resolution cascade for order lines: + 1. OutputType's linked pricing_tier (if active) → use its price_per_item + 2. Product's category_key → look up PricingTier by category + 3. "default" category tier → global fallback + 4. None if nothing configured +""" +from decimal import Decimal +from typing import Any + +from sqlalchemy import select, update as sql_update +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.models.pricing_tier import PricingTier + + +async def get_price_for( + db: AsyncSession, + category_key: str, + quality_level: str = "Normal", +) -> Decimal | None: + """Return price_per_item for the given category + quality level. + + Falls back to category_key='default' if no exact match is found. + Returns None if nothing is configured. + """ + # 1. Exact match + result = await db.execute( + select(PricingTier).where( + PricingTier.category_key == category_key, + PricingTier.quality_level == quality_level, + PricingTier.is_active.is_(True), + ) + ) + tier = result.scalar_one_or_none() + if tier is not None: + return tier.price_per_item + + if category_key == "default": + return None + + # 2. Fallback: default category + result = await db.execute( + select(PricingTier).where( + PricingTier.category_key == "default", + PricingTier.quality_level == quality_level, + PricingTier.is_active.is_(True), + ) + ) + tier = result.scalar_one_or_none() + return tier.price_per_item if tier is not None else None + + +async def resolve_line_price( + db: AsyncSession, + output_type_id: str | None, + product_category_key: str | None, +) -> Decimal | None: + """Resolve the unit price for a single order line using the cascade. + + 1. OutputType's linked pricing_tier (if active) + 2. Product's category_key → PricingTier by category + 3. "default" category tier → global fallback + 4. None + """ + if output_type_id is not None: + from app.models.output_type import OutputType + result = await db.execute( + select(OutputType) + .options(selectinload(OutputType.pricing_tier)) + .where(OutputType.id == output_type_id) + ) + ot = result.scalar_one_or_none() + if ot and ot.pricing_tier and ot.pricing_tier.is_active: + return ot.pricing_tier.price_per_item + + # Step 2+3: category lookup with default fallback + cat = product_category_key or "default" + return await get_price_for(db, cat) + + +async def estimate_order_price( + db: AsyncSession, + lines: list[dict[str, Any]], +) -> dict: + """Estimate price for a list of prospective order lines. + + Each line dict should have: product_id, output_type_id. + Returns {total, line_count, breakdown: [{output_type_id, product_id, unit_price}], has_unpriced}. + """ + from app.models.product import Product + + breakdown: list[dict] = [] + total = Decimal("0.00") + has_unpriced = False + + for line in lines: + product_id = line.get("product_id") + output_type_id = line.get("output_type_id") + + # Get product category + cat = None + if product_id: + prod_result = await db.execute( + select(Product).where(Product.id == product_id) + ) + prod = prod_result.scalar_one_or_none() + if prod: + cat = prod.category_key + + price = await resolve_line_price(db, output_type_id, cat) + + breakdown.append({ + "output_type_id": str(output_type_id) if output_type_id else None, + "product_id": str(product_id) if product_id else None, + "unit_price": float(price) if price is not None else None, + }) + + if price is not None: + total += price + else: + has_unpriced = True + + return { + "total": float(total), + "line_count": len(lines), + "breakdown": breakdown, + "has_unpriced": has_unpriced, + } + + +async def compute_order_estimated_price( + db: AsyncSession, + order, + items, + quality_level: str = "Normal", +) -> Decimal | None: + """Compute estimated price for an order based on rendering items. + + Returns None if no pricing is configured, or Decimal('0.00') if there + are no rendering items. + """ + rendering_count = sum(1 for i in items if i.medias_rendering) + if rendering_count == 0: + return Decimal("0.00") + + # Resolve category from template + category_key = "default" + if order.template_id is not None: + from app.models.template import Template + tmpl_result = await db.execute( + select(Template).where(Template.id == order.template_id) + ) + tmpl = tmpl_result.scalar_one_or_none() + if tmpl and tmpl.category_key: + category_key = tmpl.category_key + + unit_price = await get_price_for(db, category_key, quality_level) + if unit_price is None: + return None + + return unit_price * rendering_count + + +async def refresh_order_price(db: AsyncSession, order_id) -> Decimal | None: + """Re-fetch order + lines, resolve per-line prices, snapshot to unit_price, update order total.""" + from app.models.order import Order + from app.models.order_line import OrderLine + from app.models.output_type import OutputType + from app.models.product import Product + + order_result = await db.execute(select(Order).where(Order.id == order_id)) + order = order_result.scalar_one_or_none() + if order is None: + return None + + lines_result = await db.execute( + select(OrderLine) + .options( + selectinload(OrderLine.output_type).selectinload(OutputType.pricing_tier), + selectinload(OrderLine.product), + ) + .where( + OrderLine.order_id == order_id, + OrderLine.output_type_id.is_not(None), + ) + ) + lines = lines_result.scalars().all() + + if not lines: + await db.execute( + sql_update(Order) + .where(Order.id == order_id) + .values(estimated_price=Decimal("0.00")) + ) + await db.commit() + return Decimal("0.00") + + total = Decimal("0.00") + any_priced = False + + for line in lines: + # Cascade: 1) OT pricing tier, 2) product category, 3) default + price = None + if line.output_type and line.output_type.pricing_tier and line.output_type.pricing_tier.is_active: + price = line.output_type.pricing_tier.price_per_item + else: + cat = line.product.category_key if line.product else None + price = await get_price_for(db, cat or "default") + + # Snapshot to line + await db.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values(unit_price=price) + ) + + if price is not None: + total += price + any_priced = True + + new_price = total if any_priced else None + + await db.execute( + sql_update(Order) + .where(Order.id == order_id) + .values(estimated_price=new_price) + ) + await db.commit() + return new_price diff --git a/backend/app/services/product_service.py b/backend/app/services/product_service.py new file mode 100644 index 0000000..860e7f1 --- /dev/null +++ b/backend/app/services/product_service.py @@ -0,0 +1,143 @@ +"""Product service — lookup/create products, link CAD files.""" +import uuid +from sqlalchemy import select, func, update as sql_update +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.product import Product + +# Default render positions added to every newly created product. +DEFAULT_RENDER_POSITIONS = [ + {"name": "3/4 Front", "rotation_x": -15.0, "rotation_y": 45.0, "rotation_z": 0.0, "is_default": True, "sort_order": 0}, + {"name": "3/4 Rear", "rotation_x": -15.0, "rotation_y": -135.0, "rotation_z": 0.0, "is_default": False, "sort_order": 1}, + {"name": "Default", "rotation_x": 0.0, "rotation_y": 0.0, "rotation_z": 0.0, "is_default": False, "sort_order": 2}, +] + + +async def create_default_positions(db: AsyncSession, product_id: uuid.UUID) -> None: + """Insert the default render positions for a newly created product.""" + from app.models.render_position import ProductRenderPosition + for pos_data in DEFAULT_RENDER_POSITIONS: + db.add(ProductRenderPosition(product_id=product_id, **pos_data)) + await db.flush() + + +def _fill_missing_fields(product: Product, pim_id: str | None, fields: dict) -> None: + """Fill in null/empty fields on an existing product without overwriting manual edits.""" + if pim_id and not product.pim_id: + product.pim_id = pim_id + for attr in ( + "name", "category_key", "ebene1", "ebene2", "baureihe", + "lagertyp", "name_cad_modell", "arbeitspaket", + ): + if fields.get(attr) and not getattr(product, attr, None): + setattr(product, attr, fields[attr]) + # Update medias_rendering if not set + if fields.get("medias_rendering") is not None and product.medias_rendering is None: + product.medias_rendering = fields["medias_rendering"] + # Always update components from the latest Excel import (needed for auto-reassign) + if fields.get("components"): + product.components = fields["components"] + + +async def lookup_product( + db: AsyncSession, pim_id: str | None, produkt_baureihe: str | None +) -> Product | None: + """Read-only lookup: produkt_baureihe (primary), then pim_id (fallback). + + Same cascade as lookup_or_create_product but never creates or mutates. + """ + if produkt_baureihe: + result = await db.execute( + select(Product).where( + func.lower(Product.produkt_baureihe) == produkt_baureihe.lower(), + Product.is_active.is_(True), + ) + ) + product = result.scalar_one_or_none() + if product is not None: + return product + # baureihe provided but not found — skip pim_id fallback (same logic) + return None + + if pim_id: + result = await db.execute( + select(Product).where(Product.pim_id == pim_id, Product.is_active.is_(True)) + ) + return result.scalar_one_or_none() + + return None + + + +async def lookup_or_create_product( + db: AsyncSession, pim_id: str | None, fields: dict +) -> tuple[Product, bool]: + """Look up by produkt_baureihe (primary), then pim_id (fallback). Create if not found. + + Returns (product, was_created). + Does NOT overwrite existing fields — preserves manual edits. + """ + produkt_baureihe = fields.get("produkt_baureihe") + + # Primary lookup: by produkt_baureihe (case-insensitive) + if produkt_baureihe: + result = await db.execute( + select(Product).where( + func.lower(Product.produkt_baureihe) == produkt_baureihe.lower(), + Product.is_active.is_(True), + ) + ) + product = result.scalar_one_or_none() + if product is not None: + _fill_missing_fields(product, pim_id, fields) + await db.flush() + return product, False + # produkt_baureihe was provided but not found — each baureihe is a + # distinct product, so skip the pim_id fallback and create a new one. + + # Fallback lookup: by pim_id (only when produkt_baureihe is absent, + # e.g. old per-category Excel files that don't have a Baureihe column). + if not produkt_baureihe and pim_id: + result = await db.execute( + select(Product).where(Product.pim_id == pim_id, Product.is_active.is_(True)) + ) + product = result.scalar_one_or_none() + if product is not None: + _fill_missing_fields(product, pim_id, fields) + await db.flush() + return product, False + + product = Product( + pim_id=pim_id or f"auto-{uuid.uuid4().hex[:8]}", + name=fields.get("name"), + category_key=fields.get("category_key"), + ebene1=fields.get("ebene1"), + ebene2=fields.get("ebene2"), + baureihe=fields.get("baureihe"), + produkt_baureihe=produkt_baureihe, + lagertyp=fields.get("lagertyp"), + name_cad_modell=fields.get("name_cad_modell"), + arbeitspaket=fields.get("arbeitspaket"), + components=fields.get("components", []), + cad_part_materials=fields.get("cad_part_materials", []), + source_excel=fields.get("source_excel"), + ) + db.add(product) + await db.flush() + await create_default_positions(db, product.id) + return product, True + + + +async def link_cad_to_product( + db: AsyncSession, product_id: uuid.UUID, cad_file_id: uuid.UUID +) -> Product: + """Set product.cad_file_id via direct SQL UPDATE.""" + await db.execute( + sql_update(Product) + .where(Product.id == product_id) + .values(cad_file_id=cad_file_id) + ) + await db.commit() + result = await db.execute(select(Product).where(Product.id == product_id)) + return result.scalar_one() diff --git a/backend/app/services/render_dispatcher.py b/backend/app/services/render_dispatcher.py new file mode 100644 index 0000000..de7da2d --- /dev/null +++ b/backend/app/services/render_dispatcher.py @@ -0,0 +1,374 @@ +"""Render dispatcher — routes render jobs to Celery or Flamenco. + +Backend selection priority: +1. OutputType.render_backend per-type override ("celery" / "flamenco") +2. OutputType.is_animation — animations default to Flamenco +3. System setting render_backend — global default ("celery" / "flamenco" / "auto") +4. "auto" mode: stills → Celery, animations → Flamenco +""" +import json +import logging +from datetime import datetime + +from sqlalchemy import select, update as sql_update +from sqlalchemy.orm import Session, joinedload + +from app.models.order_line import OrderLine +from app.models.output_type import OutputType +from app.models.product import Product +from app.models.system_setting import SystemSetting + +logger = logging.getLogger(__name__) + + +def _load_setting(session: Session, key: str, default: str = "") -> str: + """Load a single system setting (sync).""" + row = session.execute( + select(SystemSetting).where(SystemSetting.key == key) + ).scalar_one_or_none() + return row.value if row else default + + +def resolve_backend(output_type: OutputType | None, system_backend: str) -> str: + """Determine which backend to use for a given output type. + + Returns "celery" or "flamenco". + """ + if output_type is None: + return "celery" + + # Priority 1: explicit per-type override + ot_backend = output_type.render_backend + if ot_backend in ("celery", "flamenco"): + return ot_backend + + # Priority 2+3: is_animation + system setting + if system_backend in ("celery", "flamenco"): + return system_backend + + # Priority 4: auto mode — animations → Flamenco, stills → Celery + if output_type.is_animation: + return "flamenco" + return "celery" + + +def build_flamenco_job_settings( + output_type: OutputType, + product: Product, + step_path: str, + output_dir: str, + system_settings: dict[str, str], + lighting_only: bool = False, + shadow_catcher: bool = False, + camera_orbit: bool = True, + cycles_device: str = "auto", + rotation_x: float = 0.0, + rotation_y: float = 0.0, + rotation_z: float = 0.0, +) -> dict: + """Build Flamenco job settings from output type and product metadata.""" + render_settings = output_type.render_settings or {} + engine = render_settings.get("engine", system_settings.get("blender_engine", "cycles")) + samples_key = f"blender_{engine}_samples" + samples = render_settings.get("samples", int(system_settings.get(samples_key, "256"))) + stl_quality = render_settings.get("stl_quality", system_settings.get("stl_quality", "low")) + width = render_settings.get("width", 1920 if output_type.is_animation else 1024) + height = render_settings.get("height", 1080 if output_type.is_animation else 1024) + + part_colors = {} + part_names_ordered = [] + if product.cad_file and product.cad_file.parsed_objects: + part_names_ordered = product.cad_file.parsed_objects.get("objects", []) + materials_source = product.cad_part_materials + if materials_source: + from app.services.step_processor import build_part_colors + part_colors = build_part_colors(part_names_ordered, materials_source) + + transparent_bg = bool(output_type.transparent_bg) if hasattr(output_type, 'transparent_bg') else False + + settings = { + "step_path": step_path, + "engine": engine, + "samples": samples, + "stl_quality": stl_quality, + "width": width, + "height": height, + "part_colors_json": json.dumps(part_colors), + "transparent_bg": transparent_bg, + "template_path": "", + "target_collection": "Product", + "material_library_path": "", + "material_map_json": "{}", + "part_names_ordered_json": json.dumps(part_names_ordered), + "lighting_only": lighting_only, + "shadow_catcher": shadow_catcher, + "cycles_device": cycles_device, + "rotation_x": rotation_x, + "rotation_y": rotation_y, + "rotation_z": rotation_z, + } + + for dk in ('noise_threshold', 'denoiser', 'denoising_input_passes', + 'denoising_prefilter', 'denoising_quality', 'denoising_use_gpu'): + settings[dk] = str(render_settings.get(dk, "")) + + if output_type.is_animation: + # Turntable-specific settings + output_name = render_settings.get("output_name", "turntable") + settings["output_dir"] = output_dir + settings["output_name"] = output_name + settings["frame_count"] = render_settings.get("frame_count", 120) + settings["fps"] = render_settings.get("fps", 30) + settings["turntable_degrees"] = render_settings.get("turntable_degrees", 360) + settings["turntable_axis"] = render_settings.get("turntable_axis", "world_z") + settings["bg_color"] = render_settings.get("bg_color", "") + settings["camera_orbit"] = camera_orbit + else: + # Still-specific settings + ext = output_type.output_format or "png" + settings["output_path"] = f"{output_dir}/render.{ext}" + + return settings + + +def dispatch_render(order_line_id: str) -> dict: + """Route a render job to Celery or Flamenco based on configuration. + + Must be called from a sync context (Celery task or sync wrapper). + Returns {"backend": "celery"|"flamenco", "job_ref": str}. + """ + from app.config import settings as app_settings + from app.services.render_log import emit, clear + + clear(order_line_id) + emit(order_line_id, "Dispatch started — loading order line data") + + sync_url = app_settings.database_url.replace("+asyncpg", "") + from sqlalchemy import create_engine + engine_db = create_engine(sync_url) + + with Session(engine_db) as session: + line = session.execute( + select(OrderLine) + .where(OrderLine.id == order_line_id) + .options( + joinedload(OrderLine.product).joinedload(Product.cad_file), + joinedload(OrderLine.output_type), + ) + ).scalar_one_or_none() + + if line is None: + emit(order_line_id, "Order line not found", "error") + logger.error(f"OrderLine {order_line_id} not found") + return {"backend": "none", "job_ref": "", "error": "not_found"} + + product_name = line.product.name or line.product.pim_id or "unknown" + output_name = line.output_type.name if line.output_type else "default" + emit(order_line_id, f"Product: {product_name} | Output: {output_name}") + + if line.product.cad_file_id is None: + emit(order_line_id, "Product has no CAD file — marking as failed", "error") + logger.warning(f"OrderLine {order_line_id}: product has no CAD file") + session.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values(render_status="failed") + ) + session.commit() + return {"backend": "none", "job_ref": "", "error": "no_cad_file"} + + cad_name = line.product.cad_file.original_name if line.product.cad_file else "?" + emit(order_line_id, f"CAD file: {cad_name}") + + # Load system settings + system_backend = _load_setting(session, "render_backend", "celery") + flamenco_url = _load_setting(session, "flamenco_manager_url", "http://flamenco-manager:8080") + + backend = resolve_backend(line.output_type, system_backend) + emit(order_line_id, f"Resolved backend: {backend}") + + # Mark as processing + now = datetime.utcnow() + session.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values( + render_status="processing", + render_backend_used=backend, + render_started_at=now, + ) + ) + session.commit() + + if backend == "flamenco": + emit(order_line_id, f"Submitting job to Flamenco Manager ({flamenco_url})") + result = _dispatch_flamenco(session, line, flamenco_url) + if result.get("error"): + emit(order_line_id, f"Flamenco submit failed: {result['error']}", "error") + else: + emit(order_line_id, f"Flamenco job submitted: {result.get('job_ref', '?')}") + return result + else: + emit(order_line_id, "Dispatching to Celery render worker") + return _dispatch_celery(order_line_id) + + engine_db.dispose() + + +def _dispatch_celery(order_line_id: str) -> dict: + """Dispatch to the existing Celery render task.""" + from app.tasks.step_tasks import render_order_line_task + result = render_order_line_task.delay(order_line_id) + return {"backend": "celery", "job_ref": result.id} + + +def _dispatch_flamenco(session: Session, line: OrderLine, flamenco_url: str) -> dict: + """Submit a job to Flamenco Manager.""" + import re + from app.services.flamenco_client import get_flamenco_client + + # Load all needed system settings + all_keys = ["blender_engine", "blender_cycles_samples", "blender_eevee_samples", "stl_quality", "cycles_device"] + sys_settings = {} + for key in all_keys: + sys_settings[key] = _load_setting(session, key, "") + + output_type = line.output_type + product = line.product + cad_file = product.cad_file + + # Load render_position for rotation values + rotation_x = rotation_y = rotation_z = 0.0 + if line.render_position_id: + from app.models.render_position import ProductRenderPosition + rp = session.get(ProductRenderPosition, line.render_position_id) + if rp: + rotation_x, rotation_y, rotation_z = rp.rotation_x, rp.rotation_y, rp.rotation_z + + # Flamenco mounts the uploads volume at /shared, backend uses /app/uploads + raw_path = cad_file.stored_path if cad_file else "" + step_path = raw_path.replace("/app/uploads/", "/shared/") if raw_path else "" + output_dir = f"/shared/renders/{line.id}" + + job_type = "schaeffler-turntable" if (output_type and output_type.is_animation) else "schaeffler-still" + + # Resolve render template + material library BEFORE building job settings + # (template.lighting_only is needed by build_flamenco_job_settings) + from app.services.template_service import resolve_template, get_material_library_path + + category_key = product.category_key if product else None + ot_id = str(line.output_type_id) if line.output_type_id else None + template = resolve_template(category_key=category_key, output_type_id=ot_id) + material_library = get_material_library_path() + + # Resolve cycles_device: per-output-type override wins, fall back to system setting + ot_cycles_device = output_type.cycles_device if output_type else None + effective_cycles_device = ot_cycles_device or sys_settings.get("cycles_device", "gpu") or "gpu" + + settings = build_flamenco_job_settings( + output_type=output_type, + product=product, + step_path=step_path, + output_dir=output_dir, + system_settings=sys_settings, + lighting_only=bool(template.lighting_only) if template else False, + shadow_catcher=bool(template.shadow_catcher_enabled) if template else False, + camera_orbit=bool(template.camera_orbit) if template else True, + cycles_device=effective_cycles_device, + rotation_x=rotation_x, + rotation_y=rotation_y, + rotation_z=rotation_z, + ) + + if template: + # Remap path for Flamenco shared volume + tmpl_path = template.blend_file_path.replace("/app/uploads/", "/shared/") + settings["template_path"] = tmpl_path + settings["target_collection"] = template.target_collection + logger.info( + f"Flamenco job: using render template '{template.name}' " + f"(id={template.id}, path={tmpl_path}, collection={template.target_collection})" + ) + else: + logger.info( + f"Flamenco job: no render template found for " + f"category_key={category_key!r}, output_type_id={ot_id!r} — using factory settings" + ) + + # Material library + material map: send whenever library exists and product + # has material assignments — works with or without a render template. + # When a template is present, only apply if material_replace_enabled is set. + materials_source = product.cad_part_materials + use_materials = bool(material_library and materials_source) + if template and not template.material_replace_enabled: + use_materials = False + + if use_materials: + mat_lib_path = material_library.replace("/app/uploads/", "/shared/") + settings["material_library_path"] = mat_lib_path + mat_map = { + m["part_name"]: m["material"] + for m in materials_source + if m.get("part_name") and m.get("material") + } + # Resolve raw material names to SCHAEFFLER library names via aliases + from app.services.material_service import resolve_material_map + mat_map = resolve_material_map(mat_map) + settings["material_map_json"] = json.dumps(mat_map) + + # Output naming: meaningful filename instead of generic render.ext + def _sanitize(s: str) -> str: + return re.sub(r'[^\w\-.]', '_', s.strip())[:100] + + product_name = product.name or product.pim_id or "product" + ot_name = output_type.name if output_type else "render" + + if not (output_type and output_type.is_animation): + ext = output_type.output_format or "png" if output_type else "png" + filename = f"{_sanitize(product_name)}_{_sanitize(ot_name)}.{ext}" + settings["output_path"] = f"{output_dir}/{filename}" + + metadata = { + "order_line_id": str(line.id), + "order_id": str(line.order_id), + "product_name": product.name or "", + "output_type": output_type.name if output_type else "", + "category": product.category_key or "", + } + + job_name = f"{product.name or product.pim_id} - {output_type.name if output_type else 'render'}" + + try: + client = get_flamenco_client(flamenco_url) + job = client.submit_job( + name=job_name[:200], + job_type=job_type, + settings=settings, + metadata=metadata, + ) + job_id = job.get("id", "") + + # Save flamenco_job_id + session.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values(flamenco_job_id=job_id) + ) + session.commit() + + logger.info(f"Flamenco job submitted: {job_id} for OrderLine {line.id}") + return {"backend": "flamenco", "job_ref": job_id} + + except Exception as exc: + logger.error(f"Flamenco submit failed for OrderLine {line.id}: {exc}") + session.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values( + render_status="failed", + render_completed_at=datetime.utcnow(), + render_log={"error": f"Flamenco submit failed: {str(exc)[:500]}"}, + ) + ) + session.commit() + return {"backend": "flamenco", "job_ref": "", "error": str(exc)} diff --git a/backend/app/services/render_log.py b/backend/app/services/render_log.py new file mode 100644 index 0000000..37a792e --- /dev/null +++ b/backend/app/services/render_log.py @@ -0,0 +1,72 @@ +"""Redis-backed live render log for streaming task progress. + +Each order line gets a Redis list keyed by render:log:{order_line_id}. +Entries are JSON objects with timestamp, level, and message. +Lists auto-expire after 1 hour. +""" +import json +import time +import logging + +import redis + +from app.config import settings + +logger = logging.getLogger(__name__) + +_LOG_TTL = 3600 # 1 hour +_MAX_ENTRIES = 500 + + +def _redis() -> redis.Redis: + return redis.from_url(settings.redis_url, decode_responses=True) + + +def _key(order_line_id: str) -> str: + return f"render:log:{order_line_id}" + + +def emit(order_line_id: str, message: str, level: str = "info") -> None: + """Push a log entry for a render job.""" + entry = json.dumps({ + "ts": time.time(), + "t": time.strftime("%H:%M:%S", time.gmtime()), + "level": level, + "msg": message, + }) + try: + r = _redis() + key = _key(order_line_id) + r.rpush(key, entry) + r.ltrim(key, -_MAX_ENTRIES, -1) + r.expire(key, _LOG_TTL) + except Exception as exc: + logger.debug(f"render_log emit failed: {exc}") + + +def get_entries(order_line_id: str, after_index: int = 0) -> list[dict]: + """Get log entries starting from after_index.""" + try: + r = _redis() + raw = r.lrange(_key(order_line_id), after_index, -1) + return [json.loads(e) for e in raw] + except Exception: + return [] + + +def count(order_line_id: str) -> int: + """Get the number of log entries.""" + try: + r = _redis() + return r.llen(_key(order_line_id)) + except Exception: + return 0 + + +def clear(order_line_id: str) -> None: + """Clear log entries for a render job.""" + try: + r = _redis() + r.delete(_key(order_line_id)) + except Exception: + pass diff --git a/backend/app/services/step_processor.py b/backend/app/services/step_processor.py new file mode 100644 index 0000000..e54f98e --- /dev/null +++ b/backend/app/services/step_processor.py @@ -0,0 +1,726 @@ +""" +STEP file processor — Phase 3 implementation. + +Extracts object names from STEP files using pythonocc-core (OCC), +generates thumbnails using trimesh + pyrender, and converts to glTF. + +This module is invoked from the Celery worker (step_tasks.py). +""" +import logging +import uuid +from pathlib import Path +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from app.models.cad_file import CadFile + +logger = logging.getLogger(__name__) + +MATERIAL_PALETTE = [ + "#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8", + "#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8", +] + + +def _material_to_color(material_name: str | None, index: int) -> str: + """Return a deterministic hex color: hash material name, or use palette by index.""" + if material_name and material_name.strip(): + i = abs(hash(material_name.strip().lower())) % len(MATERIAL_PALETTE) + return MATERIAL_PALETTE[i] + return MATERIAL_PALETTE[index % len(MATERIAL_PALETTE)] + + +def build_part_colors( + cad_parsed_objects: list[str], + cad_part_materials: list[dict], +) -> dict[str, str]: + """ + Build {part_name: hex_color} for thumbnail rendering. + + Args: + cad_parsed_objects: List of part names from cad_file.parsed_objects["objects"]. + cad_part_materials: List of {part_name, material} dicts from order_item.cad_part_materials. + """ + mat_map = { + m["part_name"].lower(): m.get("material") + for m in cad_part_materials + if m.get("part_name") + } + return { + name: _material_to_color(mat_map.get(name.lower()), i) + for i, name in enumerate(cad_parsed_objects) + } + + +def _normalize_stem(name: str) -> str: + """Normalize a filename stem for comparison: lowercase, strip .stp/.step extension.""" + stem = name.strip() + for ext in (".step", ".stp"): + if stem.lower().endswith(ext): + stem = stem[: -len(ext)] + break + return stem.lower() + + +def match_cad_to_items( + cad_file: "CadFile", + item_names: list[str], +) -> list[str]: + """ + Match a CadFile to a list of OrderItem name_cad_modell values. + + Matching is case-insensitive and normalizes .stp/.step extensions so that + a file named '81113-L_cut.stp' matches an item named '81113-l_cut' or + '81113-L_cut.step'. + + Args: + cad_file: A CadFile ORM object (needs .original_name). + item_names: List of name_cad_modell strings from OrderItems. + + Returns: + List of matched item names (subset of item_names). + """ + cad_stem = _normalize_stem(cad_file.original_name or "") + matched = [] + for name in item_names: + if not name: + continue + if _normalize_stem(name) == cad_stem: + matched.append(name) + return matched + + +def extract_cad_metadata(cad_file_id: str) -> None: + """ + Fast metadata extraction for a CAD file (no thumbnail generation). + + Does everything process_cad_file() does EXCEPT thumbnail rendering: + - Sets status to processing + - Extracts STEP object names + - Converts to glTF + - Leaves status as processing (render_step_thumbnail task will complete it) + - On exception: sets status to failed + """ + from app.config import settings + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + from app.models.cad_file import CadFile, ProcessingStatus + + engine = create_engine(settings.database_url_sync) + with Session(engine) as session: + cad_file = session.get(CadFile, uuid.UUID(cad_file_id)) + if not cad_file: + logger.error(f"CAD file not found: {cad_file_id}") + return + + cad_file.processing_status = ProcessingStatus.processing + session.commit() + + try: + step_path = Path(cad_file.stored_path) + if not step_path.exists(): + raise FileNotFoundError(f"STEP file not found: {step_path}") + + objects = _extract_step_objects(step_path) + cad_file.parsed_objects = {"objects": objects} + + gltf_path = _convert_to_gltf(step_path, cad_file_id, settings.upload_dir) + if gltf_path: + cad_file.gltf_path = str(gltf_path) + + # Leave status as processing — render_step_thumbnail will complete it + logger.info(f"CAD metadata extracted: {cad_file_id} ({len(objects)} objects)") + + except Exception as exc: + logger.error(f"CAD metadata extraction failed for {cad_file_id}: {exc}") + cad_file.processing_status = ProcessingStatus.failed + cad_file.error_message = str(exc)[:2000] + + session.commit() + + +def process_cad_file(cad_file_id: str) -> None: + """ + Full processing pipeline for a CAD file: + 1. Load STEP file with pythonocc + 2. Extract part/object names + 3. Generate thumbnail PNG + 4. Convert to glTF for browser viewer + 5. Update DB record + """ + from app.config import settings + # Synchronous DB access for Celery worker + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + from app.models.cad_file import CadFile, ProcessingStatus + + engine = create_engine(settings.database_url_sync) + with Session(engine) as session: + cad_file = session.get(CadFile, uuid.UUID(cad_file_id)) + if not cad_file: + logger.error(f"CAD file not found: {cad_file_id}") + return + + cad_file.processing_status = ProcessingStatus.processing + session.commit() + + try: + step_path = Path(cad_file.stored_path) + if not step_path.exists(): + raise FileNotFoundError(f"STEP file not found: {step_path}") + + # Step 1: Extract object names + objects = _extract_step_objects(step_path) + cad_file.parsed_objects = {"objects": objects} + + # Step 2: Generate thumbnail — pass empty part_colors so the Three.js + # renderer extracts named parts and auto-assigns palette colours. + # Other renderers (Blender, Pillow) ignore the part_colors argument. + thumb_path, render_log = _generate_thumbnail(step_path, cad_file_id, settings.upload_dir, part_colors={}) + if thumb_path: + cad_file.thumbnail_path = str(thumb_path) + cad_file.render_log = render_log + + # Step 3: Convert to glTF + gltf_path = _convert_to_gltf(step_path, cad_file_id, settings.upload_dir) + if gltf_path: + cad_file.gltf_path = str(gltf_path) + + cad_file.processing_status = ProcessingStatus.completed + logger.info(f"CAD file processed successfully: {cad_file_id}") + + except Exception as exc: + logger.error(f"CAD processing failed for {cad_file_id}: {exc}") + cad_file.processing_status = ProcessingStatus.failed + cad_file.error_message = str(exc)[:2000] + + session.commit() + + +def _extract_step_objects(step_path: Path) -> list[str]: + """Extract part names from STEP file using pythonocc.""" + try: + from OCC.Core.STEPCAFControl import STEPCAFControl_Reader + from OCC.Core.XCAFDoc import XCAFDoc_DocumentTool + from OCC.Core.TDocStd import TDocStd_Document + from OCC.Core.TDataStd import TDataStd_Name + from OCC.Core.TCollection import TCollection_ExtendedString + + doc = TDocStd_Document(TCollection_ExtendedString("MDTV-CAF")) + reader = STEPCAFControl_Reader() + reader.SetColorMode(True) + reader.SetNameMode(True) + status = reader.ReadFile(str(step_path)) + + if not reader.Transfer(doc): + return [] + + shape_tool = XCAFDoc_DocumentTool.ShapeTool(doc.Main()) + labels = [] + shape_tool.GetFreeShapes(labels) + + names = [] + for label in labels: + name_attr = TDataStd_Name() + if label.FindAttribute(TDataStd_Name.GetID(), name_attr): + names.append(name_attr.Get().ToExtString()) + return names + + except ImportError: + logger.warning("pythonocc-core not available; skipping object extraction") + return _extract_step_objects_fallback(step_path) + except Exception as exc: + logger.warning(f"OCC extraction failed: {exc}") + return _extract_step_objects_fallback(step_path) + + +def _extract_step_objects_fallback(step_path: Path) -> list[str]: + """Simple text-based extraction of part names from STEP file.""" + names = [] + try: + with open(step_path, "r", encoding="utf-8", errors="replace") as f: + for line in f: + # STEP format: PRODUCT('name','description',... + if "PRODUCT(" in line: + parts = line.split("PRODUCT(") + for part in parts[1:]: + if "'" in part: + name = part.split("'")[1] + if name and name not in names: + names.append(name) + except Exception: + pass + return names + + +def _get_all_settings() -> dict[str, str]: + """Read all system settings from the database.""" + defaults = { + "thumbnail_renderer": "pillow", + "blender_engine": "cycles", + "blender_cycles_samples": "256", + "blender_eevee_samples": "64", + "threejs_render_size": "1024", + "thumbnail_format": "jpg", + "stl_quality": "low", + "blender_smooth_angle": "30", + "cycles_device": "auto", + } + try: + from app.config import settings as app_settings + from sqlalchemy import create_engine, text + from sqlalchemy.orm import Session + + engine = create_engine(app_settings.database_url_sync) + with Session(engine) as session: + result = session.execute(text("SELECT key, value FROM system_settings")) + stored = {row[0]: row[1] for row in result.fetchall()} + return {k: stored.get(k, v) for k, v in defaults.items()} + except Exception as exc: + logger.warning(f"Could not read settings: {exc}; using defaults") + return defaults + + +def _generate_thumbnail( + step_path: Path, + cad_file_id: str, + upload_dir: str, + part_colors: dict[str, str] | None = None, +) -> tuple[Path | None, dict]: + """Generate thumbnail using the configured renderer. + + Returns (thumb_path, render_log_dict). + render_log_dict contains all settings + timing + blender output. + """ + import time + out_dir = Path(upload_dir) / "thumbnails" + out_dir.mkdir(parents=True, exist_ok=True) + + settings = _get_all_settings() + renderer = settings["thumbnail_renderer"] + fmt = settings["thumbnail_format"] # "jpg" or "png" + ext = "jpg" if fmt == "jpg" else "png" + + # Clean up any existing thumbnail for this cad_file_id (either extension) + for old_ext in ("png", "jpg"): + old = out_dir / f"{cad_file_id}.{old_ext}" + if old.exists(): + old.unlink(missing_ok=True) + + final_path = out_dir / f"{cad_file_id}.{ext}" + # Intermediate PNG used when a service renderer produces PNG before conversion + tmp_png = out_dir / f"{cad_file_id}_tmp.png" + + # Build the base render_log with the settings snapshot + render_log: dict = { + "renderer": renderer, + "format": fmt, + "started_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()), + } + if renderer == "blender": + engine = settings["blender_engine"] + render_log.update({ + "engine": engine, + "samples": int(settings[f"blender_{engine}_samples"]), + "stl_quality": settings["stl_quality"], + "smooth_angle": int(settings["blender_smooth_angle"]), + "cycles_device": settings["cycles_device"], + "width": 512, + "height": 512, + }) + elif renderer == "threejs": + size = int(settings["threejs_render_size"]) + render_log.update({"width": size, "height": size}) + + logger.info(f"Thumbnail renderer={renderer}, format={fmt}") + + rendered_png: Path | None = None + service_data: dict = {} + + if renderer == "blender": + engine = settings["blender_engine"] + samples = int(settings[f"blender_{engine}_samples"]) + extra = { + "engine": engine, + "samples": samples, + "stl_quality": settings["stl_quality"], + "smooth_angle": int(settings["blender_smooth_angle"]), + "cycles_device": settings["cycles_device"], + } + rendered_png, service_data = _render_via_service( + "http://blender-renderer:8100/render", step_path, tmp_png, extra + ) + if not rendered_png: + logger.warning("Blender renderer failed; falling back to Pillow placeholder") + + elif renderer == "threejs": + size = int(settings["threejs_render_size"]) + extra2: dict = {"width": size, "height": size} + if part_colors is not None: + extra2["part_colors"] = part_colors + rendered_png, service_data = _render_via_service( + "http://threejs-renderer:8101/render", step_path, tmp_png, extra2 + ) + if not rendered_png: + logger.warning("Three.js renderer failed; falling back to Pillow placeholder") + + # Merge rich service response data into render_log + if service_data: + for key in ("total_duration_s", "stl_duration_s", "render_duration_s", + "stl_size_bytes", "output_size_bytes", "parts_count", + "engine_used", "log_lines"): + if key in service_data: + render_log[key] = service_data[key] + render_log["completed_at"] = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) + + if rendered_png: + result = _finalise_image(rendered_png, final_path, fmt) + tmp_png.unlink(missing_ok=True) + render_log["fallback"] = False + return result, render_log + + # Pillow placeholder + render_log["fallback"] = True + return _generate_thumbnail_placeholder(step_path, final_path, fmt), render_log + + +def _finalise_image(src: Path, dst: Path, fmt: str) -> Path | None: + """Convert src image to dst using the requested format (jpg or png).""" + if fmt == "jpg": + try: + from PIL import Image + img = Image.open(src).convert("RGB") + img.save(str(dst), "JPEG", quality=92, optimize=True) + return dst + except Exception as exc: + logger.warning(f"JPG conversion failed: {exc}; keeping PNG") + src.rename(dst.with_suffix(".png")) + return dst.with_suffix(".png") + else: + src.rename(dst) + return dst + + +def _render_via_service( + url: str, step_path: Path, out_path: Path, extra: dict | None = None, + job_id: str | None = None, +) -> tuple[Path | None, dict]: + """Call an external renderer microservice to generate a thumbnail. + + Returns (path_or_None, response_data_dict). + job_id, when provided, is forwarded to the renderer so the render process + can be cancelled via the renderer's /cancel/{job_id} endpoint. + """ + try: + import httpx + payload = { + "step_path": str(step_path), + "output_path": str(out_path), + "width": 512, + "height": 512, + **(extra or {}), + } + if job_id: + payload["job_id"] = job_id + resp = httpx.post(url, json=payload, timeout=300.0) + data = {} + try: + data = resp.json() + except Exception: + pass + if resp.status_code == 200 and out_path.exists(): + return out_path, data + logger.warning(f"Renderer service {url} returned {resp.status_code}: {resp.text[:500]}") + except Exception as exc: + logger.warning(f"Renderer service {url} unreachable: {exc}") + return None, {} + + +def _generate_thumbnail_placeholder(step_path: Path, out_path: Path, fmt: str = "png") -> Path | None: + """Generate a simple placeholder thumbnail using Pillow.""" + try: + from PIL import Image, ImageDraw, ImageFont + + W, H = 512, 512 + img = Image.new("RGB", (W, H), color=(245, 246, 248)) + draw = ImageDraw.Draw(img) + + # Subtle grid + for i in range(0, W, 32): + draw.line([(i, 0), (i, H)], fill=(228, 230, 235), width=1) + draw.line([(0, i), (W, i)], fill=(228, 230, 235), width=1) + + # Isometric box (front / top / right faces) + cx, cy = 256, 260 + s = 110 # half-size + # Front face + draw.polygon( + [(cx - s, cy), (cx, cy + s // 2), (cx + s, cy), (cx, cy - s // 2)], + fill=(195, 208, 220), outline=(90, 110, 130), width=2, + ) + # Top face + draw.polygon( + [(cx - s, cy - s), (cx, cy - s - s // 2), (cx + s, cy - s), (cx, cy - s + s // 2)], + fill=(220, 230, 240), outline=(90, 110, 130), width=2, + ) + # Right pillar + draw.polygon( + [(cx + s, cy - s), (cx + s, cy), (cx, cy + s // 2), (cx, cy - s + s // 2)], + fill=(160, 178, 196), outline=(90, 110, 130), width=2, + ) + + # Schaeffler green top bar + draw.rectangle([0, 0, W, 10], fill=(0, 137, 61)) + + # Model name strip at bottom + name = step_path.stem + draw.rectangle([0, H - 52, W, H], fill=(30, 50, 70)) + try: + font = ImageFont.load_default(size=15) + draw.text((W // 2, H - 26), name, fill=(255, 255, 255), anchor="mm", font=font) + except Exception: + draw.text((10, H - 38), name, fill=(255, 255, 255)) + + if fmt == "jpg": + img = img.convert("RGB") + img.save(str(out_path), "JPEG", quality=92, optimize=True) + else: + img.save(str(out_path), "PNG") + return out_path + except Exception as exc: + logger.warning(f"Pillow placeholder thumbnail failed: {exc}") + return None + + +def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> bool: + """ + Regenerate a thumbnail with per-part colours for an existing CAD file. + + Called from the `regenerate_thumbnail` Celery task. + Returns True on success. + """ + from app.config import settings as app_settings + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + from app.models.cad_file import CadFile, ProcessingStatus + + db_engine = create_engine(app_settings.database_url_sync) + with Session(db_engine) as session: + cad_file = session.get(CadFile, uuid.UUID(cad_file_id)) + if not cad_file: + logger.error(f"CAD file not found: {cad_file_id}") + return False + + step_path = Path(cad_file.stored_path) + if not step_path.exists(): + logger.error(f"STEP file not found: {step_path}") + return False + + # Mark as processing so the activity page shows it as active + cad_file.processing_status = ProcessingStatus.processing + session.commit() + + try: + thumb_path, render_log = _generate_thumbnail( + step_path, cad_file_id, app_settings.upload_dir, part_colors=part_colors + ) + if thumb_path: + cad_file.thumbnail_path = str(thumb_path) + cad_file.render_log = render_log + cad_file.processing_status = ProcessingStatus.completed + session.commit() + logger.info(f"Thumbnail regenerated for CAD file {cad_file_id}") + return True + except Exception as exc: + logger.error(f"Thumbnail regeneration failed for {cad_file_id}: {exc}") + cad_file.processing_status = ProcessingStatus.failed + cad_file.error_message = str(exc)[:2000] + session.commit() + return False + + +def render_to_file( + step_path: str, + output_path: str, + part_colors: dict[str, str] | None = None, + width: int | None = None, + height: int | None = None, + transparent_bg: bool = False, + engine: str | None = None, + samples: int | None = None, + template_path: str | None = None, + target_collection: str = "Product", + material_library_path: str | None = None, + material_map: dict | None = None, + part_names_ordered: list | None = None, + lighting_only: bool = False, + shadow_catcher: bool = False, + cycles_device: str | None = None, + rotation_x: float = 0.0, + rotation_y: float = 0.0, + rotation_z: float = 0.0, + job_id: str | None = None, + noise_threshold: str = "", + denoiser: str = "", + denoising_input_passes: str = "", + denoising_prefilter: str = "", + denoising_quality: str = "", + denoising_use_gpu: str = "", +) -> tuple[bool, dict]: + """Render a STEP file to a specific output path using current system settings. + + Unlike regenerate_cad_thumbnail, this does NOT modify the shared CadFile record. + Used by render_order_line_task for per-order-line render outputs. + + Args: + step_path: Absolute path to the STEP file on disk. + output_path: Absolute path for the rendered output file. + part_colors: Optional {part_name: hex_color} map. + width: Optional render width (overrides system default). + height: Optional render height (overrides system default). + transparent_bg: If True and renderer=blender+PNG, render with transparent background. + engine: Optional per-OT engine override ("cycles" | "eevee"), or None for system default. + samples: Optional per-OT samples override, or None for system default. + template_path: Optional path to a .blend template file. + target_collection: Blender collection name to import geometry into. + material_library_path: Optional path to material library .blend file. + material_map: Optional {part_name: material_name} for material replacement. + + Returns: + (success: bool, render_log: dict) + """ + import time + + step = Path(step_path) + out = Path(output_path) + out.parent.mkdir(parents=True, exist_ok=True) + + settings = _get_all_settings() + renderer = settings["thumbnail_renderer"] + fmt = out.suffix.lstrip(".") or settings.get("thumbnail_format", "jpg") + if fmt not in ("jpg", "png"): + fmt = "jpg" + + # Temporary PNG for service renderers + tmp_png = out.parent / f"_tmp_{out.stem}.png" + + render_log: dict = { + "renderer": renderer, + "format": fmt, + "started_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()), + } + + rendered_png: Path | None = None + service_data: dict = {} + + if renderer == "blender": + actual_engine = engine or settings["blender_engine"] + actual_samples = samples or int(settings[f"blender_{actual_engine}_samples"]) + actual_cycles_device = cycles_device or settings["cycles_device"] + w = width or 512 + h = height or 512 + render_log.update({ + "engine": actual_engine, "samples": actual_samples, + "stl_quality": settings["stl_quality"], + "smooth_angle": int(settings["blender_smooth_angle"]), + "cycles_device": actual_cycles_device, + "width": w, "height": h, + }) + extra = { + "engine": actual_engine, "samples": actual_samples, + "stl_quality": settings["stl_quality"], + "smooth_angle": int(settings["blender_smooth_angle"]), + "cycles_device": actual_cycles_device, + "width": w, "height": h, + "transparent_bg": transparent_bg, + } + if part_colors is not None: + extra["part_colors"] = part_colors + if template_path: + extra["template_path"] = template_path + extra["target_collection"] = target_collection + extra["lighting_only"] = lighting_only + extra["shadow_catcher"] = shadow_catcher + render_log["template"] = template_path + render_log["target_collection"] = target_collection + if lighting_only: + render_log["lighting_only"] = True + if shadow_catcher: + render_log["shadow_catcher"] = True + if material_library_path and material_map: + extra["material_library_path"] = material_library_path + extra["material_map"] = material_map + render_log["material_replace"] = True + if part_names_ordered: + extra["part_names_ordered"] = part_names_ordered + if rotation_x or rotation_y or rotation_z: + extra["rotation_x"] = rotation_x + extra["rotation_y"] = rotation_y + extra["rotation_z"] = rotation_z + if noise_threshold: + extra["noise_threshold"] = noise_threshold + if denoiser: + extra["denoiser"] = denoiser + if denoising_input_passes: + extra["denoising_input_passes"] = denoising_input_passes + if denoising_prefilter: + extra["denoising_prefilter"] = denoising_prefilter + if denoising_quality: + extra["denoising_quality"] = denoising_quality + if denoising_use_gpu: + extra["denoising_use_gpu"] = denoising_use_gpu + rendered_png, service_data = _render_via_service( + "http://blender-renderer:8100/render", step, tmp_png, extra, job_id=job_id + ) + elif renderer == "threejs": + default_size = int(settings["threejs_render_size"]) + w = width or default_size + h = height or default_size + render_log.update({"width": w, "height": h}) + extra2: dict = {"width": w, "height": h} + if part_colors is not None: + extra2["part_colors"] = part_colors + rendered_png, service_data = _render_via_service( + "http://threejs-renderer:8101/render", step, tmp_png, extra2 + ) + + if service_data: + for key in ("total_duration_s", "stl_duration_s", "render_duration_s", + "stl_size_bytes", "output_size_bytes", "parts_count", + "engine_used", "log_lines"): + if key in service_data: + render_log[key] = service_data[key] + render_log["completed_at"] = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) + + if rendered_png: + result = _finalise_image(rendered_png, out, fmt) + tmp_png.unlink(missing_ok=True) + render_log["fallback"] = False + return result is not None, render_log + + # Pillow placeholder fallback + render_log["fallback"] = True + result = _generate_thumbnail_placeholder(step, out, fmt) + return result is not None, render_log + + +def _convert_to_gltf(step_path: Path, cad_file_id: str, upload_dir: str) -> Path | None: + """Convert STEP to glTF for browser 3D viewer.""" + out_dir = Path(upload_dir) / "gltf" + out_dir.mkdir(parents=True, exist_ok=True) + out_path = out_dir / f"{cad_file_id}.gltf" + + try: + import trimesh + mesh = trimesh.load(str(step_path)) + if isinstance(mesh, trimesh.Scene): + exported = mesh.export(str(out_path)) + else: + scene = trimesh.Scene(mesh) + exported = scene.export(str(out_path)) + return out_path if out_path.exists() else None + except ImportError: + logger.warning("trimesh not available; skipping glTF conversion") + except Exception as exc: + logger.warning(f"glTF conversion failed: {exc}") + return None diff --git a/backend/app/services/template_service.py b/backend/app/services/template_service.py new file mode 100644 index 0000000..2646496 --- /dev/null +++ b/backend/app/services/template_service.py @@ -0,0 +1,102 @@ +"""Render template resolution service. + +Used from Celery tasks (sync context) to find the best matching .blend template +for a given category + output type combination. + +Cascade priority (first active match wins): +1. Exact: category_key + output_type_id +2. Category only: category_key + output_type_id IS NULL +3. OT only: category_key IS NULL + output_type_id +4. Global: both NULL +5. No template → caller falls back to factory-settings behavior +""" +import logging + +from sqlalchemy import create_engine, select, and_ +from sqlalchemy.orm import Session + +from app.models.render_template import RenderTemplate +from app.models.system_setting import SystemSetting + +logger = logging.getLogger(__name__) + +_engine = None + + +def _get_engine(): + global _engine + if _engine is None: + from app.config import settings as app_settings + _engine = create_engine(app_settings.database_url_sync) + return _engine + + +def resolve_template( + category_key: str | None = None, + output_type_id: str | None = None, +) -> RenderTemplate | None: + """Find the best matching active render template. + + Uses sync SQLAlchemy — safe for Celery tasks. + """ + engine = _get_engine() + with Session(engine) as session: + active = RenderTemplate.is_active == True # noqa: E712 + + # 1. Exact match + if category_key and output_type_id: + row = session.execute( + select(RenderTemplate).where(and_( + active, + RenderTemplate.category_key == category_key, + RenderTemplate.output_type_id == output_type_id, + )) + ).scalar_one_or_none() + if row: + return row + + # 2. Category only + if category_key: + row = session.execute( + select(RenderTemplate).where(and_( + active, + RenderTemplate.category_key == category_key, + RenderTemplate.output_type_id.is_(None), + )) + ).scalar_one_or_none() + if row: + return row + + # 3. OT only + if output_type_id: + row = session.execute( + select(RenderTemplate).where(and_( + active, + RenderTemplate.category_key.is_(None), + RenderTemplate.output_type_id == output_type_id, + )) + ).scalar_one_or_none() + if row: + return row + + # 4. Global fallback (both NULL) + row = session.execute( + select(RenderTemplate).where(and_( + active, + RenderTemplate.category_key.is_(None), + RenderTemplate.output_type_id.is_(None), + )) + ).scalar_one_or_none() + return row + + +def get_material_library_path() -> str | None: + """Read material_library_path from system_settings. Returns None if empty.""" + engine = _get_engine() + with Session(engine) as session: + row = session.execute( + select(SystemSetting).where(SystemSetting.key == "material_library_path") + ).scalar_one_or_none() + if row and row.value and row.value.strip(): + return row.value.strip() + return None diff --git a/backend/app/tasks/__init__.py b/backend/app/tasks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/tasks/__pycache__/__init__.cpython-311.pyc b/backend/app/tasks/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..de47f22 Binary files /dev/null and b/backend/app/tasks/__pycache__/__init__.cpython-311.pyc differ diff --git a/backend/app/tasks/__pycache__/ai_tasks.cpython-311.pyc b/backend/app/tasks/__pycache__/ai_tasks.cpython-311.pyc new file mode 100644 index 0000000..e458457 Binary files /dev/null and b/backend/app/tasks/__pycache__/ai_tasks.cpython-311.pyc differ diff --git a/backend/app/tasks/__pycache__/celery_app.cpython-311.pyc b/backend/app/tasks/__pycache__/celery_app.cpython-311.pyc new file mode 100644 index 0000000..da59b8b Binary files /dev/null and b/backend/app/tasks/__pycache__/celery_app.cpython-311.pyc differ diff --git a/backend/app/tasks/__pycache__/flamenco_tasks.cpython-311.pyc b/backend/app/tasks/__pycache__/flamenco_tasks.cpython-311.pyc new file mode 100644 index 0000000..471089c Binary files /dev/null and b/backend/app/tasks/__pycache__/flamenco_tasks.cpython-311.pyc differ diff --git a/backend/app/tasks/__pycache__/step_tasks.cpython-311.pyc b/backend/app/tasks/__pycache__/step_tasks.cpython-311.pyc new file mode 100644 index 0000000..acc39ac Binary files /dev/null and b/backend/app/tasks/__pycache__/step_tasks.cpython-311.pyc differ diff --git a/backend/app/tasks/ai_tasks.py b/backend/app/tasks/ai_tasks.py new file mode 100644 index 0000000..54b0ac7 --- /dev/null +++ b/backend/app/tasks/ai_tasks.py @@ -0,0 +1,17 @@ +"""Celery tasks for Azure AI validation.""" +import logging +from app.tasks.celery_app import celery_app + +logger = logging.getLogger(__name__) + + +@celery_app.task(bind=True, name="app.tasks.ai_tasks.validate_item", queue="ai_validation") +def validate_item(self, order_item_id: str): + """Validate orientation of a rendered thumbnail via Azure GPT-4o Vision.""" + logger.info(f"AI validation for item: {order_item_id}") + try: + from app.services.azure_ai import validate_thumbnail + validate_thumbnail(order_item_id) + except Exception as exc: + logger.error(f"AI validation failed for {order_item_id}: {exc}") + raise self.retry(exc=exc, countdown=30, max_retries=3) diff --git a/backend/app/tasks/celery_app.py b/backend/app/tasks/celery_app.py new file mode 100644 index 0000000..52cab0a --- /dev/null +++ b/backend/app/tasks/celery_app.py @@ -0,0 +1,36 @@ +from celery import Celery +from app.config import settings + +celery_app = Celery( + "schaefflerautomat", + broker=settings.redis_url, + backend=settings.redis_url, + include=["app.tasks.step_tasks", "app.tasks.ai_tasks", "app.tasks.flamenco_tasks"], +) + +celery_app.conf.update( + task_serializer="json", + result_serializer="json", + accept_content=["json"], + timezone="UTC", + enable_utc=True, + task_routes={ + "app.tasks.step_tasks.*": {"queue": "step_processing"}, + "app.tasks.ai_tasks.*": {"queue": "ai_validation"}, + "app.tasks.flamenco_tasks.*": {"queue": "step_processing"}, + }, + beat_schedule={ + "poll-flamenco-jobs": { + "task": "app.tasks.flamenco_tasks.poll_flamenco_jobs", + "schedule": 10.0, # every 10 seconds + # Discard if not consumed before the next run; prevents queue build-up + # when workers are busy with long-running STEP/render tasks. + "options": {"expires": 9}, + }, + "check-stalled-renders": { + "task": "app.tasks.flamenco_tasks.check_stalled_renders", + "schedule": 300.0, # every 5 minutes + "options": {"expires": 290}, + }, + }, +) diff --git a/backend/app/tasks/flamenco_tasks.py b/backend/app/tasks/flamenco_tasks.py new file mode 100644 index 0000000..d240b38 --- /dev/null +++ b/backend/app/tasks/flamenco_tasks.py @@ -0,0 +1,335 @@ +"""Celery tasks for polling Flamenco job status and watchdog recovery.""" +import logging +from datetime import datetime, timedelta + +from app.tasks.celery_app import celery_app + +logger = logging.getLogger(__name__) + +# Flamenco status → our render_status mapping +FLAMENCO_STATUS_MAP = { + "queued": "processing", + "active": "processing", + "completed": "completed", + "failed": "failed", + "canceled": "failed", + "cancel-requested": "processing", + "paused": "processing", +} + + +@celery_app.task(name="app.tasks.flamenco_tasks.poll_flamenco_jobs", queue="step_processing") +def poll_flamenco_jobs(): + """Poll Flamenco Manager for active render jobs and update OrderLine status. + + Runs on a Celery Beat schedule (every 10 seconds). + + Uses a Redis lock (TTL=9s) to ensure at most one poll executes per 10-second + window. When the queue backs up with many duplicates (e.g. all workers are + busy with long STEP/render tasks), duplicates acquire the lock, find it taken, + and return immediately — draining the queue without doing redundant work. + """ + import redis as redis_lib + from app.config import settings as app_settings + + # Deduplicate: skip if a poll ran within the last 9 seconds + try: + r = redis_lib.from_url(app_settings.redis_url) + acquired = r.set("flamenco_poll_lock", "1", nx=True, ex=9) + if not acquired: + return {"skipped": "deduplicated"} + except Exception: + pass # Redis unavailable — proceed anyway + + from sqlalchemy import create_engine, select, update as sql_update + from sqlalchemy.orm import Session + from app.models.order_line import OrderLine + from app.models.system_setting import SystemSetting + from app.services.flamenco_client import get_flamenco_client + + sync_url = app_settings.database_url.replace("+asyncpg", "") + engine = create_engine(sync_url) + + # Track orders whose lines transitioned to a terminal state + completed_order_ids = set() + + with Session(engine) as session: + # Load Flamenco Manager URL + row = session.execute( + select(SystemSetting).where(SystemSetting.key == "flamenco_manager_url") + ).scalar_one_or_none() + manager_url = row.value if row else "http://flamenco-manager:8080" + + # Find all OrderLines dispatched to Flamenco that are still processing + lines = session.execute( + select(OrderLine).where( + OrderLine.render_backend_used == "flamenco", + OrderLine.render_status == "processing", + OrderLine.flamenco_job_id.isnot(None), + ) + ).scalars().all() + + if not lines: + engine.dispose() + return {"polled": 0} + + client = get_flamenco_client(manager_url) + updated = 0 + + for line in lines: + try: + job = client.get_job(line.flamenco_job_id) + flamenco_status = job.get("status", "") + our_status = FLAMENCO_STATUS_MAP.get(flamenco_status, "processing") + + if our_status == line.render_status: + continue # No change + + updates = {"render_status": our_status} + + if our_status == "completed": + updates["render_completed_at"] = datetime.utcnow() + # Try to extract result path from job activity + activity = job.get("activity", "") + if activity: + updates["render_log"] = { + "flamenco_job_id": line.flamenco_job_id, + "flamenco_status": flamenco_status, + "activity": activity, + } + # Set result path based on job type + job_type = job.get("type", "") + metadata = job.get("metadata", {}) + if job_type == "schaeffler-turntable": + output_dir = job.get("settings", {}).get("output_dir", "") + output_name = job.get("settings", {}).get("output_name", "turntable") + updates["result_path"] = f"{output_dir}/{output_name}.mp4" + elif job_type == "schaeffler-still": + updates["result_path"] = job.get("settings", {}).get("output_path", "") + + elif our_status == "failed": + updates["render_completed_at"] = datetime.utcnow() + updates["render_log"] = { + "flamenco_job_id": line.flamenco_job_id, + "flamenco_status": flamenco_status, + "error": job.get("activity", "Job failed"), + } + + session.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values(**updates) + ) + updated += 1 + logger.info( + f"Flamenco job {line.flamenco_job_id}: " + f"{flamenco_status} → render_status={our_status}" + ) + + # Track orders with lines that reached a terminal state + if our_status in ("completed", "failed"): + completed_order_ids.add(str(line.order_id)) + + except Exception as exc: + logger.warning( + f"Failed to poll Flamenco job {line.flamenco_job_id}: {exc}" + ) + + if updated: + session.commit() + + engine.dispose() + + # Auto-advance orders if all renderable lines are done + if completed_order_ids: + from app.services.order_status_service import check_order_completion + for oid in completed_order_ids: + check_order_completion(oid) + + return {"polled": len(lines), "updated": updated} + + +# --------------------------------------------------------------------------- +# Stalled-render watchdog +# --------------------------------------------------------------------------- + +@celery_app.task(name="app.tasks.flamenco_tasks.check_stalled_renders", queue="step_processing") +def check_stalled_renders(): + """Watchdog: detect and re-dispatch render jobs stuck in 'processing'. + + Runs on a Celery Beat schedule (every 5 minutes). + + After a docker restart, Celery workers lose in-flight tasks — the DB still + shows render_status='processing' indefinitely. This task: + + * For **Celery** lines: uses Celery inspect to check whether any worker is + still actively executing the task. If not (e.g. after a restart), and + the job has been stuck longer than ``render_stall_timeout_minutes`` + (default: 120 min), it is reset to 'pending' and re-dispatched. + + * For **Flamenco** lines: queries the Flamenco Manager. If the manager + reports the job as still active the line is left alone; if the job is + gone or in a terminal/error state it is re-dispatched. + """ + from sqlalchemy import create_engine, select, update as sql_update + from sqlalchemy.orm import Session + from app.config import settings as app_settings + from app.models.order_line import OrderLine + from app.models.system_setting import SystemSetting + + sync_url = app_settings.database_url.replace("+asyncpg", "") + engine = create_engine(sync_url) + + with Session(engine) as session: + # ── Read timeout from system settings ──────────────────────────────── + row = session.execute( + select(SystemSetting).where(SystemSetting.key == "render_stall_timeout_minutes") + ).scalar_one_or_none() + try: + timeout_minutes = int(row.value) if row else 120 + except (ValueError, TypeError): + timeout_minutes = 120 + + cutoff = datetime.utcnow() - timedelta(minutes=timeout_minutes) + + stalled_lines = session.execute( + select(OrderLine).where( + OrderLine.render_status == "processing", + OrderLine.render_started_at.isnot(None), + OrderLine.render_started_at < cutoff, + ) + ).scalars().all() + + if not stalled_lines: + engine.dispose() + return {"checked": 0, "restarted": 0, "timeout_minutes": timeout_minutes} + + logger.info( + "[watchdog] Found %d stalled render(s) older than %d minutes", + len(stalled_lines), timeout_minutes, + ) + + # ── Build set of order_line_ids actively running on Celery workers ─── + active_celery_line_ids: set[str] = set() + inspect_ok = False + try: + inspect = celery_app.control.inspect(timeout=2) + active_tasks = inspect.active() or {} + for worker_tasks in active_tasks.values(): + for task_info in (worker_tasks or []): + args = task_info.get("args", []) + if args: + active_celery_line_ids.add(str(args[0])) + inspect_ok = True + except Exception as exc: + logger.warning( + "[watchdog] Celery inspect failed (%s) — will re-dispatch all timed-out Celery jobs", + exc, + ) + + # ── Load Flamenco Manager URL ───────────────────────────────────────── + manager_url = "http://flamenco-manager:8080" + try: + url_row = session.execute( + select(SystemSetting).where(SystemSetting.key == "flamenco_manager_url") + ).scalar_one_or_none() + if url_row: + manager_url = url_row.value + except Exception: + pass + + # ── Decide which lines to restart ──────────────────────────────────── + to_restart: list[OrderLine] = [] + + for line in stalled_lines: + line_id = str(line.id) + + if line.flamenco_job_id: + # Flamenco job: verify with manager before re-dispatching + try: + from app.services.flamenco_client import get_flamenco_client + client = get_flamenco_client(manager_url) + job = client.get_job(line.flamenco_job_id) + flamenco_status = job.get("status", "") + if flamenco_status in ( + "active", "queued", "paused", + "pause-requested", "cancel-requested", + ): + logger.info( + "[watchdog] Flamenco job %s is still %s — skipping line %s", + line.flamenco_job_id, flamenco_status, line_id, + ) + continue + logger.info( + "[watchdog] Flamenco job %s status=%r → re-dispatching line %s", + line.flamenco_job_id, flamenco_status, line_id, + ) + except Exception as exc: + # Manager unreachable — skip to avoid false restarts + logger.warning( + "[watchdog] Cannot reach Flamenco for job %s (%s) — skipping line %s", + line.flamenco_job_id, exc, line_id, + ) + continue + else: + # Celery job: skip if still actively running on a worker + if inspect_ok and line_id in active_celery_line_ids: + logger.info( + "[watchdog] Celery render for line %s still active — skipping", line_id + ) + continue + logger.info( + "[watchdog] Celery render for line %s not found in active tasks — re-dispatching", + line_id, + ) + + to_restart.append(line) + + if not to_restart: + engine.dispose() + return { + "checked": len(stalled_lines), + "restarted": 0, + "timeout_minutes": timeout_minutes, + } + + # ── Reset stalled lines to pending ─────────────────────────────────── + for line in to_restart: + session.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values( + render_status="pending", + render_started_at=None, + render_backend_used=None, + flamenco_job_id=None, + render_log={ + "watchdog": ( + f"Auto-restarted after {timeout_minutes} min stall " + f"(previous backend: {line.render_backend_used or 'unknown'})" + ) + }, + ) + ) + session.commit() + + engine.dispose() + + # ── Re-dispatch outside DB session ─────────────────────────────────────── + from app.services.render_dispatcher import dispatch_render + restarted = 0 + for line in to_restart: + try: + dispatch_render(str(line.id)) + restarted += 1 + logger.info("[watchdog] Re-dispatched render for order line %s", line.id) + except Exception as exc: + logger.error( + "[watchdog] Failed to re-dispatch line %s: %s — left as pending", line.id, exc + ) + + return { + "checked": len(stalled_lines), + "restarted": restarted, + "timeout_minutes": timeout_minutes, + } diff --git a/backend/app/tasks/step_tasks.py b/backend/app/tasks/step_tasks.py new file mode 100644 index 0000000..febe3d2 --- /dev/null +++ b/backend/app/tasks/step_tasks.py @@ -0,0 +1,581 @@ +"""Celery tasks for STEP file processing and thumbnail generation.""" +import logging +from app.tasks.celery_app import celery_app + +logger = logging.getLogger(__name__) + + +@celery_app.task(bind=True, name="app.tasks.step_tasks.process_step_file", queue="step_processing") +def process_step_file(self, cad_file_id: str): + """Process a STEP file: extract objects, generate thumbnail, convert to glTF. + + After processing completes, auto-populate cad_part_materials from Excel + component data for any linked products that don't yet have materials assigned. + + A per-file Redis lock (TTL = 10 min) prevents duplicate tasks from processing + the same file concurrently — e.g. when 'Process Unprocessed' is clicked while + a file is already being processed. + """ + import redis as redis_lib + from app.config import settings as app_settings + + lock_key = f"step_processing_lock:{cad_file_id}" + r = redis_lib.from_url(app_settings.redis_url) + acquired = r.set(lock_key, "1", nx=True, ex=600) # 10-minute TTL + if not acquired: + logger.warning(f"STEP file {cad_file_id} is already being processed — skipping duplicate task") + return + + try: + logger.info(f"Processing STEP file (metadata only): {cad_file_id}") + try: + from app.services.step_processor import extract_cad_metadata + extract_cad_metadata(cad_file_id) + except Exception as exc: + logger.error(f"STEP metadata extraction failed for {cad_file_id}: {exc}") + r.delete(lock_key) # release lock so a retry can proceed + raise self.retry(exc=exc, countdown=60, max_retries=3) + finally: + r.delete(lock_key) # always release on completion or unhandled error + + # Queue thumbnail rendering on the dedicated single-concurrency worker + render_step_thumbnail.delay(cad_file_id) + + +def _auto_populate_materials_for_cad(cad_file_id: str) -> None: + """Sync helper: auto-populate cad_part_materials from Excel for newly-processed CAD files. + + Only fills products where cad_part_materials is empty or all-blank, + preventing overwrites of manually assigned materials. + """ + from sqlalchemy import create_engine, select as sql_select, update as sql_update + from sqlalchemy.orm import Session + from app.config import settings as app_settings + from app.models.cad_file import CadFile + from app.models.product import Product + from app.api.routers.products import build_materials_from_excel + from app.services.step_processor import build_part_colors + + sync_url = app_settings.database_url.replace("+asyncpg", "") + eng = create_engine(sync_url) + with Session(eng) as session: + # Load the CAD file to get parsed objects + cad_file = session.execute( + sql_select(CadFile).where(CadFile.id == cad_file_id) + ).scalar_one_or_none() + if cad_file is None: + return + + parsed_objects = cad_file.parsed_objects or {} + cad_parts: list[str] = parsed_objects.get("objects", []) + if not cad_parts: + return + + # Find products linked to this CAD file that have Excel components + products = session.execute( + sql_select(Product).where( + Product.cad_file_id == cad_file.id, + Product.is_active.is_(True), + ) + ).scalars().all() + + final_part_colors = None + for product in products: + excel_components: list[dict] = product.components or [] + if not excel_components: + continue + + # Only auto-fill when cad_part_materials is empty or all-blank + existing = product.cad_part_materials or [] + if existing and any(m.get("material", "").strip() for m in existing): + continue # has at least one real material — don't overwrite + + new_materials = build_materials_from_excel(cad_parts, excel_components) + session.execute( + sql_update(Product) + .where(Product.id == product.id) + .values(cad_part_materials=new_materials) + ) + session.flush() + + # Compute part colors; thumbnail queued once after the loop + try: + final_part_colors = build_part_colors(cad_parts, new_materials) + except Exception: + logger.exception(f"Part colors build failed for product {product.id}") + + logger.info( + f"Auto-populated {len(new_materials)} materials for product {product.id} " + f"from {len(excel_components)} Excel components" + ) + + session.commit() + + # Queue exactly ONE thumbnail regeneration per CAD file regardless of how many + # products were auto-populated. Queuing once-per-product multiplies the task + # count needlessly and causes the Redis queue depth to grow instead of shrink. + if final_part_colors is not None: + try: + regenerate_thumbnail.delay(str(cad_file_id), final_part_colors) + except Exception: + logger.exception(f"Thumbnail regen queue failed for cad_file {cad_file_id}") + + eng.dispose() + + +@celery_app.task(bind=True, name="app.tasks.step_tasks.render_step_thumbnail", queue="thumbnail_rendering") +def render_step_thumbnail(self, cad_file_id: str): + """Render the thumbnail for a freshly-processed STEP file. + + Runs on the dedicated thumbnail_rendering queue (concurrency=1) so the + blender-renderer service is never overwhelmed by concurrent requests. + On success, also auto-populates materials and marks the CadFile as completed. + """ + logger.info(f"Rendering thumbnail for CAD file: {cad_file_id}") + try: + from app.services.step_processor import regenerate_cad_thumbnail + success = regenerate_cad_thumbnail(cad_file_id, part_colors={}) + if not success: + raise RuntimeError("regenerate_cad_thumbnail returned False") + except Exception as exc: + logger.error(f"Thumbnail render failed for {cad_file_id}: {exc}") + raise self.retry(exc=exc, countdown=30, max_retries=2) + + # Auto-populate materials now that parsed_objects are available + try: + _auto_populate_materials_for_cad(cad_file_id) + except Exception: + logger.exception( + f"Auto material population failed for cad_file {cad_file_id} (non-fatal)" + ) + + +@celery_app.task(bind=True, name="app.tasks.step_tasks.generate_stl_cache", queue="thumbnail_rendering") +def generate_stl_cache(self, cad_file_id: str, quality: str): + """Generate and cache STL for a CAD file without triggering a full render.""" + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + from app.config import settings as app_settings + from app.models.cad_file import CadFile + import httpx + + logger.info(f"Generating {quality}-quality STL for CAD file: {cad_file_id}") + + sync_url = app_settings.database_url.replace("+asyncpg", "") + eng = create_engine(sync_url) + with Session(eng) as session: + cad_file = session.get(CadFile, cad_file_id) + if not cad_file or not cad_file.stored_path: + logger.error(f"CAD file not found or no stored_path: {cad_file_id}") + return + step_path = cad_file.stored_path + eng.dispose() + + try: + resp = httpx.post( + "http://blender-renderer:8100/convert-stl", + json={"step_path": step_path, "quality": quality}, + timeout=600.0, + ) + if resp.status_code == 200: + data = resp.json() + logger.info(f"STL cached: {data['stl_path']} ({data['size_bytes']} bytes) in {data['duration_s']}s") + else: + raise RuntimeError(f"blender-renderer returned {resp.status_code}: {resp.text[:300]}") + except Exception as exc: + logger.error(f"STL generation failed for {cad_file_id} quality={quality}: {exc}") + raise self.retry(exc=exc, countdown=30, max_retries=2) + + +@celery_app.task(bind=True, name="app.tasks.step_tasks.regenerate_thumbnail", queue="thumbnail_rendering") +def regenerate_thumbnail(self, cad_file_id: str, part_colors: dict): + """Regenerate thumbnail with per-part colours.""" + logger.info(f"Regenerating thumbnail for CAD file: {cad_file_id}") + try: + from app.services.step_processor import regenerate_cad_thumbnail + success = regenerate_cad_thumbnail(cad_file_id, part_colors) + if not success: + raise RuntimeError("regenerate_cad_thumbnail returned False") + except Exception as exc: + logger.error(f"Thumbnail regeneration failed for {cad_file_id}: {exc}") + raise self.retry(exc=exc, countdown=30, max_retries=2) + + +@celery_app.task(name="app.tasks.step_tasks.dispatch_order_line_render", queue="step_processing") +def dispatch_order_line_render(order_line_id: str): + """Thin wrapper that calls render_dispatcher.dispatch_render().""" + logger.info(f"Dispatching render for order line: {order_line_id}") + try: + from app.services.render_dispatcher import dispatch_render + result = dispatch_render(order_line_id) + logger.info(f"Dispatch result for {order_line_id}: {result}") + return result + except Exception as exc: + logger.error(f"dispatch_order_line_render failed for {order_line_id}: {exc}") + # Mark line as failed so it doesn't stay stuck in "processing" + try: + from sqlalchemy import create_engine, update as sql_update + from sqlalchemy.orm import Session + from app.config import settings as app_settings + from app.models.order_line import OrderLine + from datetime import datetime + sync_url = app_settings.database_url.replace("+asyncpg", "") + eng = create_engine(sync_url) + with Session(eng) as s: + s.execute( + sql_update(OrderLine) + .where(OrderLine.id == order_line_id) + .values( + render_status="failed", + render_completed_at=datetime.utcnow(), + render_log={"error": f"Dispatch failed: {str(exc)[:500]}"}, + ) + ) + s.commit() + eng.dispose() + except Exception: + logger.exception(f"Failed to mark {order_line_id} as failed after dispatch error") + raise + + +@celery_app.task(bind=True, name="app.tasks.step_tasks.render_order_line_task", queue="step_processing", max_retries=3) +def render_order_line_task(self, order_line_id: str): + """Render a specific output type for an order line. + + Loads OrderLine → Product → CadFile → OutputType.render_settings. + Merges with system render settings. Stores result at order_line.result_path. + """ + logger.info(f"Rendering order line: {order_line_id}") + from app.services.render_log import emit + + emit(order_line_id, "Celery render task started") + try: + from sqlalchemy import create_engine, select, update as sql_update + from sqlalchemy.orm import Session, joinedload + from app.config import settings as app_settings + + # Use sync session for Celery (no async event loop) + sync_url = app_settings.database_url.replace("+asyncpg", "") + engine = create_engine(sync_url) + + with Session(engine) as session: + from app.models.order_line import OrderLine + from app.models.product import Product + + emit(order_line_id, "Loading order line from database") + line = session.execute( + select(OrderLine) + .where(OrderLine.id == order_line_id) + .options( + joinedload(OrderLine.product).joinedload(Product.cad_file), + joinedload(OrderLine.output_type), + ) + ).scalar_one_or_none() + + if line is None: + emit(order_line_id, "Order line not found in database", "error") + logger.error(f"OrderLine {order_line_id} not found") + return + + if line.product.cad_file_id is None: + emit(order_line_id, "Product has no CAD file — marking as failed", "error") + logger.warning(f"OrderLine {order_line_id}: product has no CAD file") + session.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values(render_status="failed") + ) + session.commit() + return + + # Mark as processing with timing + from datetime import datetime + render_start = datetime.utcnow() + session.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values( + render_status="processing", + render_backend_used="celery", + render_started_at=render_start, + ) + ) + session.commit() + + cad_file = line.product.cad_file + materials_source = line.product.cad_part_materials + + part_colors = {} + if cad_file and cad_file.parsed_objects: + parsed_names = cad_file.parsed_objects.get("objects", []) + if materials_source: + from app.services.step_processor import build_part_colors + part_colors = build_part_colors(parsed_names, materials_source) + + # Resolve render template + material library + from app.services.template_service import resolve_template, get_material_library_path + + category_key = line.product.category_key if line.product else None + ot_id = str(line.output_type_id) if line.output_type_id else None + template = resolve_template(category_key=category_key, output_type_id=ot_id) + material_library = get_material_library_path() + + # Build material_map (part_name → material_name) for material replacement. + # Works with or without a render template — only suppressed if a + # template explicitly has material_replace_enabled=False. + material_map = None + use_materials = bool(material_library and materials_source) + if template and not template.material_replace_enabled: + use_materials = False + if use_materials: + material_map = { + m["part_name"]: m["material"] + for m in materials_source + if m.get("part_name") and m.get("material") + } + # Resolve raw material names to SCHAEFFLER library names via aliases + from app.services.material_service import resolve_material_map + material_map = resolve_material_map(material_map) + + if template: + emit(order_line_id, f"Using render template: {template.name} (collection={template.target_collection}, material_replace={template.material_replace_enabled}, lighting_only={template.lighting_only})") + logger.info(f"Render template resolved: '{template.name}' path={template.blend_file_path}, lighting_only={template.lighting_only}") + else: + emit(order_line_id, "No render template found — using factory settings (Mode A)") + logger.info(f"No render template for category_key={category_key!r}, output_type_id={ot_id!r}") + + cad_name = cad_file.original_name if cad_file else "?" + # Load render_position for rotation values + rotation_x = rotation_y = rotation_z = 0.0 + if line.render_position_id: + from app.models.render_position import ProductRenderPosition + rp = session.get(ProductRenderPosition, line.render_position_id) + if rp: + rotation_x, rotation_y, rotation_z = rp.rotation_x, rp.rotation_y, rp.rotation_z + emit(order_line_id, f"Render position: '{rp.name}' ({rotation_x}°, {rotation_y}°, {rotation_z}°)") + + emit(order_line_id, f"Starting render for {cad_name} ({len(part_colors)} coloured parts)") + + # Determine output format from output_type (default jpg) + out_ext = "jpg" + if line.output_type and line.output_type.output_format: + fmt = line.output_type.output_format.lower() + if fmt in ("png", "jpg", "jpeg"): + out_ext = "png" if fmt == "png" else "jpg" + + # Build meaningful output filename + import re + def _sanitize(s: str) -> str: + return re.sub(r'[^\w\-.]', '_', s.strip())[:100] + + product_name = line.product.name or line.product.pim_id or "product" + ot_name = line.output_type.name if line.output_type else "render" + filename = f"{_sanitize(product_name)}_{_sanitize(ot_name)}.{out_ext}" + + # Render to per-line output directory (not the shared CadFile thumbnail) + from pathlib import Path as _Path + render_dir = _Path(app_settings.upload_dir) / "renders" / order_line_id + render_dir.mkdir(parents=True, exist_ok=True) + output_path = str(render_dir / filename) + + # Extract per-output-type resolution from render_settings + render_width = None + render_height = None + if line.output_type and line.output_type.render_settings: + rs = line.output_type.render_settings + if rs.get("width"): + render_width = int(rs["width"]) + if rs.get("height"): + render_height = int(rs["height"]) + + # Check if transparent background is requested + transparent_bg = False + if line.output_type and line.output_type.transparent_bg: + transparent_bg = True + + # Extract per-OT engine and samples overrides + render_engine = None + render_samples = None + noise_threshold = "" + denoiser = "" + denoising_input_passes = "" + denoising_prefilter = "" + denoising_quality = "" + denoising_use_gpu = "" + if line.output_type and line.output_type.render_settings: + rs = line.output_type.render_settings + if rs.get("engine"): + render_engine = rs["engine"] + if rs.get("samples"): + render_samples = int(rs["samples"]) + noise_threshold = str(rs.get("noise_threshold", "")) + denoiser = str(rs.get("denoiser", "")) + denoising_input_passes = str(rs.get("denoising_input_passes", "")) + denoising_prefilter = str(rs.get("denoising_prefilter", "")) + denoising_quality = str(rs.get("denoising_quality", "")) + denoising_use_gpu = str(rs.get("denoising_use_gpu", "")) + + tmpl_info = f" template={template.name}" if template else "" + emit(order_line_id, f"Calling renderer (STEP → STL → render) {render_width or 'default'}x{render_height or 'default'}{' [transparent]' if transparent_bg else ''}{f' engine={render_engine}' if render_engine else ''}{f' samples={render_samples}' if render_samples else ''}{tmpl_info}") + from app.services.step_processor import render_to_file + # Build ordered part names list for index-based Blender matching + part_names_ordered = None + if cad_file and cad_file.parsed_objects: + part_names_ordered = cad_file.parsed_objects.get("objects", []) or None + + success, render_log = render_to_file( + step_path=cad_file.stored_path, + output_path=output_path, + part_colors=part_colors, + width=render_width, + height=render_height, + transparent_bg=transparent_bg, + engine=render_engine, + samples=render_samples, + template_path=template.blend_file_path if template else None, + target_collection=template.target_collection if template else "Product", + material_library_path=material_library if use_materials else None, + material_map=material_map, + part_names_ordered=part_names_ordered, + lighting_only=bool(template.lighting_only) if template else False, + shadow_catcher=bool(template.shadow_catcher_enabled) if template else False, + cycles_device=line.output_type.cycles_device if line.output_type else None, + rotation_x=rotation_x, + rotation_y=rotation_y, + rotation_z=rotation_z, + job_id=order_line_id, + noise_threshold=noise_threshold, + denoiser=denoiser, + denoising_input_passes=denoising_input_passes, + denoising_prefilter=denoising_prefilter, + denoising_quality=denoising_quality, + denoising_use_gpu=denoising_use_gpu, + ) + + new_status = "completed" if success else "failed" + render_end = datetime.utcnow() + elapsed = (render_end - render_start).total_seconds() + + update_values = dict( + render_status=new_status, + render_completed_at=render_end, + render_log=render_log, + ) + if success: + update_values["result_path"] = output_path + + session.execute( + sql_update(OrderLine) + .where(OrderLine.id == line.id) + .values(**update_values) + ) + session.commit() + + if success: + emit(order_line_id, f"Render completed in {elapsed:.1f}s", "success") + else: + emit(order_line_id, f"Render failed after {elapsed:.1f}s", "error") + + # Notify order creator about render result + try: + from app.models.order import Order as OrderModel + order_row = session.execute( + select(OrderModel.created_by, OrderModel.order_number) + .where(OrderModel.id == line.order_id) + ).one_or_none() + if order_row: + from app.services.notification_service import emit_notification_sync + details: dict = { + "order_number": order_row[1], + "product_name": product_name, + "output_type": ot_name, + } + if not success and isinstance(render_log, dict): + err = render_log.get("error") or render_log.get("stderr", "") + if err: + details["error"] = str(err)[:300] + emit_notification_sync( + actor_user_id=None, + target_user_id=str(order_row[0]), + action="render.completed" if success else "render.failed", + entity_type="order", + entity_id=str(line.order_id), + details=details, + ) + except Exception: + logger.exception("Failed to emit render notification") + + # Check if all lines for this order are done → auto-advance + order_id_str = str(line.order_id) + + engine.dispose() + + from app.services.order_status_service import check_order_completion + check_order_completion(order_id_str) + + except Exception as exc: + logger.error(f"render_order_line_task failed for {order_line_id}: {exc}") + # If retries exhausted, mark as failed so the line doesn't stay stuck + if self.request.retries >= self.max_retries: + logger.error(f"Max retries reached for {order_line_id}, marking as failed") + try: + from sqlalchemy import create_engine, update as sql_update2 + from sqlalchemy.orm import Session as SyncSession + from app.config import settings as app_settings + from app.models.order_line import OrderLine as OL2 + sync_url2 = app_settings.database_url.replace("+asyncpg", "") + eng2 = create_engine(sync_url2) + with SyncSession(eng2) as s2: + from datetime import datetime as dt2 + s2.execute( + sql_update2(OL2).where(OL2.id == order_line_id) + .values( + render_status="failed", + render_completed_at=dt2.utcnow(), + render_log={"error": str(exc)[:500]}, + ) + ) + s2.commit() + eng2.dispose() + from app.services.order_status_service import check_order_completion + # Try to get order_id from DB + eng3 = create_engine(sync_url2) + with SyncSession(eng3) as s3: + from sqlalchemy import select as sel + row = s3.execute(sel(OL2.order_id).where(OL2.id == order_line_id)).scalar_one_or_none() + if row: + check_order_completion(str(row)) + eng3.dispose() + # Notify the order creator about the failure + try: + from sqlalchemy import select as sel2 + from app.models.order import Order as OrderModel2 + eng4 = create_engine(sync_url2) + with SyncSession(eng4) as s4: + order_row2 = s4.execute( + sel2(OrderModel2.created_by, OrderModel2.order_number) + .join(OL2, OL2.order_id == OrderModel2.id) + .where(OL2.id == order_line_id) + ).one_or_none() + eng4.dispose() + if order_row2: + from app.services.notification_service import emit_notification_sync + emit_notification_sync( + actor_user_id=None, + target_user_id=str(order_row2[0]), + action="render.failed", + entity_type="order", + entity_id=None, + details={ + "order_number": order_row2[1], + "product_name": "unknown", + "output_type": "unknown", + "error": str(exc)[:300], + }, + ) + except Exception: + logger.exception("Failed to emit render failure notification") + except Exception: + logger.exception(f"Failed to mark {order_line_id} as failed in DB") + raise + raise self.retry(exc=exc, countdown=60) diff --git a/backend/app/utils/__init__.py b/backend/app/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/utils/__pycache__/__init__.cpython-311.pyc b/backend/app/utils/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..48c59e8 Binary files /dev/null and b/backend/app/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/backend/app/utils/__pycache__/__init__.cpython-312.pyc b/backend/app/utils/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3720ee5 Binary files /dev/null and b/backend/app/utils/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/app/utils/__pycache__/auth.cpython-311.pyc b/backend/app/utils/__pycache__/auth.cpython-311.pyc new file mode 100644 index 0000000..f08b3dc Binary files /dev/null and b/backend/app/utils/__pycache__/auth.cpython-311.pyc differ diff --git a/backend/app/utils/__pycache__/auth.cpython-312.pyc b/backend/app/utils/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..5b71221 Binary files /dev/null and b/backend/app/utils/__pycache__/auth.cpython-312.pyc differ diff --git a/backend/app/utils/__pycache__/seed_templates.cpython-311.pyc b/backend/app/utils/__pycache__/seed_templates.cpython-311.pyc new file mode 100644 index 0000000..064b70d Binary files /dev/null and b/backend/app/utils/__pycache__/seed_templates.cpython-311.pyc differ diff --git a/backend/app/utils/__pycache__/seed_templates.cpython-312.pyc b/backend/app/utils/__pycache__/seed_templates.cpython-312.pyc new file mode 100644 index 0000000..80672c3 Binary files /dev/null and b/backend/app/utils/__pycache__/seed_templates.cpython-312.pyc differ diff --git a/backend/app/utils/auth.py b/backend/app/utils/auth.py new file mode 100644 index 0000000..1e26aa2 --- /dev/null +++ b/backend/app/utils/auth.py @@ -0,0 +1,70 @@ +"""JWT authentication utilities.""" +import uuid +from datetime import datetime, timedelta +from typing import Optional + +from fastapi import Depends, HTTPException, status +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from jose import JWTError, jwt +from passlib.context import CryptContext +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from app.config import settings +from app.database import get_db +from app.models.user import User + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") +bearer_scheme = HTTPBearer() + + +def hash_password(password: str) -> str: + return pwd_context.hash(password) + + +def verify_password(plain: str, hashed: str) -> bool: + return pwd_context.verify(plain, hashed) + + +def create_access_token(user_id: str, role: str) -> str: + expires = datetime.utcnow() + timedelta(minutes=settings.jwt_access_token_expire_minutes) + payload = {"sub": user_id, "role": role, "exp": expires} + return jwt.encode(payload, settings.jwt_secret_key, algorithm=settings.jwt_algorithm) + + +def decode_token(token: str) -> dict: + try: + return jwt.decode(token, settings.jwt_secret_key, algorithms=[settings.jwt_algorithm]) + except JWTError as exc: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token") from exc + + +async def get_current_user( + credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme), + db: AsyncSession = Depends(get_db), +) -> User: + payload = decode_token(credentials.credentials) + user_id = payload.get("sub") + if not user_id: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token") + + result = await db.execute(select(User).where(User.id == uuid.UUID(user_id))) + user = result.scalar_one_or_none() + if not user or not user.is_active: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="User not found or inactive") + return user + + +async def require_admin(user: User = Depends(get_current_user)) -> User: + if user.role.value != "admin": + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Admin access required") + return user + + +async def require_admin_or_pm(user: User = Depends(get_current_user)) -> User: + if user.role.value not in ("admin", "project_manager"): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Admin or Project Manager access required", + ) + return user diff --git a/backend/app/utils/seed_templates.py b/backend/app/utils/seed_templates.py new file mode 100644 index 0000000..74f01e9 --- /dev/null +++ b/backend/app/utils/seed_templates.py @@ -0,0 +1,184 @@ +"""Seed database with 7 Schaeffler product category templates.""" +import asyncio +import uuid +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker +from sqlalchemy import select + +STANDARD_FIELDS = { + "0": {"label": "Ebene1", "required": True}, + "1": {"label": "Ebene2", "required": True}, + "2": {"label": "Baureihe", "required": True}, + "3": {"label": "PIM-ID (Klasse)", "required": False}, + "4": {"label": "Produkt (Baureihe)", "required": False}, + "5": {"label": "[Separator]", "required": False, "skip": True}, + "6": {"label": "Gewähltes Produkt", "required": True}, + "7": {"label": "Name CAD-Modell", "required": True}, + "8": {"label": "Gewünschte Bildnummer", "required": False}, + "9": {"label": "Lagertyp", "required": False}, + "10": {"label": "Medias-Rendering", "required": False}, +} + +TEMPLATES = [ + { + "name": "Tapered Roller Bearings (TRB)", + "category_key": "TRB", + "description": "Kegelrollenlager – Tapered roller bearings", + "standard_fields": STANDARD_FIELDS, + "component_schema": { + "pairs": [ + {"component_type": "Innenring / Inner ring", "required": False}, + {"component_type": "Innenring / Inner ring 2", "required": False}, + {"component_type": "Innenring / Inner ring 3", "required": False}, + {"component_type": "Außenring / Outer ring", "required": False}, + {"component_type": "Außenring / Outer ring 2", "required": False}, + {"component_type": "Außenring / Outer ring 3", "required": False}, + {"component_type": "Außenring / Outer ring 4", "required": False}, + {"component_type": "Käfig / Cage", "required": False}, + {"component_type": "Wälzkörper / Rolling Element", "required": False}, + {"component_type": "Dichtungskern/Dichtungsträger", "required": False}, + {"component_type": "Dichtung Außen / Dichtlippe", "required": False}, + ] + }, + }, + { + "name": "Kugellager (Ball Bearings)", + "category_key": "Kugellager", + "description": "Kugellager – Ball bearings", + "standard_fields": STANDARD_FIELDS, + "component_schema": { + "pairs": [ + {"component_type": "Innenring / Inner ring", "required": False}, + {"component_type": "Außenring / Outer ring", "required": False}, + {"component_type": "Wälzkörper / Rolling Element", "required": True}, + {"component_type": "Käfig / Cage", "required": False}, + {"component_type": "Dichtungskern/Dichtungsträger", "required": False}, + {"component_type": "Axial - WS", "required": False}, + {"component_type": "Axial - GS", "required": False}, + ] + }, + }, + { + "name": "Gleitlager (Plain Bearings)", + "category_key": "Gleitlager", + "description": "Gleitlager – Plain / sliding bearings", + "standard_fields": STANDARD_FIELDS, + "component_schema": { + "pairs": [ + {"component_type": "Innenring / Inner ring", "required": False}, + {"component_type": "Außenring / Outer ring", "required": False}, + {"component_type": "Gehause / Housing", "required": False}, + {"component_type": "Sliding Layer", "required": False}, + {"component_type": "Dichtungsträger / Sealing carrier", "required": False}, + {"component_type": "Dichtlippe / Sealing lip", "required": False}, + ] + }, + }, + { + "name": "Spherical / Toroidal Roller Bearings (SRB/TORB)", + "category_key": "SRB_TORB", + "description": "Pendelrollenlager / Toroidalrollenlager – SRB and TORB bearings", + "standard_fields": STANDARD_FIELDS, + "component_schema": { + "pairs": [ + {"component_type": "Innenring / Inner ring", "required": False}, + {"component_type": "Außenring / Outer ring", "required": False}, + {"component_type": "Käfig / Cage", "required": False}, + {"component_type": "Wälzkörper / Rolling element", "required": False}, + {"component_type": "Bordscheibe IR / Loose Lip IR", "required": False}, + {"component_type": "Dichtungsträger / Sealing carrier", "required": False}, + ] + }, + }, + { + "name": "Cylindrical Roller Bearings (CRB)", + "category_key": "CRB", + "description": "Zylinderrollenlager – Cylindrical roller bearings", + "standard_fields": STANDARD_FIELDS, + "component_schema": { + "pairs": [ + {"component_type": "Innenring", "required": False}, + {"component_type": "Außenring", "required": False}, + {"component_type": "Rollen", "required": False}, + {"component_type": "Käfig", "required": False}, + {"component_type": "Dichtung", "required": False}, + {"component_type": "Halteringe", "required": False}, + {"component_type": "Bordscheibe", "required": False}, + ] + }, + }, + { + "name": "Linear Guide Rails", + "category_key": "Linear_schiene", + "description": "Linearsysteme – Linear guide rail systems", + "standard_fields": STANDARD_FIELDS, + "component_schema": { + "pairs": [ + {"component_type": "Rail", "required": True}, + ] + }, + }, + { + "name": "End Plates (Anschlagplatten)", + "category_key": "Anschlagplatten", + "description": "Anschlagplatten – End plates for guide rails", + "standard_fields": STANDARD_FIELDS, + "component_schema": { + "pairs": [ + {"component_type": "Platte / Plate", "required": True}, + {"component_type": "Schraube / Screw", "required": False}, + {"component_type": "Nut BZ", "required": False}, + ] + }, + }, +] + + +async def seed(db_url: str, admin_email: str = "admin@schaeffler.com", admin_password: str = "Admin1234!"): + from app.models.template import Template + from app.models.user import User, UserRole + from app.utils.auth import hash_password + + engine = create_async_engine(db_url, echo=False) + session_factory = async_sessionmaker(engine, expire_on_commit=False) + + async with session_factory() as session: + # Seed templates + for tpl_data in TEMPLATES: + result = await session.execute( + select(Template).where(Template.category_key == tpl_data["category_key"]) + ) + existing = result.scalar_one_or_none() + if not existing: + tpl = Template(**tpl_data) + session.add(tpl) + print(f" + Template: {tpl_data['category_key']}") + else: + print(f" ~ Template already exists: {tpl_data['category_key']}") + + # Seed admin user + result = await session.execute(select(User).where(User.email == admin_email)) + if not result.scalar_one_or_none(): + admin = User( + email=admin_email, + password_hash=hash_password(admin_password), + full_name="Schaeffler Admin", + role=UserRole.admin, + ) + session.add(admin) + print(f" + Admin user: {admin_email}") + else: + print(f" ~ Admin user already exists: {admin_email}") + + await session.commit() + await engine.dispose() + print("Seed complete.") + + +if __name__ == "__main__": + import sys + import os + + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) + from app.config import settings + + asyncio.run(seed(settings.database_url)) diff --git a/backend/celerybeat-schedule b/backend/celerybeat-schedule new file mode 100644 index 0000000..43744d4 Binary files /dev/null and b/backend/celerybeat-schedule differ diff --git a/backend/pyproject.toml b/backend/pyproject.toml new file mode 100644 index 0000000..2609a2f --- /dev/null +++ b/backend/pyproject.toml @@ -0,0 +1,48 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["app"] + +[project] +name = "schaefflerautomat-backend" +version = "0.1.0" +requires-python = ">=3.11" +dependencies = [ + "fastapi>=0.110.0", + "uvicorn[standard]>=0.27.0", + "sqlalchemy>=2.0.0", + "alembic>=1.13.0", + "asyncpg>=0.29.0", + "psycopg2-binary>=2.9.9", + "pydantic[email]>=2.6.0", + "pydantic-settings>=2.2.0", + "python-jose[cryptography]>=3.3.0", + "passlib[bcrypt]>=1.7.4", + "bcrypt>=3.0.0,<4.0.0", + "python-multipart>=0.0.9", + "openpyxl>=3.1.2", + "celery[redis]>=5.3.6", + "redis>=5.0.1", + "openai>=1.14.0", + "pillow>=10.2.0", + "httpx>=0.27.0", + "python-dotenv>=1.0.1", + "aiofiles>=23.2.1", + "docker>=6.1.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.0.0", + "pytest-asyncio>=0.23.5", + "httpx>=0.27.0", +] +cad = [ + "trimesh>=4.2.0", + "pygltflib>=1.16.1", +] + +[tool.pytest.ini_options] +testpaths = ["tests"] diff --git a/backend/seed.py b/backend/seed.py new file mode 100644 index 0000000..5347f47 --- /dev/null +++ b/backend/seed.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 +"""Run migrations and seed templates. Called at container startup.""" +import asyncio +import subprocess +import sys + + +def run_migrations(): + result = subprocess.run( + ["alembic", "upgrade", "head"], + capture_output=True, + text=True, + ) + if result.returncode != 0: + print(f"Migration failed:\n{result.stderr}", file=sys.stderr) + sys.exit(1) + print(result.stdout) + + +async def main(): + run_migrations() + from app.config import settings + from app.utils.seed_templates import seed + await seed(settings.database_url) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/backend/start.sh b/backend/start.sh new file mode 100644 index 0000000..c2e6edd --- /dev/null +++ b/backend/start.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -e +echo "Running migrations..." +alembic upgrade head + +echo "Seeding templates and admin user..." +python seed.py + +echo "Starting API server..." +exec uvicorn app.main:app --host 0.0.0.0 --port 8888 --reload diff --git a/backend/tests/__init__.py b/backend/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/__pycache__/__init__.cpython-312.pyc b/backend/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1eca76c Binary files /dev/null and b/backend/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/tests/__pycache__/conftest.cpython-312-pytest-9.0.2.pyc b/backend/tests/__pycache__/conftest.cpython-312-pytest-9.0.2.pyc new file mode 100644 index 0000000..6c9b3e5 Binary files /dev/null and b/backend/tests/__pycache__/conftest.cpython-312-pytest-9.0.2.pyc differ diff --git a/backend/tests/__pycache__/test_excel_parser.cpython-312-pytest-9.0.2.pyc b/backend/tests/__pycache__/test_excel_parser.cpython-312-pytest-9.0.2.pyc new file mode 100644 index 0000000..debbf09 Binary files /dev/null and b/backend/tests/__pycache__/test_excel_parser.cpython-312-pytest-9.0.2.pyc differ diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py new file mode 100644 index 0000000..417535f --- /dev/null +++ b/backend/tests/conftest.py @@ -0,0 +1,111 @@ +""" +Pytest fixtures for the Schaeffler Automat backend test suite. + +The tests in this suite are divided into: + - Unit tests (no DB / network required): excel_parser, models, schemas + - Integration tests (require running Postgres + Redis): API endpoints, tasks + +Unit tests run offline; integration tests are gated by the 'integration' +pytest mark so they can be skipped in CI without infrastructure. +""" +from __future__ import annotations + +import os +import sys +from pathlib import Path + +import pytest + +# --------------------------------------------------------------------------- +# Make sure the backend package is importable when tests are run from the +# repo root or from the backend/ directory. +# --------------------------------------------------------------------------- +BACKEND_DIR = Path(__file__).resolve().parent.parent # …/backend +if str(BACKEND_DIR) not in sys.path: + sys.path.insert(0, str(BACKEND_DIR)) + +# --------------------------------------------------------------------------- +# Paths +# --------------------------------------------------------------------------- +EXCEL_DIR = Path(__file__).resolve().parent.parent.parent / "Excel-Order-Lists" + +EXCEL_FILES: dict[str, Path] = { + "TRB": EXCEL_DIR / "TRB_Testscope_20260128.xlsx", + "Kugellager": EXCEL_DIR / "Kugellager_Testscope_20260128.xlsx", + "CRB": EXCEL_DIR / "CRB_Testscope_20260128.xlsx", + "Gleitlager": EXCEL_DIR / "Gleitlager_Testscope_20260128.xlsx", + "SRB_TORB": EXCEL_DIR / "SRB_TORB_Testscope_20260128.xlsx", + "Linear_schiene": EXCEL_DIR / "Linear_schiene_Testscope_20260128.xlsx", + "Anschlagplatten": EXCEL_DIR / "Anschlagplatten_Testscope_20260128.xlsx", +} + + +# --------------------------------------------------------------------------- +# Fixtures – Excel file paths +# --------------------------------------------------------------------------- + +@pytest.fixture(scope="session") +def excel_dir() -> Path: + """Return the directory that contains all sample Excel order lists.""" + assert EXCEL_DIR.is_dir(), f"Excel sample directory not found: {EXCEL_DIR}" + return EXCEL_DIR + + +@pytest.fixture(scope="session") +def excel_paths() -> dict[str, Path]: + """Return a mapping of category key → absolute path for each sample file.""" + missing = [k for k, p in EXCEL_FILES.items() if not p.exists()] + if missing: + pytest.skip(f"Sample Excel files missing: {missing}") + return EXCEL_FILES + + +# --------------------------------------------------------------------------- +# Fixtures – parsed Excel results (cached per test session) +# --------------------------------------------------------------------------- + +@pytest.fixture(scope="session") +def parsed_excel_all(excel_paths: dict[str, Path]) -> dict: + """Parse all 7 sample Excel files and return {category_key: ParsedExcel}.""" + from app.services.excel_parser import parse_excel + + return {cat: parse_excel(path) for cat, path in excel_paths.items()} + + +# --------------------------------------------------------------------------- +# Helpers exposed as fixtures +# --------------------------------------------------------------------------- + +@pytest.fixture(scope="session") +def parsed_trb(parsed_excel_all): + return parsed_excel_all["TRB"] + + +@pytest.fixture(scope="session") +def parsed_kugellager(parsed_excel_all): + return parsed_excel_all["Kugellager"] + + +@pytest.fixture(scope="session") +def parsed_crb(parsed_excel_all): + return parsed_excel_all["CRB"] + + +@pytest.fixture(scope="session") +def parsed_gleitlager(parsed_excel_all): + return parsed_excel_all["Gleitlager"] + + +@pytest.fixture(scope="session") +def parsed_srb_torb(parsed_excel_all): + return parsed_excel_all["SRB_TORB"] + + +@pytest.fixture(scope="session") +def parsed_linear_schiene(parsed_excel_all): + return parsed_excel_all["Linear_schiene"] + + +@pytest.fixture(scope="session") +def parsed_anschlagplatten(parsed_excel_all): + return parsed_excel_all["Anschlagplatten"] diff --git a/backend/tests/test_excel_parser.py b/backend/tests/test_excel_parser.py new file mode 100644 index 0000000..874593f --- /dev/null +++ b/backend/tests/test_excel_parser.py @@ -0,0 +1,653 @@ +""" +Unit tests for app.services.excel_parser.parse_excel + +Covers all 7 sample Excel order files: + TRB, Kugellager, CRB, Gleitlager, SRB_TORB, Linear_schiene, Anschlagplatten + +Each category class verifies: + - Correct category_key detected + - Correct template_name resolved + - Expected number of data rows (non-empty rows) + - Row indices (first data row is Excel row 4) + - medias_rendering values parsed correctly + - First row standard fields match expected values + - Component count (both per-row and total) + - Component fields (part_name lowercased, material, component_type) + - No unexpected warnings + - parsed_excel_to_dict / parsed_row_to_dict serialisation is correct + +The cross-file suite (TestAllFilesStructural) re-runs key invariants +against every file to catch regressions quickly. + +The TestParseExcelErrors suite tests ValueError / warning paths without +touching the real Excel files. +""" +from __future__ import annotations + +import sys +from pathlib import Path + +import pytest + +# Ensure backend package is importable when running from any directory. +BACKEND_DIR = Path(__file__).resolve().parent.parent +if str(BACKEND_DIR) not in sys.path: + sys.path.insert(0, str(BACKEND_DIR)) + +from app.services.excel_parser import ( + ParsedExcel, + ParsedRow, + ParsedComponent, + parse_excel, + parsed_excel_to_dict, + parsed_row_to_dict, + _normalize_filename, + _to_bool, +) + + +# --------------------------------------------------------------------------- +# Internal helpers +# --------------------------------------------------------------------------- + +def _all_components(parsed: ParsedExcel) -> list[ParsedComponent]: + """Flatten all components across all rows of a ParsedExcel.""" + return [c for row in parsed.rows for c in row.components] + + +# --------------------------------------------------------------------------- +# TRB — Tapered Roller Bearings +# --------------------------------------------------------------------------- + +class TestTRBParser: + """All assertions derived from TRB_Testscope_20260128.xlsx.""" + + def test_category_detected(self, parsed_trb: ParsedExcel): + assert parsed_trb.category_key == "TRB" + + def test_template_name(self, parsed_trb: ParsedExcel): + assert parsed_trb.template_name == "Tapered Roller Bearings (TRB)" + + def test_row_count(self, parsed_trb: ParsedExcel): + assert len(parsed_trb.rows) == 4 + + def test_row_indices(self, parsed_trb: ParsedExcel): + assert [r.row_index for r in parsed_trb.rows] == [4, 5, 6, 7] + + def test_no_warnings(self, parsed_trb: ParsedExcel): + assert parsed_trb.warnings == [] + + def test_first_row_ebene1(self, parsed_trb: ParsedExcel): + assert parsed_trb.rows[0].ebene1 == "Wälz- und Gleitlager" + + def test_first_row_baureihe(self, parsed_trb: ParsedExcel): + assert parsed_trb.rows[0].baureihe == "Kegelrollenlager" + + def test_first_row_pim_id(self, parsed_trb: ParsedExcel): + assert parsed_trb.rows[0].pim_id == "2305091021" + + def test_first_row_gewaehltes_produkt(self, parsed_trb: ParsedExcel): + assert parsed_trb.rows[0].gewaehltes_produkt == "F-802070.TR4-AM" + + def test_all_medias_rendering_true(self, parsed_trb: ParsedExcel): + assert all(r.medias_rendering is True for r in parsed_trb.rows) + + def test_first_row_component_count(self, parsed_trb: ParsedExcel): + assert len(parsed_trb.rows[0].components) == 20 + + def test_total_component_count(self, parsed_trb: ParsedExcel): + assert sum(len(r.components) for r in parsed_trb.rows) == 31 + + def test_first_component_material(self, parsed_trb: ParsedExcel): + assert parsed_trb.rows[0].components[0].material == "Stahl v2" + + def test_part_names_lowercase(self, parsed_trb: ParsedExcel): + for comp in _all_components(parsed_trb): + if comp.part_name: + assert comp.part_name == comp.part_name.lower() + + def test_component_column_indices_gte_11(self, parsed_trb: ParsedExcel): + for comp in _all_components(parsed_trb): + assert comp.column_index >= 11 + + def test_serialisation_keys(self, parsed_trb: ParsedExcel): + d = parsed_excel_to_dict(parsed_trb) + assert d["category_key"] == "TRB" + assert d["row_count"] == 4 + assert len(d["rows"]) == 4 + + def test_serialised_row_has_components_list(self, parsed_trb: ParsedExcel): + d = parsed_excel_to_dict(parsed_trb) + assert isinstance(d["rows"][0]["components"], list) + + +# --------------------------------------------------------------------------- +# Kugellager — Ball Bearings +# --------------------------------------------------------------------------- + +class TestKugellagerParser: + """All assertions derived from Kugellager_Testscope_20260128.xlsx.""" + + def test_category_detected(self, parsed_kugellager: ParsedExcel): + assert parsed_kugellager.category_key == "Kugellager" + + def test_template_name(self, parsed_kugellager: ParsedExcel): + assert parsed_kugellager.template_name == "Kugellager (Ball Bearings)" + + def test_row_count(self, parsed_kugellager: ParsedExcel): + assert len(parsed_kugellager.rows) == 9 + + def test_row_indices(self, parsed_kugellager: ParsedExcel): + assert parsed_kugellager.rows[0].row_index == 4 + assert parsed_kugellager.rows[-1].row_index == 12 + + def test_no_warnings(self, parsed_kugellager: ParsedExcel): + assert parsed_kugellager.warnings == [] + + def test_first_row_ebene1(self, parsed_kugellager: ParsedExcel): + assert parsed_kugellager.rows[0].ebene1 == "Wälz- und Gleitlager" + + def test_first_row_baureihe(self, parsed_kugellager: ParsedExcel): + assert parsed_kugellager.rows[0].baureihe == "Axial-Rillenkugellager" + + def test_first_row_pim_id(self, parsed_kugellager: ParsedExcel): + assert parsed_kugellager.rows[0].pim_id == "2305100101" + + def test_first_row_gewaehltes_produkt(self, parsed_kugellager: ParsedExcel): + assert parsed_kugellager.rows[0].gewaehltes_produkt == "51413-MP" + + def test_all_medias_rendering_true(self, parsed_kugellager: ParsedExcel): + assert all(r.medias_rendering is True for r in parsed_kugellager.rows) + + def test_total_component_count(self, parsed_kugellager: ParsedExcel): + assert sum(len(r.components) for r in parsed_kugellager.rows) == 55 + + def test_first_component_material(self, parsed_kugellager: ParsedExcel): + assert parsed_kugellager.rows[0].components[0].material == "Stahl v2" + + def test_part_names_lowercase(self, parsed_kugellager: ParsedExcel): + for comp in _all_components(parsed_kugellager): + if comp.part_name: + assert comp.part_name == comp.part_name.lower() + + def test_serialisation_row_count(self, parsed_kugellager: ParsedExcel): + assert parsed_excel_to_dict(parsed_kugellager)["row_count"] == 9 + + +# --------------------------------------------------------------------------- +# CRB — Cylindrical Roller Bearings +# --------------------------------------------------------------------------- + +class TestCRBParser: + """All assertions derived from CRB_Testscope_20260128.xlsx.""" + + def test_category_detected(self, parsed_crb: ParsedExcel): + assert parsed_crb.category_key == "CRB" + + def test_template_name(self, parsed_crb: ParsedExcel): + assert parsed_crb.template_name == "Cylindrical Roller Bearings (CRB)" + + def test_row_count(self, parsed_crb: ParsedExcel): + assert len(parsed_crb.rows) == 4 + + def test_row_indices(self, parsed_crb: ParsedExcel): + assert [r.row_index for r in parsed_crb.rows] == [4, 5, 6, 7] + + def test_no_warnings(self, parsed_crb: ParsedExcel): + assert parsed_crb.warnings == [] + + def test_first_row_baureihe(self, parsed_crb: ParsedExcel): + assert parsed_crb.rows[0].baureihe == "Axial-Zylinderrollenlager" + + def test_first_row_pim_id(self, parsed_crb: ParsedExcel): + assert parsed_crb.rows[0].pim_id == "2305110102" + + def test_first_row_gewaehltes_produkt(self, parsed_crb: ParsedExcel): + assert parsed_crb.rows[0].gewaehltes_produkt == "893..-M" + + def test_all_medias_rendering_true(self, parsed_crb: ParsedExcel): + assert all(r.medias_rendering is True for r in parsed_crb.rows) + + def test_first_row_component_count(self, parsed_crb: ParsedExcel): + assert len(parsed_crb.rows[0].components) == 4 + + def test_total_component_count(self, parsed_crb: ParsedExcel): + assert sum(len(r.components) for r in parsed_crb.rows) == 13 + + def test_first_component_material(self, parsed_crb: ParsedExcel): + assert parsed_crb.rows[0].components[0].material == "Stahl v2" + + def test_cad_model_names_lowercase(self, parsed_crb: ParsedExcel): + for row in parsed_crb.rows: + if row.name_cad_modell: + assert row.name_cad_modell == row.name_cad_modell.lower() + + def test_serialisation(self, parsed_crb: ParsedExcel): + d = parsed_excel_to_dict(parsed_crb) + assert d["category_key"] == "CRB" + assert d["row_count"] == 4 + + +# --------------------------------------------------------------------------- +# Gleitlager — Plain Bearings +# --------------------------------------------------------------------------- + +class TestGleitlagerParser: + """All assertions derived from Gleitlager_Testscope_20260128.xlsx.""" + + def test_category_detected(self, parsed_gleitlager: ParsedExcel): + assert parsed_gleitlager.category_key == "Gleitlager" + + def test_template_name(self, parsed_gleitlager: ParsedExcel): + assert parsed_gleitlager.template_name == "Gleitlager (Plain Bearings)" + + def test_row_count(self, parsed_gleitlager: ParsedExcel): + assert len(parsed_gleitlager.rows) == 3 + + def test_row_indices(self, parsed_gleitlager: ParsedExcel): + assert [r.row_index for r in parsed_gleitlager.rows] == [4, 5, 6] + + def test_no_warnings(self, parsed_gleitlager: ParsedExcel): + assert parsed_gleitlager.warnings == [] + + def test_first_row_baureihe(self, parsed_gleitlager: ParsedExcel): + assert parsed_gleitlager.rows[0].baureihe == "Gelenklager" + + def test_first_row_pim_id_is_none(self, parsed_gleitlager: ParsedExcel): + # Gleitlager first row has no PIM-ID + assert parsed_gleitlager.rows[0].pim_id is None + + def test_first_row_gewaehltes_produkt(self, parsed_gleitlager: ParsedExcel): + assert parsed_gleitlager.rows[0].gewaehltes_produkt == "GE..-HF" + + def test_all_medias_rendering_true(self, parsed_gleitlager: ParsedExcel): + assert all(r.medias_rendering is True for r in parsed_gleitlager.rows) + + def test_total_component_count(self, parsed_gleitlager: ParsedExcel): + assert sum(len(r.components) for r in parsed_gleitlager.rows) == 6 + + def test_first_component_material(self, parsed_gleitlager: ParsedExcel): + assert parsed_gleitlager.rows[0].components[0].material == "Durotect CMT" + + def test_serialisation(self, parsed_gleitlager: ParsedExcel): + d = parsed_excel_to_dict(parsed_gleitlager) + assert d["category_key"] == "Gleitlager" + assert d["row_count"] == 3 + + +# --------------------------------------------------------------------------- +# SRB_TORB — Spherical / Toroidal Roller Bearings +# --------------------------------------------------------------------------- + +class TestSRBTORBParser: + """All assertions derived from SRB_TORB_Testscope_20260128.xlsx.""" + + def test_category_detected(self, parsed_srb_torb: ParsedExcel): + assert parsed_srb_torb.category_key == "SRB_TORB" + + def test_template_name(self, parsed_srb_torb: ParsedExcel): + assert parsed_srb_torb.template_name == "Spherical / Toroidal Roller Bearings (SRB/TORB)" + + def test_row_count(self, parsed_srb_torb: ParsedExcel): + assert len(parsed_srb_torb.rows) == 2 + + def test_row_indices(self, parsed_srb_torb: ParsedExcel): + assert [r.row_index for r in parsed_srb_torb.rows] == [4, 5] + + def test_no_warnings(self, parsed_srb_torb: ParsedExcel): + assert parsed_srb_torb.warnings == [] + + def test_first_row_baureihe(self, parsed_srb_torb: ParsedExcel): + assert parsed_srb_torb.rows[0].baureihe == "Radial SRB" + + def test_first_row_pim_id(self, parsed_srb_torb: ParsedExcel): + assert parsed_srb_torb.rows[0].pim_id == "2305091102" + + def test_first_row_gewaehltes_produkt(self, parsed_srb_torb: ParsedExcel): + assert parsed_srb_torb.rows[0].gewaehltes_produkt == "241..-BE-XL-K30-H40" + + def test_all_medias_rendering_true(self, parsed_srb_torb: ParsedExcel): + assert all(r.medias_rendering is True for r in parsed_srb_torb.rows) + + def test_first_row_component_count(self, parsed_srb_torb: ParsedExcel): + assert len(parsed_srb_torb.rows[0].components) == 4 + + def test_total_component_count(self, parsed_srb_torb: ParsedExcel): + assert sum(len(r.components) for r in parsed_srb_torb.rows) == 8 + + def test_first_component_material(self, parsed_srb_torb: ParsedExcel): + assert parsed_srb_torb.rows[0].components[0].material == "Stahl v2" + + def test_serialisation(self, parsed_srb_torb: ParsedExcel): + d = parsed_excel_to_dict(parsed_srb_torb) + assert d["category_key"] == "SRB_TORB" + assert d["row_count"] == 2 + + +# --------------------------------------------------------------------------- +# Linear_schiene — Linear Guide Rails +# --------------------------------------------------------------------------- + +class TestLinearSchieneParser: + """All assertions derived from Linear_schiene_Testscope_20260128.xlsx.""" + + def test_category_detected(self, parsed_linear_schiene: ParsedExcel): + assert parsed_linear_schiene.category_key == "Linear_schiene" + + def test_template_name(self, parsed_linear_schiene: ParsedExcel): + assert parsed_linear_schiene.template_name == "Linear Guide Rails" + + def test_row_count(self, parsed_linear_schiene: ParsedExcel): + assert len(parsed_linear_schiene.rows) == 1 + + def test_row_index_starts_at_4(self, parsed_linear_schiene: ParsedExcel): + assert parsed_linear_schiene.rows[0].row_index == 4 + + def test_no_warnings(self, parsed_linear_schiene: ParsedExcel): + assert parsed_linear_schiene.warnings == [] + + def test_first_row_ebene1(self, parsed_linear_schiene: ParsedExcel): + assert parsed_linear_schiene.rows[0].ebene1 == "Linearsysteme" + + def test_first_row_baureihe(self, parsed_linear_schiene: ParsedExcel): + assert parsed_linear_schiene.rows[0].baureihe == "Rollenumlaufeinheiten" + + def test_first_row_pim_id(self, parsed_linear_schiene: ParsedExcel): + assert parsed_linear_schiene.rows[0].pim_id == "233092AB21" + + def test_first_row_gewaehltes_produkt(self, parsed_linear_schiene: ParsedExcel): + assert parsed_linear_schiene.rows[0].gewaehltes_produkt == "TSX..-D" + + def test_medias_rendering(self, parsed_linear_schiene: ParsedExcel): + assert parsed_linear_schiene.rows[0].medias_rendering is True + + def test_component_count(self, parsed_linear_schiene: ParsedExcel): + assert len(parsed_linear_schiene.rows[0].components) == 1 + + def test_first_component_part_name(self, parsed_linear_schiene: ParsedExcel): + comp = parsed_linear_schiene.rows[0].components[0] + assert comp.part_name == "tsx25d-g1-hj-gen.prt" + + def test_first_component_material(self, parsed_linear_schiene: ParsedExcel): + comp = parsed_linear_schiene.rows[0].components[0] + assert comp.material == "Stahl v2" + + def test_serialisation(self, parsed_linear_schiene: ParsedExcel): + d = parsed_excel_to_dict(parsed_linear_schiene) + assert d["category_key"] == "Linear_schiene" + assert d["row_count"] == 1 + + +# --------------------------------------------------------------------------- +# Anschlagplatten — End Plates +# --------------------------------------------------------------------------- + +class TestAnschlagplattenParser: + """All assertions derived from Anschlagplatten_Testscope_20260128.xlsx.""" + + def test_category_detected(self, parsed_anschlagplatten: ParsedExcel): + assert parsed_anschlagplatten.category_key == "Anschlagplatten" + + def test_template_name(self, parsed_anschlagplatten: ParsedExcel): + assert parsed_anschlagplatten.template_name == "End Plates (Anschlagplatten)" + + def test_row_count(self, parsed_anschlagplatten: ParsedExcel): + assert len(parsed_anschlagplatten.rows) == 2 + + def test_row_indices(self, parsed_anschlagplatten: ParsedExcel): + assert [r.row_index for r in parsed_anschlagplatten.rows] == [4, 5] + + def test_no_warnings(self, parsed_anschlagplatten: ParsedExcel): + assert parsed_anschlagplatten.warnings == [] + + def test_first_row_ebene1(self, parsed_anschlagplatten: ParsedExcel): + assert parsed_anschlagplatten.rows[0].ebene1 == "Linearsysteme" + + def test_first_row_baureihe(self, parsed_anschlagplatten: ParsedExcel): + assert parsed_anschlagplatten.rows[0].baureihe == "Endplatten für Führungsschiene LFS" + + def test_first_row_pim_id(self, parsed_anschlagplatten: ParsedExcel): + assert parsed_anschlagplatten.rows[0].pim_id == "233092AM41" + + def test_first_row_gewaehltes_produkt(self, parsed_anschlagplatten: ParsedExcel): + assert parsed_anschlagplatten.rows[0].gewaehltes_produkt == "ANS.LFS52-FH" + + def test_all_medias_rendering_true(self, parsed_anschlagplatten: ParsedExcel): + assert all(r.medias_rendering is True for r in parsed_anschlagplatten.rows) + + def test_total_component_count(self, parsed_anschlagplatten: ParsedExcel): + assert sum(len(r.components) for r in parsed_anschlagplatten.rows) == 3 + + def test_first_component_part_name(self, parsed_anschlagplatten: ParsedExcel): + comp = parsed_anschlagplatten.rows[0].components[0] + assert comp.part_name == "ans_lfs52-fh-0011_p.prt" + + def test_first_component_material(self, parsed_anschlagplatten: ParsedExcel): + comp = parsed_anschlagplatten.rows[0].components[0] + assert comp.material == "Stahl brüniert" + + def test_serialisation(self, parsed_anschlagplatten: ParsedExcel): + d = parsed_excel_to_dict(parsed_anschlagplatten) + assert d["category_key"] == "Anschlagplatten" + assert d["row_count"] == 2 + + +# --------------------------------------------------------------------------- +# Cross-file structural invariants +# --------------------------------------------------------------------------- + +class TestAllFilesStructural: + """Invariants that must hold for every one of the 7 sample files.""" + + ALL_CATEGORIES = [ + "TRB", "Kugellager", "CRB", "Gleitlager", + "SRB_TORB", "Linear_schiene", "Anschlagplatten", + ] + + def test_all_categories_detected(self, parsed_excel_all: dict): + for cat in self.ALL_CATEGORIES: + assert parsed_excel_all[cat].category_key == cat + + def test_all_have_template_names(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + assert parsed.template_name is not None, f"{cat}: template_name is None" + + def test_all_have_at_least_one_row(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + assert len(parsed.rows) > 0, f"{cat}: no data rows parsed" + + def test_all_rows_start_at_index_4(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + assert parsed.rows[0].row_index == 4, ( + f"{cat}: first row_index is {parsed.rows[0].row_index}, expected 4" + ) + + def test_row_indices_monotonically_increasing(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + indices = [r.row_index for r in parsed.rows] + assert indices == sorted(indices), f"{cat}: row indices not ascending: {indices}" + + def test_all_medias_rendering_true(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + for row in parsed.rows: + assert row.medias_rendering is True, ( + f"{cat} row {row.row_index}: medias_rendering={row.medias_rendering}" + ) + + def test_all_files_have_no_warnings(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + assert parsed.warnings == [], f"{cat} produced warnings: {parsed.warnings}" + + def test_all_component_column_indices_gte_11(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + for row in parsed.rows: + for comp in row.components: + assert comp.column_index >= 11, ( + f"{cat} row {row.row_index}: component column_index={comp.column_index}" + ) + + def test_all_part_names_lowercase(self, parsed_excel_all: dict): + """The parser normalises filenames to lowercase.""" + for cat, parsed in parsed_excel_all.items(): + for row in parsed.rows: + for comp in row.components: + if comp.part_name: + assert comp.part_name == comp.part_name.lower(), ( + f"{cat} row {row.row_index}: part_name not lowercase: {comp.part_name!r}" + ) + + def test_all_cad_model_names_lowercase(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + for row in parsed.rows: + if row.name_cad_modell: + assert row.name_cad_modell == row.name_cad_modell.lower(), ( + f"{cat} row {row.row_index}: name_cad_modell not lowercase: {row.name_cad_modell!r}" + ) + + def test_all_have_at_least_11_column_headers(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + assert len(parsed.column_headers) >= 11, ( + f"{cat}: only {len(parsed.column_headers)} column headers (expected >= 11)" + ) + + def test_serialised_dict_required_keys(self, parsed_excel_all: dict): + required = { + "filename", "category_key", "template_name", + "row_count", "column_headers", "rows", "warnings", + } + for cat, parsed in parsed_excel_all.items(): + d = parsed_excel_to_dict(parsed) + missing = required - d.keys() + assert not missing, f"{cat}: serialised dict missing keys: {missing}" + + def test_serialised_row_required_keys(self, parsed_excel_all: dict): + required = { + "row_index", "ebene1", "ebene2", "baureihe", "pim_id", + "produkt_baureihe", "gewaehltes_produkt", "name_cad_modell", + "gewuenschte_bildnummer", "lagertyp", "medias_rendering", "components", + } + for cat, parsed in parsed_excel_all.items(): + for row in parsed.rows: + d = parsed_row_to_dict(row) + missing = required - d.keys() + assert not missing, ( + f"{cat} row {row.row_index}: serialised row missing keys: {missing}" + ) + + def test_serialised_component_required_keys(self, parsed_excel_all: dict): + required = {"part_name", "material", "component_type", "column_index"} + for cat, parsed in parsed_excel_all.items(): + for row in parsed.rows: + for comp_d in parsed_row_to_dict(row)["components"]: + missing = required - comp_d.keys() + assert not missing, ( + f"{cat} row {row.row_index}: component dict missing keys: {missing}" + ) + + def test_serialised_row_count_matches(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + d = parsed_excel_to_dict(parsed) + assert d["row_count"] == len(d["rows"]) == len(parsed.rows) + + +# --------------------------------------------------------------------------- +# Internal helper unit tests +# --------------------------------------------------------------------------- + +class TestNormalizeFilename: + def test_lowercases_extension(self): + assert _normalize_filename("TEST.PRT") == "test.prt" + + def test_strips_leading_trailing_spaces(self): + assert _normalize_filename(" 81113-L_cut.stp ") == "81113-l_cut.stp" + + def test_none_returns_none(self): + assert _normalize_filename(None) is None + + def test_empty_string_returns_none(self): + # _normalize_filename("") returns "" which the _clean wrapper converts to None + # In the parser _normalize_filename wraps _clean, so empty → None + result = _normalize_filename("") + # The function strips and lowercases; empty string stays empty (falsy) + assert result == "" or result is None + + +class TestToBool: + @pytest.mark.parametrize("val,expected", [ + (1, True), + (0, False), + (True, True), + (False, False), + ("1", True), + ("0", False), + ("ja", True), + ("Ja", True), + ("nein", False), + ("Nein", False), + ("yes", True), + ("no", False), + ("x", True), + ("", False), + (None, None), + ]) + def test_to_bool_parametrize(self, val, expected): + assert _to_bool(val) == expected + + def test_medias_rendering_is_bool_or_none(self, parsed_excel_all: dict): + for cat, parsed in parsed_excel_all.items(): + for row in parsed.rows: + assert row.medias_rendering in (True, False, None), ( + f"{cat} row {row.row_index}: unexpected medias_rendering={row.medias_rendering!r}" + ) + + +# --------------------------------------------------------------------------- +# Error handling +# --------------------------------------------------------------------------- + +class TestParseExcelErrors: + def test_nonexistent_file_raises(self, tmp_path: Path): + with pytest.raises(ValueError, match="Cannot open Excel file"): + parse_excel(tmp_path / "does_not_exist.xlsx") + + def test_too_few_rows_raises(self, tmp_path: Path): + import openpyxl + wb = openpyxl.Workbook() + ws = wb.active + ws.append(["only one row"]) + ws.append(["only two rows"]) + path = tmp_path / "short.xlsx" + wb.save(path) + with pytest.raises(ValueError, match="fewer than 3 rows"): + parse_excel(path) + + def test_empty_data_rows_produces_warning(self, tmp_path: Path): + """A file with valid headers but zero data rows should warn, not raise.""" + import openpyxl + wb = openpyxl.Workbook() + ws = wb.active + ws.append(["Instructions row 1"]) + ws.append(["Instructions row 2"]) + ws.append([ + "Ebene1", "Ebene2", "Baureihe", "PIM", "Produkt", "SEP", + "Produkt", "Name", "Bildnr", "Lagertyp", "Medias", + ]) + # Intentionally no data rows + path = tmp_path / "no_data.xlsx" + wb.save(path) + + result = parse_excel(path) + assert result.rows == [] + assert len(result.warnings) > 0 + + def test_parse_accepts_pathlib_path(self, excel_paths: dict): + """parse_excel should accept a Path object, not just a string.""" + path = excel_paths["TRB"] + assert isinstance(path, Path) + result = parse_excel(path) + assert result.category_key == "TRB" + + def test_parse_accepts_string_path(self, excel_paths: dict): + """parse_excel should also accept a plain string path.""" + result = parse_excel(str(excel_paths["CRB"])) + assert result.category_key == "CRB" diff --git a/blender-renderer/Dockerfile b/blender-renderer/Dockerfile new file mode 100644 index 0000000..6b52a56 --- /dev/null +++ b/blender-renderer/Dockerfile @@ -0,0 +1,47 @@ +FROM ubuntu:22.04 + +ENV DEBIAN_FRONTEND=noninteractive +ENV PYTHONUNBUFFERED=1 +# OSMesa for headless cadquery/VTK (no display needed) +ENV PYOPENGL_PLATFORM=osmesa +ENV VTK_DEFAULT_EGL=0 + +# Runtime libraries for cadquery/VTK + Blender 5.x +RUN apt-get update && apt-get install -y \ + python3-pip \ + python3-dev \ + libxrender1 \ + libxi6 \ + libxkbcommon-x11-0 \ + libsm6 \ + libglib2.0-0 \ + libgl1-mesa-glx \ + libosmesa6 \ + libgomp1 \ + libxfixes3 \ + libxrandr2 \ + libxcursor1 \ + libxinerama1 \ + libwayland-client0 \ + libwayland-cursor0 \ + libwayland-egl1 \ + libvulkan1 \ + mesa-vulkan-drivers \ + libegl1 \ + libegl-mesa0 \ + libgbm1 \ + && rm -rf /var/lib/apt/lists/* + +# Blender 5.0.1 is mounted from the host at /opt/blender (see docker-compose.yml) +ENV BLENDER_BIN=/opt/blender/blender + +WORKDIR /app + +COPY requirements.txt . +RUN pip3 install --no-cache-dir -r requirements.txt + +COPY . . + +EXPOSE 8100 + +CMD ["python3", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8100"] diff --git a/blender-renderer/__pycache__/app.cpython-310.pyc b/blender-renderer/__pycache__/app.cpython-310.pyc new file mode 100644 index 0000000..e69e510 Binary files /dev/null and b/blender-renderer/__pycache__/app.cpython-310.pyc differ diff --git a/blender-renderer/app.py b/blender-renderer/app.py new file mode 100644 index 0000000..9325740 --- /dev/null +++ b/blender-renderer/app.py @@ -0,0 +1,581 @@ +""" +Blender renderer service — FastAPI microservice. + +Accepts a STEP file path (on shared uploads volume) and renders a thumbnail PNG +using the pipeline: STEP → STL (via cadquery) → PNG (via Blender headless). +""" +import asyncio +import json as _json_mod +import logging +import os +import signal +import shutil +import subprocess +import tempfile +import threading +import time +from pathlib import Path + +from fastapi import FastAPI, HTTPException +from pydantic import BaseModel + +logger = logging.getLogger(__name__) + +app = FastAPI(title="Blender Renderer", version="1.0.0") + +# Active render subprocesses keyed by job_id for cancellation support +_active_procs: dict[str, subprocess.Popen] = {} +_procs_lock = threading.Lock() + +# Limit concurrent Blender renders to avoid memory exhaustion from parallel threads +# (each thread loads cadquery/OCC, ~300-500 MB each). +# Resizable at runtime via POST /configure without restart. +_max_concurrent: int = 3 +_render_semaphore = threading.Semaphore(_max_concurrent) +_config_lock = threading.Lock() + + +def _set_max_concurrent(n: int) -> None: + """Replace the global semaphore with a new one sized to n. + + In-flight renders hold a reference to the old semaphore and will release it + normally; new renders pick up the new one. + """ + global _render_semaphore, _max_concurrent + with _config_lock: + _max_concurrent = n + _render_semaphore = threading.Semaphore(n) + + +class RenderRequest(BaseModel): + step_path: str + output_path: str + width: int = 512 + height: int = 512 + engine: str = "cycles" # "cycles" or "eevee" + samples: int = 256 + stl_quality: str = "low" # "low" or "high" + smooth_angle: int = 30 # degrees; 0 = shade_flat, >0 = shade_smooth_by_angle + cycles_device: str = "auto" # "auto", "gpu", or "cpu" + transparent_bg: bool = False # render with transparent background (PNG only) + part_colors: dict | None = None # optional {part_name: hex_color} + template_path: str | None = None # Path to .blend template file + target_collection: str = "Product" # Collection to import geometry into + material_library_path: str | None = None # Path to material library .blend + material_map: dict | None = None # {part_name: material_name} from Excel + part_names_ordered: list | None = None # ordered STEP part names for index matching + lighting_only: bool = False # use template World/HDRI only; force auto-camera + shadow_catcher: bool = False # enable Shadowcatcher collection + position plane at bbox min Z + rotation_x: float = 0.0 # Euler X rotation in degrees (applied to imported STL) + rotation_y: float = 0.0 # Euler Y rotation in degrees + rotation_z: float = 0.0 # Euler Z rotation in degrees + job_id: str | None = None # Optional ID for cancellation tracking + noise_threshold: str = "" # Adaptive sampling noise threshold (empty = Blender default) + denoiser: str = "" # "OPTIX" | "OPENIMAGEDENOISE" (empty = auto) + denoising_input_passes: str = "" # "RGB" | "RGB_ALBEDO" | "RGB_ALBEDO_NORMAL" + denoising_prefilter: str = "" # "NONE" | "FAST" | "ACCURATE" + denoising_quality: str = "" # "HIGH" | "BALANCED" | "FAST" (Blender 4.2+) + denoising_use_gpu: str = "" # "1" = GPU, "0" = CPU, "" = auto + + +def _find_blender() -> str: + """Locate the Blender binary: prefer $BLENDER_BIN, then PATH.""" + import os, shutil + env_bin = os.environ.get("BLENDER_BIN", "") + if env_bin and Path(env_bin).exists(): + return env_bin + return shutil.which("blender") or "blender" + + +@app.get("/health") +async def health(): + blender_bin = _find_blender() + version = "unknown" + try: + result = subprocess.run( + [blender_bin, "--version"], capture_output=True, text=True, timeout=10 + ) + first_line = (result.stdout or result.stderr or "").splitlines() + version = first_line[0].strip() if first_line else "unknown" + except Exception: + pass + return { + "status": "ok", + "renderer": "blender", + "blender_path": blender_bin, + "blender_version": version, + } + + +class ConvertStlRequest(BaseModel): + step_path: str + quality: str = "low" # "low" or "high" + + +@app.post("/convert-stl") +async def convert_stl(req: ConvertStlRequest): + """Convert a STEP file to STL and cache it — no Blender render.""" + if req.quality not in ("low", "high"): + raise HTTPException(400, detail="quality must be 'low' or 'high'") + + step_path = Path(req.step_path) + if not step_path.exists(): + raise HTTPException(404, detail=f"STEP file not found: {step_path}") + + stl_path = step_path.parent / f"{step_path.stem}_{req.quality}.stl" + parts_dir = step_path.parent / f"{step_path.stem}_{req.quality}_parts" + + t0 = time.monotonic() + try: + if not stl_path.exists() or stl_path.stat().st_size == 0: + await asyncio.to_thread(_convert_step_to_stl, step_path, stl_path, req.quality) + logger.info("STL generated: %s (%d KB)", stl_path.name, stl_path.stat().st_size // 1024) + else: + logger.info("STL cache hit: %s (%d KB)", stl_path.name, stl_path.stat().st_size // 1024) + except Exception as e: + logger.error("STEP→STL conversion failed: %s", e) + raise HTTPException(500, detail=f"STEP conversion failed: {e}") + + try: + if not (parts_dir / "manifest.json").exists(): + await asyncio.to_thread(_export_per_part_stls, step_path, parts_dir, req.quality) + except Exception as e: + logger.warning("per-part STL export failed (non-fatal): %s", e) + + return { + "stl_path": str(stl_path), + "size_bytes": stl_path.stat().st_size if stl_path.exists() else 0, + "duration_s": round(time.monotonic() - t0, 2), + } + + +@app.post("/cancel/{job_id}") +async def cancel_render(job_id: str): + """Kill the Blender subprocess for a running job (best-effort).""" + with _procs_lock: + proc = _active_procs.pop(job_id, None) + if proc is None: + return {"status": "not_found", "job_id": job_id} + try: + pgid = os.getpgid(proc.pid) + os.killpg(pgid, signal.SIGTERM) + logger.info("Sent SIGTERM to process group %d for job %s", pgid, job_id) + except (ProcessLookupError, OSError): + pass # process already finished + return {"status": "cancelled", "job_id": job_id} + + +@app.get("/status") +async def status(): + """Return current render queue depth and concurrency setting.""" + with _procs_lock: + active = len(_active_procs) + with _config_lock: + current_max = _max_concurrent + return {"active_jobs": active, "max_concurrent": current_max} + + +@app.post("/configure") +async def configure(max_concurrent: int): + """Dynamically update the maximum number of concurrent Blender renders.""" + if not (1 <= max_concurrent <= 16): + from fastapi import HTTPException + raise HTTPException(400, detail="max_concurrent must be between 1 and 16") + _set_max_concurrent(max_concurrent) + logger.info("max_concurrent_renders updated to %d", max_concurrent) + return {"max_concurrent": max_concurrent} + + +@app.post("/render") +async def render(req: RenderRequest): + step_path = Path(req.step_path) + output_path = Path(req.output_path) + + if not step_path.exists(): + raise HTTPException(404, detail=f"STEP file not found: {step_path}") + + output_path.parent.mkdir(parents=True, exist_ok=True) + + t_start = time.monotonic() + + # Acquire render slot — blocks if 3 renders are already running. + # asyncio.to_thread is used so the semaphore acquire doesn't block the event loop. + acquired = await asyncio.to_thread(_render_semaphore.acquire) + + # 1. Get/create STL cache — persistent next to STEP file so re-renders skip conversion + stl_path = step_path.parent / f"{step_path.stem}_{req.stl_quality}.stl" + parts_dir = step_path.parent / f"{step_path.stem}_{req.stl_quality}_parts" + stl_size_bytes = 0 + t_stl_start = time.monotonic() + try: + if not stl_path.exists() or stl_path.stat().st_size == 0: + logger.info("STL cache miss — converting: %s", step_path.name) + _convert_step_to_stl(step_path, stl_path, req.stl_quality) + else: + logger.info("STL cache hit: %s (%d KB)", stl_path.name, stl_path.stat().st_size // 1024) + stl_size_bytes = stl_path.stat().st_size if stl_path.exists() else 0 + except Exception as e: + _render_semaphore.release() + logger.error(f"STEP→STL conversion failed: {e}") + raise HTTPException(500, detail=f"STEP conversion failed: {e}") + + # Per-part export (non-fatal — Blender falls back to combined STL) + try: + if not (parts_dir / "manifest.json").exists(): + _export_per_part_stls(step_path, parts_dir, req.stl_quality) + except Exception as e: + logger.warning("per-part STL export failed (non-fatal): %s", e) + stl_duration_s = round(time.monotonic() - t_stl_start, 2) + + # 2. Render STL → PNG via Blender + render_log_lines: list[str] = [] + parts_count = 0 + engine_used = req.engine + t_render_start = time.monotonic() + try: + render_log_lines, parts_count, engine_used = _render_stl_with_blender( + stl_path, output_path, req.width, req.height, + req.engine, req.samples, req.smooth_angle, req.cycles_device, + req.transparent_bg, + template_path=req.template_path, + target_collection=req.target_collection, + material_library_path=req.material_library_path, + material_map=req.material_map, + part_names_ordered=req.part_names_ordered, + lighting_only=req.lighting_only, + shadow_catcher=req.shadow_catcher, + rotation_x=req.rotation_x, + rotation_y=req.rotation_y, + rotation_z=req.rotation_z, + job_id=req.job_id, + noise_threshold=req.noise_threshold, + denoiser=req.denoiser, + denoising_input_passes=req.denoising_input_passes, + denoising_prefilter=req.denoising_prefilter, + denoising_quality=req.denoising_quality, + denoising_use_gpu=req.denoising_use_gpu, + ) + except Exception as e: + logger.error(f"Blender render failed: {e}") + raise HTTPException(500, detail=f"Blender render failed: {e}") + finally: + _render_semaphore.release() + # STL cache is persistent — do NOT delete stl_path or parts_dir + render_duration_s = round(time.monotonic() - t_render_start, 2) + + if not output_path.exists(): + raise HTTPException(500, detail="Render produced no output file") + + total_duration_s = round(time.monotonic() - t_start, 2) + output_size_bytes = output_path.stat().st_size + + return { + "output_path": str(output_path), + "status": "ok", + "renderer": "blender", + # Timing + "total_duration_s": total_duration_s, + "stl_duration_s": stl_duration_s, + "render_duration_s": render_duration_s, + # Mesh info + "stl_size_bytes": stl_size_bytes, + "output_size_bytes": output_size_bytes, + "parts_count": parts_count, + # Effective settings (engine may differ from requested if EEVEE fell back) + "engine_used": engine_used, + # Blender log lines (filtered to [blender_render] prefix lines) + "log_lines": render_log_lines, + } + + +def _convert_step_to_stl(step_path: Path, stl_path: Path, quality: str = "low") -> None: + """Convert STEP file to STL using cadquery. + + quality="low" → tolerance=0.3, angularTolerance=0.3 (fast, coarser mesh) + quality="high" → tolerance=0.01, angularTolerance=0.02 (slower, finer mesh) + """ + import cadquery as cq + shape = cq.importers.importStep(str(step_path)) + if quality == "high": + cq.exporters.export(shape, str(stl_path), tolerance=0.01, angularTolerance=0.02) + else: + cq.exporters.export(shape, str(stl_path), tolerance=0.3, angularTolerance=0.3) + if not stl_path.exists() or stl_path.stat().st_size == 0: + raise RuntimeError("cadquery produced empty STL") + + +def _export_per_part_stls(step_path: Path, parts_dir: Path, quality: str = "low") -> list: + """Export one STL per named STEP leaf shape using OCP XCAF. + + Creates parts_dir with individual STL files and a manifest.json. + Returns the manifest list, or empty list on failure. + """ + tol = 0.01 if quality == "high" else 0.3 + angular_tol = 0.05 if quality == "high" else 0.3 + + try: + from OCP.STEPCAFControl import STEPCAFControl_Reader + from OCP.XCAFDoc import XCAFDoc_DocumentTool, XCAFDoc_ShapeTool + from OCP.TDataStd import TDataStd_Name + from OCP.TDF import TDF_Label as TDF_Label_cls, TDF_LabelSequence + from OCP.XCAFApp import XCAFApp_Application + from OCP.TDocStd import TDocStd_Document + from OCP.TCollection import TCollection_ExtendedString + from OCP.IFSelect import IFSelect_RetDone + import cadquery as cq + except ImportError as e: + logger.warning("per-part export skipped (import error): %s", e) + return [] + + app = XCAFApp_Application.GetApplication_s() + doc = TDocStd_Document(TCollection_ExtendedString("XmlOcaf")) + app.InitDocument(doc) + + reader = STEPCAFControl_Reader() + reader.SetNameMode(True) + status = reader.ReadFile(str(step_path)) + if status != IFSelect_RetDone: + logger.warning("XCAF reader failed with status %s", status) + return [] + + if not reader.Transfer(doc): + logger.warning("XCAF transfer failed") + return [] + + shape_tool = XCAFDoc_DocumentTool.ShapeTool_s(doc.Main()) + name_id = TDataStd_Name.GetID_s() + + leaves = [] + + def _get_label_name(label): + name_attr = TDataStd_Name() + if label.FindAttribute(name_id, name_attr): + return name_attr.Get().ToExtString() + return "" + + def _collect_leaves(label): + if XCAFDoc_ShapeTool.IsAssembly_s(label): + components = TDF_LabelSequence() + XCAFDoc_ShapeTool.GetComponents_s(label, components) + for i in range(1, components.Length() + 1): + comp_label = components.Value(i) + if XCAFDoc_ShapeTool.IsReference_s(comp_label): + ref_label = TDF_Label_cls() + XCAFDoc_ShapeTool.GetReferredShape_s(comp_label, ref_label) + comp_name = _get_label_name(comp_label) + ref_name = _get_label_name(ref_label) + # Prefer referred shape name — matches material_map keys + name = ref_name or comp_name + if XCAFDoc_ShapeTool.IsAssembly_s(ref_label): + _collect_leaves(ref_label) + elif XCAFDoc_ShapeTool.IsSimpleShape_s(ref_label): + # Use comp_label shape — includes instance transform (position) + shape = XCAFDoc_ShapeTool.GetShape_s(comp_label) + leaves.append((name or f"unnamed_{len(leaves)}", shape)) + else: + _collect_leaves(comp_label) + elif XCAFDoc_ShapeTool.IsSimpleShape_s(label): + name = _get_label_name(label) + shape = XCAFDoc_ShapeTool.GetShape_s(label) + leaves.append((name or f"unnamed_{len(leaves)}", shape)) + + top_labels = TDF_LabelSequence() + shape_tool.GetFreeShapes(top_labels) + for i in range(1, top_labels.Length() + 1): + _collect_leaves(top_labels.Value(i)) + + if not leaves: + logger.warning("no leaf shapes found via XCAF") + return [] + + parts_dir.mkdir(parents=True, exist_ok=True) + manifest = [] + + for idx, (name, shape) in enumerate(leaves): + safe_name = name.replace("/", "_").replace("\\", "_").replace(" ", "_") + filename = f"{idx:02d}_{safe_name}.stl" + filepath = str(parts_dir / filename) + + try: + import cadquery as cq + cq_shape = cq.Shape(shape) + cq_shape.exportStl(filepath, tolerance=tol, angularTolerance=angular_tol) + manifest.append({"index": idx, "name": name, "file": filename}) + except Exception as e: + logger.warning("failed to export part '%s': %s", name, e) + + manifest_path = parts_dir / "manifest.json" + with open(manifest_path, "w") as f: + _json_mod.dump({"parts": manifest}, f, indent=2) + + total_size = sum( + os.path.getsize(str(parts_dir / p["file"])) + for p in manifest + if (parts_dir / p["file"]).exists() + ) + logger.info("exported %d per-part STLs (%d KB) to %s", len(manifest), total_size // 1024, parts_dir) + return manifest + + +def _parse_blender_log(stdout: str) -> tuple[list[str], int]: + """Extract [blender_render] lines and parts count from Blender stdout.""" + lines = [] + parts_count = 0 + for line in (stdout or "").splitlines(): + stripped = line.strip() + if "[blender_render]" in stripped or "[blender_render" in stripped: + lines.append(stripped) + if "separated into" in stripped: + try: + parts_count = int(stripped.split("separated into")[1].split("part")[0].strip()) + except Exception: + pass + elif "imported" in stripped and "named parts" in stripped: + try: + parts_count = int(stripped.split("imported")[1].split("named")[0].strip()) + except Exception: + pass + elif stripped.startswith("Saved:") or stripped.startswith("Fra:"): + lines.append(stripped) + return lines, parts_count + + +def _render_stl_with_blender( + stl_path: Path, output_path: Path, width: int, height: int, + engine: str = "cycles", samples: int = 256, smooth_angle: int = 30, + cycles_device: str = "auto", transparent_bg: bool = False, + template_path: str | None = None, target_collection: str = "Product", + material_library_path: str | None = None, material_map: dict | None = None, + part_names_ordered: list | None = None, lighting_only: bool = False, + shadow_catcher: bool = False, + rotation_x: float = 0.0, rotation_y: float = 0.0, rotation_z: float = 0.0, + job_id: str | None = None, + noise_threshold: str = "", + denoiser: str = "", + denoising_input_passes: str = "", + denoising_prefilter: str = "", + denoising_quality: str = "", + denoising_use_gpu: str = "", +) -> tuple[list[str], int, str]: + """Render STL to PNG using Blender in background mode. + + Returns (log_lines, parts_count, engine_used). + Blender is launched in its own process group (start_new_session=True) so + that SIGTERM from a cancel request kills the entire Blender tree. + """ + import json as _json + blender_bin = _find_blender() + script_path = Path(__file__).parent / "blender_render.py" + + env = dict(os.environ) + if engine == "eevee": + env.update({ + "VK_ICD_FILENAMES": "/usr/share/vulkan/icd.d/lvp_icd.x86_64.json", + "LIBGL_ALWAYS_SOFTWARE": "1", + "MESA_GL_VERSION_OVERRIDE": "4.5", + "EGL_PLATFORM": "surfaceless", + }) + else: + env.update({ + "EGL_PLATFORM": "surfaceless", + }) + + def _build_cmd(eng: str) -> list: + return [ + blender_bin, + "--background", + "--python", str(script_path), + "--", + str(stl_path), + str(output_path), + str(width), + str(height), + eng, + str(samples), + str(smooth_angle), + cycles_device, + "1" if transparent_bg else "0", + template_path or "", + target_collection, + material_library_path or "", + _json.dumps(material_map) if material_map else "{}", + _json.dumps(part_names_ordered) if part_names_ordered else "[]", + "1" if lighting_only else "0", + "1" if shadow_catcher else "0", + str(rotation_x), + str(rotation_y), + str(rotation_z), + noise_threshold or "", + denoiser or "", + denoising_input_passes or "", + denoising_prefilter or "", + denoising_quality or "", + denoising_use_gpu or "", + ] + + def _run_blender(eng: str) -> subprocess.CompletedProcess: + """Launch Blender in an isolated process group and wait for completion.""" + cmd = _build_cmd(eng) + proc = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + env=env, + start_new_session=True, # new process group → SIGTERM kills entire tree + ) + if job_id: + with _procs_lock: + _active_procs[job_id] = proc + try: + stdout, stderr = proc.communicate(timeout=300) + except subprocess.TimeoutExpired: + try: + os.killpg(os.getpgid(proc.pid), signal.SIGTERM) + except (ProcessLookupError, OSError): + pass + stdout, stderr = proc.communicate() + finally: + if job_id: + with _procs_lock: + _active_procs.pop(job_id, None) + return subprocess.CompletedProcess(cmd, proc.returncode, stdout, stderr) + + result = _run_blender(engine) + engine_used = engine + + # Log to uvicorn output + if result.stdout: + for line in result.stdout.splitlines(): + logger.info("[blender] %s", line) + if result.stderr: + for line in result.stderr.splitlines(): + logger.warning("[blender stderr] %s", line) + + # If EEVEE fails with a non-signal error, automatically retry with Cycles. + # A negative returncode means the process was killed by a signal (e.g. cancel) + # — do NOT retry in that case. + if result.returncode > 0 and engine == "eevee": + logger.warning( + "EEVEE render failed (exit %d) – retrying with Cycles (CPU).", + result.returncode, + ) + result = _run_blender("cycles") + engine_used = "cycles (eevee fallback)" + if result.stdout: + for line in result.stdout.splitlines(): + logger.info("[blender-cycles-fallback] %s", line) + if result.stderr: + for line in result.stderr.splitlines(): + logger.warning("[blender-cycles-fallback stderr] %s", line) + + if result.returncode != 0: + stdout_tail = result.stdout[-2000:] if result.stdout else "" + stderr_tail = result.stderr[-2000:] if result.stderr else "" + raise RuntimeError( + f"Blender exited {result.returncode}.\n" + f"STDOUT: {stdout_tail}\nSTDERR: {stderr_tail}" + ) + + log_lines, parts_count = _parse_blender_log(result.stdout) + return log_lines, parts_count, engine_used diff --git a/blender-renderer/blender_render.py b/blender-renderer/blender_render.py new file mode 100644 index 0000000..1b18b16 --- /dev/null +++ b/blender-renderer/blender_render.py @@ -0,0 +1,753 @@ +""" +Blender Python script for rendering an STL file to PNG. +Targets Blender 5.0+ (EEVEE / Cycles). + +Called by Blender: + blender --background --python blender_render.py -- \ + [engine] [samples] + +engine: "cycles" (default) | "eevee" + +Features: +- Disconnected mesh islands split into separate objects and painted with + palette colours (same 10-colour palette as the Three.js renderer). +- Bounding-box-aware camera: object fills ~85 % of the frame. +- Isometric-style angle (elevation 28°, azimuth 40°). +- Dynamic clip planes. +- Standard (non-Filmic) colour management → no grey tint. +- Schaeffler green top bar + model name label via Pillow post-processing. +""" +import sys +import os +import math +import bpy +from mathutils import Vector, Matrix + +# ── Colour palette (matches Three.js renderer) ─────────────────────────────── + +PALETTE_HEX = [ + "#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8", + "#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8", +] + +def _srgb_to_linear(c: int) -> float: + """Convert 0-255 sRGB integer to linear float.""" + v = c / 255.0 + return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4 + +def _hex_to_linear(hex_color: str) -> tuple: + """Return (r, g, b, 1.0) in Blender linear colour space.""" + h = hex_color.lstrip('#') + return ( + _srgb_to_linear(int(h[0:2], 16)), + _srgb_to_linear(int(h[2:4], 16)), + _srgb_to_linear(int(h[4:6], 16)), + 1.0, + ) + +PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX] + +# ── Parse arguments ─────────────────────────────────────────────────────────── + +argv = sys.argv +if "--" in argv: + argv = argv[argv.index("--") + 1:] +else: + argv = [] + +if len(argv) < 4: + print("Usage: blender --background --python blender_render.py -- " + " [engine] [samples] [smooth_angle] [cycles_device] [transparent_bg]") + sys.exit(1) + +import json as _json + +stl_path = argv[0] +output_path = argv[1] +width = int(argv[2]) +height = int(argv[3]) +engine = argv[4].lower() if len(argv) > 4 else "cycles" +samples = int(argv[5]) if len(argv) > 5 else (64 if engine == "eevee" else 256) +smooth_angle = int(argv[6]) if len(argv) > 6 else 30 # degrees; 0 = flat shading +cycles_device = argv[7].lower() if len(argv) > 7 else "auto" # "auto", "gpu", "cpu" +transparent_bg = argv[8] == "1" if len(argv) > 8 else False +template_path = argv[9] if len(argv) > 9 and argv[9] else "" +target_collection = argv[10] if len(argv) > 10 else "Product" +material_library_path = argv[11] if len(argv) > 11 and argv[11] else "" +material_map_raw = argv[12] if len(argv) > 12 else "{}" +try: + material_map = _json.loads(material_map_raw) if material_map_raw else {} +except _json.JSONDecodeError: + material_map = {} + +part_names_ordered_raw = argv[13] if len(argv) > 13 else "[]" +try: + part_names_ordered = _json.loads(part_names_ordered_raw) if part_names_ordered_raw else [] +except _json.JSONDecodeError: + part_names_ordered = [] + +lighting_only = argv[14] == "1" if len(argv) > 14 else False +shadow_catcher = argv[15] == "1" if len(argv) > 15 else False +rotation_x = float(argv[16]) if len(argv) > 16 else 0.0 +rotation_y = float(argv[17]) if len(argv) > 17 else 0.0 +rotation_z = float(argv[18]) if len(argv) > 18 else 0.0 +noise_threshold_arg = argv[19] if len(argv) > 19 else "" +denoiser_arg = argv[20] if len(argv) > 20 else "" +denoising_input_passes_arg = argv[21] if len(argv) > 21 else "" +denoising_prefilter_arg = argv[22] if len(argv) > 22 else "" +denoising_quality_arg = argv[23] if len(argv) > 23 else "" +denoising_use_gpu_arg = argv[24] if len(argv) > 24 else "" + +# Validate template path: if provided it MUST exist on disk. +# Fail loudly rather than silently rendering with factory settings. +if template_path and not os.path.isfile(template_path): + print(f"[blender_render] ERROR: template_path was provided but file not found: {template_path}") + print("[blender_render] Check that the blend-templates directory is on the shared volume.") + sys.exit(1) + +use_template = bool(template_path) + +print(f"[blender_render] engine={engine}, samples={samples}, size={width}x{height}, smooth_angle={smooth_angle}°, device={cycles_device}, transparent={transparent_bg}") +print(f"[blender_render] part_names_ordered: {len(part_names_ordered)} entries") +if use_template: + print(f"[blender_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}") +else: + print("[blender_render] no template — using factory settings (Mode A)") +if material_library_path: + print(f"[blender_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}") + +# ── Helper: find or create collection by name ──────────────────────────────── + +def _ensure_collection(name: str): + """Return a collection by name, creating it if needed.""" + if name in bpy.data.collections: + return bpy.data.collections[name] + col = bpy.data.collections.new(name) + bpy.context.scene.collection.children.link(col) + return col + + +def _apply_smooth(part_obj, angle_deg): + """Apply smooth or flat shading to a mesh object.""" + bpy.context.view_layer.objects.active = part_obj + part_obj.select_set(True) + if angle_deg > 0: + try: + bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg)) + except AttributeError: + bpy.ops.object.shade_smooth() + part_obj.data.use_auto_smooth = True + part_obj.data.auto_smooth_angle = math.radians(angle_deg) + else: + bpy.ops.object.shade_flat() + + +def _assign_palette_material(part_obj, index): + """Assign a palette colour material to a mesh part.""" + color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)] + mat = bpy.data.materials.new(name=f"Part_{index}") + mat.use_nodes = True + bsdf = mat.node_tree.nodes.get("Principled BSDF") + if bsdf: + bsdf.inputs["Base Color"].default_value = color + bsdf.inputs["Metallic"].default_value = 0.35 + bsdf.inputs["Roughness"].default_value = 0.40 + try: + bsdf.inputs["Specular IOR Level"].default_value = 0.5 + except KeyError: + pass + part_obj.data.materials.clear() + part_obj.data.materials.append(mat) + + +import re as _re + + +def _scale_mm_to_m(parts): + """Scale imported STL objects from mm to Blender metres (×0.001). + + STEP/STL coordinates are in mm; Blender's default unit is metres. + Without scaling a 50 mm part appears as 50 m inside Blender — way too large + relative to any template environment designed in metric units. + """ + if not parts: + return + bpy.ops.object.select_all(action='DESELECT') + for p in parts: + p.scale = (0.001, 0.001, 0.001) + p.location *= 0.001 + p.select_set(True) + bpy.context.view_layer.objects.active = parts[0] + bpy.ops.object.transform_apply(scale=True, location=False, rotation=False) + print(f"[blender_render] scaled {len(parts)} parts mm→m (×0.001)") + + +def _apply_rotation(parts, rx, ry, rz): + """Apply Euler rotation (degrees, XYZ order) to all parts around world origin. + + After _import_stl + _scale_mm_to_m the combined bbox center is at world origin, + so rotating around origin is equivalent to rotating around the assembly center. + """ + if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0): + return + from mathutils import Euler + rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4() + for p in parts: + p.matrix_world = rot_mat @ p.matrix_world + # Bake rotation into mesh data so camera bbox calculations see the rotated geometry + bpy.ops.object.select_all(action='DESELECT') + for p in parts: + p.select_set(True) + bpy.context.view_layer.objects.active = parts[0] + bpy.ops.object.transform_apply(location=False, rotation=True, scale=False) + print(f"[blender_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts") + + +def _import_stl(stl_file): + """Import STL into Blender, using per-part STLs if available. + + Checks for {stl_stem}_parts/manifest.json next to the STL file. + - Per-part mode: imports each part STL, names Blender object after STEP part name. + - Fallback: imports combined STL and splits by loose geometry. + + Returns list of Blender mesh objects, centred at origin. + """ + stl_dir = os.path.dirname(stl_file) + stl_stem = os.path.splitext(os.path.basename(stl_file))[0] + parts_dir = os.path.join(stl_dir, stl_stem + "_parts") + manifest_path = os.path.join(parts_dir, "manifest.json") + + parts = [] + + if os.path.isfile(manifest_path): + # ── Per-part mode ──────────────────────────────────────────────── + try: + with open(manifest_path, "r") as f: + manifest = _json.loads(f.read()) + part_entries = manifest.get("parts", []) + except Exception as e: + print(f"[blender_render] WARNING: failed to read manifest: {e}") + part_entries = [] + + if part_entries: + for entry in part_entries: + part_file = os.path.join(parts_dir, entry["file"]) + part_name = entry["name"] + if not os.path.isfile(part_file): + print(f"[blender_render] WARNING: part STL missing: {part_file}") + continue + + bpy.ops.object.select_all(action='DESELECT') + bpy.ops.wm.stl_import(filepath=part_file) + imported = bpy.context.selected_objects + if imported: + obj = imported[0] + obj.name = part_name + if obj.data: + obj.data.name = part_name + parts.append(obj) + + if parts: + print(f"[blender_render] imported {len(parts)} named parts from per-part STLs") + + # ── Fallback: combined STL + separate by loose ─────────────────────── + if not parts: + bpy.ops.wm.stl_import(filepath=stl_file) + obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None + if obj is None: + print(f"ERROR: No objects imported from {stl_file}") + sys.exit(1) + + bpy.context.view_layer.objects.active = obj + bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS') + obj.location = (0.0, 0.0, 0.0) + + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.separate(type='LOOSE') + bpy.ops.object.mode_set(mode='OBJECT') + + parts = list(bpy.context.selected_objects) + print(f"[blender_render] fallback: separated into {len(parts)} part(s)") + return parts + + # ── Centre per-part imports at origin (combined bbox) ──────────────── + all_corners = [] + for p in parts: + all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box) + + if all_corners: + mins = Vector((min(v.x for v in all_corners), + min(v.y for v in all_corners), + min(v.z for v in all_corners))) + maxs = Vector((max(v.x for v in all_corners), + max(v.y for v in all_corners), + max(v.z for v in all_corners))) + center = (mins + maxs) * 0.5 + for p in parts: + p.location -= center + + return parts + + +def _resolve_part_name(index, part_obj): + """Get the STEP part name for a Blender part by index. + + With per-part import, part_obj.name IS the STEP name (possibly with + Blender .NNN suffix for duplicates). Strip that suffix for lookup. + Falls back to part_names_ordered index mapping for combined-STL mode. + """ + # Strip Blender auto-suffix (.001, .002, etc.) + base_name = _re.sub(r'\.\d{3}$', '', part_obj.name) + # If the base name looks like a real STEP part name (not generic "Cube" etc.), + # use it directly + if part_names_ordered and index < len(part_names_ordered): + return part_names_ordered[index] + return base_name + + +def _apply_material_library(parts, mat_lib_path, mat_map): + """Append materials from library .blend and assign to parts via material_map. + + With per-part STL import, Blender objects are named after STEP parts, + so matching is by name (stripping Blender .NNN suffix for duplicates). + Falls back to part_names_ordered index-based matching for combined-STL mode. + + mat_map: {part_name_lower: material_name} + Parts without a match keep their current material. + """ + if not mat_lib_path or not os.path.isfile(mat_lib_path): + print(f"[blender_render] material library not found: {mat_lib_path}") + return + + # Collect unique material names needed + needed = set(mat_map.values()) + if not needed: + return + + # Append materials from library + appended = {} + for mat_name in needed: + inner_path = f"{mat_lib_path}/Material/{mat_name}" + try: + bpy.ops.wm.append( + filepath=inner_path, + directory=f"{mat_lib_path}/Material/", + filename=mat_name, + link=False, + ) + if mat_name in bpy.data.materials: + appended[mat_name] = bpy.data.materials[mat_name] + print(f"[blender_render] appended material: {mat_name}") + else: + print(f"[blender_render] WARNING: material '{mat_name}' not found after append") + except Exception as exc: + print(f"[blender_render] WARNING: failed to append material '{mat_name}': {exc}") + + if not appended: + return + + # Assign materials to parts — primary: name-based (per-part STL mode), + # secondary: index-based via part_names_ordered (combined STL fallback) + assigned_count = 0 + for i, part in enumerate(parts): + # Try name-based matching first (strip Blender .NNN suffix) + base_name = _re.sub(r'\.\d{3}$', '', part.name) + part_key = base_name.lower().strip() + mat_name = mat_map.get(part_key) + + # Fall back to index-based matching via part_names_ordered + if not mat_name and part_names_ordered and i < len(part_names_ordered): + step_name = part_names_ordered[i] + part_key = step_name.lower().strip() + mat_name = mat_map.get(part_key) + + if mat_name and mat_name in appended: + part.data.materials.clear() + part.data.materials.append(appended[mat_name]) + assigned_count += 1 + print(f"[blender_render] assigned '{mat_name}' to part '{part.name}'") + + print(f"[blender_render] material assignment: {assigned_count}/{len(parts)} parts matched") + + +# ── SCENE SETUP ────────────────────────────────────────────────────────────── + +if use_template: + # ── MODE B: Template-based render ──────────────────────────────────────── + print(f"[blender_render] Opening template: {template_path}") + bpy.ops.wm.open_mainfile(filepath=template_path) + + # Find or create target collection + target_col = _ensure_collection(target_collection) + + # Import and split STL + parts = _import_stl(stl_path) + # Scale mm→m: STEP coords are mm, Blender default unit is metres + _scale_mm_to_m(parts) + # Apply render position rotation (before camera/bbox calculations) + _apply_rotation(parts, rotation_x, rotation_y, rotation_z) + + # Move imported parts into target collection + for part in parts: + # Remove from all existing collections + for col in list(part.users_collection): + col.objects.unlink(part) + target_col.objects.link(part) + + # Apply smooth shading + for part in parts: + _apply_smooth(part, smooth_angle) + + # Material assignment: library materials if available, otherwise palette + if material_library_path and material_map: + # Build lowercased material_map for matching + mat_map_lower = {k.lower(): v for k, v in material_map.items()} + _apply_material_library(parts, material_library_path, mat_map_lower) + # Parts not matched by library get palette fallback + for i, part in enumerate(parts): + if not part.data.materials or len(part.data.materials) == 0: + _assign_palette_material(part, i) + else: + for i, part in enumerate(parts): + _assign_palette_material(part, i) + + # ── Shadow catcher (Cycles only, template mode only) ───────────────────── + if shadow_catcher: + sc_col_name = "Shadowcatcher" + sc_obj_name = "Shadowcatcher" + # Enable the Shadowcatcher collection in all view layers + for vl in bpy.context.scene.view_layers: + def _enable_col_recursive(layer_col): + if layer_col.collection.name == sc_col_name: + layer_col.exclude = False + layer_col.collection.hide_render = False + layer_col.collection.hide_viewport = False + return True + for child in layer_col.children: + if _enable_col_recursive(child): + return True + return False + _enable_col_recursive(vl.layer_collection) + + sc_obj = bpy.data.objects.get(sc_obj_name) + if sc_obj: + # Calculate product bbox min Z (world space) + all_world_corners = [] + for part in parts: + for corner in part.bound_box: + all_world_corners.append((part.matrix_world @ Vector(corner)).z) + if all_world_corners: + sc_obj.location.z = min(all_world_corners) + print(f"[blender_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}") + else: + print(f"[blender_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template") + + # lighting_only: use template World/HDRI but force auto-camera UNLESS the shadow + # catcher is enabled — in that case the template camera is already positioned to + # show both the product and its shadow on the ground plane. + needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera + if lighting_only and not shadow_catcher: + print("[blender_render] lighting_only mode: using template World/HDRI, forcing auto-camera") + elif needs_auto_camera: + print("[blender_render] WARNING: template has no camera — will create auto-camera") + + # Set very close near clip on template camera for mm-scale parts (now in metres) + if not needs_auto_camera and bpy.context.scene.camera: + bpy.context.scene.camera.data.clip_start = 0.001 + + print(f"[blender_render] template mode: {len(parts)} parts imported into collection '{target_collection}'") + +else: + # ── MODE A: Factory settings (original behavior) ───────────────────────── + needs_auto_camera = True + bpy.ops.wm.read_factory_settings(use_empty=True) + parts = _import_stl(stl_path) + # Scale mm→m: STEP coords are mm, Blender default unit is metres + _scale_mm_to_m(parts) + # Apply render position rotation (before camera/bbox calculations) + _apply_rotation(parts, rotation_x, rotation_y, rotation_z) + + for i, part in enumerate(parts): + _apply_smooth(part, smooth_angle) + _assign_palette_material(part, i) + + # Apply material library on top of palette colours (same logic as Mode B). + # material_library_path / material_map are parsed from argv even in Mode A + # but were previously never used here — that was the bug. + if material_library_path and material_map: + mat_map_lower = {k.lower(): v for k, v in material_map.items()} + _apply_material_library(parts, material_library_path, mat_map_lower) + # Parts not matched by the library keep their palette material (already set above) + +if needs_auto_camera: + # ── Combined bounding box / bounding sphere ────────────────────────────── + all_corners = [] + for part in parts: + all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box) + + bbox_min = Vector(( + min(v.x for v in all_corners), + min(v.y for v in all_corners), + min(v.z for v in all_corners), + )) + bbox_max = Vector(( + max(v.x for v in all_corners), + max(v.y for v in all_corners), + max(v.z for v in all_corners), + )) + + bbox_center = (bbox_min + bbox_max) * 0.5 + bbox_dims = bbox_max - bbox_min + bsphere_radius = max(bbox_dims.length * 0.5, 0.001) + + print(f"[blender_render] bbox_dims={tuple(round(d,4) for d in bbox_dims)}, " + f"bsphere_radius={bsphere_radius:.4f}, center={tuple(round(c,4) for c in bbox_center)}") + + # ── Lighting — only in Mode A (factory settings) ───────────────────────── + # In template mode the .blend file provides its own World/HDRI lighting. + # Adding auto-lights would overpower the template's intended look. + if not use_template: + light_dist = bsphere_radius * 6.0 + + bpy.ops.object.light_add(type='SUN', location=( + bbox_center.x + light_dist * 0.5, + bbox_center.y - light_dist * 0.35, + bbox_center.z + light_dist, + )) + sun = bpy.context.active_object + sun.data.energy = 4.0 + sun.rotation_euler = (math.radians(45), 0, math.radians(30)) + + bpy.ops.object.light_add(type='AREA', location=( + bbox_center.x - light_dist * 0.4, + bbox_center.y + light_dist * 0.4, + bbox_center.z + light_dist * 0.7, + )) + fill = bpy.context.active_object + fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0) + fill.data.size = max(4.0, bsphere_radius * 4.0) + + # ── Camera ─────────────────────────────────────────────────────────────── + ELEVATION_DEG = 28.0 + AZIMUTH_DEG = 40.0 + LENS_MM = 50.0 + SENSOR_WIDTH_MM = 36.0 + FILL_FACTOR = 0.85 + + elevation_rad = math.radians(ELEVATION_DEG) + azimuth_rad = math.radians(AZIMUTH_DEG) + + cam_dir = Vector(( + math.cos(elevation_rad) * math.cos(azimuth_rad), + math.cos(elevation_rad) * math.sin(azimuth_rad), + math.sin(elevation_rad), + )).normalized() + + fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM)) + fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM)) + fov_used = min(fov_h, fov_v) + + dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR + dist = max(dist, bsphere_radius * 1.5) + print(f"[blender_render] camera dist={dist:.4f}, fov={math.degrees(fov_used):.2f}°") + + cam_location = bbox_center + cam_dir * dist + bpy.ops.object.camera_add(location=cam_location) + cam_obj = bpy.context.active_object + cam_obj.data.lens = LENS_MM + bpy.context.scene.camera = cam_obj + + look_dir = (bbox_center - cam_location).normalized() + up_world = Vector((0.0, 0.0, 1.0)) + right = look_dir.cross(up_world) + if right.length < 1e-6: + right = Vector((1.0, 0.0, 0.0)) + right.normalize() + cam_up = right.cross(look_dir).normalized() + + rot_mat = Matrix(( + ( right.x, right.y, right.z), + ( cam_up.x, cam_up.y, cam_up.z), + (-look_dir.x, -look_dir.y, -look_dir.z), + )).transposed() + cam_obj.rotation_euler = rot_mat.to_euler('XYZ') + + cam_obj.data.clip_start = max(dist * 0.001, 0.0001) + cam_obj.data.clip_end = dist + bsphere_radius * 3.0 + print(f"[blender_render] clip {cam_obj.data.clip_start:.6f} … {cam_obj.data.clip_end:.4f}") + + # ── World background — only in Mode A ──────────────────────────────────── + # In template mode the .blend file owns its World (HDRI, sky texture, studio + # lighting). Overwriting it would destroy the HDR look the template was + # designed to use (e.g. Alpha-HDR output types with Filmic tonemapping). + if not use_template: + world = bpy.data.worlds.new("World") + bpy.context.scene.world = world + world.use_nodes = True + bg = world.node_tree.nodes["Background"] + bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0) + bg.inputs["Strength"].default_value = 0.15 + +# ── Render engine ───────────────────────────────────────────────────────────── +scene = bpy.context.scene + +if engine == "eevee": + # Blender 4.x used 'BLENDER_EEVEE_NEXT'; Blender 5.x reverted to 'BLENDER_EEVEE'. + # Try both names so the script works across versions. + set_ok = False + for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'): + try: + scene.render.engine = eevee_id + set_ok = True + print(f"[blender_render] EEVEE engine id: {eevee_id}") + break + except TypeError: + continue + + if not set_ok: + print("[blender_render] WARNING: could not set EEVEE engine – falling back to Cycles") + engine = "cycles" + + if engine == "eevee": + # Sample attribute name changed across minor versions + for attr in ('taa_render_samples', 'samples'): + try: + setattr(scene.eevee, attr, samples) + print(f"[blender_render] EEVEE samples: scene.eevee.{attr}={samples}") + break + except AttributeError: + continue + +if engine != "eevee": # covers both explicit Cycles and EEVEE-fallback + scene.render.engine = 'CYCLES' + scene.cycles.samples = samples + scene.cycles.use_denoising = True + scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE' + if denoising_input_passes_arg: + try: scene.cycles.denoising_input_passes = denoising_input_passes_arg + except Exception: pass + if denoising_prefilter_arg: + try: scene.cycles.denoising_prefilter = denoising_prefilter_arg + except Exception: pass + if denoising_quality_arg: + try: scene.cycles.denoising_quality = denoising_quality_arg + except Exception: pass + if denoising_use_gpu_arg: + try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1") + except AttributeError: pass + if noise_threshold_arg: + scene.cycles.use_adaptive_sampling = True + scene.cycles.adaptive_threshold = float(noise_threshold_arg) + + # ── Device selection: "cpu" forces CPU, "gpu" forces GPU (fail if unavailable), + # "auto" tries GPU first and falls back to CPU. + gpu_type_found = None + if cycles_device != "cpu": + try: + cycles_prefs = bpy.context.preferences.addons['cycles'].preferences + for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'): + try: + cycles_prefs.compute_device_type = device_type + cycles_prefs.get_devices() + gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU'] + if gpu_devs: + for d in gpu_devs: + d.use = True + gpu_type_found = device_type + break + except Exception as e: + print(f"[blender_render] {device_type} not available: {e}") + except Exception as e: + print(f"[blender_render] GPU probe failed: {e}") + + if gpu_type_found: + scene.cycles.device = 'GPU' + print(f"[blender_render] Cycles GPU ({gpu_type_found}), samples={samples}") + else: + scene.cycles.device = 'CPU' + print(f"[blender_render] WARNING: GPU not found — falling back to CPU, samples={samples}") + +# ── Colour management ───────────────────────────────────────────────────────── +# In template mode the .blend file owns its colour management (e.g. Filmic/ +# AgX for HDR, custom exposure for Alpha-HDR output types). Overwriting it +# would destroy the look the template was designed for. +# In factory-settings mode (Mode A) force Standard to avoid the grey Filmic +# tint that Blender applies by default. +if not use_template: + scene.view_settings.view_transform = 'Standard' + scene.view_settings.exposure = 0.0 + scene.view_settings.gamma = 1.0 + try: + scene.view_settings.look = 'None' + except Exception: + pass + +# ── Render settings ─────────────────────────────────────────────────────────── +scene.render.resolution_x = width +scene.render.resolution_y = height +scene.render.resolution_percentage = 100 +scene.render.image_settings.file_format = 'PNG' +scene.render.filepath = output_path +scene.render.film_transparent = transparent_bg + +# ── Render ──────────────────────────────────────────────────────────────────── +print(f"[blender_render] Rendering → {output_path} (Blender {bpy.app.version_string})") +bpy.ops.render.render(write_still=True) +print("[blender_render] render done.") + +# ── Pillow post-processing: green bar + model name label ───────────────────── +# Skip overlay for transparent renders to keep clean alpha channel +if transparent_bg: + print("[blender_render] Transparent mode — skipping Pillow overlay.") +else: + try: + from PIL import Image, ImageDraw, ImageFont + + img = Image.open(output_path).convert("RGBA") + draw = ImageDraw.Draw(img) + W, H = img.size + + # Schaeffler green top bar + bar_h = max(8, H // 32) + draw.rectangle([0, 0, W - 1, bar_h - 1], fill=(0, 137, 61, 255)) + + # Model name strip at bottom + model_name = os.path.splitext(os.path.basename(stl_path))[0] + label_h = max(20, H // 20) + img.alpha_composite( + Image.new("RGBA", (W, label_h), (30, 30, 30, 180)), + dest=(0, H - label_h), + ) + + font_size = max(10, label_h - 6) + font = None + for fp in [ + "/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf", + "/usr/share/fonts/truetype/liberation/LiberationSans-Bold.ttf", + "/usr/share/fonts/truetype/freefont/FreeSansBold.ttf", + ]: + if os.path.exists(fp): + try: + font = ImageFont.truetype(fp, font_size) + break + except Exception: + pass + if font is None: + font = ImageFont.load_default() + + tb = draw.textbbox((0, 0), model_name, font=font) + text_w = tb[2] - tb[0] + draw.text( + ((W - text_w) // 2, H - label_h + (label_h - (tb[3] - tb[1])) // 2), + model_name, font=font, fill=(255, 255, 255, 255), + ) + + img.convert("RGB").save(output_path, format="PNG") + print(f"[blender_render] Pillow overlay applied.") + + except ImportError: + print("[blender_render] Pillow not in Blender Python – skipping overlay.") + except Exception as exc: + print(f"[blender_render] Pillow overlay failed (non-fatal): {exc}") + +print("[blender_render] Done.") diff --git a/blender-renderer/requirements.txt b/blender-renderer/requirements.txt new file mode 100644 index 0000000..8204681 --- /dev/null +++ b/blender-renderer/requirements.txt @@ -0,0 +1,4 @@ +fastapi>=0.110.0 +uvicorn[standard]>=0.27.0 +cadquery>=2.4.0 +pillow>=10.2.0 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..da66819 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,223 @@ +services: + postgres: + image: postgres:16-alpine + environment: + POSTGRES_DB: ${POSTGRES_DB:-schaeffler} + POSTGRES_USER: ${POSTGRES_USER:-schaeffler} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-schaeffler} + volumes: + - pgdata:/var/lib/postgresql/data + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-schaeffler}"] + interval: 5s + timeout: 5s + retries: 5 + + redis: + image: redis:7-alpine + ports: + - "6379:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 5s + retries: 5 + + backend: + build: + context: ./backend + dockerfile: Dockerfile + command: /start.sh + environment: + - POSTGRES_DB=${POSTGRES_DB:-schaeffler} + - POSTGRES_USER=${POSTGRES_USER:-schaeffler} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-schaeffler} + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + - REDIS_URL=${REDIS_URL:-redis://redis:6379/0} + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-changeme-in-production} + - JWT_ALGORITHM=${JWT_ALGORITHM:-HS256} + - JWT_ACCESS_TOKEN_EXPIRE_MINUTES=${JWT_ACCESS_TOKEN_EXPIRE_MINUTES:-480} + - AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY:-} + - AZURE_OPENAI_ENDPOINT=${AZURE_OPENAI_ENDPOINT:-} + - AZURE_OPENAI_DEPLOYMENT=${AZURE_OPENAI_DEPLOYMENT:-gpt-4o} + - AZURE_OPENAI_API_VERSION=${AZURE_OPENAI_API_VERSION:-2024-02-01} + - UPLOAD_DIR=/app/uploads + - MAX_UPLOAD_SIZE_MB=${MAX_UPLOAD_SIZE_MB:-500} + volumes: + - ./backend:/app + - uploads:/app/uploads + - /var/run/docker.sock:/var/run/docker.sock + ports: + - "8888:8888" + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + + worker: + build: + context: ./backend + dockerfile: Dockerfile + command: celery -A app.tasks.celery_app worker --loglevel=info -Q step_processing,ai_validation --concurrency=${CELERY_WORKER_CONCURRENCY:-8} + environment: + - POSTGRES_DB=${POSTGRES_DB:-schaeffler} + - POSTGRES_USER=${POSTGRES_USER:-schaeffler} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-schaeffler} + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + - REDIS_URL=${REDIS_URL:-redis://redis:6379/0} + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-changeme-in-production} + - AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY:-} + - AZURE_OPENAI_ENDPOINT=${AZURE_OPENAI_ENDPOINT:-} + - AZURE_OPENAI_DEPLOYMENT=${AZURE_OPENAI_DEPLOYMENT:-gpt-4o} + - AZURE_OPENAI_API_VERSION=${AZURE_OPENAI_API_VERSION:-2024-02-01} + - UPLOAD_DIR=/app/uploads + - CELERY_WORKER_CONCURRENCY=${CELERY_WORKER_CONCURRENCY:-8} + volumes: + - ./backend:/app + - uploads:/app/uploads + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + + worker-thumbnail: + build: + context: ./backend + dockerfile: Dockerfile + command: celery -A app.tasks.celery_app worker --loglevel=info -Q thumbnail_rendering --concurrency=1 + environment: + - POSTGRES_DB=${POSTGRES_DB:-schaeffler} + - POSTGRES_USER=${POSTGRES_USER:-schaeffler} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-schaeffler} + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + - REDIS_URL=${REDIS_URL:-redis://redis:6379/0} + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-changeme-in-production} + - AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY:-} + - AZURE_OPENAI_ENDPOINT=${AZURE_OPENAI_ENDPOINT:-} + - AZURE_OPENAI_DEPLOYMENT=${AZURE_OPENAI_DEPLOYMENT:-gpt-4o} + - AZURE_OPENAI_API_VERSION=${AZURE_OPENAI_API_VERSION:-2024-02-01} + - UPLOAD_DIR=/app/uploads + volumes: + - ./backend:/app + - uploads:/app/uploads + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + + beat: + build: + context: ./backend + dockerfile: Dockerfile + command: celery -A app.tasks.celery_app beat --loglevel=info + environment: + - POSTGRES_DB=${POSTGRES_DB:-schaeffler} + - POSTGRES_USER=${POSTGRES_USER:-schaeffler} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-schaeffler} + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + - REDIS_URL=${REDIS_URL:-redis://redis:6379/0} + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-changeme-in-production} + - AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY:-} + - AZURE_OPENAI_ENDPOINT=${AZURE_OPENAI_ENDPOINT:-} + - AZURE_OPENAI_DEPLOYMENT=${AZURE_OPENAI_DEPLOYMENT:-gpt-4o} + - AZURE_OPENAI_API_VERSION=${AZURE_OPENAI_API_VERSION:-2024-02-01} + - UPLOAD_DIR=/app/uploads + volumes: + - ./backend:/app + - uploads:/app/uploads + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + + blender-renderer: + build: + context: ./blender-renderer + dockerfile: Dockerfile + ports: + - "8100:8100" + volumes: + - uploads:/app/uploads + - ./blender-renderer:/app + - /opt/blender:/opt/blender:ro + restart: unless-stopped + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu, compute, utility, graphics] + + threejs-renderer: + build: + context: ./threejs-renderer + dockerfile: Dockerfile + ports: + - "8101:8101" + volumes: + - uploads:/app/uploads + - ./threejs-renderer:/app + restart: unless-stopped + + flamenco-manager: + build: ./flamenco + environment: + - FLAMENCO_MODE=manager + ports: + - "8080:8080" + volumes: + - uploads:/shared + - flamenco-data:/data + - ./flamenco/scripts:/opt/flamenco/scripts + restart: unless-stopped + + flamenco-worker: + build: ./flamenco + environment: + - FLAMENCO_MODE=worker + - FLAMENCO_MANAGER_URL=http://flamenco-manager:8080 + volumes: + - uploads:/shared + - /opt/blender:/opt/blender:ro + - ./flamenco/scripts:/opt/flamenco/scripts + depends_on: + - flamenco-manager + deploy: + replicas: 1 + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu, compute, utility, graphics] + restart: unless-stopped + + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + ports: + - "5173:5173" + environment: + - VITE_API_URL=http://backend:8888 + volumes: + - ./frontend:/app + - /app/node_modules + depends_on: + - backend + +volumes: + pgdata: + uploads: + flamenco-data: diff --git a/flamenco/Dockerfile b/flamenco/Dockerfile new file mode 100644 index 0000000..329dfd0 --- /dev/null +++ b/flamenco/Dockerfile @@ -0,0 +1,37 @@ +FROM ubuntu:24.04 + +ENV DEBIAN_FRONTEND=noninteractive +ENV FLAMENCO_VERSION=3.8 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + wget ca-certificates ffmpeg python3 python3-pip python3-venv \ + libgl1 libglib2.0-0 libxrender1 libsm6 libxext6 \ + libegl1 libgles2 \ + && rm -rf /var/lib/apt/lists/* + +# Install Flamenco binary +RUN mkdir -p /opt/flamenco && \ + wget -qO /tmp/flamenco.tar.gz \ + "https://flamenco.blender.org/downloads/flamenco-${FLAMENCO_VERSION}-linux-amd64.tar.gz" && \ + tar -xzf /tmp/flamenco.tar.gz -C /opt/flamenco --strip-components=1 && \ + rm /tmp/flamenco.tar.gz + +# Install cadquery for STEP→STL conversion +RUN python3 -m venv /opt/venv && \ + /opt/venv/bin/pip install --no-cache-dir cadquery + +WORKDIR /opt/flamenco + +COPY entrypoint.sh /opt/flamenco/entrypoint.sh +COPY manager-config.yaml /opt/flamenco/flamenco-manager.yaml +COPY worker-config.yaml /opt/flamenco/flamenco-worker.yaml +COPY scripts/ /opt/flamenco/scripts/ + +RUN chmod +x /opt/flamenco/entrypoint.sh + +# Shared storage for render outputs +VOLUME ["/shared", "/data"] + +EXPOSE 8080 + +ENTRYPOINT ["/opt/flamenco/entrypoint.sh"] diff --git a/flamenco/entrypoint.sh b/flamenco/entrypoint.sh new file mode 100644 index 0000000..cf382a8 --- /dev/null +++ b/flamenco/entrypoint.sh @@ -0,0 +1,18 @@ +#!/bin/bash +set -e + +FLAMENCO_MODE="${FLAMENCO_MODE:-manager}" + +if [ "$FLAMENCO_MODE" = "manager" ]; then + echo "Starting Flamenco Manager..." + # Flamenco 3.x reads flamenco-manager.yaml from the working directory + exec /opt/flamenco/flamenco-manager +elif [ "$FLAMENCO_MODE" = "worker" ]; then + echo "Starting Flamenco Worker..." + echo "Manager URL: ${FLAMENCO_MANAGER_URL:-http://flamenco-manager:8080}" + # Flamenco 3.x reads flamenco-worker.yaml from the working directory + exec /opt/flamenco/flamenco-worker +else + echo "ERROR: Unknown FLAMENCO_MODE='${FLAMENCO_MODE}'. Use 'manager' or 'worker'." + exit 1 +fi diff --git a/flamenco/manager-config.yaml b/flamenco/manager-config.yaml new file mode 100644 index 0000000..6b4e4ec --- /dev/null +++ b/flamenco/manager-config.yaml @@ -0,0 +1,29 @@ +_meta: + version: 3 + +manager_name: Schaeffler Render Farm +database: /data/flamenco-manager.sqlite +listen: :8080 +autodiscoverable: true + +# Storage +local_manager_storage_path: /data/manager-storage +shared_storage_path: /shared + +shaman: + enabled: false + +# Timeouts +task_timeout: 30m +worker_timeout: 1m + +# Variables available to job scripts +variables: + blender: + values: + - platform: linux + value: /opt/blender/blender + python: + values: + - platform: linux + value: /opt/venv/bin/python3 diff --git a/flamenco/scripts/convert_step.py b/flamenco/scripts/convert_step.py new file mode 100644 index 0000000..d830ad7 --- /dev/null +++ b/flamenco/scripts/convert_step.py @@ -0,0 +1,216 @@ +"""STEP to STL converter for Flamenco tasks. + +Usage: python convert_step.py + quality: 'low' or 'high' + +Produces: + - Combined STL at (for fallback) + - Per-part STLs in _parts/ with manifest.json +""" +import sys +import os +import json +import time + + +def _export_per_part_stls(step_path, parts_dir, quality): + """Export one STL per named STEP leaf shape using OCP XCAF. + + Creates parts_dir with individual STL files and a manifest.json: + {"parts": [{"index": 0, "name": "PartName", "file": "00_PartName.stl"}, ...]} + + Returns the manifest list, or empty list on failure. + """ + tol = 0.01 if quality == "high" else 0.3 + angular_tol = 0.05 if quality == "high" else 0.3 + + try: + from OCP.STEPCAFControl import STEPCAFControl_Reader + from OCP.XCAFDoc import XCAFDoc_DocumentTool, XCAFDoc_ShapeTool + from OCP.TDataStd import TDataStd_Name + from OCP.TDF import TDF_Label as TDF_Label_cls, TDF_LabelSequence + from OCP.XCAFApp import XCAFApp_Application + from OCP.TDocStd import TDocStd_Document + from OCP.TCollection import TCollection_ExtendedString + from OCP.IFSelect import IFSelect_RetDone + import cadquery as cq + except ImportError as e: + print(f"[convert_step] per-part export skipped (import error): {e}") + return [] + + # Read STEP with XCAF + app = XCAFApp_Application.GetApplication_s() + doc = TDocStd_Document(TCollection_ExtendedString("XmlOcaf")) + app.InitDocument(doc) + + reader = STEPCAFControl_Reader() + reader.SetNameMode(True) + status = reader.ReadFile(str(step_path)) + if status != IFSelect_RetDone: + print(f"[convert_step] XCAF reader failed with status {status}") + return [] + + if not reader.Transfer(doc): + print("[convert_step] XCAF transfer failed") + return [] + + shape_tool = XCAFDoc_DocumentTool.ShapeTool_s(doc.Main()) + name_id = TDataStd_Name.GetID_s() + + # Recursively collect leaf shapes with their names + leaves = [] # list of (name, TopoDS_Shape) + + def _get_label_name(label): + """Extract name string from a TDF_Label.""" + name_attr = TDataStd_Name() + if label.FindAttribute(name_id, name_attr): + return name_attr.Get().ToExtString() + return "" + + def _collect_leaves(label): + """Recursively collect leaf (simple shape) labels.""" + if XCAFDoc_ShapeTool.IsAssembly_s(label): + # Get components of this assembly + components = TDF_LabelSequence() + XCAFDoc_ShapeTool.GetComponents_s(label, components) + for i in range(1, components.Length() + 1): + comp_label = components.Value(i) + if XCAFDoc_ShapeTool.IsReference_s(comp_label): + ref_label = TDF_Label_cls() + XCAFDoc_ShapeTool.GetReferredShape_s(comp_label, ref_label) + # Use the component name (instance name), fall back to referred shape name + comp_name = _get_label_name(comp_label) + ref_name = _get_label_name(ref_label) + # Prefer referred shape name — matches material_map keys + name = ref_name or comp_name + if XCAFDoc_ShapeTool.IsAssembly_s(ref_label): + _collect_leaves(ref_label) + elif XCAFDoc_ShapeTool.IsSimpleShape_s(ref_label): + # Use comp_label shape — includes instance transform (position) + shape = XCAFDoc_ShapeTool.GetShape_s(comp_label) + leaves.append((name or f"unnamed_{len(leaves)}", shape)) + else: + _collect_leaves(comp_label) + elif XCAFDoc_ShapeTool.IsSimpleShape_s(label): + name = _get_label_name(label) + shape = XCAFDoc_ShapeTool.GetShape_s(label) + leaves.append((name or f"unnamed_{len(leaves)}", shape)) + + # Get top-level free shapes + top_labels = TDF_LabelSequence() + shape_tool.GetFreeShapes(top_labels) + for i in range(1, top_labels.Length() + 1): + _collect_leaves(top_labels.Value(i)) + + if not leaves: + print("[convert_step] no leaf shapes found via XCAF") + return [] + + # Export each leaf shape as individual STL + os.makedirs(parts_dir, exist_ok=True) + manifest = [] + + for idx, (name, shape) in enumerate(leaves): + # Sanitize filename: replace problematic chars + safe_name = name.replace("/", "_").replace("\\", "_").replace(" ", "_") + filename = f"{idx:02d}_{safe_name}.stl" + filepath = os.path.join(parts_dir, filename) + + try: + cq_shape = cq.Shape(shape) + cq_shape.exportStl(filepath, tolerance=tol, angularTolerance=angular_tol) + manifest.append({"index": idx, "name": name, "file": filename}) + except Exception as e: + print(f"[convert_step] WARNING: failed to export part '{name}': {e}") + + # Write manifest + manifest_path = os.path.join(parts_dir, "manifest.json") + with open(manifest_path, "w") as f: + json.dump({"parts": manifest}, f, indent=2) + + total_size = sum( + os.path.getsize(os.path.join(parts_dir, p["file"])) + for p in manifest + if os.path.exists(os.path.join(parts_dir, p["file"])) + ) + print(f"[convert_step] exported {len(manifest)} per-part STLs " + f"({total_size / 1024:.0f} KB total) to {parts_dir}") + + return manifest + + +def main(): + if len(sys.argv) < 4: + print("Usage: convert_step.py ") + sys.exit(1) + + step_path = sys.argv[1] + stl_path = sys.argv[2] + quality = sys.argv[3] + + if not os.path.isfile(step_path): + print(f"ERROR: STEP file not found: {step_path}") + sys.exit(1) + + os.makedirs(os.path.dirname(stl_path), exist_ok=True) + + # Cache hit: skip re-conversion if STL already exists and is non-empty + if os.path.isfile(stl_path) and os.path.getsize(stl_path) > 0: + size_kb = os.path.getsize(stl_path) / 1024 + print(f"[convert_step] Cache hit: {stl_path} ({size_kb:.0f} KB) — skipping STEP conversion") + stl_stem = os.path.splitext(stl_path)[0] + parts_dir = stl_stem + "_parts" + manifest_path = os.path.join(parts_dir, "manifest.json") + if not os.path.isfile(manifest_path): + print("[convert_step] Per-part STLs missing — exporting from STEP") + t1 = time.time() + try: + manifest = _export_per_part_stls(step_path, parts_dir, quality) + if manifest: + print(f"[convert_step] per-part export took {time.time() - t1:.1f}s") + else: + print("[convert_step] per-part export empty — combined STL only") + except Exception as e: + print(f"[convert_step] per-part export failed (non-fatal): {e}") + else: + print(f"[convert_step] Per-part STLs exist: {parts_dir}") + return + + print(f"Converting STEP -> STL: {step_path}") + print(f"Quality: {quality}") + t0 = time.time() + + import cadquery as cq + + tol = 0.01 if quality == "high" else 0.3 + angular_tol = 0.05 if quality == "high" else 0.3 + + result = cq.importers.importStep(step_path) + cq.exporters.export( + result, + stl_path, + exportType="STL", + tolerance=tol, + angularTolerance=angular_tol, + ) + + elapsed = time.time() - t0 + size_kb = os.path.getsize(stl_path) / 1024 + print(f"STL written: {stl_path} ({size_kb:.0f} KB, {elapsed:.1f}s)") + + # Export per-part STLs alongside the combined STL (non-fatal) + stl_stem = os.path.splitext(stl_path)[0] + parts_dir = stl_stem + "_parts" + t1 = time.time() + try: + manifest = _export_per_part_stls(step_path, parts_dir, quality) + if manifest: + print(f"[convert_step] per-part export took {time.time() - t1:.1f}s") + else: + print("[convert_step] per-part export failed or empty — combined STL only") + except Exception as e: + print(f"[convert_step] per-part export failed (non-fatal): {e}") + + +if __name__ == "__main__": + main() diff --git a/flamenco/scripts/schaeffler-still.js b/flamenco/scripts/schaeffler-still.js new file mode 100644 index 0000000..9e369d2 --- /dev/null +++ b/flamenco/scripts/schaeffler-still.js @@ -0,0 +1,121 @@ +// Schaeffler Still Render job type for Flamenco 3.x +// Pipeline: STEP -> STL (cadquery) -> Blender single-frame render + +const JOB_TYPE = { + label: "Schaeffler Still", + settings: [ + { key: "step_path", type: "string", required: true, + description: "Absolute path to STEP file" }, + { key: "output_path", type: "string", required: true, + description: "Full path for output image (e.g. /shared/render.png)" }, + { key: "width", type: "int32", default: 1024, + description: "Output width in pixels" }, + { key: "height", type: "int32", default: 1024, + description: "Output height in pixels" }, + { key: "engine", type: "string", default: "cycles", + description: "Blender render engine: cycles or eevee" }, + { key: "samples", type: "int32", default: 256, + description: "Render samples" }, + { key: "stl_quality", type: "string", default: "low", + description: "STL mesh quality: low or high" }, + { key: "part_colors_json", type: "string", default: "{}", + description: "JSON dict mapping part names to hex colors" }, + { key: "transparent_bg", type: "bool", default: false, + description: "Render with transparent background (PNG alpha)" }, + { key: "template_path", type: "string", default: "", + description: "Path to .blend template file (empty = factory settings)" }, + { key: "target_collection", type: "string", default: "Product", + description: "Blender collection name to import geometry into" }, + { key: "material_library_path", type: "string", default: "", + description: "Path to material library .blend file" }, + { key: "material_map_json", type: "string", default: "{}", + description: "JSON dict mapping part names to material names" }, + { key: "part_names_ordered_json", type: "string", default: "[]", + description: "JSON array of STEP part names in solid order (for index-based matching)" }, + { key: "lighting_only", type: "bool", default: false, + description: "Use template only for World/HDRI lighting; always auto-frame with computed camera" }, + { key: "cycles_device", type: "string", default: "auto", + description: "Cycles compute device: auto (try GPU, fall back to CPU), gpu (force GPU), cpu (force CPU)" }, + { key: "shadow_catcher", type: "bool", default: false, + description: "Enable Shadowcatcher collection from template and position plane under product (Cycles only)" }, + { key: "rotation_x", type: "float", default: 0.0, + description: "Product rotation around X axis in degrees (render position)" }, + { key: "rotation_y", type: "float", default: 0.0, + description: "Product rotation around Y axis in degrees (render position)" }, + { key: "rotation_z", type: "float", default: 0.0, + description: "Product rotation around Z axis in degrees (render position)" }, + { key: "noise_threshold", type: "string", default: "", + description: "Adaptive sampling noise threshold (empty = Blender default 0.01)" }, + { key: "denoiser", type: "string", default: "", + description: "Cycles denoiser: OPTIX, OPENIMAGEDENOISE, or empty for auto" }, + { key: "denoising_input_passes", type: "string", default: "", + description: "Denoising input passes: RGB, RGB_ALBEDO, RGB_ALBEDO_NORMAL, or empty for default" }, + { key: "denoising_prefilter", type: "string", default: "", + description: "Denoising prefilter: NONE, FAST, ACCURATE, or empty for default" }, + { key: "denoising_quality", type: "string", default: "", + description: "Denoising quality: HIGH, BALANCED, FAST, or empty for default (Blender 4.2+)" }, + { key: "denoising_use_gpu", type: "string", default: "", + description: "Route OIDN denoising through GPU: 1, 0, or empty for auto" }, + ], +}; + +function compileJob(job) { + const settings = job.settings; + // Cache STL next to STEP file: {step_dir}/{step_stem}_{quality}.stl + // This allows re-renders to skip the STEP→STL conversion step. + const stepDir = settings.step_path.replace(/\/[^/]+$/, ""); + const stepBasename = settings.step_path.replace(/.*\//, ""); + const stepStem = stepBasename.replace(/\.[^.]+$/, ""); + const stlPath = stepDir + "/" + stepStem + "_" + settings.stl_quality + ".stl"; + + // Task 1: Convert STEP to STL + const convertTask = author.Task("convert-step", "misc"); + convertTask.addCommand(author.Command("exec", { + exe: "{python}", + args: [ + "/opt/flamenco/scripts/convert_step.py", + settings.step_path, + stlPath, + settings.stl_quality, + ], + })); + job.addTask(convertTask); + + // Task 2: Render single image with Blender + const renderTask = author.Task("render-image", "blender"); + renderTask.addCommand(author.Command("exec", { + exe: "{blender}", + args: [ + "--background", "--python", + "/opt/flamenco/scripts/still_render.py", + "--", + stlPath, + settings.output_path, + String(settings.width), + String(settings.height), + settings.engine, + String(settings.samples), + settings.part_colors_json, + settings.transparent_bg ? "1" : "0", + settings.template_path || "", + settings.target_collection || "Product", + settings.material_library_path || "", + settings.material_map_json || "{}", + settings.part_names_ordered_json || "[]", + settings.lighting_only ? "1" : "0", + settings.cycles_device || "auto", + settings.shadow_catcher ? "1" : "0", + String(settings.rotation_x || 0), + String(settings.rotation_y || 0), + String(settings.rotation_z || 0), + settings.noise_threshold || "", + settings.denoiser || "", + settings.denoising_input_passes || "", + settings.denoising_prefilter || "", + settings.denoising_quality || "", + settings.denoising_use_gpu || "", + ], + })); + renderTask.addDependency(convertTask); + job.addTask(renderTask); +} diff --git a/flamenco/scripts/schaeffler-turntable.js b/flamenco/scripts/schaeffler-turntable.js new file mode 100644 index 0000000..1bbb108 --- /dev/null +++ b/flamenco/scripts/schaeffler-turntable.js @@ -0,0 +1,211 @@ +// Schaeffler Turntable Animation job type for Flamenco 3.x +// Pipeline: STEP -> STL (cadquery) -> Blender scene setup -> Blender -a render -> FFmpeg video +// +// Task flow: +// 1. convert-step : STEP → STL via cadquery +// 2. setup-scene : turntable_setup.py imports STL, applies materials/camera/animation, +// saves a ready-to-render .blend to output_dir/scene.blend +// 3. render-frames : blender --background scene.blend --python turntable_gpu_setup.py -a +// Blender's native -a keeps GPU scene (BVH, textures) loaded for ALL +// frames — no per-frame re-upload overhead. +// 4. compose-video : FFmpeg encodes frame PNGs → MP4 + +const JOB_TYPE = { + label: "Schaeffler Turntable", + settings: [ + { key: "step_path", type: "string", required: true, + description: "Absolute path to STEP file" }, + { key: "output_dir", type: "string", required: true, + description: "Directory for rendered frames and final video" }, + { key: "output_name", type: "string", required: true, default: "turntable", + description: "Base name for output files" }, + { key: "frame_count", type: "int32", default: 120, + description: "Number of frames to render" }, + { key: "fps", type: "int32", default: 30, + description: "Frames per second for output video" }, + { key: "turntable_degrees", type: "int32", default: 360, + description: "Total rotation in degrees" }, + { key: "width", type: "int32", default: 1920, + description: "Output width in pixels" }, + { key: "height", type: "int32", default: 1080, + description: "Output height in pixels" }, + { key: "engine", type: "string", default: "cycles", + description: "Blender render engine: cycles or eevee" }, + { key: "samples", type: "int32", default: 128, + description: "Render samples" }, + { key: "stl_quality", type: "string", default: "low", + description: "STL mesh quality: low or high" }, + { key: "part_colors_json", type: "string", default: "{}", + description: "JSON dict mapping part names to hex colors" }, + { key: "template_path", type: "string", default: "", + description: "Path to .blend template file (empty = factory settings)" }, + { key: "target_collection", type: "string", default: "Product", + description: "Blender collection name to import geometry into" }, + { key: "material_library_path", type: "string", default: "", + description: "Path to material library .blend file" }, + { key: "material_map_json", type: "string", default: "{}", + description: "JSON dict mapping part names to material names" }, + { key: "part_names_ordered_json", type: "string", default: "[]", + description: "JSON array of STEP part names in solid order (for index-based matching)" }, + { key: "lighting_only", type: "bool", default: false, + description: "Use template only for World/HDRI lighting; always auto-frame with computed camera" }, + { key: "cycles_device", type: "string", default: "auto", + description: "Cycles compute device: auto (try GPU, fall back to CPU), gpu (force GPU), cpu (force CPU)" }, + { key: "shadow_catcher", type: "bool", default: false, + description: "Enable Shadowcatcher collection from template and position plane under product (Cycles only)" }, + { key: "rotation_x", type: "float", default: 0.0, + description: "Product rotation around X axis in degrees (render position)" }, + { key: "rotation_y", type: "float", default: 0.0, + description: "Product rotation around Y axis in degrees (render position)" }, + { key: "rotation_z", type: "float", default: 0.0, + description: "Product rotation around Z axis in degrees (render position)" }, + { key: "turntable_axis", type: "string", default: "world_z", + description: "Turntable rotation axis: world_z (default), world_x, or world_y" }, + { key: "bg_color", type: "string", default: "", + description: "Solid background hex color for compositing (e.g. #1a1a2e); empty = HDR visible as background" }, + { key: "camera_orbit", type: "bool", default: true, + description: "Rotate camera around product instead of rotating product (true = better GPU performance, BVH cached)" }, + { key: "noise_threshold", type: "string", default: "", + description: "Adaptive sampling noise threshold (empty = Blender default 0.01)" }, + { key: "denoiser", type: "string", default: "", + description: "Cycles denoiser: OPTIX, OPENIMAGEDENOISE, or empty for auto" }, + { key: "denoising_input_passes", type: "string", default: "", + description: "Denoising input passes: RGB, RGB_ALBEDO, RGB_ALBEDO_NORMAL, or empty for default" }, + { key: "denoising_prefilter", type: "string", default: "", + description: "Denoising prefilter: NONE, FAST, ACCURATE, or empty for default" }, + { key: "denoising_quality", type: "string", default: "", + description: "Denoising quality: HIGH, BALANCED, FAST, or empty for default (Blender 4.2+)" }, + { key: "denoising_use_gpu", type: "string", default: "", + description: "Route OIDN denoising through GPU: 1, 0, or empty for auto" }, + ], +}; + +function compileJob(job) { + const settings = job.settings; + // Cache STL next to STEP file: {step_dir}/{step_stem}_{quality}.stl + const stepDir = settings.step_path.replace(/\/[^/]+$/, ""); + const stepBasename = settings.step_path.replace(/.*\//, ""); + const stepStem = stepBasename.replace(/\.[^.]+$/, ""); + const stlPath = stepDir + "/" + stepStem + "_" + settings.stl_quality + ".stl"; + const framesDir = settings.output_dir + "/frames"; + const scenePath = settings.output_dir + "/scene.blend"; + const videoPath = settings.output_dir + "/" + settings.output_name + ".mp4"; + + // Task 1: Convert STEP to STL + const convertTask = author.Task("convert-step", "misc"); + convertTask.addCommand(author.Command("exec", { + exe: "{python}", + args: [ + "/opt/flamenco/scripts/convert_step.py", + settings.step_path, + stlPath, + settings.stl_quality, + ], + })); + job.addTask(convertTask); + + // Task 2: Setup Blender scene and save to scene.blend + // turntable_setup.py imports the STL, assigns materials, sets up the + // camera rig and pivot animation, configures the compositor (bg_color), + // and saves the complete scene — ready for native -a rendering. + const setupTask = author.Task("setup-scene", "blender"); + setupTask.addCommand(author.Command("exec", { + exe: "{blender}", + args: [ + "--background", "--python", + "/opt/flamenco/scripts/turntable_setup.py", + "--", + stlPath, + framesDir, + String(settings.frame_count), + String(settings.turntable_degrees), + String(settings.width), + String(settings.height), + settings.engine, + String(settings.samples), + settings.part_colors_json, + settings.template_path || "", + settings.target_collection || "Product", + settings.material_library_path || "", + settings.material_map_json || "{}", + settings.part_names_ordered_json || "[]", + settings.lighting_only ? "1" : "0", + settings.cycles_device || "gpu", + settings.shadow_catcher ? "1" : "0", + String(settings.rotation_x || 0), + String(settings.rotation_y || 0), + String(settings.rotation_z || 0), + settings.turntable_axis || "world_z", + settings.bg_color || "", + settings.transparent_bg ? "1" : "0", + scenePath, + settings.camera_orbit !== false ? "1" : "0", + settings.noise_threshold || "", + settings.denoiser || "", + settings.denoising_input_passes || "", + settings.denoising_prefilter || "", + settings.denoising_quality || "", + settings.denoising_use_gpu || "", + ], + })); + setupTask.addDependency(convertTask); + job.addTask(setupTask); + + // Task 3: Render all frames using Blender's native -a (--render-anim) + // turntable_gpu_setup.py re-applies GPU preferences (user-level, not stored + // in .blend), then -a renders all frames in one process — GPU scene stays + // loaded between frames, no per-frame BVH re-upload. + const renderTask = author.Task("render-frames", "blender"); + renderTask.addCommand(author.Command("exec", { + exe: "{blender}", + args: [ + "--background", + scenePath, + "--python", + "/opt/flamenco/scripts/turntable_gpu_setup.py", + "-a", + ], + })); + renderTask.addDependency(setupTask); + job.addTask(renderTask); + + // Task 4: Compose video with FFmpeg + // Blender writes transparent PNG frames (film_transparent=True) when bg_color is set. + // FFmpeg composites them over a solid colour background using the lavfi color source. + // Without bg_color, frames are opaque and encoded directly. + const composeTask = author.Task("compose-video", "misc"); + const bgHex = (settings.bg_color || "").replace(/^#/, ""); + const ffmpegArgs = bgHex + ? [ + "-y", + // Background: solid colour at video resolution and frame rate + "-f", "lavfi", + "-i", "color=c=0x" + bgHex + ":size=" + String(settings.width) + "x" + String(settings.height) + ":rate=" + String(settings.fps), + // Foreground: transparent PNG frame sequence + "-framerate", String(settings.fps), + "-i", framesDir + "/frame_%04d.png", + // Composite foreground over background + "-filter_complex", "[0:v][1:v]overlay=0:0:shortest=1", + "-c:v", "libx264", + "-pix_fmt", "yuv420p", + "-preset", "medium", + "-crf", "18", + videoPath, + ] + : [ + "-y", + "-framerate", String(settings.fps), + "-i", framesDir + "/frame_%04d.png", + "-c:v", "libx264", + "-pix_fmt", "yuv420p", + "-preset", "medium", + "-crf", "18", + videoPath, + ]; + composeTask.addCommand(author.Command("exec", { + exe: "ffmpeg", + args: ffmpegArgs, + })); + composeTask.addDependency(renderTask); + job.addTask(composeTask); +} diff --git a/flamenco/scripts/still_render.py b/flamenco/scripts/still_render.py new file mode 100644 index 0000000..c7f3adf --- /dev/null +++ b/flamenco/scripts/still_render.py @@ -0,0 +1,781 @@ +"""Blender Python script: single-frame still render for Flamenco. + +Matches the lighting, camera, materials, and post-processing of the +Celery blender_render.py so that LQ and HQ renders look consistent. + +Usage (from Blender): + blender --background --python still_render.py -- \ + \ + \ + [template_path] [target_collection] [material_library_path] [material_map_json] +""" +import bpy +import sys +import os +import json +import math +from mathutils import Vector, Matrix + +# ── Colour palette (matches blender_render.py / Three.js renderer) ─────────── +PALETTE_HEX = [ + "#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8", + "#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8", +] + +def _srgb_to_linear(c: int) -> float: + v = c / 255.0 + return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4 + +def _hex_to_linear(hex_color: str) -> tuple: + h = hex_color.lstrip('#') + return ( + _srgb_to_linear(int(h[0:2], 16)), + _srgb_to_linear(int(h[2:4], 16)), + _srgb_to_linear(int(h[4:6], 16)), + 1.0, + ) + +PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX] + +SMOOTH_ANGLE = 30 # degrees + + +# ── Helper functions ───────────────────────────────────────────────────────── + +def _ensure_collection(name: str): + """Return a collection by name, creating it if needed.""" + if name in bpy.data.collections: + return bpy.data.collections[name] + col = bpy.data.collections.new(name) + bpy.context.scene.collection.children.link(col) + return col + + +def _assign_palette_material(part_obj, index): + """Assign a palette colour material to a mesh part.""" + color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)] + mat = bpy.data.materials.new(name=f"Part_{index}") + mat.use_nodes = True + bsdf = mat.node_tree.nodes.get("Principled BSDF") + if bsdf: + bsdf.inputs["Base Color"].default_value = color + bsdf.inputs["Metallic"].default_value = 0.35 + bsdf.inputs["Roughness"].default_value = 0.40 + try: + bsdf.inputs["Specular IOR Level"].default_value = 0.5 + except KeyError: + pass + part_obj.data.materials.clear() + part_obj.data.materials.append(mat) + + +def _apply_smooth(part_obj, angle_deg): + """Apply smooth or flat shading to a mesh object.""" + bpy.context.view_layer.objects.active = part_obj + part_obj.select_set(True) + if angle_deg > 0: + try: + bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg)) + except AttributeError: + bpy.ops.object.shade_smooth() + part_obj.data.use_auto_smooth = True + part_obj.data.auto_smooth_angle = math.radians(angle_deg) + else: + bpy.ops.object.shade_flat() + + +import re as _re + + +def _scale_mm_to_m(parts): + """Scale imported STL objects from mm to Blender metres (×0.001). + + STEP/STL coordinates are in mm; Blender's default unit is metres. + Without scaling a 50 mm part appears as 50 m inside Blender — way too large + relative to any template environment designed in metric units. + """ + if not parts: + return + bpy.ops.object.select_all(action='DESELECT') + for p in parts: + p.scale = (0.001, 0.001, 0.001) + p.location *= 0.001 + p.select_set(True) + bpy.context.view_layer.objects.active = parts[0] + bpy.ops.object.transform_apply(scale=True, location=False, rotation=False) + print(f"[still_render] scaled {len(parts)} parts mm→m (×0.001)") + + +def _apply_rotation(parts, rx, ry, rz): + """Apply Euler rotation (degrees, XYZ order) to all parts around world origin.""" + if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0): + return + import math + from mathutils import Euler + rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4() + for p in parts: + p.matrix_world = rot_mat @ p.matrix_world + bpy.ops.object.select_all(action='DESELECT') + for p in parts: + p.select_set(True) + bpy.context.view_layer.objects.active = parts[0] + bpy.ops.object.transform_apply(location=False, rotation=True, scale=False) + print(f"[still_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts") + + +def _import_stl(stl_file): + """Import STL into Blender, using per-part STLs if available. + + Checks for {stl_stem}_parts/manifest.json next to the STL file. + - Per-part mode: imports each part STL, names Blender object after STEP part name. + - Fallback: imports combined STL and splits by loose geometry. + + Returns list of Blender mesh objects, centred at origin. + """ + stl_dir = os.path.dirname(stl_file) + stl_stem = os.path.splitext(os.path.basename(stl_file))[0] + parts_dir = os.path.join(stl_dir, stl_stem + "_parts") + manifest_path = os.path.join(parts_dir, "manifest.json") + + parts = [] + + if os.path.isfile(manifest_path): + # ── Per-part mode ──────────────────────────────────────────────── + try: + with open(manifest_path, "r") as f: + manifest = json.loads(f.read()) + part_entries = manifest.get("parts", []) + except Exception as e: + print(f"[still_render] WARNING: failed to read manifest: {e}") + part_entries = [] + + if part_entries: + for entry in part_entries: + part_file = os.path.join(parts_dir, entry["file"]) + part_name = entry["name"] + if not os.path.isfile(part_file): + print(f"[still_render] WARNING: part STL missing: {part_file}") + continue + + bpy.ops.object.select_all(action='DESELECT') + bpy.ops.wm.stl_import(filepath=part_file) + imported = bpy.context.selected_objects + if imported: + obj = imported[0] + obj.name = part_name + if obj.data: + obj.data.name = part_name + parts.append(obj) + + if parts: + print(f"[still_render] imported {len(parts)} named parts from per-part STLs") + + # ── Fallback: combined STL + separate by loose ─────────────────────── + if not parts: + bpy.ops.wm.stl_import(filepath=stl_file) + obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None + if obj is None: + print(f"ERROR: No objects imported from {stl_file}") + sys.exit(1) + + bpy.context.view_layer.objects.active = obj + bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS') + obj.location = (0.0, 0.0, 0.0) + + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.separate(type='LOOSE') + bpy.ops.object.mode_set(mode='OBJECT') + + parts = list(bpy.context.selected_objects) + print(f"[still_render] fallback: separated into {len(parts)} part(s)") + return parts + + # ── Centre per-part imports at origin (combined bbox) ──────────────── + all_corners = [] + for p in parts: + all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box) + + if all_corners: + mins = Vector((min(v.x for v in all_corners), + min(v.y for v in all_corners), + min(v.z for v in all_corners))) + maxs = Vector((max(v.x for v in all_corners), + max(v.y for v in all_corners), + max(v.z for v in all_corners))) + center = (mins + maxs) * 0.5 + for p in parts: + p.location -= center + + return parts + + +def _resolve_part_name(index, part_obj, part_names_ordered): + """Get the STEP part name for a Blender part by index. + + With per-part import, part_obj.name IS the STEP name (possibly with + Blender .NNN suffix). Falls back to part_names_ordered for combined-STL mode. + """ + base_name = _re.sub(r'\.\d{3}$', '', part_obj.name) + if part_names_ordered and index < len(part_names_ordered): + return part_names_ordered[index] + return base_name + + +def _apply_material_library(parts, mat_lib_path, mat_map, part_names_ordered=None): + """Append materials from library .blend and assign to parts via material_map. + + With per-part STL import, Blender objects are named after STEP parts, + so matching is by name (stripping Blender .NNN suffix for duplicates). + Falls back to part_names_ordered index-based matching for combined-STL mode. + + mat_map: {part_name_lower: material_name} + Parts without a match keep their current material. + """ + if not mat_lib_path or not os.path.isfile(mat_lib_path): + print(f"[still_render] material library not found: {mat_lib_path}") + return + + # Collect unique material names needed + needed = set(mat_map.values()) + if not needed: + return + + # Append materials from library + appended = {} + for mat_name in needed: + inner_path = f"{mat_lib_path}/Material/{mat_name}" + try: + bpy.ops.wm.append( + filepath=inner_path, + directory=f"{mat_lib_path}/Material/", + filename=mat_name, + link=False, + ) + if mat_name in bpy.data.materials: + appended[mat_name] = bpy.data.materials[mat_name] + print(f"[still_render] appended material: {mat_name}") + else: + print(f"[still_render] WARNING: material '{mat_name}' not found after append") + except Exception as exc: + print(f"[still_render] WARNING: failed to append material '{mat_name}': {exc}") + + if not appended: + return + + # Assign materials to parts — primary: name-based (per-part STL mode), + # secondary: index-based via part_names_ordered (combined STL fallback) + assigned_count = 0 + for i, part in enumerate(parts): + # Try name-based matching first (strip Blender .NNN suffix) + base_name = _re.sub(r'\.\d{3}$', '', part.name) + part_key = base_name.lower().strip() + mat_name = mat_map.get(part_key) + + # Fall back to index-based matching via part_names_ordered + if not mat_name and part_names_ordered and i < len(part_names_ordered): + step_name = part_names_ordered[i] + part_key = step_name.lower().strip() + mat_name = mat_map.get(part_key) + + if mat_name and mat_name in appended: + part.data.materials.clear() + part.data.materials.append(appended[mat_name]) + assigned_count += 1 + print(f"[still_render] assigned '{mat_name}' to part '{part.name}'") + + print(f"[still_render] material assignment: {assigned_count}/{len(parts)} parts matched") + + +def main(): + argv = sys.argv + args = argv[argv.index("--") + 1:] + + stl_path = args[0] + output_path = args[1] + width = int(args[2]) + height = int(args[3]) + engine = args[4] + samples = int(args[5]) + part_colors_json = args[6] if len(args) > 6 else "{}" + transparent_bg = args[7] == "1" if len(args) > 7 else False + + # Template + material library args (passed by schaeffler-still.js) + template_path = args[8] if len(args) > 8 and args[8] else "" + target_collection = args[9] if len(args) > 9 else "Product" + material_library_path = args[10] if len(args) > 10 and args[10] else "" + material_map_raw = args[11] if len(args) > 11 else "{}" + part_names_ordered_raw = args[12] if len(args) > 12 else "[]" + lighting_only = args[13] == "1" if len(args) > 13 else False + cycles_device = args[14].lower() if len(args) > 14 else "auto" # "auto", "gpu", "cpu" + shadow_catcher = args[15] == "1" if len(args) > 15 else False + rotation_x = float(args[16]) if len(args) > 16 else 0.0 + rotation_y = float(args[17]) if len(args) > 17 else 0.0 + rotation_z = float(args[18]) if len(args) > 18 else 0.0 + noise_threshold_arg = args[19] if len(args) > 19 else "" + denoiser_arg = args[20] if len(args) > 20 else "" + denoising_input_passes_arg = args[21] if len(args) > 21 else "" + denoising_prefilter_arg = args[22] if len(args) > 22 else "" + denoising_quality_arg = args[23] if len(args) > 23 else "" + denoising_use_gpu_arg = args[24] if len(args) > 24 else "" + + os.makedirs(os.path.dirname(output_path), exist_ok=True) + + try: + part_colors = json.loads(part_colors_json) + except json.JSONDecodeError: + part_colors = {} + + try: + material_map = json.loads(material_map_raw) if material_map_raw else {} + except json.JSONDecodeError: + material_map = {} + + try: + part_names_ordered = json.loads(part_names_ordered_raw) if part_names_ordered_raw else [] + except json.JSONDecodeError: + part_names_ordered = [] + + # Validate template path: if provided it MUST exist on disk. + # A missing template is a configuration error — fail loudly rather than + # silently falling back to factory-settings mode which produces renders that + # look completely wrong. + if template_path and not os.path.isfile(template_path): + print(f"[still_render] ERROR: template_path was provided but file not found: {template_path}") + print("[still_render] Ensure the blend-templates directory is accessible on this worker.") + sys.exit(1) + + use_template = bool(template_path) + + print(f"[still_render] engine={engine}, samples={samples}, size={width}x{height}, transparent={transparent_bg}") + print(f"[still_render] part_names_ordered: {len(part_names_ordered)} entries") + if use_template: + print(f"[still_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}") + else: + print("[still_render] no template — using factory settings (Mode A)") + if material_library_path: + print(f"[still_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}") + + # ── SCENE SETUP ────────────────────────────────────────────────────────── + + if use_template: + # ── MODE B: Template-based render ──────────────────────────────────── + print(f"[still_render] Opening template: {template_path}") + bpy.ops.wm.open_mainfile(filepath=template_path) + + # Find or create target collection + target_col = _ensure_collection(target_collection) + + # Import and split STL + parts = _import_stl(stl_path) + # Scale mm→m: STEP coords are mm, Blender default unit is metres + _scale_mm_to_m(parts) + # Apply render position rotation (before camera/bbox calculations) + _apply_rotation(parts, rotation_x, rotation_y, rotation_z) + + # Move imported parts into target collection + for part in parts: + for col in list(part.users_collection): + col.objects.unlink(part) + target_col.objects.link(part) + + # Apply smooth shading + for part in parts: + _apply_smooth(part, SMOOTH_ANGLE) + + # Material assignment: library materials if available, otherwise palette + if material_library_path and material_map: + mat_map_lower = {k.lower(): v for k, v in material_map.items()} + _apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered) + # Parts not matched by library get palette fallback + for i, part in enumerate(parts): + if not part.data.materials or len(part.data.materials) == 0: + _assign_palette_material(part, i) + else: + for i, part in enumerate(parts): + step_name = _resolve_part_name(i, part, part_names_ordered) + color_hex = part_colors.get(step_name) + if color_hex: + color = _hex_to_linear(color_hex) + mat = bpy.data.materials.new(name=f"Part_{i}") + mat.use_nodes = True + bsdf = mat.node_tree.nodes.get("Principled BSDF") + if bsdf: + bsdf.inputs["Base Color"].default_value = color + bsdf.inputs["Metallic"].default_value = 0.35 + bsdf.inputs["Roughness"].default_value = 0.40 + try: + bsdf.inputs["Specular IOR Level"].default_value = 0.5 + except KeyError: + pass + part.data.materials.clear() + part.data.materials.append(mat) + else: + _assign_palette_material(part, i) + + # ── Shadow catcher (Cycles only, template mode only) ───────────────── + if shadow_catcher: + sc_col_name = "Shadowcatcher" + sc_obj_name = "Shadowcatcher" + for vl in bpy.context.scene.view_layers: + def _enable_col_recursive(layer_col): + if layer_col.collection.name == sc_col_name: + layer_col.exclude = False + layer_col.collection.hide_render = False + layer_col.collection.hide_viewport = False + return True + for child in layer_col.children: + if _enable_col_recursive(child): + return True + return False + _enable_col_recursive(vl.layer_collection) + + sc_obj = bpy.data.objects.get(sc_obj_name) + if sc_obj: + all_world_z = [] + for part in parts: + for corner in part.bound_box: + all_world_z.append((part.matrix_world @ Vector(corner)).z) + if all_world_z: + sc_obj.location.z = min(all_world_z) + print(f"[still_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}") + else: + print(f"[still_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template") + + # lighting_only: use template World/HDRI but force auto-camera UNLESS the shadow + # catcher is enabled — in that case the template camera is already positioned to + # show both the product and its shadow on the ground plane. + needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera + if lighting_only and not shadow_catcher: + print("[still_render] lighting_only mode: using template World/HDRI, forcing auto-camera") + elif needs_auto_camera: + print("[still_render] WARNING: template has no camera — will create auto-camera") + + # Set very close near clip on template camera for mm-scale parts (now in metres) + if not needs_auto_camera and bpy.context.scene.camera: + bpy.context.scene.camera.data.clip_start = 0.001 + + print(f"[still_render] template mode: {len(parts)} parts imported into collection '{target_collection}'") + + else: + # ── MODE A: Factory settings (original behavior) ───────────────────── + needs_auto_camera = True + bpy.ops.wm.read_factory_settings(use_empty=True) + + parts = _import_stl(stl_path) + # Scale mm→m: STEP coords are mm, Blender default unit is metres + _scale_mm_to_m(parts) + # Apply render position rotation (before camera/bbox calculations) + _apply_rotation(parts, rotation_x, rotation_y, rotation_z) + + for i, part in enumerate(parts): + _apply_smooth(part, SMOOTH_ANGLE) + + # Material assignment: library materials if available, else part_colors/palette + if material_library_path and material_map: + mat_map_lower = {k.lower(): v for k, v in material_map.items()} + _apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered) + # Palette fallback for unmatched parts + for i, part in enumerate(parts): + if not part.data.materials or len(part.data.materials) == 0: + _assign_palette_material(part, i) + else: + # part_colors or palette — use index-based lookup via part_names_ordered + for i, part in enumerate(parts): + step_name = _resolve_part_name(i, part, part_names_ordered) + color_hex = part_colors.get(step_name) + if color_hex: + color = _hex_to_linear(color_hex) + else: + color = PALETTE_LINEAR[i % len(PALETTE_LINEAR)] + + mat = bpy.data.materials.new(name=f"Part_{i}") + mat.use_nodes = True + bsdf = mat.node_tree.nodes.get("Principled BSDF") + if bsdf: + bsdf.inputs["Base Color"].default_value = color + bsdf.inputs["Metallic"].default_value = 0.35 + bsdf.inputs["Roughness"].default_value = 0.40 + try: + bsdf.inputs["Specular IOR Level"].default_value = 0.5 + except KeyError: + pass + part.data.materials.clear() + part.data.materials.append(mat) + + if needs_auto_camera: + # ── Combined bounding box / bounding sphere ────────────────────────── + all_corners = [] + for part in parts: + all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box) + + bbox_min = Vector(( + min(v.x for v in all_corners), + min(v.y for v in all_corners), + min(v.z for v in all_corners), + )) + bbox_max = Vector(( + max(v.x for v in all_corners), + max(v.y for v in all_corners), + max(v.z for v in all_corners), + )) + + bbox_center = (bbox_min + bbox_max) * 0.5 + bbox_dims = bbox_max - bbox_min + bsphere_radius = max(bbox_dims.length * 0.5, 0.001) + + print(f"[still_render] bbox_dims={tuple(round(d, 4) for d in bbox_dims)}, " + f"bsphere_radius={bsphere_radius:.4f}") + + # ── Lighting — only in Mode A (factory settings) ───────────────────── + # In template mode the .blend file provides its own World/HDRI lighting. + # Adding auto-lights would overpower the template's intended look. + if not use_template: + light_dist = bsphere_radius * 6.0 + + bpy.ops.object.light_add(type='SUN', location=( + bbox_center.x + light_dist * 0.5, + bbox_center.y - light_dist * 0.35, + bbox_center.z + light_dist, + )) + sun = bpy.context.active_object + sun.data.energy = 4.0 + sun.rotation_euler = (math.radians(45), 0, math.radians(30)) + + bpy.ops.object.light_add(type='AREA', location=( + bbox_center.x - light_dist * 0.4, + bbox_center.y + light_dist * 0.4, + bbox_center.z + light_dist * 0.7, + )) + fill = bpy.context.active_object + fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0) + fill.data.size = max(4.0, bsphere_radius * 4.0) + + # ── Camera (isometric-style, matches blender_render.py) ────────────── + ELEVATION_DEG = 28.0 + AZIMUTH_DEG = 40.0 + LENS_MM = 50.0 + SENSOR_WIDTH_MM = 36.0 + FILL_FACTOR = 0.85 + + elevation_rad = math.radians(ELEVATION_DEG) + azimuth_rad = math.radians(AZIMUTH_DEG) + + cam_dir = Vector(( + math.cos(elevation_rad) * math.cos(azimuth_rad), + math.cos(elevation_rad) * math.sin(azimuth_rad), + math.sin(elevation_rad), + )).normalized() + + fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM)) + fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM)) + fov_used = min(fov_h, fov_v) + + dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR + dist = max(dist, bsphere_radius * 1.5) + + cam_location = bbox_center + cam_dir * dist + bpy.ops.object.camera_add(location=cam_location) + cam_obj = bpy.context.active_object + cam_obj.data.lens = LENS_MM + bpy.context.scene.camera = cam_obj + + # Look-at rotation + look_dir = (bbox_center - cam_location).normalized() + up_world = Vector((0.0, 0.0, 1.0)) + right = look_dir.cross(up_world) + if right.length < 1e-6: + right = Vector((1.0, 0.0, 0.0)) + right.normalize() + cam_up = right.cross(look_dir).normalized() + + rot_mat = Matrix(( + (right.x, right.y, right.z), + (cam_up.x, cam_up.y, cam_up.z), + (-look_dir.x, -look_dir.y, -look_dir.z), + )).transposed() + cam_obj.rotation_euler = rot_mat.to_euler('XYZ') + + cam_obj.data.clip_start = max(dist * 0.001, 0.0001) + cam_obj.data.clip_end = dist + bsphere_radius * 3.0 + + # ── World background — only in Mode A ─────────────────────────────── + # In template mode the .blend file owns its World (HDRI, sky texture, + # studio lighting). Overwriting it would destroy the HDR look the + # template was designed to use (e.g. Alpha-HDR output types). + if not use_template: + world = bpy.data.worlds.new("World") + bpy.context.scene.world = world + world.use_nodes = True + bg = world.node_tree.nodes["Background"] + bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0) + bg.inputs["Strength"].default_value = 0.15 + + # ── Colour management ──────────────────────────────────────────────────── + # In template mode the .blend file owns its colour management settings + # (e.g. Filmic/AgX for HDR, custom exposure for Alpha-HDR output types). + # Overwriting them would destroy the look the template was designed for. + # In factory-settings mode (Mode A) we force Standard to avoid the grey + # Filmic tint that Blender applies by default. + scene = bpy.context.scene + if not use_template: + scene.view_settings.view_transform = 'Standard' + scene.view_settings.exposure = 0.0 + scene.view_settings.gamma = 1.0 + try: + scene.view_settings.look = 'None' + except Exception: + pass + + # ── Render engine ──────────────────────────────────────────────────────── + if engine == "eevee": + eevee_ok = False + for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'): + try: + scene.render.engine = eevee_id + eevee_ok = True + print(f"[still_render] EEVEE engine id: {eevee_id}") + break + except TypeError: + continue + if eevee_ok: + for attr in ('taa_render_samples', 'samples'): + try: + setattr(scene.eevee, attr, samples) + break + except AttributeError: + continue + else: + print("[still_render] WARNING: EEVEE unavailable, falling back to Cycles") + engine = "cycles" + + if engine != "eevee": + scene.render.engine = 'CYCLES' + scene.cycles.samples = samples + scene.cycles.use_denoising = True + scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE' + if denoising_input_passes_arg: + try: scene.cycles.denoising_input_passes = denoising_input_passes_arg + except Exception: pass + if denoising_prefilter_arg: + try: scene.cycles.denoising_prefilter = denoising_prefilter_arg + except Exception: pass + if denoising_quality_arg: + try: scene.cycles.denoising_quality = denoising_quality_arg + except Exception: pass + if denoising_use_gpu_arg: + try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1") + except AttributeError: pass + if noise_threshold_arg: + scene.cycles.use_adaptive_sampling = True + scene.cycles.adaptive_threshold = float(noise_threshold_arg) + # Device selection: "cpu" forces CPU, "gpu" forces GPU (warns if unavailable), + # "auto" (default) tries GPU first and falls back to CPU. + print(f"[still_render] cycles_device={cycles_device}") + gpu_found = False + if cycles_device != "cpu": + try: + cycles_prefs = bpy.context.preferences.addons['cycles'].preferences + for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'): + try: + cycles_prefs.compute_device_type = device_type + cycles_prefs.get_devices() + gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU'] + if gpu_devs: + for d in gpu_devs: + d.use = True + scene.cycles.device = 'GPU' + gpu_found = True + print(f"[still_render] Cycles GPU ({device_type})") + break + except Exception: + continue + except Exception: + pass + if not gpu_found: + scene.cycles.device = 'CPU' + print("[still_render] WARNING: GPU not found — falling back to CPU") + + # ── Render settings ────────────────────────────────────────────────────── + scene.render.resolution_x = width + scene.render.resolution_y = height + scene.render.resolution_percentage = 100 + scene.render.film_transparent = transparent_bg + + ext = os.path.splitext(output_path)[1].lower() + if ext in ('.jpg', '.jpeg'): + scene.render.image_settings.file_format = 'JPEG' + scene.render.image_settings.quality = 92 + else: + scene.render.image_settings.file_format = 'PNG' + + scene.render.filepath = output_path + + # ── Render ─────────────────────────────────────────────────────────────── + print(f"[still_render] Rendering -> {output_path} (Blender {bpy.app.version_string})") + bpy.ops.render.render(write_still=True) + print("[still_render] render done.") + + # ── Pillow post-processing: green bar + model name label ───────────────── + # Skip overlay for transparent renders to keep clean alpha channel + if transparent_bg: + print("[still_render] Transparent mode — skipping Pillow overlay.") + else: + try: + from PIL import Image, ImageDraw, ImageFont + + img = Image.open(output_path).convert("RGBA") + draw = ImageDraw.Draw(img) + W, H = img.size + + # Schaeffler green top bar + bar_h = max(8, H // 32) + draw.rectangle([0, 0, W - 1, bar_h - 1], fill=(0, 137, 61, 255)) + + # Model name strip at bottom + model_name = os.path.splitext(os.path.basename(stl_path))[0] + label_h = max(20, H // 20) + img.alpha_composite( + Image.new("RGBA", (W, label_h), (30, 30, 30, 180)), + dest=(0, H - label_h), + ) + + font_size = max(10, label_h - 6) + font = None + for fp in [ + "/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf", + "/usr/share/fonts/truetype/liberation/LiberationSans-Bold.ttf", + "/usr/share/fonts/truetype/freefont/FreeSansBold.ttf", + ]: + if os.path.exists(fp): + try: + font = ImageFont.truetype(fp, font_size) + break + except Exception: + pass + if font is None: + font = ImageFont.load_default() + + tb = draw.textbbox((0, 0), model_name, font=font) + text_w = tb[2] - tb[0] + draw.text( + ((W - text_w) // 2, H - label_h + (label_h - (tb[3] - tb[1])) // 2), + model_name, font=font, fill=(255, 255, 255, 255), + ) + + # Save in original format + if ext in ('.jpg', '.jpeg'): + img.convert("RGB").save(output_path, format="JPEG", quality=92) + else: + img.convert("RGB").save(output_path, format="PNG") + print("[still_render] Pillow overlay applied.") + + except ImportError: + print("[still_render] Pillow not available - skipping overlay.") + except Exception as exc: + print(f"[still_render] Pillow overlay failed (non-fatal): {exc}") + + print("[still_render] Done.") + + +if __name__ == "__main__": + main() diff --git a/flamenco/scripts/turntable_gpu_setup.py b/flamenco/scripts/turntable_gpu_setup.py new file mode 100644 index 0000000..33bae75 --- /dev/null +++ b/flamenco/scripts/turntable_gpu_setup.py @@ -0,0 +1,74 @@ +"""Blender GPU preferences setup for native animation render (-a). + +Called as: + blender --background scene.blend --python turntable_gpu_setup.py -a + +Reads the intended cycles_device from the scene custom property set by +turntable_setup.py, then applies the matching GPU compute device preferences. +GPU preferences are user-level and not stored in .blend, so they must be +re-applied at render time. + +After this script runs, Blender processes -a and renders all animation frames +natively — keeping the GPU scene (BVH, textures) loaded across all frames. +""" +import bpy + +scene = bpy.context.scene +cycles_device = scene.get("_cycles_device", "gpu") +denoiser_override = scene.get("_denoiser_override", "") + +if scene.render.engine != 'CYCLES': + # EEVEE or other engine — no Cycles GPU preferences needed + print(f"[turntable_gpu] engine={scene.render.engine} — no Cycles GPU setup needed") +elif cycles_device == "cpu": + scene.cycles.device = 'CPU' + print("[turntable_gpu] Using CPU (explicit override)") +else: + gpu_found = False + try: + cycles_prefs = bpy.context.preferences.addons['cycles'].preferences + for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'): + try: + cycles_prefs.compute_device_type = device_type + cycles_prefs.get_devices() + gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU'] + if gpu_devs: + for d in gpu_devs: + d.use = True + scene.cycles.device = 'GPU' + gpu_found = True + + # OptiX denoiser is fully GPU-native and faster than OIDN on NVIDIA. + # Fall back to OIDN (also GPU-accelerated) on CUDA/HIP. + if not denoiser_override: + if device_type == 'OPTIX': + try: + scene.cycles.denoiser = 'OPTIX' + print("[turntable_gpu] OptiX denoiser active (GPU-native)") + except Exception: + pass # Keep OIDN + else: + try: + scene.cycles.denoiser = denoiser_override + print(f"[turntable_gpu] Denoiser override: {denoiser_override}") + except Exception: + pass + + # Blender 4.x+: explicitly route OIDN through GPU path + try: + scene.cycles.denoising_use_gpu = True + except AttributeError: + pass # Older Blender — OIDN uses GPU automatically when device=GPU + + print(f"[turntable_gpu] Cycles GPU ({device_type}) — rendering {scene.frame_end - scene.frame_start + 1} frames") + break + except Exception: + continue + except Exception: + pass + + if not gpu_found: + scene.cycles.device = 'CPU' + print("[turntable_gpu] WARNING: GPU not found — falling back to CPU") + +print(f"[turntable_gpu] Output: {scene.render.filepath}#### (frames {scene.frame_start}–{scene.frame_end})") diff --git a/flamenco/scripts/turntable_render.py b/flamenco/scripts/turntable_render.py new file mode 100644 index 0000000..2a274da --- /dev/null +++ b/flamenco/scripts/turntable_render.py @@ -0,0 +1,762 @@ +"""Blender Python script: turntable animation render for Flamenco. + +Usage (from Blender): + blender --background --python turntable_render.py -- \ + \ + \ + [template_path] [target_collection] [material_library_path] [material_map_json] +""" +import bpy +import sys +import os +import json +import math +from mathutils import Vector, Matrix + +# ── Colour palette (matches blender_render.py / Three.js renderer) ─────────── +PALETTE_HEX = [ + "#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8", + "#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8", +] + +def _srgb_to_linear(c: int) -> float: + v = c / 255.0 + return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4 + +def _hex_to_linear(hex_color: str) -> tuple: + h = hex_color.lstrip('#') + return ( + _srgb_to_linear(int(h[0:2], 16)), + _srgb_to_linear(int(h[2:4], 16)), + _srgb_to_linear(int(h[4:6], 16)), + 1.0, + ) + +PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX] + +SMOOTH_ANGLE = 30 # degrees + + +# ── Helper functions ───────────────────────────────────────────────────────── + +def _ensure_collection(name: str): + """Return a collection by name, creating it if needed.""" + if name in bpy.data.collections: + return bpy.data.collections[name] + col = bpy.data.collections.new(name) + bpy.context.scene.collection.children.link(col) + return col + + +def _assign_palette_material(part_obj, index): + """Assign a palette colour material to a mesh part.""" + color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)] + mat = bpy.data.materials.new(name=f"Part_{index}") + mat.use_nodes = True + bsdf = mat.node_tree.nodes.get("Principled BSDF") + if bsdf: + bsdf.inputs["Base Color"].default_value = color + bsdf.inputs["Metallic"].default_value = 0.35 + bsdf.inputs["Roughness"].default_value = 0.40 + try: + bsdf.inputs["Specular IOR Level"].default_value = 0.5 + except KeyError: + pass + part_obj.data.materials.clear() + part_obj.data.materials.append(mat) + + +def _apply_smooth(part_obj, angle_deg): + """Apply smooth or flat shading to a mesh object.""" + bpy.context.view_layer.objects.active = part_obj + part_obj.select_set(True) + if angle_deg > 0: + try: + bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg)) + except AttributeError: + bpy.ops.object.shade_smooth() + part_obj.data.use_auto_smooth = True + part_obj.data.auto_smooth_angle = math.radians(angle_deg) + else: + bpy.ops.object.shade_flat() + + +import re as _re + + +def _apply_rotation(parts, rx, ry, rz): + """Apply Euler XYZ rotation (degrees) to all parts by modifying matrix_world. + + Rotates around world origin, which equals the assembly centre because + _import_stl already centres parts there. Applied before material assignment + and camera/bbox calculations so everything downstream sees the final pose. + """ + if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0): + return + from mathutils import Euler + rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4() + for p in parts: + p.matrix_world = rot_mat @ p.matrix_world + bpy.ops.object.select_all(action='DESELECT') + for p in parts: + p.select_set(True) + bpy.context.view_layer.objects.active = parts[0] + bpy.ops.object.transform_apply(location=False, rotation=True, scale=False) + print(f"[turntable_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts") + + +def _axis_rotation(axis: str, degrees: float) -> tuple: + """Map turntable axis name to Euler (x, y, z) rotation in radians.""" + rad = math.radians(degrees) + if axis == "world_x": + return (rad, 0.0, 0.0) + elif axis == "world_y": + return (0.0, rad, 0.0) + else: # "world_z" default + return (0.0, 0.0, rad) + + +def _set_fcurves_linear(action): + """Set LINEAR interpolation on all fcurves. + + Handles both the legacy Blender < 4.4 API (action.fcurves) and the new + Baklava layered-action API introduced in Blender 4.4 / 5.x + (action.layers[*].strips[*].channelbags[*].fcurves). + """ + try: + # New layered-action API (Blender 4.4+ / 5.x) + for layer in action.layers: + for strip in layer.strips: + for channelbag in strip.channelbags: + for fc in channelbag.fcurves: + for kp in fc.keyframe_points: + kp.interpolation = 'LINEAR' + except AttributeError: + # Legacy API (Blender < 4.4) + for fc in action.fcurves: + for kp in fc.keyframe_points: + kp.interpolation = 'LINEAR' + + +def _scale_mm_to_m(parts): + """Scale imported STL objects from mm to Blender metres (×0.001). + + STEP/STL coordinates are in mm; Blender's default unit is metres. + Without scaling a 50 mm part appears as 50 m inside Blender — way too large + relative to any template environment designed in metric units. + """ + if not parts: + return + bpy.ops.object.select_all(action='DESELECT') + for p in parts: + p.scale = (0.001, 0.001, 0.001) + p.location *= 0.001 + p.select_set(True) + bpy.context.view_layer.objects.active = parts[0] + bpy.ops.object.transform_apply(scale=True, location=False, rotation=False) + print(f"[turntable_render] scaled {len(parts)} parts mm→m (×0.001)") + + +def _import_stl(stl_file): + """Import STL into Blender, using per-part STLs if available. + + Checks for {stl_stem}_parts/manifest.json next to the STL file. + - Per-part mode: imports each part STL, names Blender object after STEP part name. + - Fallback: imports combined STL and splits by loose geometry. + + Returns list of Blender mesh objects, centred at origin. + """ + stl_dir = os.path.dirname(stl_file) + stl_stem = os.path.splitext(os.path.basename(stl_file))[0] + parts_dir = os.path.join(stl_dir, stl_stem + "_parts") + manifest_path = os.path.join(parts_dir, "manifest.json") + + parts = [] + + if os.path.isfile(manifest_path): + # ── Per-part mode ──────────────────────────────────────────────── + try: + with open(manifest_path, "r") as f: + manifest = json.loads(f.read()) + part_entries = manifest.get("parts", []) + except Exception as e: + print(f"[turntable_render] WARNING: failed to read manifest: {e}") + part_entries = [] + + if part_entries: + for entry in part_entries: + part_file = os.path.join(parts_dir, entry["file"]) + part_name = entry["name"] + if not os.path.isfile(part_file): + print(f"[turntable_render] WARNING: part STL missing: {part_file}") + continue + + bpy.ops.object.select_all(action='DESELECT') + bpy.ops.wm.stl_import(filepath=part_file) + imported = bpy.context.selected_objects + if imported: + obj = imported[0] + obj.name = part_name + if obj.data: + obj.data.name = part_name + parts.append(obj) + + if parts: + print(f"[turntable_render] imported {len(parts)} named parts from per-part STLs") + + # ── Fallback: combined STL + separate by loose ─────────────────────── + if not parts: + bpy.ops.wm.stl_import(filepath=stl_file) + obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None + if obj is None: + print(f"ERROR: No objects imported from {stl_file}") + sys.exit(1) + + bpy.context.view_layer.objects.active = obj + bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS') + obj.location = (0.0, 0.0, 0.0) + + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.separate(type='LOOSE') + bpy.ops.object.mode_set(mode='OBJECT') + + parts = list(bpy.context.selected_objects) + print(f"[turntable_render] fallback: separated into {len(parts)} part(s)") + return parts + + # ── Centre per-part imports at origin (combined bbox) ──────────────── + all_corners = [] + for p in parts: + all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box) + + if all_corners: + mins = Vector((min(v.x for v in all_corners), + min(v.y for v in all_corners), + min(v.z for v in all_corners))) + maxs = Vector((max(v.x for v in all_corners), + max(v.y for v in all_corners), + max(v.z for v in all_corners))) + center = (mins + maxs) * 0.5 + for p in parts: + p.location -= center + + return parts + + +def _resolve_part_name(index, part_obj, part_names_ordered): + """Get the STEP part name for a Blender part by index. + + With per-part import, part_obj.name IS the STEP name (possibly with + Blender .NNN suffix). Falls back to part_names_ordered for combined-STL mode. + """ + base_name = _re.sub(r'\.\d{3}$', '', part_obj.name) + if part_names_ordered and index < len(part_names_ordered): + return part_names_ordered[index] + return base_name + + +def _apply_material_library(parts, mat_lib_path, mat_map, part_names_ordered=None): + """Append materials from library .blend and assign to parts via material_map. + + With per-part STL import, Blender objects are named after STEP parts, + so matching is by name (stripping Blender .NNN suffix for duplicates). + Falls back to part_names_ordered index-based matching for combined-STL mode. + + mat_map: {part_name_lower: material_name} + Parts without a match keep their current material. + """ + if not mat_lib_path or not os.path.isfile(mat_lib_path): + print(f"[turntable_render] material library not found: {mat_lib_path}") + return + + # Collect unique material names needed + needed = set(mat_map.values()) + if not needed: + return + + # Append materials from library + appended = {} + for mat_name in needed: + inner_path = f"{mat_lib_path}/Material/{mat_name}" + try: + bpy.ops.wm.append( + filepath=inner_path, + directory=f"{mat_lib_path}/Material/", + filename=mat_name, + link=False, + ) + if mat_name in bpy.data.materials: + appended[mat_name] = bpy.data.materials[mat_name] + print(f"[turntable_render] appended material: {mat_name}") + else: + print(f"[turntable_render] WARNING: material '{mat_name}' not found after append") + except Exception as exc: + print(f"[turntable_render] WARNING: failed to append material '{mat_name}': {exc}") + + if not appended: + return + + # Assign materials to parts — primary: name-based (per-part STL mode), + # secondary: index-based via part_names_ordered (combined STL fallback) + assigned_count = 0 + for i, part in enumerate(parts): + # Try name-based matching first (strip Blender .NNN suffix) + base_name = _re.sub(r'\.\d{3}$', '', part.name) + part_key = base_name.lower().strip() + mat_name = mat_map.get(part_key) + + # Fall back to index-based matching via part_names_ordered + if not mat_name and part_names_ordered and i < len(part_names_ordered): + step_name = part_names_ordered[i] + part_key = step_name.lower().strip() + mat_name = mat_map.get(part_key) + + if mat_name and mat_name in appended: + part.data.materials.clear() + part.data.materials.append(appended[mat_name]) + assigned_count += 1 + print(f"[turntable_render] assigned '{mat_name}' to part '{part.name}'") + + print(f"[turntable_render] material assignment: {assigned_count}/{len(parts)} parts matched") + + +def main(): + argv = sys.argv + # Everything after "--" is our args + args = argv[argv.index("--") + 1:] + + stl_path = args[0] + frames_dir = args[1] + frame_count = int(args[2]) + degrees = int(args[3]) + width = int(args[4]) + height = int(args[5]) + engine = args[6] + samples = int(args[7]) + part_colors_json = args[8] if len(args) > 8 else "{}" + + # Template + material library args (passed by schaeffler-turntable.js) + template_path = args[9] if len(args) > 9 and args[9] else "" + target_collection = args[10] if len(args) > 10 else "Product" + material_library_path = args[11] if len(args) > 11 and args[11] else "" + material_map_raw = args[12] if len(args) > 12 else "{}" + part_names_ordered_raw = args[13] if len(args) > 13 else "[]" + lighting_only = args[14] == "1" if len(args) > 14 else False + cycles_device = args[15].lower() if len(args) > 15 else "auto" # "auto", "gpu", "cpu" + shadow_catcher = args[16] == "1" if len(args) > 16 else False + rotation_x = float(args[17]) if len(args) > 17 else 0.0 + rotation_y = float(args[18]) if len(args) > 18 else 0.0 + rotation_z = float(args[19]) if len(args) > 19 else 0.0 + turntable_axis = args[20] if len(args) > 20 else "world_z" + bg_color = args[21] if len(args) > 21 else "" + transparent_bg = args[22] == "1" if len(args) > 22 else False + + os.makedirs(frames_dir, exist_ok=True) + + try: + part_colors = json.loads(part_colors_json) + except json.JSONDecodeError: + part_colors = {} + + try: + material_map = json.loads(material_map_raw) if material_map_raw else {} + except json.JSONDecodeError: + material_map = {} + + try: + part_names_ordered = json.loads(part_names_ordered_raw) if part_names_ordered_raw else [] + except json.JSONDecodeError: + part_names_ordered = [] + + # Validate template path: if provided it MUST exist on disk. + if template_path and not os.path.isfile(template_path): + print(f"[turntable_render] ERROR: template_path was provided but file not found: {template_path}") + print("[turntable_render] Ensure the blend-templates directory is accessible on this worker.") + sys.exit(1) + + use_template = bool(template_path) + + print(f"[turntable_render] engine={engine}, samples={samples}, size={width}x{height}, " + f"frames={frame_count}, degrees={degrees}") + print(f"[turntable_render] part_names_ordered: {len(part_names_ordered)} entries") + if use_template: + print(f"[turntable_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}") + else: + print("[turntable_render] no template — using factory settings (Mode A)") + if material_library_path: + print(f"[turntable_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}") + + # ── SCENE SETUP ────────────────────────────────────────────────────────── + + if use_template: + # ── MODE B: Template-based render ──────────────────────────────────── + print(f"[turntable_render] Opening template: {template_path}") + bpy.ops.wm.open_mainfile(filepath=template_path) + + # Find or create target collection + target_col = _ensure_collection(target_collection) + + # Import and split STL + parts = _import_stl(stl_path) + # Scale mm→m: STEP coords are mm, Blender default unit is metres + _scale_mm_to_m(parts) + # Apply render position rotation before material/camera setup + _apply_rotation(parts, rotation_x, rotation_y, rotation_z) + + # Move imported parts into target collection + for part in parts: + for col in list(part.users_collection): + col.objects.unlink(part) + target_col.objects.link(part) + + # Apply smooth shading + for part in parts: + _apply_smooth(part, SMOOTH_ANGLE) + + # Material assignment: library materials if available, otherwise palette + if material_library_path and material_map: + mat_map_lower = {k.lower(): v for k, v in material_map.items()} + _apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered) + # Parts not matched by library get palette fallback + for i, part in enumerate(parts): + if not part.data.materials or len(part.data.materials) == 0: + _assign_palette_material(part, i) + else: + for i, part in enumerate(parts): + step_name = _resolve_part_name(i, part, part_names_ordered) + color_hex = part_colors.get(step_name) + if not color_hex: + _assign_palette_material(part, i) + + # ── Shadow catcher (Cycles only, template mode only) ───────────────── + if shadow_catcher: + sc_col_name = "Shadowcatcher" + sc_obj_name = "Shadowcatcher" + for vl in bpy.context.scene.view_layers: + def _enable_col_recursive(layer_col): + if layer_col.collection.name == sc_col_name: + layer_col.exclude = False + layer_col.collection.hide_render = False + layer_col.collection.hide_viewport = False + return True + for child in layer_col.children: + if _enable_col_recursive(child): + return True + return False + _enable_col_recursive(vl.layer_collection) + + sc_obj = bpy.data.objects.get(sc_obj_name) + if sc_obj: + all_world_z = [] + for part in parts: + for corner in part.bound_box: + all_world_z.append((part.matrix_world @ Vector(corner)).z) + if all_world_z: + sc_obj.location.z = min(all_world_z) + print(f"[turntable_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}") + else: + print(f"[turntable_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template") + + # lighting_only: always use auto-framing; normal template: use camera if present + needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera + if lighting_only and not shadow_catcher: + print("[turntable_render] lighting_only mode: using template World/HDRI, forcing auto-camera") + elif needs_auto_camera: + print("[turntable_render] WARNING: template has no camera — will create auto-camera") + + # Set very close near clip on template camera for mm-scale parts (now in metres) + if not needs_auto_camera and bpy.context.scene.camera: + bpy.context.scene.camera.data.clip_start = 0.001 + + print(f"[turntable_render] template mode: {len(parts)} parts imported into collection '{target_collection}'") + + else: + # ── MODE A: Factory settings ───────────────────────────────────────── + needs_auto_camera = True + bpy.ops.wm.read_factory_settings(use_empty=True) + + parts = _import_stl(stl_path) + # Scale mm→m: STEP coords are mm, Blender default unit is metres + _scale_mm_to_m(parts) + # Apply render position rotation before material/camera setup + _apply_rotation(parts, rotation_x, rotation_y, rotation_z) + + for i, part in enumerate(parts): + _apply_smooth(part, SMOOTH_ANGLE) + + # Material assignment: library materials if available, else part_colors/palette + if material_library_path and material_map: + mat_map_lower = {k.lower(): v for k, v in material_map.items()} + _apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered) + # Palette fallback for unmatched parts + for i, part in enumerate(parts): + if not part.data.materials or len(part.data.materials) == 0: + _assign_palette_material(part, i) + else: + # part_colors or palette — use index-based lookup via part_names_ordered + for i, part in enumerate(parts): + step_name = _resolve_part_name(i, part, part_names_ordered) + color_hex = part_colors.get(step_name) + if color_hex: + mat = bpy.data.materials.new(name=f"mat_{part.name}") + mat.use_nodes = True + bsdf = mat.node_tree.nodes.get("Principled BSDF") + if bsdf: + color = _hex_to_linear(color_hex) + bsdf.inputs["Base Color"].default_value = color + bsdf.inputs["Metallic"].default_value = 0.35 + bsdf.inputs["Roughness"].default_value = 0.40 + try: + bsdf.inputs["Specular IOR Level"].default_value = 0.5 + except KeyError: + pass + part.data.materials.clear() + part.data.materials.append(mat) + else: + _assign_palette_material(part, i) + + if needs_auto_camera: + # ── Combined bounding box / bounding sphere ────────────────────────── + all_corners = [] + for part in parts: + all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box) + + bbox_min = Vector(( + min(v.x for v in all_corners), + min(v.y for v in all_corners), + min(v.z for v in all_corners), + )) + bbox_max = Vector(( + max(v.x for v in all_corners), + max(v.y for v in all_corners), + max(v.z for v in all_corners), + )) + + bbox_center = (bbox_min + bbox_max) * 0.5 + bbox_dims = bbox_max - bbox_min + bsphere_radius = max(bbox_dims.length * 0.5, 0.001) + + print(f"[turntable_render] bbox_dims={tuple(round(d, 4) for d in bbox_dims)}, " + f"bsphere_radius={bsphere_radius:.4f}") + + # ── Lighting — only in Mode A (factory settings) ───────────────────── + # In template mode the .blend file provides its own World/HDRI lighting. + # Adding auto-lights would overpower the template's intended look. + if not use_template: + light_dist = bsphere_radius * 6.0 + + bpy.ops.object.light_add(type='SUN', location=( + bbox_center.x + light_dist * 0.5, + bbox_center.y - light_dist * 0.35, + bbox_center.z + light_dist, + )) + sun = bpy.context.active_object + sun.data.energy = 4.0 + sun.rotation_euler = (math.radians(45), 0, math.radians(30)) + + bpy.ops.object.light_add(type='AREA', location=( + bbox_center.x - light_dist * 0.4, + bbox_center.y + light_dist * 0.4, + bbox_center.z + light_dist * 0.7, + )) + fill = bpy.context.active_object + fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0) + fill.data.size = max(4.0, bsphere_radius * 4.0) + + # ── Camera ─────────────────────────────────────────────────────────── + cam_dist = bsphere_radius * 2.5 + cam_location = Vector(( + bbox_center.x + cam_dist, + bbox_center.y, + bbox_center.z + bsphere_radius * 0.5, + )) + bpy.ops.object.camera_add(location=cam_location) + camera = bpy.context.active_object + bpy.context.scene.camera = camera + camera.data.clip_start = max(cam_dist * 0.001, 0.0001) + camera.data.clip_end = cam_dist * 10.0 + + # Track-to constraint for look-at + empty = bpy.data.objects.new("target", None) + bpy.context.collection.objects.link(empty) + empty.location = bbox_center + + track = camera.constraints.new(type='TRACK_TO') + track.target = empty + track.track_axis = 'TRACK_NEGATIVE_Z' + track.up_axis = 'UP_Y' + + # ── World background — only in Mode A ─────────────────────────────── + # In template mode the .blend file owns its World (HDRI, sky texture, + # studio lighting). Overwriting it would destroy the HDR look. + if not use_template: + world = bpy.data.worlds.new("World") + bpy.context.scene.world = world + world.use_nodes = True + bg = world.node_tree.nodes["Background"] + bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0) + bg.inputs["Strength"].default_value = 0.15 + + # ── Turntable pivot ────────────────────────────────────────────────── + pivot = bpy.data.objects.new("pivot", None) + bpy.context.collection.objects.link(pivot) + pivot.location = bbox_center + + # Parent camera to pivot + camera.parent = pivot + camera.location = (cam_dist, 0, bsphere_radius * 0.5) + + # Keyframe pivot rotation + scene = bpy.context.scene + scene.frame_start = 1 + scene.frame_end = frame_count + + pivot.rotation_euler = (0, 0, 0) + pivot.keyframe_insert(data_path="rotation_euler", frame=1) + pivot.rotation_euler = _axis_rotation(turntable_axis, degrees) + pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1) + + # Linear interpolation — frame N+1 is never rendered, giving N uniform steps + _set_fcurves_linear(pivot.animation_data.action) + + else: + # Template has camera — set up turntable on the model parts instead + scene = bpy.context.scene + scene.frame_start = 1 + scene.frame_end = frame_count + + # Calculate model center for pivot + all_corners = [] + for part in parts: + all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box) + + bbox_center = Vector(( + (min(v.x for v in all_corners) + max(v.x for v in all_corners)) * 0.5, + (min(v.y for v in all_corners) + max(v.y for v in all_corners)) * 0.5, + (min(v.z for v in all_corners) + max(v.z for v in all_corners)) * 0.5, + )) + + # Create a pivot empty and parent all parts to it + pivot = bpy.data.objects.new("turntable_pivot", None) + bpy.context.collection.objects.link(pivot) + pivot.location = bbox_center + + for part in parts: + part.parent = pivot + + # Keyframe pivot rotation + pivot.rotation_euler = (0, 0, 0) + pivot.keyframe_insert(data_path="rotation_euler", frame=1) + pivot.rotation_euler = _axis_rotation(turntable_axis, degrees) + pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1) + + # Linear interpolation — frame N+1 is never rendered, giving N uniform steps + _set_fcurves_linear(pivot.animation_data.action) + + # ── Colour management ──────────────────────────────────────────────────── + # In template mode the .blend file owns its colour management settings. + # Overwriting them would destroy the intended HDR/tonemapping look. + # In factory-settings mode force Standard to avoid the grey Filmic tint. + scene = bpy.context.scene + if not use_template: + scene.view_settings.view_transform = 'Standard' + scene.view_settings.exposure = 0.0 + scene.view_settings.gamma = 1.0 + try: + scene.view_settings.look = 'None' + except Exception: + pass + + # ── Render engine ──────────────────────────────────────────────────────── + if engine == "eevee": + eevee_ok = False + for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'): + try: + scene.render.engine = eevee_id + eevee_ok = True + print(f"[turntable_render] EEVEE engine id: {eevee_id}") + break + except TypeError: + continue + if eevee_ok: + for attr in ('taa_render_samples', 'samples'): + try: + setattr(scene.eevee, attr, samples) + break + except AttributeError: + continue + else: + print("[turntable_render] WARNING: EEVEE not available, falling back to Cycles") + engine = "cycles" + + if engine != "eevee": + scene.render.engine = 'CYCLES' + scene.cycles.samples = samples + scene.cycles.use_denoising = True + scene.cycles.denoiser = 'OPENIMAGEDENOISE' # GPU-accelerated when CUDA/OptiX active + # Device selection: "cpu" forces CPU, "gpu" forces GPU (warns if unavailable), + # "auto" (default) tries GPU first and falls back to CPU. + print(f"[turntable_render] cycles_device={cycles_device}") + gpu_found = False + if cycles_device != "cpu": + try: + cycles_prefs = bpy.context.preferences.addons['cycles'].preferences + for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'): + try: + cycles_prefs.compute_device_type = device_type + cycles_prefs.get_devices() + gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU'] + if gpu_devs: + for d in gpu_devs: + d.use = True + scene.cycles.device = 'GPU' + gpu_found = True + print(f"[turntable_render] Cycles GPU ({device_type})") + break + except Exception: + continue + except Exception: + pass + if not gpu_found: + scene.cycles.device = 'CPU' + print("[turntable_render] WARNING: GPU not found — falling back to CPU") + + # ── Render settings ────────────────────────────────────────────────────── + scene.render.resolution_x = width + scene.render.resolution_y = height + scene.render.resolution_percentage = 100 + scene.render.image_settings.file_format = 'PNG' + + # ── Transparent background ──────────────────────────────────────────────── + # bg_color compositing is handled by FFmpeg in the compose-video task. + # Blender renders transparent PNG frames when bg_color is set. + if bg_color or transparent_bg: + scene.render.film_transparent = True + if bg_color: + print(f"[turntable_render] film_transparent=True for FFmpeg bg_color compositing ({bg_color})") + else: + print("[turntable_render] transparent_bg enabled (alpha PNG frames)") + + # ── Render all frames ──────────────────────────────────────────────────── + # Per-frame loop with write_still=True. In a single Blender session, + # Cycles keeps the GPU scene (BVH, textures, material graph) loaded + # between frames — only the animated pivot transform is updated each step. + # bpy.ops.render.render(animation=True) does NOT work reliably in + # background mode after wm.open_mainfile() in Blender 5.x (silently + # writes no files), so we use the explicit per-frame approach. + import time as _time + _render_start = _time.time() + for frame in range(1, frame_count + 1): + scene.frame_set(frame) + scene.render.filepath = os.path.join(frames_dir, f"frame_{frame:04d}") + bpy.ops.render.render(write_still=True) + elapsed = _time.time() - _render_start + fps_so_far = frame / elapsed + print(f"[turntable_render] Frame {frame}/{frame_count} — {elapsed:.1f}s elapsed ({fps_so_far:.2f} fps)") + + total = _time.time() - _render_start + print(f"[turntable_render] Turntable render complete: {frame_count} frames in {total:.1f}s ({frame_count/total:.2f} fps avg)") + + +if __name__ == "__main__": + main() diff --git a/flamenco/scripts/turntable_setup.py b/flamenco/scripts/turntable_setup.py new file mode 100644 index 0000000..d17726a --- /dev/null +++ b/flamenco/scripts/turntable_setup.py @@ -0,0 +1,688 @@ +"""Blender Python script: scene setup for turntable animation (Flamenco). + +Performs all scene preparation — STL import, materials, camera, pivot animation, +compositor — then SAVES the resulting .blend file to . + +The saved .blend is then rendered by a separate Flamenco task: + blender --background --python turntable_gpu_setup.py -a + +Using Blender's native -a (--render-anim) keeps the GPU scene (BVH, textures) +loaded for ALL frames in one process, avoiding per-frame GPU re-upload overhead. + +Usage (from Blender): + blender --background --python turntable_setup.py -- \\ + \\ + \\ + [template_path] [target_collection] [material_library_path] \\ + [material_map_json] [part_names_ordered_json] [lighting_only] \\ + [cycles_device] [shadow_catcher] [rotation_x] [rotation_y] [rotation_z] \\ + [turntable_axis] [bg_color] [transparent_bg] [scene_path] [camera_orbit] +""" +import bpy +import sys +import os +import json +import math +from mathutils import Vector, Matrix + +# ── Colour palette ──────────────────────────────────────────────────────────── +PALETTE_HEX = [ + "#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8", + "#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8", +] + +def _srgb_to_linear(c: int) -> float: + v = c / 255.0 + return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4 + +def _hex_to_linear(hex_color: str) -> tuple: + h = hex_color.lstrip('#') + return ( + _srgb_to_linear(int(h[0:2], 16)), + _srgb_to_linear(int(h[2:4], 16)), + _srgb_to_linear(int(h[4:6], 16)), + 1.0, + ) + +PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX] +SMOOTH_ANGLE = 30 + + +# ── Helpers (kept in sync with turntable_render.py) ────────────────────────── + +def _ensure_collection(name: str): + if name in bpy.data.collections: + return bpy.data.collections[name] + col = bpy.data.collections.new(name) + bpy.context.scene.collection.children.link(col) + return col + + +def _assign_palette_material(part_obj, index): + color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)] + mat = bpy.data.materials.new(name=f"Part_{index}") + mat.use_nodes = True + bsdf = mat.node_tree.nodes.get("Principled BSDF") + if bsdf: + bsdf.inputs["Base Color"].default_value = color + bsdf.inputs["Metallic"].default_value = 0.35 + bsdf.inputs["Roughness"].default_value = 0.40 + try: + bsdf.inputs["Specular IOR Level"].default_value = 0.5 + except KeyError: + pass + part_obj.data.materials.clear() + part_obj.data.materials.append(mat) + + +def _apply_smooth(part_obj, angle_deg): + bpy.context.view_layer.objects.active = part_obj + part_obj.select_set(True) + if angle_deg > 0: + try: + bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg)) + except AttributeError: + bpy.ops.object.shade_smooth() + part_obj.data.use_auto_smooth = True + part_obj.data.auto_smooth_angle = math.radians(angle_deg) + else: + bpy.ops.object.shade_flat() + + +import re as _re + + +def _apply_rotation(parts, rx, ry, rz): + if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0): + return + from mathutils import Euler + rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4() + for p in parts: + p.matrix_world = rot_mat @ p.matrix_world + bpy.ops.object.select_all(action='DESELECT') + for p in parts: + p.select_set(True) + bpy.context.view_layer.objects.active = parts[0] + bpy.ops.object.transform_apply(location=False, rotation=True, scale=False) + print(f"[turntable_setup] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts") + + +def _axis_rotation(axis: str, degrees: float) -> tuple: + rad = math.radians(degrees) + if axis == "world_x": + return (rad, 0.0, 0.0) + elif axis == "world_y": + return (0.0, rad, 0.0) + else: + return (0.0, 0.0, rad) + + +def _set_fcurves_linear(action): + try: + for layer in action.layers: + for strip in layer.strips: + for channelbag in strip.channelbags: + for fc in channelbag.fcurves: + for kp in fc.keyframe_points: + kp.interpolation = 'LINEAR' + except AttributeError: + for fc in action.fcurves: + for kp in fc.keyframe_points: + kp.interpolation = 'LINEAR' + + +def _scale_mm_to_m(parts): + if not parts: + return + bpy.ops.object.select_all(action='DESELECT') + for p in parts: + p.scale = (0.001, 0.001, 0.001) + p.location *= 0.001 + p.select_set(True) + bpy.context.view_layer.objects.active = parts[0] + bpy.ops.object.transform_apply(scale=True, location=False, rotation=False) + print(f"[turntable_setup] scaled {len(parts)} parts mm→m (×0.001)") + + +def _import_stl(stl_file): + stl_dir = os.path.dirname(stl_file) + stl_stem = os.path.splitext(os.path.basename(stl_file))[0] + parts_dir = os.path.join(stl_dir, stl_stem + "_parts") + manifest_path = os.path.join(parts_dir, "manifest.json") + + parts = [] + + if os.path.isfile(manifest_path): + try: + with open(manifest_path, "r") as f: + manifest = json.loads(f.read()) + part_entries = manifest.get("parts", []) + except Exception as e: + print(f"[turntable_setup] WARNING: failed to read manifest: {e}") + part_entries = [] + + if part_entries: + for entry in part_entries: + part_file = os.path.join(parts_dir, entry["file"]) + part_name = entry["name"] + if not os.path.isfile(part_file): + print(f"[turntable_setup] WARNING: part STL missing: {part_file}") + continue + bpy.ops.object.select_all(action='DESELECT') + bpy.ops.wm.stl_import(filepath=part_file) + imported = bpy.context.selected_objects + if imported: + obj = imported[0] + obj.name = part_name + if obj.data: + obj.data.name = part_name + parts.append(obj) + + if parts: + print(f"[turntable_setup] imported {len(parts)} named parts from per-part STLs") + + if not parts: + bpy.ops.wm.stl_import(filepath=stl_file) + obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None + if obj is None: + print(f"ERROR: No objects imported from {stl_file}") + sys.exit(1) + + bpy.context.view_layer.objects.active = obj + bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS') + obj.location = (0.0, 0.0, 0.0) + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.separate(type='LOOSE') + bpy.ops.object.mode_set(mode='OBJECT') + parts = list(bpy.context.selected_objects) + print(f"[turntable_setup] fallback: separated into {len(parts)} part(s)") + return parts + + all_corners = [] + for p in parts: + all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box) + + if all_corners: + mins = Vector((min(v.x for v in all_corners), + min(v.y for v in all_corners), + min(v.z for v in all_corners))) + maxs = Vector((max(v.x for v in all_corners), + max(v.y for v in all_corners), + max(v.z for v in all_corners))) + center = (mins + maxs) * 0.5 + for p in parts: + p.location -= center + + return parts + + +def _resolve_part_name(index, part_obj, part_names_ordered): + base_name = _re.sub(r'\.\d{3}$', '', part_obj.name) + if part_names_ordered and index < len(part_names_ordered): + return part_names_ordered[index] + return base_name + + +def _apply_material_library(parts, mat_lib_path, mat_map, part_names_ordered=None): + if not mat_lib_path or not os.path.isfile(mat_lib_path): + print(f"[turntable_setup] material library not found: {mat_lib_path}") + return + + needed = set(mat_map.values()) + if not needed: + return + + appended = {} + for mat_name in needed: + inner_path = f"{mat_lib_path}/Material/{mat_name}" + try: + bpy.ops.wm.append( + filepath=inner_path, + directory=f"{mat_lib_path}/Material/", + filename=mat_name, + link=False, + ) + if mat_name in bpy.data.materials: + appended[mat_name] = bpy.data.materials[mat_name] + print(f"[turntable_setup] appended material: {mat_name}") + else: + print(f"[turntable_setup] WARNING: material '{mat_name}' not found after append") + except Exception as exc: + print(f"[turntable_setup] WARNING: failed to append material '{mat_name}': {exc}") + + if not appended: + return + + assigned_count = 0 + for i, part in enumerate(parts): + base_name = _re.sub(r'\.\d{3}$', '', part.name) + part_key = base_name.lower().strip() + mat_name = mat_map.get(part_key) + + if not mat_name and part_names_ordered and i < len(part_names_ordered): + step_name = part_names_ordered[i] + part_key = step_name.lower().strip() + mat_name = mat_map.get(part_key) + + if mat_name and mat_name in appended: + part.data.materials.clear() + part.data.materials.append(appended[mat_name]) + assigned_count += 1 + print(f"[turntable_setup] assigned '{mat_name}' to part '{part.name}'") + + print(f"[turntable_setup] material assignment: {assigned_count}/{len(parts)} parts matched") + + +def main(): + argv = sys.argv + args = argv[argv.index("--") + 1:] + + stl_path = args[0] + frames_dir = args[1] + frame_count = int(args[2]) + degrees = int(args[3]) + width = int(args[4]) + height = int(args[5]) + engine = args[6] + samples = int(args[7]) + part_colors_json = args[8] if len(args) > 8 else "{}" + template_path = args[9] if len(args) > 9 and args[9] else "" + target_collection = args[10] if len(args) > 10 else "Product" + material_library_path = args[11] if len(args) > 11 and args[11] else "" + material_map_raw = args[12] if len(args) > 12 else "{}" + part_names_ordered_raw = args[13] if len(args) > 13 else "[]" + lighting_only = args[14] == "1" if len(args) > 14 else False + cycles_device = args[15].lower() if len(args) > 15 else "auto" + shadow_catcher = args[16] == "1" if len(args) > 16 else False + rotation_x = float(args[17]) if len(args) > 17 else 0.0 + rotation_y = float(args[18]) if len(args) > 18 else 0.0 + rotation_z = float(args[19]) if len(args) > 19 else 0.0 + turntable_axis = args[20] if len(args) > 20 else "world_z" + bg_color = args[21] if len(args) > 21 else "" + transparent_bg = args[22] == "1" if len(args) > 22 else False + scene_path = args[23] if len(args) > 23 else os.path.join(os.path.dirname(frames_dir), "scene.blend") + camera_orbit = args[24] != "0" if len(args) > 24 else True + noise_threshold_arg = args[25] if len(args) > 25 else "" + denoiser_arg = args[26] if len(args) > 26 else "" + denoising_input_passes_arg = args[27] if len(args) > 27 else "" + denoising_prefilter_arg = args[28] if len(args) > 28 else "" + denoising_quality_arg = args[29] if len(args) > 29 else "" + denoising_use_gpu_arg = args[30] if len(args) > 30 else "" + + os.makedirs(frames_dir, exist_ok=True) + os.makedirs(os.path.dirname(scene_path), exist_ok=True) + + try: + part_colors = json.loads(part_colors_json) + except json.JSONDecodeError: + part_colors = {} + + try: + material_map = json.loads(material_map_raw) if material_map_raw else {} + except json.JSONDecodeError: + material_map = {} + + try: + part_names_ordered = json.loads(part_names_ordered_raw) if part_names_ordered_raw else [] + except json.JSONDecodeError: + part_names_ordered = [] + + if template_path and not os.path.isfile(template_path): + print(f"[turntable_setup] ERROR: template_path not found: {template_path}") + sys.exit(1) + + use_template = bool(template_path) + + print(f"[turntable_setup] engine={engine}, samples={samples}, size={width}x{height}, " + f"frames={frame_count}, degrees={degrees}") + print(f"[turntable_setup] part_names_ordered: {len(part_names_ordered)} entries") + if use_template: + print(f"[turntable_setup] template={template_path}, collection={target_collection}, lighting_only={lighting_only}") + else: + print("[turntable_setup] no template — using factory settings (Mode A)") + if material_library_path: + print(f"[turntable_setup] material_library={material_library_path}, material_map keys={list(material_map.keys())}") + + # ── SCENE SETUP ────────────────────────────────────────────────────────── + + if use_template: + print(f"[turntable_setup] Opening template: {template_path}") + bpy.ops.wm.open_mainfile(filepath=template_path) + + target_col = _ensure_collection(target_collection) + parts = _import_stl(stl_path) + _scale_mm_to_m(parts) + _apply_rotation(parts, rotation_x, rotation_y, rotation_z) + + for part in parts: + for col in list(part.users_collection): + col.objects.unlink(part) + target_col.objects.link(part) + + for part in parts: + _apply_smooth(part, SMOOTH_ANGLE) + + if material_library_path and material_map: + mat_map_lower = {k.lower(): v for k, v in material_map.items()} + _apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered) + for i, part in enumerate(parts): + if not part.data.materials or len(part.data.materials) == 0: + _assign_palette_material(part, i) + else: + for i, part in enumerate(parts): + step_name = _resolve_part_name(i, part, part_names_ordered) + color_hex = part_colors.get(step_name) + if not color_hex: + _assign_palette_material(part, i) + + if shadow_catcher: + sc_col_name = "Shadowcatcher" + sc_obj_name = "Shadowcatcher" + for vl in bpy.context.scene.view_layers: + def _enable_col_recursive(layer_col): + if layer_col.collection.name == sc_col_name: + layer_col.exclude = False + layer_col.collection.hide_render = False + layer_col.collection.hide_viewport = False + return True + for child in layer_col.children: + if _enable_col_recursive(child): + return True + return False + _enable_col_recursive(vl.layer_collection) + + sc_obj = bpy.data.objects.get(sc_obj_name) + if sc_obj: + all_world_z = [] + for part in parts: + for corner in part.bound_box: + all_world_z.append((part.matrix_world @ Vector(corner)).z) + if all_world_z: + sc_obj.location.z = min(all_world_z) + print(f"[turntable_setup] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}") + else: + print(f"[turntable_setup] WARNING: shadow catcher object '{sc_obj_name}' not found") + + needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera + if not needs_auto_camera and bpy.context.scene.camera: + bpy.context.scene.camera.data.clip_start = 0.001 + + print(f"[turntable_setup] template mode: {len(parts)} parts imported into '{target_collection}'") + + else: + needs_auto_camera = True + bpy.ops.wm.read_factory_settings(use_empty=True) + + parts = _import_stl(stl_path) + _scale_mm_to_m(parts) + _apply_rotation(parts, rotation_x, rotation_y, rotation_z) + + for i, part in enumerate(parts): + _apply_smooth(part, SMOOTH_ANGLE) + + if material_library_path and material_map: + mat_map_lower = {k.lower(): v for k, v in material_map.items()} + _apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered) + for i, part in enumerate(parts): + if not part.data.materials or len(part.data.materials) == 0: + _assign_palette_material(part, i) + else: + for i, part in enumerate(parts): + step_name = _resolve_part_name(i, part, part_names_ordered) + color_hex = part_colors.get(step_name) + if color_hex: + mat = bpy.data.materials.new(name=f"mat_{part.name}") + mat.use_nodes = True + bsdf = mat.node_tree.nodes.get("Principled BSDF") + if bsdf: + color = _hex_to_linear(color_hex) + bsdf.inputs["Base Color"].default_value = color + bsdf.inputs["Metallic"].default_value = 0.35 + bsdf.inputs["Roughness"].default_value = 0.40 + try: + bsdf.inputs["Specular IOR Level"].default_value = 0.5 + except KeyError: + pass + part.data.materials.clear() + part.data.materials.append(mat) + else: + _assign_palette_material(part, i) + + if needs_auto_camera: + all_corners = [] + for part in parts: + all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box) + + bbox_min = Vector((min(v.x for v in all_corners), min(v.y for v in all_corners), min(v.z for v in all_corners))) + bbox_max = Vector((max(v.x for v in all_corners), max(v.y for v in all_corners), max(v.z for v in all_corners))) + bbox_center = (bbox_min + bbox_max) * 0.5 + bbox_dims = bbox_max - bbox_min + bsphere_radius = max(bbox_dims.length * 0.5, 0.001) + + print(f"[turntable_setup] bbox_dims={tuple(round(d, 4) for d in bbox_dims)}, bsphere_radius={bsphere_radius:.4f}") + + if not use_template: + light_dist = bsphere_radius * 6.0 + bpy.ops.object.light_add(type='SUN', location=( + bbox_center.x + light_dist * 0.5, + bbox_center.y - light_dist * 0.35, + bbox_center.z + light_dist, + )) + sun = bpy.context.active_object + sun.data.energy = 4.0 + sun.rotation_euler = (math.radians(45), 0, math.radians(30)) + + bpy.ops.object.light_add(type='AREA', location=( + bbox_center.x - light_dist * 0.4, + bbox_center.y + light_dist * 0.4, + bbox_center.z + light_dist * 0.7, + )) + fill = bpy.context.active_object + fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0) + fill.data.size = max(4.0, bsphere_radius * 4.0) + + cam_dist = bsphere_radius * 2.5 + cam_location = Vector((bbox_center.x + cam_dist, bbox_center.y, bbox_center.z + bsphere_radius * 0.5)) + bpy.ops.object.camera_add(location=cam_location) + camera = bpy.context.active_object + bpy.context.scene.camera = camera + camera.data.clip_start = max(cam_dist * 0.001, 0.0001) + camera.data.clip_end = cam_dist * 10.0 + + empty = bpy.data.objects.new("target", None) + bpy.context.collection.objects.link(empty) + empty.location = bbox_center + + track = camera.constraints.new(type='TRACK_TO') + track.target = empty + track.track_axis = 'TRACK_NEGATIVE_Z' + track.up_axis = 'UP_Y' + + if not use_template: + world = bpy.data.worlds.new("World") + bpy.context.scene.world = world + world.use_nodes = True + bg = world.node_tree.nodes["Background"] + bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0) + bg.inputs["Strength"].default_value = 0.15 + + pivot = bpy.data.objects.new("pivot", None) + bpy.context.collection.objects.link(pivot) + pivot.location = bbox_center + camera.parent = pivot + camera.location = (cam_dist, 0, bsphere_radius * 0.5) + + scene = bpy.context.scene + scene.frame_start = 1 + scene.frame_end = frame_count + + pivot.rotation_euler = (0, 0, 0) + pivot.keyframe_insert(data_path="rotation_euler", frame=1) + pivot.rotation_euler = _axis_rotation(turntable_axis, degrees) + pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1) + _set_fcurves_linear(pivot.animation_data.action) + + else: + scene = bpy.context.scene + scene.frame_start = 1 + scene.frame_end = frame_count + + all_corners = [] + for part in parts: + all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box) + + bbox_center = Vector(( + (min(v.x for v in all_corners) + max(v.x for v in all_corners)) * 0.5, + (min(v.y for v in all_corners) + max(v.y for v in all_corners)) * 0.5, + (min(v.z for v in all_corners) + max(v.z for v in all_corners)) * 0.5, + )) + + if camera_orbit and bpy.context.scene.camera: + # Camera-orbit mode: rotate camera around static product. + # Parts stay stationary → Cycles BVH cached across all frames → ~40% speedup. + camera = bpy.context.scene.camera + cam_world = camera.matrix_world.copy() + + cam_pivot = bpy.data.objects.new("cam_pivot", None) + bpy.context.collection.objects.link(cam_pivot) + cam_pivot.location = bbox_center + + camera.parent = cam_pivot + # Restore world-space transform after parenting (Blender recomputes local matrix) + camera.matrix_world = cam_world + + cam_pivot.rotation_euler = (0, 0, 0) + cam_pivot.keyframe_insert(data_path="rotation_euler", frame=1) + cam_pivot.rotation_euler = _axis_rotation(turntable_axis, degrees) + cam_pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1) + _set_fcurves_linear(cam_pivot.animation_data.action) + print(f"[turntable_setup] camera-orbit mode: cam_pivot at {tuple(round(c, 4) for c in bbox_center)}") + else: + # Product-rotation mode: parts parent to pivot (default fallback when no camera) + pivot = bpy.data.objects.new("turntable_pivot", None) + bpy.context.collection.objects.link(pivot) + pivot.location = bbox_center + + for part in parts: + part.parent = pivot + + pivot.rotation_euler = (0, 0, 0) + pivot.keyframe_insert(data_path="rotation_euler", frame=1) + pivot.rotation_euler = _axis_rotation(turntable_axis, degrees) + pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1) + _set_fcurves_linear(pivot.animation_data.action) + print(f"[turntable_setup] product-rotation mode: {len(parts)} parts parented to turntable_pivot") + + # ── Colour management ──────────────────────────────────────────────────── + scene = bpy.context.scene + if not use_template: + scene.view_settings.view_transform = 'Standard' + scene.view_settings.exposure = 0.0 + scene.view_settings.gamma = 1.0 + try: + scene.view_settings.look = 'None' + except Exception: + pass + + # ── Render engine ──────────────────────────────────────────────────────── + if engine == "eevee": + eevee_ok = False + for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'): + try: + scene.render.engine = eevee_id + eevee_ok = True + print(f"[turntable_setup] EEVEE engine id: {eevee_id}") + break + except TypeError: + continue + if eevee_ok: + for attr in ('taa_render_samples', 'samples'): + try: + setattr(scene.eevee, attr, samples) + break + except AttributeError: + continue + else: + print("[turntable_setup] WARNING: EEVEE not available, falling back to Cycles") + engine = "cycles" + + if engine != "eevee": + scene.render.engine = 'CYCLES' + scene.cycles.samples = samples + scene.cycles.use_denoising = True + scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE' + if denoising_input_passes_arg: + try: scene.cycles.denoising_input_passes = denoising_input_passes_arg + except Exception: pass + if denoising_prefilter_arg: + try: scene.cycles.denoising_prefilter = denoising_prefilter_arg + except Exception: pass + if denoising_quality_arg: + try: scene.cycles.denoising_quality = denoising_quality_arg + except Exception: pass + if denoising_use_gpu_arg: + try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1") + except AttributeError: pass + if noise_threshold_arg: + scene.cycles.use_adaptive_sampling = True + scene.cycles.adaptive_threshold = float(noise_threshold_arg) + if denoiser_arg: + scene["_denoiser_override"] = denoiser_arg + # scene.cycles.device is set by turntable_gpu_setup.py at render time + # (GPU preferences are user-level and not stored in .blend) + # We set the intended device here so gpu_setup can read it. + scene["_cycles_device"] = cycles_device + # Keep BVH, textures, and scene data resident on GPU between frames. + # Critical for -a mode: prevents Cycles from re-uploading data each frame. + scene.render.use_persistent_data = True + # No motion blur needed for static mechanical parts — eliminates per-frame + # CPU deformation calculations. + scene.render.use_motion_blur = False + print(f"[turntable_setup] cycles_device preference saved: {cycles_device}") + print("[turntable_setup] use_persistent_data=True, use_motion_blur=False") + + # ── Render output settings ─────────────────────────────────────────────── + scene.render.resolution_x = width + scene.render.resolution_y = height + scene.render.resolution_percentage = 100 + scene.render.image_settings.file_format = 'PNG' + # Blender -a appends 4-digit frame number: "frame_" → "frame_0001.png" + scene.render.filepath = os.path.join(frames_dir, "frame_") + + # ── Transparent background ──────────────────────────────────────────────── + # bg_color compositing is done by FFmpeg in the compose-video task. + # Blender renders transparent PNG frames (film_transparent=True) when + # bg_color is set; FFmpeg then overlays them over a solid colour background. + if bg_color or transparent_bg: + scene.render.film_transparent = True + if bg_color: + print(f"[turntable_setup] film_transparent=True for FFmpeg bg_color compositing ({bg_color})") + else: + print("[turntable_setup] transparent_bg enabled (alpha PNG frames)") + + # ── Save scene ─────────────────────────────────────────────────────────── + # save_as_mainfile saves to an explicit new path (like File > Save As). + # save_mainfile would save back to the originally-opened template path. + print(f"[turntable_setup] Saving scene to {scene_path} …") + result = bpy.ops.wm.save_as_mainfile(filepath=scene_path) + if 'FINISHED' not in result: + print(f"[turntable_setup] ERROR: save_as_mainfile returned {result} — aborting") + sys.exit(1) + if not os.path.isfile(scene_path): + print(f"[turntable_setup] ERROR: scene file not found after save: {scene_path}") + sys.exit(1) + size_mb = os.path.getsize(scene_path) / 1024 / 1024 + print(f"[turntable_setup] Scene saved → {scene_path} ({size_mb:.1f} MB)") + print(f"[turntable_setup] Ready for: blender --background {scene_path} --python turntable_gpu_setup.py -a") + + +if __name__ == "__main__": + try: + main() + except SystemExit: + raise + except Exception as _exc: + import traceback + traceback.print_exc() + print(f"[turntable_setup] FATAL: unhandled exception — {_exc}") + sys.exit(1) diff --git a/flamenco/worker-config.yaml b/flamenco/worker-config.yaml new file mode 100644 index 0000000..974108a --- /dev/null +++ b/flamenco/worker-config.yaml @@ -0,0 +1,2 @@ +manager_url: http://flamenco-manager:8080/ +task_types: [blender, ffmpeg, file-management, misc] diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..597940a --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,12 @@ +FROM node:20-alpine + +WORKDIR /app + +COPY package.json . +RUN npm install + +COPY . . + +EXPOSE 5173 + +CMD ["npm", "run", "dev"] diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..7fbee91 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,13 @@ + + + + + + + Schaeffler Automat + + +
+ + + diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000..fd230af --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,3121 @@ +{ + "name": "schaefflerautomat-frontend", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "schaefflerautomat-frontend", + "version": "0.1.0", + "dependencies": { + "@react-three/drei": "^9.102.3", + "@react-three/fiber": "^8.16.2", + "@tanstack/react-query": "^5.28.4", + "@tanstack/react-table": "^8.14.0", + "axios": "^1.6.8", + "clsx": "^2.1.0", + "lucide-react": "^0.363.0", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-dropzone": "^14.2.3", + "react-router-dom": "^6.22.3", + "recharts": "^3.7.0", + "sonner": "^1.4.41", + "tailwind-merge": "^2.2.2", + "three": "^0.163.0", + "zustand": "^4.5.2" + }, + "devDependencies": { + "@types/react": "^18.2.74", + "@types/react-dom": "^18.2.23", + "@types/three": "^0.163.0", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.19", + "postcss": "^8.4.38", + "tailwindcss": "^3.4.3", + "typescript": "^5.4.3", + "vite": "^5.2.6" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.28.6", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@mediapipe/tasks-vision": { + "version": "0.10.17", + "license": "Apache-2.0" + }, + "node_modules/@monogrid/gainmap-js": { + "version": "3.4.0", + "license": "MIT", + "dependencies": { + "promise-worker-transferable": "^1.0.4" + }, + "peerDependencies": { + "three": ">= 0.159.0" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@react-spring/animated": { + "version": "9.7.5", + "license": "MIT", + "dependencies": { + "@react-spring/shared": "~9.7.5", + "@react-spring/types": "~9.7.5" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/@react-spring/core": { + "version": "9.7.5", + "license": "MIT", + "dependencies": { + "@react-spring/animated": "~9.7.5", + "@react-spring/shared": "~9.7.5", + "@react-spring/types": "~9.7.5" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/react-spring/donate" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/@react-spring/rafz": { + "version": "9.7.5", + "license": "MIT" + }, + "node_modules/@react-spring/shared": { + "version": "9.7.5", + "license": "MIT", + "dependencies": { + "@react-spring/rafz": "~9.7.5", + "@react-spring/types": "~9.7.5" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/@react-spring/three": { + "version": "9.7.5", + "license": "MIT", + "dependencies": { + "@react-spring/animated": "~9.7.5", + "@react-spring/core": "~9.7.5", + "@react-spring/shared": "~9.7.5", + "@react-spring/types": "~9.7.5" + }, + "peerDependencies": { + "@react-three/fiber": ">=6.0", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "three": ">=0.126" + } + }, + "node_modules/@react-spring/types": { + "version": "9.7.5", + "license": "MIT" + }, + "node_modules/@react-three/drei": { + "version": "9.122.0", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.26.0", + "@mediapipe/tasks-vision": "0.10.17", + "@monogrid/gainmap-js": "^3.0.6", + "@react-spring/three": "~9.7.5", + "@use-gesture/react": "^10.3.1", + "camera-controls": "^2.9.0", + "cross-env": "^7.0.3", + "detect-gpu": "^5.0.56", + "glsl-noise": "^0.0.0", + "hls.js": "^1.5.17", + "maath": "^0.10.8", + "meshline": "^3.3.1", + "react-composer": "^5.0.3", + "stats-gl": "^2.2.8", + "stats.js": "^0.17.0", + "suspend-react": "^0.1.3", + "three-mesh-bvh": "^0.7.8", + "three-stdlib": "^2.35.6", + "troika-three-text": "^0.52.0", + "tunnel-rat": "^0.1.2", + "utility-types": "^3.11.0", + "zustand": "^5.0.1" + }, + "peerDependencies": { + "@react-three/fiber": "^8", + "react": "^18", + "react-dom": "^18", + "three": ">=0.137" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + } + } + }, + "node_modules/@react-three/drei/node_modules/zustand": { + "version": "5.0.11", + "license": "MIT", + "engines": { + "node": ">=12.20.0" + }, + "peerDependencies": { + "@types/react": ">=18.0.0", + "immer": ">=9.0.6", + "react": ">=18.0.0", + "use-sync-external-store": ">=1.2.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "immer": { + "optional": true + }, + "react": { + "optional": true + }, + "use-sync-external-store": { + "optional": true + } + } + }, + "node_modules/@react-three/fiber": { + "version": "8.18.0", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.17.8", + "@types/react-reconciler": "^0.26.7", + "@types/webxr": "*", + "base64-js": "^1.5.1", + "buffer": "^6.0.3", + "its-fine": "^1.0.6", + "react-reconciler": "^0.27.0", + "react-use-measure": "^2.1.7", + "scheduler": "^0.21.0", + "suspend-react": "^0.1.3", + "zustand": "^3.7.1" + }, + "peerDependencies": { + "expo": ">=43.0", + "expo-asset": ">=8.4", + "expo-file-system": ">=11.0", + "expo-gl": ">=11.0", + "react": ">=18 <19", + "react-dom": ">=18 <19", + "react-native": ">=0.64", + "three": ">=0.133" + }, + "peerDependenciesMeta": { + "expo": { + "optional": true + }, + "expo-asset": { + "optional": true + }, + "expo-file-system": { + "optional": true + }, + "expo-gl": { + "optional": true + }, + "react-dom": { + "optional": true + }, + "react-native": { + "optional": true + } + } + }, + "node_modules/@react-three/fiber/node_modules/zustand": { + "version": "3.7.2", + "license": "MIT", + "engines": { + "node": ">=12.7.0" + }, + "peerDependencies": { + "react": ">=16.8" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + } + } + }, + "node_modules/@reduxjs/toolkit": { + "version": "2.11.2", + "resolved": "https://registry.npmjs.org/@reduxjs/toolkit/-/toolkit-2.11.2.tgz", + "integrity": "sha512-Kd6kAHTA6/nUpp8mySPqj3en3dm0tdMIgbttnQ1xFMVpufoj+ADi8pXLBsd4xzTRHQa7t/Jv8W5UnCuW4kuWMQ==", + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@standard-schema/utils": "^0.3.0", + "immer": "^11.0.0", + "redux": "^5.0.1", + "redux-thunk": "^3.1.0", + "reselect": "^5.1.0" + }, + "peerDependencies": { + "react": "^16.9.0 || ^17.0.0 || ^18 || ^19", + "react-redux": "^7.2.1 || ^8.1.3 || ^9.0.0" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + }, + "react-redux": { + "optional": true + } + } + }, + "node_modules/@reduxjs/toolkit/node_modules/immer": { + "version": "11.1.4", + "resolved": "https://registry.npmjs.org/immer/-/immer-11.1.4.tgz", + "integrity": "sha512-XREFCPo6ksxVzP4E0ekD5aMdf8WMwmdNaz6vuvxgI40UaEiu6q3p8X52aU6GdyvLY3XXX/8R7JOTXStz/nBbRw==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/immer" + } + }, + "node_modules/@remix-run/router": { + "version": "1.23.2", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.59.0", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "license": "MIT" + }, + "node_modules/@standard-schema/utils": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@standard-schema/utils/-/utils-0.3.0.tgz", + "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==", + "license": "MIT" + }, + "node_modules/@tanstack/query-core": { + "version": "5.90.20", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/react-query": { + "version": "5.90.21", + "license": "MIT", + "dependencies": { + "@tanstack/query-core": "5.90.20" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": "^18 || ^19" + } + }, + "node_modules/@tanstack/react-table": { + "version": "8.21.3", + "license": "MIT", + "dependencies": { + "@tanstack/table-core": "8.21.3" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": ">=16.8", + "react-dom": ">=16.8" + } + }, + "node_modules/@tanstack/table-core": { + "version": "8.21.3", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tweenjs/tween.js": { + "version": "23.1.3", + "license": "MIT" + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/d3-array": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", + "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", + "license": "MIT" + }, + "node_modules/@types/d3-color": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", + "license": "MIT" + }, + "node_modules/@types/d3-ease": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", + "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", + "license": "MIT" + }, + "node_modules/@types/d3-interpolate": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "license": "MIT", + "dependencies": { + "@types/d3-color": "*" + } + }, + "node_modules/@types/d3-path": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", + "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", + "license": "MIT" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", + "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-shape": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.8.tgz", + "integrity": "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==", + "license": "MIT", + "dependencies": { + "@types/d3-path": "*" + } + }, + "node_modules/@types/d3-time": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", + "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", + "license": "MIT" + }, + "node_modules/@types/d3-timer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", + "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", + "license": "MIT" + }, + "node_modules/@types/draco3d": { + "version": "1.4.10", + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/offscreencanvas": { + "version": "2019.7.3", + "license": "MIT" + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.28", + "license": "MIT", + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.7", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^18.0.0" + } + }, + "node_modules/@types/react-reconciler": { + "version": "0.26.7", + "license": "MIT", + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/stats.js": { + "version": "0.17.4", + "license": "MIT" + }, + "node_modules/@types/three": { + "version": "0.163.0", + "license": "MIT", + "dependencies": { + "@tweenjs/tween.js": "~23.1.1", + "@types/stats.js": "*", + "@types/webxr": "*", + "fflate": "~0.8.2", + "meshoptimizer": "~0.18.1" + } + }, + "node_modules/@types/use-sync-external-store": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/@types/use-sync-external-store/-/use-sync-external-store-0.0.6.tgz", + "integrity": "sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==", + "license": "MIT" + }, + "node_modules/@types/webxr": { + "version": "0.5.24", + "license": "MIT" + }, + "node_modules/@use-gesture/core": { + "version": "10.3.1", + "license": "MIT" + }, + "node_modules/@use-gesture/react": { + "version": "10.3.1", + "license": "MIT", + "dependencies": { + "@use-gesture/core": "10.3.1" + }, + "peerDependencies": { + "react": ">= 16.8.0" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "dev": true, + "license": "MIT" + }, + "node_modules/anymatch": { + "version": "3.1.3", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "license": "MIT" + }, + "node_modules/attr-accept": { + "version": "2.2.5", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/autoprefixer": { + "version": "10.4.27", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001774", + "fraction.js": "^5.3.4", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/axios": { + "version": "1.13.6", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.0", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/bidi-js": { + "version": "1.0.3", + "license": "MIT", + "dependencies": { + "require-from-string": "^2.0.2" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer": { + "version": "6.0.3", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/camera-controls": { + "version": "2.10.1", + "license": "MIT", + "peerDependencies": { + "three": ">=0.126.1" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001775", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chokidar": { + "version": "3.6.0", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "4.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-env": { + "version": "7.0.3", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.1" + }, + "bin": { + "cross-env": "src/bin/cross-env.js", + "cross-env-shell": "src/bin/cross-env-shell.js" + }, + "engines": { + "node": ">=10.14", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "license": "MIT" + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.2.tgz", + "integrity": "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decimal.js-light": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz", + "integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==", + "license": "MIT" + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/detect-gpu": { + "version": "5.0.70", + "license": "MIT", + "dependencies": { + "webgl-constants": "^1.1.1" + } + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/dlv": { + "version": "1.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/draco3d": { + "version": "1.5.7", + "license": "Apache-2.0" + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.302", + "dev": true, + "license": "ISC" + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-toolkit": { + "version": "1.44.0", + "resolved": "https://registry.npmjs.org/es-toolkit/-/es-toolkit-1.44.0.tgz", + "integrity": "sha512-6penXeZalaV88MM3cGkFZZfOoLGWshWWfdy0tWw/RlVVyhvMaWSBTOvXNeiW3e5FwdS5ePW0LGEu17zT139ktg==", + "license": "MIT", + "workspaces": [ + "docs", + "benchmarks" + ] + }, + "node_modules/esbuild": { + "version": "0.21.5", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/eventemitter3": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz", + "integrity": "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==", + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fastq": { + "version": "1.20.1", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fflate": { + "version": "0.8.2", + "license": "MIT" + }, + "node_modules/file-selector": { + "version": "2.1.2", + "license": "MIT", + "dependencies": { + "tslib": "^2.7.0" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.5", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fraction.js": { + "version": "5.3.4", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glsl-noise": { + "version": "0.0.0", + "license": "MIT" + }, + "node_modules/gopd": { + "version": "1.2.0", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hls.js": { + "version": "1.6.15", + "license": "Apache-2.0" + }, + "node_modules/ieee754": { + "version": "1.2.1", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/immediate": { + "version": "3.0.6", + "license": "MIT" + }, + "node_modules/immer": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/immer/-/immer-10.2.0.tgz", + "integrity": "sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/immer" + } + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-promise": { + "version": "2.2.2", + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "license": "ISC" + }, + "node_modules/its-fine": { + "version": "1.2.5", + "license": "MIT", + "dependencies": { + "@types/react-reconciler": "^0.28.0" + }, + "peerDependencies": { + "react": ">=18.0" + } + }, + "node_modules/its-fine/node_modules/@types/react-reconciler": { + "version": "0.28.9", + "license": "MIT", + "peerDependencies": { + "@types/react": "*" + } + }, + "node_modules/jiti": { + "version": "1.21.7", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/lie": { + "version": "3.3.0", + "license": "MIT", + "dependencies": { + "immediate": "~3.0.5" + } + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "dev": true, + "license": "MIT" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lucide-react": { + "version": "0.363.0", + "license": "ISC", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/maath": { + "version": "0.10.8", + "license": "MIT", + "peerDependencies": { + "@types/three": ">=0.134.0", + "three": ">=0.134.0" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/meshline": { + "version": "3.3.1", + "license": "MIT", + "peerDependencies": { + "three": ">=0.137" + } + }, + "node_modules/meshoptimizer": { + "version": "0.18.1", + "license": "MIT" + }, + "node_modules/micromatch": { + "version": "4.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/mz": { + "version": "2.7.0", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-releases": { + "version": "2.0.27", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.1.0", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-load-config": { + "version": "6.0.1", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "lilconfig": "^3.1.1" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "jiti": ">=1.21.0", + "postcss": ">=8.0.9", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + }, + "postcss": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/potpack": { + "version": "1.0.2", + "license": "ISC" + }, + "node_modules/promise-worker-transferable": { + "version": "1.0.4", + "license": "Apache-2.0", + "dependencies": { + "is-promise": "^2.1.0", + "lie": "^3.0.2" + } + }, + "node_modules/prop-types": { + "version": "15.8.1", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "license": "MIT" + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/react": { + "version": "18.3.1", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-composer": { + "version": "5.0.3", + "license": "MIT", + "dependencies": { + "prop-types": "^15.6.0" + }, + "peerDependencies": { + "react": "^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-dom/node_modules/scheduler": { + "version": "0.23.2", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/react-dropzone": { + "version": "14.4.1", + "license": "MIT", + "dependencies": { + "attr-accept": "^2.2.4", + "file-selector": "^2.1.0", + "prop-types": "^15.8.1" + }, + "engines": { + "node": ">= 10.13" + }, + "peerDependencies": { + "react": ">= 16.8 || 18.0.0" + } + }, + "node_modules/react-is": { + "version": "16.13.1", + "license": "MIT" + }, + "node_modules/react-reconciler": { + "version": "0.27.0", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.21.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "peerDependencies": { + "react": "^18.0.0" + } + }, + "node_modules/react-redux": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.2.0.tgz", + "integrity": "sha512-ROY9fvHhwOD9ySfrF0wmvu//bKCQ6AeZZq1nJNtbDC+kk5DuSuNX/n6YWYF/SYy7bSba4D4FSz8DJeKY/S/r+g==", + "license": "MIT", + "dependencies": { + "@types/use-sync-external-store": "^0.0.6", + "use-sync-external-store": "^1.4.0" + }, + "peerDependencies": { + "@types/react": "^18.2.25 || ^19", + "react": "^18.0 || ^19", + "redux": "^5.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "redux": { + "optional": true + } + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-router": { + "version": "6.30.3", + "license": "MIT", + "dependencies": { + "@remix-run/router": "1.23.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8" + } + }, + "node_modules/react-router-dom": { + "version": "6.30.3", + "license": "MIT", + "dependencies": { + "@remix-run/router": "1.23.2", + "react-router": "6.30.3" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8", + "react-dom": ">=16.8" + } + }, + "node_modules/react-use-measure": { + "version": "2.1.7", + "license": "MIT", + "peerDependencies": { + "react": ">=16.13", + "react-dom": ">=16.13" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + } + } + }, + "node_modules/read-cache": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/recharts": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-3.7.0.tgz", + "integrity": "sha512-l2VCsy3XXeraxIID9fx23eCb6iCBsxUQDnE8tWm6DFdszVAO7WVY/ChAD9wVit01y6B2PMupYiMmQwhgPHc9Ew==", + "license": "MIT", + "workspaces": [ + "www" + ], + "dependencies": { + "@reduxjs/toolkit": "1.x.x || 2.x.x", + "clsx": "^2.1.1", + "decimal.js-light": "^2.5.1", + "es-toolkit": "^1.39.3", + "eventemitter3": "^5.0.1", + "immer": "^10.1.1", + "react-redux": "8.x.x || 9.x.x", + "reselect": "5.1.1", + "tiny-invariant": "^1.3.3", + "use-sync-external-store": "^1.2.2", + "victory-vendor": "^37.0.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-is": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/redux": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/redux/-/redux-5.0.1.tgz", + "integrity": "sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w==", + "license": "MIT" + }, + "node_modules/redux-thunk": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/redux-thunk/-/redux-thunk-3.1.0.tgz", + "integrity": "sha512-NW2r5T6ksUKXCabzhL9z+h206HQw/NJkcLm1GPImRQ8IzfXwRGqjVhKJGauHirT0DAuyy6hjdnMZaRoAcy0Klw==", + "license": "MIT", + "peerDependencies": { + "redux": "^5.0.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/reselect": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/reselect/-/reselect-5.1.1.tgz", + "integrity": "sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w==", + "license": "MIT" + }, + "node_modules/resolve": { + "version": "1.22.11", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.59.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.59.0", + "@rollup/rollup-android-arm64": "4.59.0", + "@rollup/rollup-darwin-arm64": "4.59.0", + "@rollup/rollup-darwin-x64": "4.59.0", + "@rollup/rollup-freebsd-arm64": "4.59.0", + "@rollup/rollup-freebsd-x64": "4.59.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.59.0", + "@rollup/rollup-linux-arm-musleabihf": "4.59.0", + "@rollup/rollup-linux-arm64-gnu": "4.59.0", + "@rollup/rollup-linux-arm64-musl": "4.59.0", + "@rollup/rollup-linux-loong64-gnu": "4.59.0", + "@rollup/rollup-linux-loong64-musl": "4.59.0", + "@rollup/rollup-linux-ppc64-gnu": "4.59.0", + "@rollup/rollup-linux-ppc64-musl": "4.59.0", + "@rollup/rollup-linux-riscv64-gnu": "4.59.0", + "@rollup/rollup-linux-riscv64-musl": "4.59.0", + "@rollup/rollup-linux-s390x-gnu": "4.59.0", + "@rollup/rollup-linux-x64-gnu": "4.59.0", + "@rollup/rollup-linux-x64-musl": "4.59.0", + "@rollup/rollup-openbsd-x64": "4.59.0", + "@rollup/rollup-openharmony-arm64": "4.59.0", + "@rollup/rollup-win32-arm64-msvc": "4.59.0", + "@rollup/rollup-win32-ia32-msvc": "4.59.0", + "@rollup/rollup-win32-x64-gnu": "4.59.0", + "@rollup/rollup-win32-x64-msvc": "4.59.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/scheduler": { + "version": "0.21.0", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/sonner": { + "version": "1.7.4", + "license": "MIT", + "peerDependencies": { + "react": "^18.0.0 || ^19.0.0 || ^19.0.0-rc", + "react-dom": "^18.0.0 || ^19.0.0 || ^19.0.0-rc" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stats-gl": { + "version": "2.4.2", + "license": "MIT", + "dependencies": { + "@types/three": "*", + "three": "^0.170.0" + }, + "peerDependencies": { + "@types/three": "*", + "three": "*" + } + }, + "node_modules/stats-gl/node_modules/three": { + "version": "0.170.0", + "license": "MIT" + }, + "node_modules/stats.js": { + "version": "0.17.0", + "license": "MIT" + }, + "node_modules/sucrase": { + "version": "3.35.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "tinyglobby": "^0.2.11", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/suspend-react": { + "version": "0.1.3", + "license": "MIT", + "peerDependencies": { + "react": ">=17.0" + } + }, + "node_modules/tailwind-merge": { + "version": "2.6.1", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/dcastil" + } + }, + "node_modules/tailwindcss": { + "version": "3.4.19", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.7", + "lilconfig": "^3.1.3", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/thenify": { + "version": "3.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "dev": true, + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/three": { + "version": "0.163.0", + "license": "MIT" + }, + "node_modules/three-mesh-bvh": { + "version": "0.7.8", + "license": "MIT", + "peerDependencies": { + "three": ">= 0.151.0" + } + }, + "node_modules/three-stdlib": { + "version": "2.36.1", + "license": "MIT", + "dependencies": { + "@types/draco3d": "^1.4.0", + "@types/offscreencanvas": "^2019.6.4", + "@types/webxr": "^0.5.2", + "draco3d": "^1.4.1", + "fflate": "^0.6.9", + "potpack": "^1.0.1" + }, + "peerDependencies": { + "three": ">=0.128.0" + } + }, + "node_modules/three-stdlib/node_modules/fflate": { + "version": "0.6.10", + "license": "MIT" + }, + "node_modules/tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", + "license": "MIT" + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/troika-three-text": { + "version": "0.52.4", + "license": "MIT", + "dependencies": { + "bidi-js": "^1.0.2", + "troika-three-utils": "^0.52.4", + "troika-worker-utils": "^0.52.0", + "webgl-sdf-generator": "1.1.1" + }, + "peerDependencies": { + "three": ">=0.125.0" + } + }, + "node_modules/troika-three-utils": { + "version": "0.52.4", + "license": "MIT", + "peerDependencies": { + "three": ">=0.125.0" + } + }, + "node_modules/troika-worker-utils": { + "version": "0.52.0", + "license": "MIT" + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/tslib": { + "version": "2.8.1", + "license": "0BSD" + }, + "node_modules/tunnel-rat": { + "version": "0.1.2", + "license": "MIT", + "dependencies": { + "zustand": "^4.3.2" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/use-sync-external-store": { + "version": "1.6.0", + "license": "MIT", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/utility-types": { + "version": "3.11.0", + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/victory-vendor": { + "version": "37.3.6", + "resolved": "https://registry.npmjs.org/victory-vendor/-/victory-vendor-37.3.6.tgz", + "integrity": "sha512-SbPDPdDBYp+5MJHhBCAyI7wKM3d5ivekigc2Dk2s7pgbZ9wIgIBYGVw4zGHBml/qTFbexrofXW6Gu4noGxrOwQ==", + "license": "MIT AND ISC", + "dependencies": { + "@types/d3-array": "^3.0.3", + "@types/d3-ease": "^3.0.0", + "@types/d3-interpolate": "^3.0.1", + "@types/d3-scale": "^4.0.2", + "@types/d3-shape": "^3.1.0", + "@types/d3-time": "^3.0.0", + "@types/d3-timer": "^3.0.0", + "d3-array": "^3.1.6", + "d3-ease": "^3.0.1", + "d3-interpolate": "^3.0.1", + "d3-scale": "^4.0.2", + "d3-shape": "^3.1.0", + "d3-time": "^3.0.0", + "d3-timer": "^3.0.1" + } + }, + "node_modules/vite": { + "version": "5.4.21", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/webgl-constants": { + "version": "1.1.1" + }, + "node_modules/webgl-sdf-generator": { + "version": "1.1.1", + "license": "MIT" + }, + "node_modules/which": { + "version": "2.0.2", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "dev": true, + "license": "ISC" + }, + "node_modules/zustand": { + "version": "4.5.7", + "license": "MIT", + "dependencies": { + "use-sync-external-store": "^1.2.2" + }, + "engines": { + "node": ">=12.7.0" + }, + "peerDependencies": { + "@types/react": ">=16.8", + "immer": ">=9.0.6", + "react": ">=16.8" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "immer": { + "optional": true + }, + "react": { + "optional": true + } + } + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..200c6f3 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,40 @@ +{ + "name": "schaefflerautomat-frontend", + "private": true, + "version": "0.1.0", + "type": "module", + "scripts": { + "dev": "vite --host 0.0.0.0 --port 5173", + "build": "tsc && vite build", + "preview": "vite preview" + }, + "dependencies": { + "@react-three/drei": "^9.102.3", + "@react-three/fiber": "^8.16.2", + "@tanstack/react-query": "^5.28.4", + "@tanstack/react-table": "^8.14.0", + "axios": "^1.6.8", + "clsx": "^2.1.0", + "lucide-react": "^0.363.0", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-dropzone": "^14.2.3", + "react-router-dom": "^6.22.3", + "recharts": "^3.7.0", + "sonner": "^1.4.41", + "tailwind-merge": "^2.2.2", + "three": "^0.163.0", + "zustand": "^4.5.2" + }, + "devDependencies": { + "@types/react": "^18.2.74", + "@types/react-dom": "^18.2.23", + "@types/three": "^0.163.0", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.19", + "postcss": "^8.4.38", + "tailwindcss": "^3.4.3", + "typescript": "^5.4.3", + "vite": "^5.2.6" + } +} diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 0000000..2e7af2b --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx new file mode 100644 index 0000000..175e50b --- /dev/null +++ b/frontend/src/App.tsx @@ -0,0 +1,71 @@ +import { BrowserRouter, Routes, Route, Navigate } from 'react-router-dom' +import { useAuthStore } from './store/auth' +import Layout from './components/layout/Layout' +import LoginPage from './pages/Login' +import DashboardPage from './pages/Dashboard' +import OrdersPage from './pages/Orders' +import OrderDetailPage from './pages/OrderDetail' +import NewOrderPage from './pages/NewOrder' +import UploadPage from './pages/Upload' +import AdminPage from './pages/Admin' +import CadPreviewPage from './pages/CadPreview' +import MaterialsPage from './pages/Materials' +import WorkerActivityPage from './pages/WorkerActivity' +import ProductLibraryPage from './pages/ProductLibrary' +import ProductDetailPage from './pages/ProductDetail' +import NewProductOrderPage from './pages/NewProductOrder' +import NotificationsPage from './pages/Notifications' +import PreferencesPage from './pages/Preferences' + +function ProtectedRoute({ children }: { children: React.ReactNode }) { + const token = useAuthStore((s) => s.token) + if (!token) return + return <>{children} +} + +function AdminRoute({ children }: { children: React.ReactNode }) { + const { token, user } = useAuthStore() + if (!token) return + if (user?.role !== 'admin' && user?.role !== 'project_manager') return + return <>{children} +} + +export default function App() { + return ( + + + } /> + + + + } + > + } /> + } /> + } /> + } /> + } /> + } /> + + + + } + /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + + + + ) +} diff --git a/frontend/src/api/analytics.ts b/frontend/src/api/analytics.ts new file mode 100644 index 0000000..7798866 --- /dev/null +++ b/frontend/src/api/analytics.ts @@ -0,0 +1,137 @@ +import api from './client' + +export interface TopLevelSummary { + total_orders: number + completed_orders: number + total_revenue: number + total_rendering_items: number +} + +export interface ThroughputPoint { + week: string + count: number + completed: number +} + +export interface RevenuePoint { + month: string + revenue: number + order_count: number +} + +export interface ProcessingTimeStats { + avg_submit_to_complete_s: number | null + avg_submit_to_processing_s: number | null + p50_s: number | null + p95_s: number | null +} + +export interface ItemStatusBreakdown { + pending: number + approved: number + rejected: number +} + +export interface RenderTimeBreakdown { + avg_stl_s: number | null + avg_render_s: number | null + avg_total_s: number | null + sample_count: number +} + +export interface CategoryCount { + category: string + count: number +} + +export interface ProductCategoryStats { + unique_products_rendered: number + total_products: number + products_with_cad: number + products_by_category: CategoryCount[] +} + +export interface OutputTypeUsagePoint { + output_type: string + count: number +} + +export interface RenderStatusDistribution { + pending: number + processing: number + completed: number + failed: number +} + +export interface RendererUsagePoint { + renderer: string + count: number +} + +export interface TopProductEntry { + pim_id: string + product_name: string | null + category: string + order_count: number +} + +export interface CategoryRevenueEntry { + category: string + order_count: number + revenue: number +} + +export interface RenderBackendStatsEntry { + backend: string + total: number + completed: number + failed: number + avg_render_s: number | null + p50_render_s: number | null +} + +export interface RenderTimeByOutputType { + output_type: string + job_count: number + avg_render_s: number | null + min_render_s: number | null + max_render_s: number | null + p50_render_s: number | null +} + +export interface OrdersByUserEntry { + full_name: string + email: string + role: string + order_count: number + revenue: number +} + +export interface DashboardKPIs { + summary: TopLevelSummary + throughput: ThroughputPoint[] + revenue: RevenuePoint[] + processing_times: ProcessingTimeStats + item_status: ItemStatusBreakdown + render_times: RenderTimeBreakdown + product_stats: ProductCategoryStats + output_type_usage: OutputTypeUsagePoint[] + render_status: RenderStatusDistribution + renderer_usage: RendererUsagePoint[] + top_products: TopProductEntry[] + orders_by_user: OrdersByUserEntry[] + category_revenue: CategoryRevenueEntry[] + render_backend_stats: RenderBackendStatsEntry[] + render_time_by_output_type: RenderTimeByOutputType[] +} + +export async function getDashboardKPIs( + dateFrom?: string, + dateTo?: string, +): Promise { + const params: Record = {} + if (dateFrom) params.date_from = dateFrom + if (dateTo) params.date_to = dateTo + const res = await api.get('/analytics/dashboard', { params }) + return res.data +} diff --git a/frontend/src/api/cad.ts b/frontend/src/api/cad.ts new file mode 100644 index 0000000..dec4624 --- /dev/null +++ b/frontend/src/api/cad.ts @@ -0,0 +1,105 @@ +import api from './client' + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +export interface CadObjects { + cad_file_id: string + original_name: string + processing_status: 'pending' | 'processing' | 'completed' | 'failed' + parsed_objects: Record | null +} + +export interface RegenerateThumbnailResponse { + cad_file_id: string + original_name: string + status: 'queued' + task_id: string | null +} + +// --------------------------------------------------------------------------- +// API functions +// --------------------------------------------------------------------------- + +/** + * Returns the URL to the thumbnail PNG for a CAD file. + * Use directly in – the browser will + * handle the authenticated request via the axios interceptor when called + * programmatically, or you can construct the URL for use in img tags when + * the auth token is set as a header. + * + * For use in tags without auth headers, prefer fetching as a blob and + * creating an object URL (see fetchThumbnailBlob below). + */ +export function getCadThumbnailUrl(cadFileId: string): string { + return `/api/cad/${cadFileId}/thumbnail` +} + +/** + * Fetch the thumbnail PNG as a Blob and return an object URL suitable for + * use in without needing explicit auth headers. + * Remember to call URL.revokeObjectURL() when the component unmounts. + */ +export async function fetchThumbnailBlob(cadFileId: string): Promise { + const res = await api.get(`/cad/${cadFileId}/thumbnail`, { + responseType: 'blob', + }) + return URL.createObjectURL(res.data) +} + +/** + * Fetch the glTF model file as a Blob and return an object URL. + * Remember to call URL.revokeObjectURL() when the consumer is done. + */ +export async function fetchModelBlob(cadFileId: string): Promise { + const res = await api.get(`/cad/${cadFileId}/model`, { + responseType: 'blob', + }) + return URL.createObjectURL(res.data) +} + +/** + * Return the parsed_objects JSON for a CAD file. + */ +export async function getCadObjects(cadFileId: string): Promise { + const res = await api.get(`/cad/${cadFileId}/objects`) + return res.data +} + +/** + * Download the cached STL for a CAD file as a file-save dialog. + * quality: 'low' | 'high' + * The backend returns a human-readable filename, but we derive it client-side too. + */ +export async function downloadStl(cadFileId: string, quality: 'low' | 'high', suggestedName?: string): Promise { + const res = await api.get(`/cad/${cadFileId}/stl/${quality}`, { + responseType: 'blob', + }) + const url = URL.createObjectURL(res.data) + const a = document.createElement('a') + a.href = url + a.download = suggestedName ? `${suggestedName}_${quality}.stl` : `model_${quality}.stl` + document.body.appendChild(a) + a.click() + document.body.removeChild(a) + URL.revokeObjectURL(url) +} + +export async function generateStl(cadFileId: string, quality: 'low' | 'high'): Promise<{ task_id: string }> { + const res = await api.post<{ task_id: string }>(`/cad/${cadFileId}/generate-stl/${quality}`) + return res.data +} + +/** + * Ask the backend to re-queue STEP processing for a CAD file (admin only). + * Returns the Celery task_id (or null if the worker is not available). + */ +export async function regenerateThumbnail( + cadFileId: string, +): Promise { + const res = await api.post( + `/cad/${cadFileId}/regenerate-thumbnail`, + ) + return res.data +} diff --git a/frontend/src/api/client.ts b/frontend/src/api/client.ts new file mode 100644 index 0000000..380e526 --- /dev/null +++ b/frontend/src/api/client.ts @@ -0,0 +1,26 @@ +import axios from 'axios' +import { useAuthStore } from '../store/auth' + +const api = axios.create({ + baseURL: '/api', + headers: { 'Content-Type': 'application/json' }, +}) + +api.interceptors.request.use((config) => { + const token = useAuthStore.getState().token + if (token) config.headers.Authorization = `Bearer ${token}` + return config +}) + +api.interceptors.response.use( + (res) => res, + (err) => { + if (err.response?.status === 401) { + useAuthStore.getState().logout() + window.location.href = '/login' + } + return Promise.reject(err) + }, +) + +export default api diff --git a/frontend/src/api/materials.ts b/frontend/src/api/materials.ts new file mode 100644 index 0000000..32d84e5 --- /dev/null +++ b/frontend/src/api/materials.ts @@ -0,0 +1,86 @@ +import api from './client' + +export interface Material { + id: string + name: string + description: string | null + source: string + schaeffler_code: number | null + created_by_name: string | null + aliases: string[] + created_at: string + updated_at: string +} + +export interface MaterialAlias { + id: string + alias: string + created_at: string +} + +export async function listMaterials() { + const res = await api.get('/materials') + return res.data +} + +export async function createMaterial(data: { + name: string + description?: string + source?: string + schaeffler_code?: number | null +}) { + const res = await api.post('/materials', data) + return res.data +} + +export async function updateMaterial(id: string, data: { name?: string; description?: string }) { + const res = await api.patch(`/materials/${id}`, data) + return res.data +} + +export async function deleteMaterial(id: string) { + await api.delete(`/materials/${id}`) +} + +export async function saveCadPartMaterials( + orderId: string, + itemId: string, + parts: Array<{ part_name: string; material: string }>, +) { + const res = await api.put(`/orders/${orderId}/items/${itemId}/cad-materials`, { parts }) + return res.data +} + +export async function seedSchaefflerMaterials() { + const res = await api.post<{ inserted: number; total: number }>('/materials/seed-schaeffler') + return res.data +} + +export async function getNextCode(typePrefix: string) { + const res = await api.get<{ next_code: number; prefix: string; next_consecutive: number }>( + `/materials/next-code`, + { params: { type_prefix: typePrefix } }, + ) + return res.data +} + +// --- Alias endpoints --- + +export async function listAliases(materialId: string): Promise { + const res = await api.get(`/materials/${materialId}/aliases`) + return res.data +} + +export async function addAlias(materialId: string, alias: string): Promise { + const res = await api.post(`/materials/${materialId}/aliases`, { alias }) + return res.data +} + +export async function deleteAlias(aliasId: string): Promise { + await api.delete(`/materials/aliases/${aliasId}`) +} + +export async function seedAliases(): Promise<{ inserted: number; total: number }> { + const res = await api.post<{ inserted: number; total: number }>('/materials/seed-aliases') + return res.data +} diff --git a/frontend/src/api/notifications.ts b/frontend/src/api/notifications.ts new file mode 100644 index 0000000..c506580 --- /dev/null +++ b/frontend/src/api/notifications.ts @@ -0,0 +1,39 @@ +import api from './client' + +export interface Notification { + id: string + action: string + entity_type: string | null + entity_id: string | null + details: Record | null + timestamp: string + read_at: string | null +} + +export interface NotificationListResponse { + items: Notification[] + unread_count: number + total: number +} + +export async function getNotifications(params?: { + limit?: number + offset?: number + unread_only?: boolean +}): Promise { + const { data } = await api.get('/notifications', { params }) + return data +} + +export async function getUnreadCount(): Promise { + const { data } = await api.get('/notifications/unread-count') + return data.unread_count +} + +export async function markAsRead(ids?: string[]): Promise { + await api.post('/notifications/mark-read', { notification_ids: ids ?? null }) +} + +export async function markOneAsRead(id: string): Promise { + await api.post(`/notifications/${id}/mark-read`) +} diff --git a/frontend/src/api/orders.ts b/frontend/src/api/orders.ts new file mode 100644 index 0000000..f0fd55e --- /dev/null +++ b/frontend/src/api/orders.ts @@ -0,0 +1,250 @@ +import api from './client' +import type { Product } from './products' +import type { OutputType } from './outputTypes' + +export interface OrderLine { + id: string + order_id: string + product_id: string + product: Product + output_type_id: string | null + output_type: OutputType | null + gewuenschte_bildnummer: string | null + item_status: 'pending' | 'approved' | 'rejected' + render_status: 'pending' | 'processing' | 'completed' | 'failed' | 'cancelled' + result_path: string | null + thumbnail_url: string | null + ai_validation_status: string + ai_validation_result: Record | null + render_backend_used: string | null + flamenco_job_id: string | null + unit_price: number | null + render_position_id: string | null + render_position_name: string | null + notes: string | null + created_at: string + updated_at: string +} + +export interface OrderLineCreate { + product_id: string + output_type_id?: string | null + render_position_id?: string | null + gewuenschte_bildnummer?: string | null + notes?: string | null +} + +export interface Order { + id: string + order_number: string + template_id: string | null + status: 'draft' | 'submitted' | 'processing' | 'completed' | 'rejected' + created_by: string + source_excel: string | null + notes: string | null + created_at: string + updated_at: string + submitted_at: string | null + completed_at: string | null + estimated_price: number | null + item_count: number + line_count: number + render_progress: { + total: number + completed: number + processing: number + failed: number + pending: number + cancelled: number + } | null +} + +export interface OrderItem { + id: string + order_id: string + row_index: number + ebene1: string | null + ebene2: string | null + baureihe: string | null + pim_id: string | null + produkt_baureihe: string | null + gewaehltes_produkt: string | null + name_cad_modell: string | null + gewuenschte_bildnummer: string | null + lagertyp: string | null + medias_rendering: boolean | null + components: Array<{ part_name: string | null; material: string | null; component_type: string | null; column_index: number }> + cad_file_id: string | null + thumbnail_path: string | null + ai_validation_status: string + ai_validation_result: Record | null + item_status: 'pending' | 'approved' | 'rejected' + notes: string | null + created_at: string +} + +export interface OrderDetail extends Order { + items: OrderItem[] + lines: OrderLine[] +} + +export async function listOrders(params?: { status?: string; skip?: number; limit?: number }) { + const res = await api.get('/orders', { params }) + return res.data +} + +export async function searchOrders(params: { + q?: string + statuses?: string[] + date_from?: string + date_to?: string + limit?: number +}): Promise { + const res = await api.get('/orders/search', { + params: { + q: params.q || '', + statuses: params.statuses?.join(',') || '', + date_from: params.date_from || '', + date_to: params.date_to || '', + limit: params.limit || 50, + }, + }) + return res.data +} + +export async function getOrder(id: string) { + const res = await api.get(`/orders/${id}`) + return res.data +} + +export async function patchOrderItem( + orderId: string, + itemId: string, + patch: Partial<{ + ebene1: string | null + ebene2: string | null + baureihe: string | null + pim_id: string | null + produkt_baureihe: string | null + gewaehltes_produkt: string | null + name_cad_modell: string | null + gewuenschte_bildnummer: string | null + lagertyp: string | null + medias_rendering: boolean | null + notes: string | null + }>, +) { + const res = await api.patch(`/orders/${orderId}/items/${itemId}`, patch) + return res.data +} + +export async function createOrder(data: { + template_id?: string + source_excel?: string + notes?: string + items?: Array<{ + row_index: number + ebene1?: string | null + ebene2?: string | null + baureihe?: string | null + pim_id?: string | null + produkt_baureihe?: string | null + gewaehltes_produkt?: string | null + name_cad_modell?: string | null + gewuenschte_bildnummer?: string | null + lagertyp?: string | null + medias_rendering?: boolean | null + components: Array<{ part_name?: string | null; material?: string | null; component_type?: string | null; column_index: number }> + }> + lines?: OrderLineCreate[] +}) { + const res = await api.post('/orders', data) + return res.data +} + +export async function addOrderLine(orderId: string, data: OrderLineCreate): Promise { + const res = await api.post(`/orders/${orderId}/lines`, data) + return res.data +} + +export async function removeOrderLine(orderId: string, lineId: string): Promise { + await api.delete(`/orders/${orderId}/lines/${lineId}`) +} + +export async function submitOrder(id: string) { + const res = await api.post(`/orders/${id}/submit`) + return res.data +} + +export async function deleteOrder(id: string) { + await api.delete(`/orders/${id}`) +} + +export async function unlinkCadFile(orderId: string, itemId: string) { + await api.delete(`/orders/${orderId}/items/${itemId}/cad-file`) +} + +export async function dispatchRenders(orderId: string) { + const res = await api.post<{ dispatched: number }>(`/orders/${orderId}/dispatch-renders`) + return res.data +} + +export async function cancelLineRender(orderId: string, lineId: string) { + const res = await api.post<{ cancelled: boolean; line_id: string; backend: string; errors: string[] | null }>( + `/orders/${orderId}/lines/${lineId}/cancel-render` + ) + return res.data +} + +export async function cancelOrderRenders(orderId: string) { + const res = await api.post<{ cancelled: number; order_status: string; errors: string[] | null }>( + `/orders/${orderId}/cancel-renders` + ) + return res.data +} + +export async function regenerateItemThumbnail(orderId: string, itemId: string) { + const res = await api.post<{ status: string; task_id: string; cad_file_id: string }>( + `/orders/${orderId}/items/${itemId}/regenerate-thumbnail` + ) + return res.data +} + +export interface SplitMissingStepResult { + new_order_id: string + new_order_number: string + moved_item_count: number + moved_line_count: number +} + +export async function splitMissingStep(orderId: string): Promise { + const res = await api.post(`/orders/${orderId}/split-missing-step`) + return res.data +} + +export interface GenerateLinesResult { + created: number + skipped: number + no_product_count: number + no_step_count: number +} + +export async function generateLinesFromItems( + orderId: string, + outputTypeIds: string[], +): Promise { + const res = await api.post(`/orders/${orderId}/generate-lines`, { + output_type_ids: outputTypeIds, + }) + return res.data +} + +export async function downloadOrderRenders(orderId: string, orderNumber: string): Promise { + const res = await api.get(`/orders/${orderId}/download-renders`, { responseType: 'blob' }) + const url = URL.createObjectURL(res.data) + const a = document.createElement('a') + a.href = url + a.download = `${orderNumber}_renders.zip` + a.click() + URL.revokeObjectURL(url) +} diff --git a/frontend/src/api/outputTypes.ts b/frontend/src/api/outputTypes.ts new file mode 100644 index 0000000..301057f --- /dev/null +++ b/frontend/src/api/outputTypes.ts @@ -0,0 +1,46 @@ +import api from './client' + +export interface OutputType { + id: string + name: string + description: string | null + renderer: string + render_settings: Record + output_format: string + sort_order: number + compatible_categories: string[] + render_backend: string + is_animation: boolean + transparent_bg: boolean + cycles_device: string | null + pricing_tier_id: number | null + pricing_tier_name: string | null + price_per_item: number | null + is_active: boolean + created_at: string + updated_at: string +} + +export async function listOutputTypes( + includeInactive = false, + category?: string, +): Promise { + const params: Record = { include_inactive: includeInactive } + if (category) params.category = category + const res = await api.get('/output-types', { params }) + return res.data +} + +export async function createOutputType(data: Partial): Promise { + const res = await api.post('/output-types', data) + return res.data +} + +export async function updateOutputType(id: string, data: Partial): Promise { + const res = await api.patch(`/output-types/${id}`, data) + return res.data +} + +export async function deleteOutputType(id: string): Promise { + await api.delete(`/output-types/${id}`) +} diff --git a/frontend/src/api/pricing.ts b/frontend/src/api/pricing.ts new file mode 100644 index 0000000..b1caff2 --- /dev/null +++ b/frontend/src/api/pricing.ts @@ -0,0 +1,61 @@ +import api from './client' + +export interface PricingTier { + id: number + category_key: string + quality_level: string + price_per_item: number + description: string | null + is_active: boolean + created_at: string + updated_at: string +} + +export async function listPricingTiers(): Promise { + const res = await api.get('/pricing') + return res.data +} + +export async function createPricingTier(data: { + category_key: string + quality_level: string + price_per_item: number + description?: string + is_active?: boolean +}): Promise { + const res = await api.post('/pricing', data) + return res.data +} + +export async function updatePricingTier( + id: number, + data: { category_key?: string; quality_level?: string; price_per_item?: number; description?: string; is_active?: boolean }, +): Promise { + const res = await api.patch(`/pricing/${id}`, data) + return res.data +} + +export async function deletePricingTier(id: number): Promise { + await api.delete(`/pricing/${id}`) +} + +export interface PriceEstimateLine { + product_id: string + output_type_id: string | null +} + +export interface PriceEstimate { + total: number + line_count: number + breakdown: Array<{ + output_type_id: string | null + product_id: string | null + unit_price: number | null + }> + has_unpriced: boolean +} + +export async function estimatePrice(lines: PriceEstimateLine[]): Promise { + const res = await api.post('/pricing/estimate', { lines }) + return res.data +} diff --git a/frontend/src/api/products.ts b/frontend/src/api/products.ts new file mode 100644 index 0000000..855a868 --- /dev/null +++ b/frontend/src/api/products.ts @@ -0,0 +1,189 @@ +import api from './client' + +export interface RenderPosition { + id: string + product_id: string + name: string + rotation_x: number + rotation_y: number + rotation_z: number + is_default: boolean + sort_order: number + created_at: string +} + +export interface ComponentData { + part_name: string | null + material: string | null + component_type: string | null + column_index: number +} + +export interface CadPartMaterial { + part_name: string + material: string +} + +export interface Product { + id: string + pim_id: string + name: string | null + category_key: string | null + ebene1: string | null + ebene2: string | null + baureihe: string | null + produkt_baureihe: string | null + lagertyp: string | null + name_cad_modell: string | null + gewuenschte_bildnummer: string | null + medias_rendering: boolean | null + components: ComponentData[] + cad_part_materials: CadPartMaterial[] + cad_file_id: string | null + thumbnail_url: string | null + render_image_url: string | null + processing_status: string | null + stl_cached: string[] + cad_parsed_objects: string[] | null + arbeitspaket: string | null + notes: string | null + is_active: boolean + source_excel: string | null + render_positions: RenderPosition[] + created_at: string + updated_at: string +} + +export interface ProductListParams { + q?: string + category_key?: string + has_cad?: boolean + ready_only?: boolean + materials_filter?: string // "complete" | "incomplete" | "" + skip?: number + limit?: number +} + +export async function listProducts(params?: ProductListParams): Promise { + const res = await api.get('/products', { params }) + return res.data +} + +export async function getProduct(id: string): Promise { + const res = await api.get(`/products/${id}`) + return res.data +} + +export async function createProduct(data: Partial): Promise { + const res = await api.post('/products', data) + return res.data +} + +export async function updateProduct(id: string, data: Partial): Promise { + const res = await api.patch(`/products/${id}`, data) + return res.data +} + +export async function deleteProduct(id: string, hard = false): Promise { + await api.delete(`/products/${id}`, { params: hard ? { hard: true } : undefined }) +} + +export async function uploadProductCad(id: string, file: File) { + const form = new FormData() + form.append('file', file) + const res = await api.post(`/products/${id}/cad`, form, { + headers: { 'Content-Type': 'multipart/form-data' }, + }) + return res.data +} + +export async function saveProductCadMaterials(id: string, parts: CadPartMaterial[]): Promise { + const res = await api.post(`/products/${id}/cad-materials`, { parts }) + return res.data +} + +export async function regenerateProduct(id: string) { + const res = await api.post(`/products/${id}/regenerate`) + return res.data +} + +export async function reprocessProduct(id: string) { + const res = await api.post(`/products/${id}/reprocess`) + return res.data +} + +export async function reassignMaterialsFromExcel(id: string): Promise { + const res = await api.post(`/products/${id}/reassign-materials-from-excel`) + return res.data +} + +export interface ProductRender { + order_line_id: string + order_number: string | null + output_type_name: string | null + render_url: string + is_video: boolean + render_backend: string | null + completed_at: string | null +} + +export async function getProductRenders(id: string): Promise { + const res = await api.get(`/products/${id}/renders`) + return res.data +} + +export async function deleteProductRender(productId: string, orderLineId: string): Promise { + await api.delete(`/products/${productId}/renders/${orderLineId}`) +} + +export async function downloadProductRenders( + productId: string, + orderLineIds: string[], + filename?: string, +): Promise { + const res = await api.post( + `/products/${productId}/download-renders`, + { order_line_ids: orderLineIds }, + { responseType: 'blob' }, + ) + const url = URL.createObjectURL(res.data) + const a = document.createElement('a') + a.href = url + a.download = filename ?? 'renders.zip' + document.body.appendChild(a) + a.click() + document.body.removeChild(a) + URL.revokeObjectURL(url) +} + +export async function getProductOrders(id: string) { + const res = await api.get(`/products/${id}/orders`) + return res.data +} + +export async function listRenderPositions(productId: string): Promise { + const res = await api.get(`/products/${productId}/render-positions`) + return res.data +} + +export async function createRenderPosition( + productId: string, + data: { name: string; rotation_x: number; rotation_y: number; rotation_z: number; is_default?: boolean; sort_order?: number }, +): Promise { + const res = await api.post(`/products/${productId}/render-positions`, data) + return res.data +} + +export async function updateRenderPosition( + productId: string, + posId: string, + data: Partial<{ name: string; rotation_x: number; rotation_y: number; rotation_z: number; is_default: boolean; sort_order: number }>, +): Promise { + const res = await api.patch(`/products/${productId}/render-positions/${posId}`, data) + return res.data +} + +export async function deleteRenderPosition(productId: string, posId: string): Promise { + await api.delete(`/products/${productId}/render-positions/${posId}`) +} + diff --git a/frontend/src/api/renderTemplates.ts b/frontend/src/api/renderTemplates.ts new file mode 100644 index 0000000..d78895f --- /dev/null +++ b/frontend/src/api/renderTemplates.ts @@ -0,0 +1,77 @@ +import api from './client'; + +export interface RenderTemplate { + id: string; + name: string; + category_key: string | null; + output_type_id: string | null; + output_type_name: string | null; + blend_file_path: string; + original_filename: string; + target_collection: string; + material_replace_enabled: boolean; + lighting_only: boolean; + shadow_catcher_enabled: boolean; + camera_orbit: boolean; + is_active: boolean; + created_at: string; + updated_at: string; +} + +export interface MaterialLibraryInfo { + exists: boolean; + filename: string | null; + size_bytes: number | null; + path: string | null; +} + +export async function listRenderTemplates(): Promise { + const { data } = await api.get('/render-templates'); + return data; +} + +export async function createRenderTemplate(formData: FormData): Promise { + const { data } = await api.post('/render-templates', formData, { + headers: { 'Content-Type': 'multipart/form-data' }, + }); + return data; +} + +export async function updateRenderTemplate( + id: string, + updates: Partial>, +): Promise { + const { data } = await api.patch(`/render-templates/${id}`, updates); + return data; +} + +export async function deleteRenderTemplate(id: string): Promise { + await api.delete(`/render-templates/${id}`); +} + +export async function reuploadBlendFile(id: string, file: File): Promise { + const fd = new FormData(); + fd.append('file', file); + const { data } = await api.post(`/render-templates/${id}/upload`, fd, { + headers: { 'Content-Type': 'multipart/form-data' }, + }); + return data; +} + +export async function uploadMaterialLibrary(file: File): Promise { + const fd = new FormData(); + fd.append('file', file); + const { data } = await api.post('/admin/settings/material-library', fd, { + headers: { 'Content-Type': 'multipart/form-data' }, + }); + return data; +} + +export async function getMaterialLibraryInfo(): Promise { + const { data } = await api.get('/admin/settings/material-library'); + return data; +} + +export async function deleteMaterialLibrary(): Promise { + await api.delete('/admin/settings/material-library'); +} diff --git a/frontend/src/api/uploads.ts b/frontend/src/api/uploads.ts new file mode 100644 index 0000000..8c2cda0 --- /dev/null +++ b/frontend/src/api/uploads.ts @@ -0,0 +1,102 @@ +import api from './client' +import type { OrderDetail } from './orders' + +export interface ExcelPreviewRow { + row_index: number + pim_id: string | null + produkt_baureihe: string | null + gewaehltes_produkt: string | null + product_exists: boolean + product_id: string | null + medias_rendering: boolean | null + category_key: string | null + has_step: boolean + is_duplicate: boolean + duplicate_of_row: number | null +} + +export interface ExcelPreviewResult { + excel_path: string + filename: string + category_key: string | null + row_count: number + existing_product_count: number + new_product_count: number + no_pim_id_count: number + has_step_count: number + no_step_count: number + duplicate_count: number + warnings: string[] + rows: ExcelPreviewRow[] + column_headers: string[] + template_name: string | null +} + +export interface ParsedComponent { + part_name: string | null + material: string | null + component_type: string | null + column_index: number +} + +export interface ParsedRow { + row_index: number + ebene1: string | null + ebene2: string | null + baureihe: string | null + pim_id: string | null + produkt_baureihe: string | null + gewaehltes_produkt: string | null + name_cad_modell: string | null + gewuenschte_bildnummer: string | null + lagertyp: string | null + medias_rendering: boolean | null + components: ParsedComponent[] +} + +export interface ParsedExcelResponse { + filename: string + excel_path?: string + category_key: string | null + template_name: string | null + row_count: number + column_headers: string[] + rows: ParsedRow[] + warnings: string[] +} + +export interface OutputTypeSelection { + row_index: number + output_type_ids: string[] +} + +export interface ExcelFinalizeRequest { + excel_path: string + included_row_indices: number[] + output_type_selections: OutputTypeSelection[] + notes?: string | null + template_id?: string | null +} + +export async function uploadExcel(file: File): Promise { + const form = new FormData() + form.append('file', file) + const res = await api.post('/uploads/excel', form, { + headers: { 'Content-Type': 'multipart/form-data' }, + }) + return res.data +} + +export async function finalizeExcelImport(data: ExcelFinalizeRequest): Promise { + const res = await api.post('/uploads/excel/finalize', data) + return res.data +} + +export async function uploadStep(file: File) { + const form = new FormData() + form.append('file', file) + const res = await api.post('/uploads/step', form, { + headers: { 'Content-Type': 'multipart/form-data' }, + }) + return res.data +} diff --git a/frontend/src/api/worker.ts b/frontend/src/api/worker.ts new file mode 100644 index 0000000..c676063 --- /dev/null +++ b/frontend/src/api/worker.ts @@ -0,0 +1,125 @@ +import api from './client' + +export interface RenderLog { + renderer: string + format?: string + engine?: string + engine_used?: string + samples?: number + cycles_device?: string + stl_quality?: string + smooth_angle?: number + width?: number + height?: number + total_duration_s?: number + stl_duration_s?: number + render_duration_s?: number + stl_size_bytes?: number + output_size_bytes?: number + parts_count?: number + log_lines?: string[] + fallback?: boolean + started_at?: string + completed_at?: string +} + +export interface CadActivityEntry { + cad_file_id: string + original_name: string + file_size: number | null + processing_status: 'pending' | 'processing' | 'completed' | 'failed' | string + error_message: string | null + updated_at: string + created_at: string + order_numbers: string[] + render_log: RenderLog | null +} + +export interface RenderJobEntry { + order_line_id: string + order_number: string | null + product_name: string | null + output_type_name: string | null + render_status: 'processing' | 'completed' | 'failed' | string + render_backend_used: string | null + flamenco_job_id: string | null + render_started_at: string | null + render_completed_at: string | null + updated_at: string +} + +export interface WorkerActivity { + cad_processing: CadActivityEntry[] + active_count: number + failed_count: number + render_jobs: RenderJobEntry[] + render_active_count: number + render_failed_count: number +} + +export async function getWorkerActivity(): Promise { + const res = await api.get('/worker/activity') + return res.data +} + +export async function reprocessCadFile(cad_file_id: string): Promise { + await api.post(`/worker/activity/${cad_file_id}/reprocess`) +} + +export interface RenderLogEntry { + ts: number + t: string + level: 'info' | 'error' | 'success' | string + msg: string +} + +export async function getRenderLog(orderLineId: string, after: number = 0): Promise<{ + entries: RenderLogEntry[] + total: number + next_after: number +}> { + const res = await api.get(`/worker/render-log/${orderLineId}`, { params: { after } }) + return res.data +} + +/** Returns the SSE URL for streaming render logs (needs token in query param). */ +export function renderLogStreamUrl(orderLineId: string): string { + return `/api/worker/render-log/${orderLineId}/stream` +} + +// --------------------------------------------------------------------------- +// Queue inspection + control +// --------------------------------------------------------------------------- + +export interface QueueTask { + task_id: string + task_name: string + args: any[] + argsrepr: string + status: 'pending' | 'active' | 'reserved' + worker?: string + queue?: string +} + +export interface QueueStatus { + queue_depths: Record + pending_count: number + active: QueueTask[] + reserved: QueueTask[] + pending: QueueTask[] +} + +export async function getQueueStatus(): Promise { + const res = await api.get('/worker/queue') + return res.data +} + +export async function purgeQueue(): Promise<{ purged: number; message: string }> { + const res = await api.post<{ purged: number; message: string }>('/worker/queue/purge') + return res.data +} + +export async function cancelTask(taskId: string): Promise<{ revoked: string }> { + const res = await api.post<{ revoked: string }>(`/worker/queue/cancel/${taskId}`) + return res.data +} diff --git a/frontend/src/components/LiveRenderLog.tsx b/frontend/src/components/LiveRenderLog.tsx new file mode 100644 index 0000000..3a4844e --- /dev/null +++ b/frontend/src/components/LiveRenderLog.tsx @@ -0,0 +1,127 @@ +import { useState, useEffect, useRef } from 'react' +import { useQuery } from '@tanstack/react-query' +import { Terminal, ChevronDown, ChevronUp } from 'lucide-react' +import { getRenderLog } from '../api/worker' +import type { RenderLogEntry } from '../api/worker' + +const LEVEL_COLORS: Record = { + info: 'text-gray-300', + error: 'text-red-400', + success: 'text-green-400', + warn: 'text-yellow-400', +} + +/** + * Live render log panel — polls Redis-backed log entries every 2s. + * Shows a compact terminal-style output for a render job. + * + * Always does an initial fetch to check if entries exist (so failed jobs + * still show their log). Polls only when isActive. + */ +export default function LiveRenderLog({ + orderLineId, + isActive, + compact = false, +}: { + orderLineId: string + /** Whether the render is still processing — enables polling */ + isActive: boolean + /** Compact mode (inline, no border) for table rows */ + compact?: boolean +}) { + const [expanded, setExpanded] = useState(isActive) + const scrollRef = useRef(null) + + // Always fetch once to probe for existing entries; poll only when active + const { data } = useQuery({ + queryKey: ['render-log', orderLineId], + queryFn: () => getRenderLog(orderLineId), + refetchInterval: isActive ? 2000 : false, + }) + + const entries: RenderLogEntry[] = data?.entries ?? [] + const hasEntries = entries.length > 0 + + // Auto-scroll to bottom when new entries arrive + useEffect(() => { + if (scrollRef.current && isActive) { + scrollRef.current.scrollTop = scrollRef.current.scrollHeight + } + }, [entries.length, isActive]) + + // Auto-expand when active + useEffect(() => { + if (isActive) setExpanded(true) + }, [isActive]) + + // Nothing to show at all + if (!hasEntries && !isActive) return null + + if (compact) { + return ( +
+ + {expanded && hasEntries && ( + + )} +
+ ) + } + + return ( +
+ + {expanded && ( + + )} +
+ ) +} + +function LogPanel({ + entries, + isActive, + scrollRef, + maxHeight, +}: { + entries: RenderLogEntry[] + isActive: boolean + scrollRef: React.RefObject + maxHeight: string +}) { + return ( +
+ {entries.map((entry, i) => ( +
+ {entry.t} + {entry.msg} +
+ ))} + {isActive && entries.length > 0 && ( +
...
+ )} + {entries.length === 0 && ( +
No log entries yet
+ )} +
+ ) +} diff --git a/frontend/src/components/MaterialWizard.tsx b/frontend/src/components/MaterialWizard.tsx new file mode 100644 index 0000000..84dfd80 --- /dev/null +++ b/frontend/src/components/MaterialWizard.tsx @@ -0,0 +1,307 @@ +import { useState, useEffect, useMemo } from 'react' +import { useMutation, useQueryClient } from '@tanstack/react-query' +import { toast } from 'sonner' +import { X, ChevronLeft, ChevronRight, Wrench, Paintbrush, Shapes, FlaskConical, HelpCircle } from 'lucide-react' +import { createMaterial, getNextCode } from '../api/materials' + +interface Props { + open: boolean + onClose: () => void + onCreated?: (name: string) => void +} + +const MATERIAL_TYPES = [ + { code: '01', label: 'Metals', icon: Wrench, color: 'bg-slate-100 text-slate-700 border-slate-300', activeColor: 'bg-slate-600 text-white border-slate-600' }, + { code: '02', label: 'Coatings', icon: Paintbrush, color: 'bg-status-info-bg text-status-info-text border-border-default', activeColor: 'bg-blue-600 text-white border-blue-600' }, + { code: '03', label: 'Non-metals', icon: Shapes, color: 'bg-status-warning-bg text-status-warning-text border-border-default', activeColor: 'bg-amber-600 text-white border-amber-600' }, + { code: '04', label: 'Compounds', icon: FlaskConical, color: 'bg-purple-50 text-purple-700 border-purple-300', activeColor: 'bg-purple-600 text-white border-purple-600' }, + { code: '05', label: 'Misc', icon: HelpCircle, color: 'bg-surface-alt text-content-secondary border-border-default', activeColor: 'bg-gray-600 text-white border-gray-600' }, +] as const + +const SUBTYPE_PRESETS: Record> = { + '01': [ + { code: '01', label: 'Steel' }, + { code: '02', label: 'Niro' }, + { code: '03', label: 'Tin' }, + { code: '04', label: 'Aluminium' }, + { code: '05', label: 'Brass' }, + { code: '06', label: 'Bronze' }, + ], + '02': [ + { code: '01', label: 'Durotect' }, + { code: '02', label: 'Coat' }, + ], + '03': [ + { code: '01', label: 'Elastomer' }, + { code: '02', label: 'Plastic (opaque)' }, + { code: '03', label: 'Plastic (translucent)' }, + { code: '04', label: 'TPU' }, + { code: '05', label: 'Ceramic' }, + ], + '04': [ + { code: '01', label: 'E-series' }, + { code: '02', label: 'Elgo-series' }, + { code: '03', label: 'PTFE / GFK' }, + ], + '05': [], +} + +export default function MaterialWizard({ open, onClose, onCreated }: Props) { + const qc = useQueryClient() + const [step, setStep] = useState(1) + const [typeCode, setTypeCode] = useState('') + const [subTypeCode, setSubTypeCode] = useState('') + const [customSubType, setCustomSubType] = useState('') + const [consecutive, setConsecutive] = useState(null) + const [nameParts, setNameParts] = useState('') + const [description, setDescription] = useState('') + const [loadingCode, setLoadingCode] = useState(false) + + const effectiveSubType = subTypeCode || customSubType + + // Reset on open + useEffect(() => { + if (open) { + setStep(1) + setTypeCode('') + setSubTypeCode('') + setCustomSubType('') + setConsecutive(null) + setNameParts('') + setDescription('') + } + }, [open]) + + // Fetch next consecutive number when type + subtype are set + useEffect(() => { + if (!typeCode || !effectiveSubType || effectiveSubType.length !== 2) { + setConsecutive(null) + return + } + const prefix = typeCode + effectiveSubType + setLoadingCode(true) + getNextCode(prefix) + .then((res) => setConsecutive(res.next_consecutive)) + .catch(() => setConsecutive(1)) + .finally(() => setLoadingCode(false)) + }, [typeCode, effectiveSubType]) + + const fullCode = useMemo(() => { + if (!typeCode || !effectiveSubType || consecutive === null) return null + return `${typeCode}${effectiveSubType}${String(consecutive).padStart(2, '0')}` + }, [typeCode, effectiveSubType, consecutive]) + + const sanitizedName = nameParts + .replace(/\s+/g, '-') + .replace(/[^a-zA-Z0-9\-]/g, '') + .replace(/-+/g, '-') + .replace(/^-|-$/g, '') + + const fullMaterialName = fullCode && sanitizedName + ? `SCHAEFFLER_${fullCode}_${sanitizedName}` + : null + + const schaefflerCodeInt = fullCode ? parseInt(fullCode, 10) : null + + const createMut = useMutation({ + mutationFn: () => + createMaterial({ + name: fullMaterialName!, + description: description.trim() || undefined, + source: 'manual', + schaeffler_code: schaefflerCodeInt, + }), + onSuccess: () => { + toast.success('Material created') + qc.invalidateQueries({ queryKey: ['materials'] }) + if (onCreated && fullMaterialName) onCreated(fullMaterialName) + onClose() + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to create material'), + }) + + if (!open) return null + + const nameValid = sanitizedName.length >= 2 + + return ( +
+
e.stopPropagation()} + > + {/* Header */} +
+
+

Schaeffler Material Wizard

+

Step {step} of 3

+
+ +
+ + {/* Steps */} +
+ {step === 1 && ( +
+

Select material type:

+
+ {MATERIAL_TYPES.map((t) => { + const Icon = t.icon + const active = typeCode === t.code + return ( + + ) + })} +
+
+ )} + + {step === 2 && ( +
+

+ Sub-type for {MATERIAL_TYPES.find((t) => t.code === typeCode)?.label}: +

+ {(SUBTYPE_PRESETS[typeCode] ?? []).length > 0 && ( +
+ {SUBTYPE_PRESETS[typeCode]!.map((st) => ( + + ))} +
+ )} + +
+ + { + const v = e.target.value.replace(/\D/g, '') + setCustomSubType(v) + if (v.length > 0) setSubTypeCode('') + }} + className="w-24 px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-accent" + /> +
+ + {effectiveSubType.length === 2 && ( +
+

Next consecutive number:

+

+ {loadingCode ? '...' : consecutive !== null ? String(consecutive).padStart(2, '0') : '--'} +

+
+ )} +
+ )} + + {step === 3 && ( +
+

Material name and description:

+
+
+ + setNameParts(e.target.value)} + className="w-full px-3 py-2 border border-border-default rounded-md text-sm focus:outline-none focus:border-accent" + /> + {nameParts && !nameValid && ( +

Name must be at least 2 characters (a-z, 0-9, dashes)

+ )} +
+
+ + setDescription(e.target.value)} + className="w-full px-3 py-2 border border-border-default rounded-md text-sm focus:outline-none focus:border-accent" + /> +
+
+
+ )} +
+ + {/* Live preview bar */} +
+

Preview:

+

+ {fullMaterialName || ( + + SCHAEFFLER_{typeCode || 'XX'}{effectiveSubType || 'YY'}{consecutive !== null ? String(consecutive).padStart(2, '0') : 'ZZ'}_{sanitizedName || 'Name'} + + )} +

+
+ + {/* Footer buttons */} +
+ + + {step < 3 ? ( + + ) : ( + + )} +
+
+
+ ) +} diff --git a/frontend/src/components/admin/MaterialLibrary.tsx b/frontend/src/components/admin/MaterialLibrary.tsx new file mode 100644 index 0000000..e29abf9 --- /dev/null +++ b/frontend/src/components/admin/MaterialLibrary.tsx @@ -0,0 +1,171 @@ +import { useState } from 'react' +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { toast } from 'sonner' +import { Plus, Trash2, Pencil, Check, X } from 'lucide-react' +import { listMaterials, createMaterial, updateMaterial, deleteMaterial } from '../../api/materials' +import type { Material } from '../../api/materials' + +export default function MaterialLibrary() { + const qc = useQueryClient() + const [showAdd, setShowAdd] = useState(false) + const [newName, setNewName] = useState('') + const [newDesc, setNewDesc] = useState('') + const [editingId, setEditingId] = useState(null) + const [editName, setEditName] = useState('') + const [editDesc, setEditDesc] = useState('') + + const { data: materials = [] } = useQuery({ + queryKey: ['materials'], + queryFn: listMaterials, + }) + + const createMut = useMutation({ + mutationFn: () => createMaterial({ name: newName.trim(), description: newDesc.trim() || undefined }), + onSuccess: () => { + toast.success('Material added') + qc.invalidateQueries({ queryKey: ['materials'] }) + setShowAdd(false) + setNewName('') + setNewDesc('') + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to add material'), + }) + + const updateMut = useMutation({ + mutationFn: (id: string) => updateMaterial(id, { name: editName.trim(), description: editDesc.trim() || undefined }), + onSuccess: () => { + toast.success('Material updated') + qc.invalidateQueries({ queryKey: ['materials'] }) + setEditingId(null) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to update'), + }) + + const deleteMut = useMutation({ + mutationFn: deleteMaterial, + onSuccess: () => { + toast.success('Material deleted') + qc.invalidateQueries({ queryKey: ['materials'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to delete'), + }) + + const startEdit = (mat: Material) => { + setEditingId(mat.id) + setEditName(mat.name) + setEditDesc(mat.description ?? '') + } + + return ( +
+
+
+

Material Library

+

+ Shared materials available when assigning CAD part materials. +

+
+ +
+ + {showAdd && ( +
+
+ + setNewName(e.target.value)} + onKeyDown={(e) => e.key === 'Enter' && newName.trim() && createMut.mutate()} + className="input-base" + /> +
+
+ + setNewDesc(e.target.value)} + onKeyDown={(e) => e.key === 'Enter' && newName.trim() && createMut.mutate()} + className="input-base" + /> +
+
+ + +
+
+ )} + + {materials.length === 0 ? ( +
+ No materials yet. Add the first one above. +
+ ) : ( +
+ {materials.map((mat) => ( +
+ {editingId === mat.id ? ( + <> + setEditName(e.target.value)} + className="flex-1 px-2 py-1 border border-border-default rounded text-sm focus:outline-none focus:border-accent" + /> + setEditDesc(e.target.value)} + placeholder="Description" + className="flex-1 px-2 py-1 border border-border-default rounded text-sm focus:outline-none focus:border-accent" + /> + + + + ) : ( + <> +
+

{mat.name}

+ {mat.description && ( +

{mat.description}

+ )} +
+ + + + )} +
+ ))} +
+ )} +
+ ) +} diff --git a/frontend/src/components/admin/OutputTypeTable.tsx b/frontend/src/components/admin/OutputTypeTable.tsx new file mode 100644 index 0000000..6396850 --- /dev/null +++ b/frontend/src/components/admin/OutputTypeTable.tsx @@ -0,0 +1,1094 @@ +import { useState, useRef, useEffect } from 'react' +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { Pencil, Trash2, Plus, Check, X, ChevronDown } from 'lucide-react' +import { toast } from 'sonner' +import { + listOutputTypes, createOutputType, updateOutputType, deleteOutputType, +} from '../../api/outputTypes' +import type { OutputType } from '../../api/outputTypes' +import { listPricingTiers } from '../../api/pricing' +import type { PricingTier } from '../../api/pricing' + +const RENDERERS = ['threejs', 'blender', 'pillow'] +const FORMATS = ['png', 'jpg', 'gltf', 'stl', 'mp4', 'webm'] +const BACKENDS = ['auto', 'celery', 'flamenco'] +const ALL_CATEGORIES = [ + { key: 'TRB', label: 'TRB' }, + { key: 'Kugellager', label: 'Kugellager' }, + { key: 'CRB', label: 'CRB' }, + { key: 'Gleitlager', label: 'Gleitlager' }, + { key: 'SRB_TORB', label: 'SRB/TORB' }, + { key: 'Linear_schiene', label: 'Linear' }, + { key: 'Anschlagplatten', label: 'Anschlag' }, +] +const EMPTY_FORM ={ name: '', description: '', renderer: 'threejs', output_format: 'png', sort_order: 0, compatible_categories: [] as string[], render_backend: 'auto', is_animation: false, transparent_bg: false, cycles_device: '' as string, pricing_tier_id: null as number | null, width: '', height: '', engine: '', samples: '', frame_count: '', fps: '', turntable_axis: 'world_z', bg_color: '', noise_threshold: '', denoiser: '', denoising_input_passes: '', denoising_prefilter: '', denoising_quality: '', denoising_use_gpu: '' } + +export default function OutputTypeTable() { + const qc = useQueryClient() + const [showAdd, setShowAdd] = useState(false) + const [form, setForm] = useState(EMPTY_FORM) + const [editingId, setEditingId] = useState(null) + const [editDraft, setEditDraft] = useState>({}) + + const { data: types, isLoading } = useQuery({ + queryKey: ['output-types-admin'], + queryFn: () => listOutputTypes(true), + }) + + const { data: pricingTiers } = useQuery({ + queryKey: ['pricing-tiers'], + queryFn: listPricingTiers, + }) + + const createMut = useMutation({ + mutationFn: () => { + const rs: Record = {} + if (form.width) rs.width = Number(form.width) + if (form.height) rs.height = Number(form.height) + if (form.engine) rs.engine = form.engine + if (form.samples) rs.samples = Number(form.samples) + if (form.is_animation) { + if (form.frame_count) rs.frame_count = Number(form.frame_count) + if (form.fps) rs.fps = Number(form.fps) + if (form.turntable_axis) rs.turntable_axis = form.turntable_axis + if (form.bg_color) rs.bg_color = form.bg_color + } + if (form.noise_threshold) rs.noise_threshold = form.noise_threshold + if (form.denoiser) rs.denoiser = form.denoiser + if (form.denoising_input_passes) rs.denoising_input_passes = form.denoising_input_passes + if (form.denoising_prefilter) rs.denoising_prefilter = form.denoising_prefilter + if (form.denoising_quality) rs.denoising_quality = form.denoising_quality + if (form.denoising_use_gpu) rs.denoising_use_gpu = form.denoising_use_gpu + return createOutputType({ + name: form.name.trim(), + description: form.description.trim() || undefined, + renderer: form.renderer, + output_format: form.output_format, + sort_order: Number(form.sort_order), + compatible_categories: form.compatible_categories, + render_backend: form.render_backend, + is_animation: form.is_animation, + transparent_bg: form.transparent_bg, + cycles_device: form.cycles_device || null, + pricing_tier_id: form.pricing_tier_id, + render_settings: Object.keys(rs).length > 0 ? rs : {}, + }) + }, + onSuccess: () => { + toast.success('Output type created') + qc.invalidateQueries({ queryKey: ['output-types-admin'] }) + qc.invalidateQueries({ queryKey: ['output-types'] }) + setForm(EMPTY_FORM) + setShowAdd(false) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to create output type'), + }) + + const updateMut = useMutation({ + mutationFn: ({ id, data }: { id: string; data: Partial & { _width?: string; _height?: string; _engine?: string; _samples?: string; _frame_count?: string; _fps?: string; _turntable_axis?: string; _bg_color?: string; _noise_threshold?: string; _denoiser?: string; _denoising_input_passes?: string; _denoising_prefilter?: string; _denoising_quality?: string; _denoising_use_gpu?: string } }) => { + const { _width, _height, _engine, _samples, _frame_count, _fps, _turntable_axis, _bg_color, _noise_threshold, _denoiser, _denoising_input_passes, _denoising_prefilter, _denoising_quality, _denoising_use_gpu, ...rest } = data + if (_width !== undefined || _height !== undefined || _engine !== undefined || _samples !== undefined || _frame_count !== undefined || _fps !== undefined || _turntable_axis !== undefined || _bg_color !== undefined || _noise_threshold !== undefined || _denoiser !== undefined || _denoising_input_passes !== undefined || _denoising_prefilter !== undefined || _denoising_quality !== undefined || _denoising_use_gpu !== undefined) { + const ot = types?.find((t) => t.id === id) + const existing = ot?.render_settings || {} + const rs = { ...existing } + if (_width !== undefined) { + if (_width) rs.width = Number(_width); else delete rs.width + } + if (_height !== undefined) { + if (_height) rs.height = Number(_height); else delete rs.height + } + if (_engine !== undefined) { + if (_engine) rs.engine = _engine; else delete rs.engine + } + if (_samples !== undefined) { + if (_samples) rs.samples = Number(_samples); else delete rs.samples + } + if (_frame_count !== undefined) { + if (_frame_count) rs.frame_count = Number(_frame_count); else delete rs.frame_count + } + if (_fps !== undefined) { + if (_fps) rs.fps = Number(_fps); else delete rs.fps + } + if (_turntable_axis !== undefined) { + if (_turntable_axis) rs.turntable_axis = _turntable_axis; else delete rs.turntable_axis + } + if (_bg_color !== undefined) { + if (_bg_color) rs.bg_color = _bg_color; else delete rs.bg_color + } + if (_noise_threshold !== undefined) { + if (_noise_threshold) rs.noise_threshold = _noise_threshold; else delete rs.noise_threshold + } + if (_denoiser !== undefined) { + if (_denoiser) rs.denoiser = _denoiser; else delete rs.denoiser + } + if (_denoising_input_passes !== undefined) { + if (_denoising_input_passes) rs.denoising_input_passes = _denoising_input_passes; else delete rs.denoising_input_passes + } + if (_denoising_prefilter !== undefined) { + if (_denoising_prefilter) rs.denoising_prefilter = _denoising_prefilter; else delete rs.denoising_prefilter + } + if (_denoising_quality !== undefined) { + if (_denoising_quality) rs.denoising_quality = _denoising_quality; else delete rs.denoising_quality + } + if (_denoising_use_gpu !== undefined) { + if (_denoising_use_gpu) rs.denoising_use_gpu = _denoising_use_gpu; else delete rs.denoising_use_gpu + } + rest.render_settings = rs + } + return updateOutputType(id, rest) + }, + onSuccess: () => { + toast.success('Output type updated') + qc.invalidateQueries({ queryKey: ['output-types-admin'] }) + qc.invalidateQueries({ queryKey: ['output-types'] }) + setEditingId(null) + setEditDraft({}) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to update'), + }) + + const deleteMut = useMutation({ + mutationFn: (id: string) => deleteOutputType(id), + onSuccess: () => { + toast.success('Output type deleted') + qc.invalidateQueries({ queryKey: ['output-types-admin'] }) + qc.invalidateQueries({ queryKey: ['output-types'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to delete'), + }) + + // Check if transparent_bg / bg_color controls should be visible + function showTransparentBg(renderer: string, _format: string) { + return renderer === 'blender' + } + + // Check if blender-specific engine/samples columns should be visible + function showBlenderSettings(renderer: string) { + return renderer === 'blender' + } + + return ( +
+ + + + + + + + + + + + + + + + + + + + + + + + {isLoading && ( + + + + )} + {types?.map((ot) => ( + + {editingId === ot.id ? ( + <> + + + + + + + + + + + + + + + + + + + ) : ( + <> + + + + + + + + + + + + + + + + + + + )} + + ))} + + {/* Add row */} + {showAdd && ( + + + + + + + + + + + + + + + + + + + + )} + +
NameRendererFormatBackendAnimTurntableBGDeviceEngineSamplesDenoiseCategoriesResolutionPricingSortActiveActions
Loading…
+ setEditDraft({ ...editDraft, name: e.target.value })} + /> + + + + + + + + setEditDraft({ ...editDraft, is_animation: e.target.checked })} + /> + + {(editDraft.is_animation ?? ot.is_animation) ? ( +
+
+ setEditDraft({ ...editDraft, _frame_count: e.target.value } as any)} + title="Frame count" + /> + f + setEditDraft({ ...editDraft, _fps: e.target.value } as any)} + title="FPS" + /> + fps +
+ +
+ ) : ( + + )} +
+ {showTransparentBg(editDraft.renderer ?? ot.renderer, editDraft.output_format ?? ot.output_format) ? ( +
+ + {(() => { + const cur = (editDraft as any)._bg_color !== undefined + ? (editDraft as any)._bg_color as string + : (ot.render_settings?.bg_color as string || '') + const enabled = cur !== '' + return ( + <> + + {enabled && ( +
+ setEditDraft({ ...editDraft, _bg_color: e.target.value } as any)} + /> + setEditDraft({ ...editDraft, _bg_color: e.target.value } as any)} + /> +
+ )} + + ) + })()} +
+ ) : ( + + )} +
+ {showBlenderSettings(editDraft.renderer ?? ot.renderer) ? ( + + ) : ( + + )} + + {showBlenderSettings(editDraft.renderer ?? ot.renderer) ? ( + + ) : ( + + )} + + {showBlenderSettings(editDraft.renderer ?? ot.renderer) ? ( + setEditDraft({ ...editDraft, _samples: e.target.value } as any)} + /> + ) : ( + + )} + + {showBlenderSettings(editDraft.renderer ?? ot.renderer) ? ( +
+ + setEditDraft({ ...editDraft, _noise_threshold: e.target.value } as any)} + title="Noise threshold (adaptive sampling)" + /> + + + + +
+ ) : ( + + )} +
+ setEditDraft({ ...editDraft, compatible_categories: cats })} + /> + +
+ setEditDraft({ ...editDraft, _width: e.target.value } as any)} + /> + x + setEditDraft({ ...editDraft, _height: e.target.value } as any)} + /> +
+
+ + + setEditDraft({ ...editDraft, sort_order: Number(e.target.value) })} + /> + + setEditDraft({ ...editDraft, is_active: e.target.checked })} + /> + + + + {ot.name}{ot.renderer}{ot.output_format} + + {ot.render_backend} + + + {ot.is_animation && ( + video + )} + + {ot.is_animation ? ( +
+ + {(ot.render_settings?.frame_count as number) || 120}f / {(ot.render_settings?.fps as number) || 30}fps + + + 360° {({'world_z': 'World Z', 'world_x': 'World X', 'world_y': 'World Y'} as Record)[ot.render_settings?.turntable_axis as string] ?? 'World Z'} + +
+ ) : ( + + )} +
+ {showTransparentBg(ot.renderer, ot.output_format) && ( +
+ {ot.transparent_bg && ( + alpha + )} + {ot.render_settings?.bg_color && ( +
+ + {ot.render_settings.bg_color} +
+ )} +
+ )} +
+ {showBlenderSettings(ot.renderer) ? ( + ot.cycles_device ? ( + + {ot.cycles_device.toUpperCase()} + + ) : ( + system + ) + ) : ( + + )} + + {showBlenderSettings(ot.renderer) ? ( + ot.render_settings?.engine ? ( + + {ot.render_settings.engine === 'cycles' ? 'Cycles' : 'EEVEE'} + + ) : ( + default + ) + ) : ( + + )} + + {showBlenderSettings(ot.renderer) ? ( + ot.render_settings?.samples ? ( + {ot.render_settings.samples} + ) : ( + default + ) + ) : ( + + )} + + {showBlenderSettings(ot.renderer) ? ( +
+ {ot.render_settings?.denoiser ? ( + + {ot.render_settings.denoiser === 'OPTIX' ? 'OptiX' : 'OIDN'} + + ) : null} + {ot.render_settings?.noise_threshold ? ( + t={ot.render_settings.noise_threshold} + ) : null} + {ot.render_settings?.denoising_prefilter ? ( + {ot.render_settings.denoising_prefilter as string} + ) : null} + {!ot.render_settings?.denoiser && !ot.render_settings?.noise_threshold && !ot.render_settings?.denoising_prefilter && ( + default + )} +
+ ) : ( + + )} +
+ {(!ot.compatible_categories || ot.compatible_categories.length === 0) ? ( + All + ) : ( +
+ {ot.compatible_categories.map((c: string) => ( + + {ALL_CATEGORIES.find((ac) => ac.key === c)?.label || c} + + ))} +
+ )} +
+ {ot.render_settings?.width || ot.render_settings?.height + ? `${ot.render_settings.width || '?'}x${ot.render_settings.height || '?'}` + : default} + + {ot.pricing_tier_name ? ( +
+ + {ot.pricing_tier_name} + + {ot.price_per_item != null && ( + + {ot.price_per_item.toFixed(2)} + + )} +
+ ) : ( + Category default + )} +
{ot.sort_order} + + {ot.is_active ? 'active' : 'inactive'} + + + + +
+ setForm({ ...form, name: e.target.value })} + /> + + + + + + + + setForm({ ...form, is_animation: e.target.checked })} + /> + + {form.is_animation ? ( +
+
+ setForm({ ...form, frame_count: e.target.value })} + title="Frame count" + /> + f + setForm({ ...form, fps: e.target.value })} + title="FPS" + /> + fps +
+ +
+ ) : ( + + )} +
+ {showTransparentBg(form.renderer, form.output_format) ? ( +
+ + + {form.bg_color && ( +
+ setForm({ ...form, bg_color: e.target.value })} + /> + setForm({ ...form, bg_color: e.target.value })} + /> +
+ )} +
+ ) : ( + + )} +
+ {showBlenderSettings(form.renderer) ? ( + + ) : ( + + )} + + {showBlenderSettings(form.renderer) ? ( + + ) : ( + + )} + + {showBlenderSettings(form.renderer) ? ( + setForm({ ...form, samples: e.target.value })} + /> + ) : ( + + )} + + {showBlenderSettings(form.renderer) ? ( +
+ + setForm({ ...form, noise_threshold: e.target.value })} + title="Noise threshold (adaptive sampling)" + /> + + + + +
+ ) : ( + + )} +
+ setForm({ ...form, compatible_categories: cats })} + /> + +
+ setForm({ ...form, width: e.target.value })} + /> + x + setForm({ ...form, height: e.target.value })} + /> +
+
+ + + setForm({ ...form, sort_order: Number(e.target.value) })} + /> + + + +
+ + {!showAdd && ( +
+ +
+ )} +
+ ) +} + + +function CategoryMultiSelect({ + value, + onChange, +}: { + value: string[] + onChange: (cats: string[]) => void +}) { + const [open, setOpen] = useState(false) + const ref = useRef(null) + + useEffect(() => { + function handleClick(e: MouseEvent) { + if (ref.current && !ref.current.contains(e.target as Node)) setOpen(false) + } + document.addEventListener('mousedown', handleClick) + return () => document.removeEventListener('mousedown', handleClick) + }, []) + + function toggle(key: string) { + if (value.includes(key)) { + onChange(value.filter((k) => k !== key)) + } else { + onChange([...value, key]) + } + } + + return ( +
+ + {open && ( +
+ {ALL_CATEGORIES.map((cat) => ( + + ))} + {value.length > 0 && ( + + )} +
+ )} +
+ ) +} diff --git a/frontend/src/components/admin/PricingTierTable.tsx b/frontend/src/components/admin/PricingTierTable.tsx new file mode 100644 index 0000000..5e70ffa --- /dev/null +++ b/frontend/src/components/admin/PricingTierTable.tsx @@ -0,0 +1,299 @@ +import { useState } from 'react' +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { Pencil, Trash2, Plus, Check, X } from 'lucide-react' +import { toast } from 'sonner' +import { listPricingTiers, createPricingTier, updatePricingTier, deletePricingTier } from '../../api/pricing' +import type { PricingTier } from '../../api/pricing' + +const EMPTY_FORM = { category_key: '', quality_level: 'Normal', price_per_item: '', description: '' } + +export default function PricingTierTable() { + const qc = useQueryClient() + const [showAdd, setShowAdd] = useState(false) + const [form, setForm] = useState(EMPTY_FORM) + const [editingId, setEditingId] = useState(null) + const [editDraft, setEditDraft] = useState>({}) + + const { data: tiers, isLoading } = useQuery({ + queryKey: ['pricing-tiers'], + queryFn: listPricingTiers, + }) + + const createMut = useMutation({ + mutationFn: () => + createPricingTier({ + category_key: form.category_key.trim(), + quality_level: form.quality_level.trim() || 'Normal', + price_per_item: parseFloat(form.price_per_item), + description: form.description.trim() || undefined, + }), + onSuccess: () => { + toast.success('Pricing tier created') + qc.invalidateQueries({ queryKey: ['pricing-tiers'] }) + setForm(EMPTY_FORM) + setShowAdd(false) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to create tier'), + }) + + const updateMut = useMutation({ + mutationFn: ({ id, data }: { id: number; data: Partial }) => + updatePricingTier(id, { + category_key: data.category_key, + quality_level: data.quality_level, + price_per_item: data.price_per_item != null ? Number(data.price_per_item) : undefined, + description: data.description !== undefined ? data.description ?? undefined : undefined, + is_active: data.is_active, + }), + onSuccess: () => { + toast.success('Tier updated') + qc.invalidateQueries({ queryKey: ['pricing-tiers'] }) + setEditingId(null) + setEditDraft({}) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to update tier'), + }) + + const deleteMut = useMutation({ + mutationFn: (id: number) => deletePricingTier(id), + onSuccess: () => { + toast.success('Tier deleted') + qc.invalidateQueries({ queryKey: ['pricing-tiers'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to delete tier'), + }) + + function startEdit(tier: PricingTier) { + setEditingId(tier.id) + setEditDraft({ + category_key: tier.category_key, + quality_level: tier.quality_level, + price_per_item: tier.price_per_item, + description: tier.description ?? '', + is_active: tier.is_active, + }) + } + + function cancelEdit() { + setEditingId(null) + setEditDraft({}) + } + + const canCreate = form.category_key.trim() !== '' && form.price_per_item !== '' && !isNaN(parseFloat(form.price_per_item)) + + return ( +
+ {/* Add form toggle */} +
+ {showAdd ? ( +
+
+ setForm({ ...form, category_key: e.target.value })} + className="input-base" + title="Product category key this tier applies to (e.g. TRB, Kugellager). Leave empty for the global fallback tier." + /> + setForm({ ...form, quality_level: e.target.value })} + className="input-base" + title="Quality level label for this tier (e.g. Normal, Premium). Used for display purposes." + /> + setForm({ ...form, price_per_item: e.target.value })} + className="input-base" + /> + setForm({ ...form, description: e.target.value })} + className="input-base" + /> +
+
+ + +
+
+ ) : ( + + )} +
+ + {/* Table */} + {isLoading ? ( +
Loading…
+ ) : !tiers || tiers.length === 0 ? ( +
+ No pricing tiers configured. Add one above. +
+ ) : ( +
+ {/* Warning if no default tier */} + {!tiers.some((t) => t.category_key === 'default') && ( +
+ No global default tier configured. Orders without a category-specific tier will have no price. +
+ )} + + + + + + + + + + + + {[...tiers].sort((a, b) => { + // Sort 'default' to top + if (a.category_key === 'default' && b.category_key !== 'default') return -1 + if (b.category_key === 'default' && a.category_key !== 'default') return 1 + return 0 + }).map((tier) => { + const isEditing = editingId === tier.id + const isDefault = tier.category_key === 'default' + return ( + + + + + + + + + ) + })} + +
CategoryQuality Level€ / ItemDescriptionActive +
+ {isEditing ? ( + setEditDraft((d) => ({ ...d, category_key: e.target.value }))} + className="w-full px-2 py-1 border border-border-default rounded text-sm focus:outline-none focus:border-accent" + /> + ) : ( +
+ {tier.category_key} + {isDefault && ( + + Global Fallback + + )} +
+ )} +
+ {isEditing ? ( + setEditDraft((d) => ({ ...d, quality_level: e.target.value }))} + className="w-full px-2 py-1 border border-border-default rounded text-sm focus:outline-none focus:border-accent" + /> + ) : ( + tier.quality_level + )} + + {isEditing ? ( + setEditDraft((d) => ({ ...d, price_per_item: parseFloat(e.target.value) }))} + className="w-24 px-2 py-1 border border-border-default rounded text-sm text-right focus:outline-none focus:border-accent" + /> + ) : ( + € {Number(tier.price_per_item).toFixed(2)} + )} + + {isEditing ? ( + setEditDraft((d) => ({ ...d, description: e.target.value }))} + className="w-full px-2 py-1 border border-border-default rounded text-sm focus:outline-none focus:border-accent" + /> + ) : ( + tier.description || + )} + + {isEditing ? ( + setEditDraft((d) => ({ ...d, is_active: e.target.checked }))} + className="w-4 h-4" + /> + ) : ( + + {tier.is_active ? 'yes' : 'no'} + + )} + +
+ {isEditing ? ( + <> + + + + ) : ( + <> + + + + )} +
+
+
+ )} +
+ ) +} diff --git a/frontend/src/components/admin/RenderTemplateTable.tsx b/frontend/src/components/admin/RenderTemplateTable.tsx new file mode 100644 index 0000000..d5881bb --- /dev/null +++ b/frontend/src/components/admin/RenderTemplateTable.tsx @@ -0,0 +1,492 @@ +import { useState, useRef } from 'react' +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { Pencil, Trash2, Plus, Check, X, Upload, Download } from 'lucide-react' +import { toast } from 'sonner' +import { + listRenderTemplates, + createRenderTemplate, + updateRenderTemplate, + deleteRenderTemplate, + reuploadBlendFile, +} from '../../api/renderTemplates' +import type { RenderTemplate } from '../../api/renderTemplates' +import { listOutputTypes } from '../../api/outputTypes' +import type { OutputType } from '../../api/outputTypes' + +const ALL_CATEGORIES = [ + { key: 'TRB', label: 'TRB' }, + { key: 'Kugellager', label: 'Kugellager' }, + { key: 'CRB', label: 'CRB' }, + { key: 'Gleitlager', label: 'Gleitlager' }, + { key: 'SRB_TORB', label: 'SRB/TORB' }, + { key: 'Linear_schiene', label: 'Linear' }, + { key: 'Anschlagplatten', label: 'Anschlag' }, +] + +const EMPTY_FORM = { + name: '', + category_key: '' as string, + output_type_id: '' as string, + target_collection: 'Product', + material_replace_enabled: false, + lighting_only: false, + shadow_catcher_enabled: false, + camera_orbit: true, +} + +export default function RenderTemplateTable() { + const qc = useQueryClient() + const [showAdd, setShowAdd] = useState(false) + const [form, setForm] = useState(EMPTY_FORM) + const [addFile, setAddFile] = useState(null) + const [editingId, setEditingId] = useState(null) + const [editDraft, setEditDraft] = useState>({}) + const fileInputRef = useRef(null) + const reuploadRef = useRef(null) + const [reuploadId, setReuploadId] = useState(null) + + const { data: templates, isLoading } = useQuery({ + queryKey: ['render-templates'], + queryFn: listRenderTemplates, + }) + + const { data: outputTypes } = useQuery({ + queryKey: ['output-types-admin'], + queryFn: () => listOutputTypes(true), + }) + + const createMut = useMutation({ + mutationFn: () => { + if (!addFile) throw new Error('Please select a .blend file') + const fd = new FormData() + fd.append('name', form.name.trim()) + fd.append('file', addFile) + fd.append('category_key', form.category_key || '') + fd.append('output_type_id', form.output_type_id || '') + fd.append('target_collection', form.target_collection || 'Product') + fd.append('material_replace_enabled', String(form.material_replace_enabled)) + fd.append('lighting_only', String(form.lighting_only)) + fd.append('shadow_catcher_enabled', String(form.shadow_catcher_enabled)) + fd.append('camera_orbit', String(form.camera_orbit)) + return createRenderTemplate(fd) + }, + onSuccess: () => { + toast.success('Render template created') + qc.invalidateQueries({ queryKey: ['render-templates'] }) + setForm(EMPTY_FORM) + setAddFile(null) + setShowAdd(false) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to create template'), + }) + + const updateMut = useMutation({ + mutationFn: ({ id, data }: { id: string; data: Record }) => + updateRenderTemplate(id, data as any), + onSuccess: () => { + toast.success('Template updated') + qc.invalidateQueries({ queryKey: ['render-templates'] }) + setEditingId(null) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to update'), + }) + + const deleteMut = useMutation({ + mutationFn: deleteRenderTemplate, + onSuccess: () => { + toast.success('Template deleted') + qc.invalidateQueries({ queryKey: ['render-templates'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to delete'), + }) + + const reuploadMut = useMutation({ + mutationFn: ({ id, file }: { id: string; file: File }) => reuploadBlendFile(id, file), + onSuccess: () => { + toast.success('.blend file updated') + qc.invalidateQueries({ queryKey: ['render-templates'] }) + setReuploadId(null) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to upload'), + }) + + function startEdit(t: RenderTemplate) { + setEditingId(t.id) + setEditDraft({ + name: t.name, + category_key: t.category_key, + output_type_id: t.output_type_id, + target_collection: t.target_collection, + material_replace_enabled: t.material_replace_enabled, + lighting_only: t.lighting_only, + shadow_catcher_enabled: t.shadow_catcher_enabled, + camera_orbit: t.camera_orbit, + is_active: t.is_active, + }) + } + + function saveEdit() { + if (!editingId) return + updateMut.mutate({ id: editingId, data: editDraft as Record }) + } + + const inputCls = 'px-2 py-1 text-sm border border-border-default rounded bg-surface focus:outline-none focus:ring-1 focus:ring-blue-400' + + return ( +
+
+

Render Templates

+ +
+ + {/* Hidden file inputs */} + { + const file = e.target.files?.[0] + if (file && reuploadId) reuploadMut.mutate({ id: reuploadId, file }) + e.target.value = '' + }} + /> + +
+ + + + + + + + + + + + + + + + + + {/* Add row */} + {showAdd && ( + + + + + + + + + + + + + )} + + {/* Template rows */} + {isLoading && ( + + )} + {templates?.map((t) => { + const isEditing = editingId === t.id + return ( + + + + + + + + + + + + + + ) + })} + + {!isLoading && (!templates || templates.length === 0) && !showAdd && ( + + + + )} + +
NameCategoryOutput TypeCollectionMat. ReplaceLighting OnlyShadow CatcherCam Orbit.blend FileActiveActions
+ setForm({ ...form, name: e.target.value })} + /> + + + + + + setForm({ ...form, target_collection: e.target.value })} + /> + + setForm({ ...form, material_replace_enabled: e.target.checked })} + /> + + setForm({ ...form, lighting_only: e.target.checked })} + /> + + setForm({ ...form, shadow_catcher_enabled: e.target.checked })} + /> + + setForm({ ...form, camera_orbit: e.target.checked })} + /> + + + + +
+ + +
+
Loading...
+ {isEditing ? ( + setEditDraft({ ...editDraft, name: e.target.value })} + /> + ) : ( + {t.name} + )} + + {isEditing ? ( + + ) : ( + t.category_key || Any + )} + + {isEditing ? ( + + ) : ( + t.output_type_name || Any + )} + + {isEditing ? ( + setEditDraft({ ...editDraft, target_collection: e.target.value })} + /> + ) : ( + {t.target_collection} + )} + + {isEditing ? ( + setEditDraft({ ...editDraft, material_replace_enabled: e.target.checked })} + /> + ) : ( + t.material_replace_enabled ? ( + Yes + ) : ( + No + ) + )} + + {isEditing ? ( + setEditDraft({ ...editDraft, lighting_only: e.target.checked })} + /> + ) : ( + t.lighting_only ? ( + HDR + ) : ( + + ) + )} + + {isEditing ? ( + setEditDraft({ ...editDraft, shadow_catcher_enabled: e.target.checked })} + /> + ) : ( + t.shadow_catcher_enabled ? ( + On + ) : ( + + ) + )} + + {isEditing ? ( + setEditDraft({ ...editDraft, camera_orbit: e.target.checked })} + /> + ) : ( + t.camera_orbit ? ( + Cam + ) : ( + Obj + ) + )} + +
+ + {t.original_filename} + + + + + +
+
+ {isEditing ? ( + setEditDraft({ ...editDraft, is_active: e.target.checked })} + /> + ) : ( + t.is_active ? ( + + ) : ( + + ) + )} + + {isEditing ? ( +
+ + +
+ ) : ( +
+ + +
+ )} +
+ No render templates configured. Click "Add Template" to create one. +
+
+

+ Templates define pre-designed .blend studio setups. When rendering, the system matches templates by Category + Output Type with fallback cascade. +

+
+ ) +} diff --git a/frontend/src/components/admin/TemplateEditor.tsx b/frontend/src/components/admin/TemplateEditor.tsx new file mode 100644 index 0000000..2410dbd --- /dev/null +++ b/frontend/src/components/admin/TemplateEditor.tsx @@ -0,0 +1,479 @@ +import { useState, useEffect } from 'react' +import { useMutation, useQueryClient } from '@tanstack/react-query' +import { toast } from 'sonner' +import { + Save, Plus, Trash2, ChevronUp, ChevronDown, + GripVertical, Eye, EyeOff, ToggleLeft, ToggleRight, +} from 'lucide-react' +import api from '../../api/client' + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +type Visibility = 'required' | 'optional' | 'hidden' + +interface StdField { + key: string + label: string + visibility: Visibility +} + +interface CompPair { + component_type: string + required: boolean +} + +interface Template { + id: string + name: string + category_key: string + description?: string | null + is_active: boolean + standard_fields: any + component_schema: any +} + +// --------------------------------------------------------------------------- +// All canonical standard field definitions (maps to DB columns in order_items) +// --------------------------------------------------------------------------- + +const ALL_FIELD_DEFS: { key: string; defaultLabel: string }[] = [ + { key: 'ebene1', defaultLabel: 'Ebene 1' }, + { key: 'ebene2', defaultLabel: 'Ebene 2' }, + { key: 'baureihe', defaultLabel: 'Baureihe' }, + { key: 'pim_id', defaultLabel: 'PIM-ID' }, + { key: 'produkt_baureihe', defaultLabel: 'Produkt / Baureihe' }, + { key: 'gewaehltes_produkt', defaultLabel: 'Gewähltes Produkt' }, + { key: 'name_cad_modell', defaultLabel: 'Name CAD-Modell' }, + { key: 'gewuenschte_bildnummer',defaultLabel: 'Gewünschte Bildnummer' }, + { key: 'lagertyp', defaultLabel: 'Lagertyp' }, + { key: 'medias_rendering', defaultLabel: 'Medias Rendering' }, +] + +// --------------------------------------------------------------------------- +// Normalisation helpers +// --------------------------------------------------------------------------- + +function normalizeFields(raw: any): StdField[] { + // New array format: [{key, label, visibility}] + if (Array.isArray(raw) && raw.length > 0 && raw[0].key) { + const existing = new Map(raw.map((f: StdField) => [f.key, f])) + // Preserve saved order, then append any missing canonical fields + const ordered: StdField[] = raw.filter((f: StdField) => + ALL_FIELD_DEFS.some((d) => d.key === f.key), + ) + ALL_FIELD_DEFS.forEach(({ key, defaultLabel }) => { + if (!existing.has(key)) { + ordered.push({ key, label: defaultLabel, visibility: 'optional' }) + } + }) + return ordered + } + // Legacy dict format {"0": {label, required}} or empty — use canonical defaults + return ALL_FIELD_DEFS.map(({ key, defaultLabel }) => ({ + key, + label: defaultLabel, + visibility: 'optional' as Visibility, + })) +} + +function normalizePairs(raw: any): CompPair[] { + if (!raw) return [] + if (Array.isArray(raw.pairs)) return raw.pairs.map((p: any) => ({ + component_type: p.component_type ?? p.part_name ?? '', + required: p.required ?? false, + })) + if (Array.isArray(raw)) return raw.map((p: any) => ({ + component_type: p.component_type ?? p.part_name ?? '', + required: p.required ?? false, + })) + return [] +} + +// --------------------------------------------------------------------------- +// Small helpers +// --------------------------------------------------------------------------- + +function moveItem(arr: T[], from: number, to: number): T[] { + const next = [...arr] + const [item] = next.splice(from, 1) + next.splice(to, 0, item) + return next +} + +const VIS_STYLES: Record = { + required: 'bg-accent text-white', + optional: 'bg-blue-500 text-white', + hidden: 'bg-surface-muted text-content-secondary', +} + +function VisibilityToggle({ + value, + onChange, +}: { + value: Visibility + onChange: (v: Visibility) => void +}) { + const cycle: Visibility[] = ['required', 'optional', 'hidden'] + const labels: Record = { required: 'Required', optional: 'Optional', hidden: 'Hidden' } + return ( + + ) +} + +// --------------------------------------------------------------------------- +// Main component +// --------------------------------------------------------------------------- + +export default function TemplateEditor({ + template, + onClose, +}: { + template: Template + onClose: () => void +}) { + const qc = useQueryClient() + + const [name, setName] = useState(template.name) + const [description, setDescription] = useState(template.description ?? '') + const [isActive, setIsActive] = useState(template.is_active) + const [fields, setFields] = useState(() => normalizeFields(template.standard_fields)) + const [pairs, setPairs] = useState(() => normalizePairs(template.component_schema)) + const [showHidden, setShowHidden] = useState(false) + const [newFieldKey, setNewFieldKey] = useState('') + + useEffect(() => { + setName(template.name) + setDescription(template.description ?? '') + setIsActive(template.is_active) + setFields(normalizeFields(template.standard_fields)) + setPairs(normalizePairs(template.component_schema)) + }, [template.id]) // eslint-disable-line react-hooks/exhaustive-deps + + const saveMut = useMutation({ + mutationFn: () => + api.patch(`/templates/${template.id}`, { + name, + description: description || null, + is_active: isActive, + standard_fields: fields, + component_schema: { pairs }, + }), + onSuccess: () => { + toast.success('Template saved') + qc.invalidateQueries({ queryKey: ['admin-templates'] }) + qc.invalidateQueries({ queryKey: ['templates'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to save'), + }) + + // ---- Standard fields helpers ---- + function updateField(i: number, patch: Partial) { + setFields((f) => f.map((x, j) => (j === i ? { ...x, ...patch } : x))) + } + function removeField(i: number) { + // Don't delete — mark hidden so it stays in DB but hidden in UI + updateField(i, { visibility: 'hidden' }) + } + const hiddenKeys = new Set(fields.filter((f) => f.visibility === 'hidden').map((f) => f.key)) + const availableToAdd = ALL_FIELD_DEFS.filter((d) => hiddenKeys.has(d.key)) + + function restoreField(key: string) { + setFields((f) => + f.map((x) => (x.key === key ? { ...x, visibility: 'optional' } : x)), + ) + setNewFieldKey('') + } + + // ---- Component pair helpers ---- + function updatePair(i: number, patch: Partial) { + setPairs((p) => p.map((x, j) => (j === i ? { ...x, ...patch } : x))) + } + function addPair() { + setPairs((p) => [...p, { component_type: '', required: false }]) + } + function removePair(i: number) { + setPairs((p) => p.filter((_, j) => j !== i)) + } + + // ---- Visible fields for rendering ---- + const visibleFields = showHidden ? fields : fields.filter((f) => f.visibility !== 'hidden') + + // ---- Shared styles ---- + const ROW = 'flex items-center gap-2 px-3 py-2 rounded-lg border border-border-light bg-surface-alt group' + const ICON_BTN = 'p-1 rounded text-content-muted hover:text-content-secondary hover:bg-surface transition-colors disabled:opacity-30' + const INPUT = 'flex-1 min-w-0 text-sm bg-transparent border-b border-transparent focus:border-accent focus:outline-none py-0.5 text-content' + + return ( +
+ {/* ------------------------------------------------------------------ */} + {/* Header */} + {/* ------------------------------------------------------------------ */} +
+
+ {/* Editable name */} + setName(e.target.value)} + className="text-base font-semibold text-content bg-transparent border-b border-transparent focus:border-accent focus:outline-none w-full" + placeholder="Template name" + /> + {/* Category key (read-only) + active toggle */} +
+ {template.category_key} + +
+ {/* Description */} + setDescription(e.target.value)} + className="text-xs text-content-muted bg-transparent border-b border-transparent focus:border-accent focus:outline-none w-full" + placeholder="Description (optional)" + /> +
+
+ +
+
+ +
+ {/* ---------------------------------------------------------------- */} + {/* Standard Fields */} + {/* ---------------------------------------------------------------- */} +
+
+
+

+ Standard Fields +

+

+ Rename, reorder, and set visibility for each column. Hidden fields are excluded from forms. +

+
+ +
+ +
+ {visibleFields.map((field, i) => { + // Real index in fields array (needed for moveItem / updateField) + const realIdx = fields.indexOf(field) + const isHidden = field.visibility === 'hidden' + return ( +
+ {/* Reorder */} +
+ + +
+ + + + {/* Label */} + updateField(realIdx, { label: e.target.value })} + className={INPUT} + placeholder="Field label" + /> + + {/* Key badge */} + + {field.key} + + + {/* Visibility */} + updateField(realIdx, { visibility: v })} + /> + + {/* Hide button */} + +
+ ) + })} +
+ + {/* Restore hidden field */} + {availableToAdd.length > 0 && ( +
+ + {newFieldKey && ( + + )} +
+ )} +
+ + {/* ---------------------------------------------------------------- */} + {/* Component Schema */} + {/* ---------------------------------------------------------------- */} +
+
+

+ Component Schema +

+

+ Define the expected component types that appear as column pairs in the Excel file (cols 11+). +

+
+ +
+ {pairs.length === 0 && ( +

No component types defined.

+ )} + {pairs.map((pair, i) => ( +
+ {/* Reorder */} +
+ + +
+ + + + {/* Index badge */} + + {i + 1} + + + {/* Component type name */} + updatePair(i, { component_type: e.target.value })} + placeholder="Component type name" + className={INPUT} + /> + + {/* Required toggle */} + + + {/* Delete */} + +
+ ))} +
+ + +
+
+
+ ) +} diff --git a/frontend/src/components/cad/ThreeDViewer.tsx b/frontend/src/components/cad/ThreeDViewer.tsx new file mode 100644 index 0000000..07964f5 --- /dev/null +++ b/frontend/src/components/cad/ThreeDViewer.tsx @@ -0,0 +1,255 @@ +import { + Suspense, + useRef, + useCallback, + useState, + useEffect, + Component, + type ErrorInfo, + type ReactNode, +} from 'react' +import { Canvas, useThree, useFrame } from '@react-three/fiber' +import { OrbitControls, useGLTF, Environment } from '@react-three/drei' +import { toast } from 'sonner' +import { X, Camera, Loader2, AlertTriangle } from 'lucide-react' +import api from '../../api/client' + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +interface ThreeDViewerProps { + cadFileId: string + onClose: () => void +} + +// --------------------------------------------------------------------------- +// Inner model loader – separated so Suspense can catch it +// --------------------------------------------------------------------------- + +function GltfModel({ url }: { url: string }) { + const { scene } = useGLTF(url) + return +} + +// --------------------------------------------------------------------------- +// Screenshot helper – lives inside Canvas so it can access gl / useThree +// --------------------------------------------------------------------------- + +interface ScreenshotCaptureProps { + enabled: boolean + cadFileId: string + onDone: () => void +} + +function ScreenshotCapture({ enabled, cadFileId, onDone }: ScreenshotCaptureProps) { + const { gl } = useThree() + const didCapture = useRef(false) + + useFrame(() => { + if (!enabled || didCapture.current) return + didCapture.current = true + + // Grab the canvas as a data-URL after the current frame has been rendered + const dataUrl = gl.domElement.toDataURL('image/png') + + // Convert data-URL → Blob without a network fetch: + // data:[][;base64], + const [header, base64Data] = dataUrl.split(',') + const mimeMatch = header.match(/:(.*?);/) + const mimeType = mimeMatch ? mimeMatch[1] : 'image/png' + const byteCharacters = atob(base64Data) + const byteArray = new Uint8Array(byteCharacters.length) + for (let i = 0; i < byteCharacters.length; i++) { + byteArray[i] = byteCharacters.charCodeAt(i) + } + const blob = new Blob([byteArray], { type: mimeType }) + + const formData = new FormData() + formData.append('thumbnail', blob, 'thumbnail.png') + + api + .post(`/cad/${cadFileId}/regenerate-thumbnail`, formData, { + headers: { 'Content-Type': 'multipart/form-data' }, + }) + .then(() => { + toast.success('Thumbnail captured and saved') + }) + .catch((err: unknown) => { + const msg = err instanceof Error ? err.message : 'Unknown error' + console.error('Thumbnail upload failed', msg) + toast.error('Failed to save thumbnail') + }) + .finally(() => { + didCapture.current = false + onDone() + }) + }) + + return null +} + +// --------------------------------------------------------------------------- +// Error boundary for the GLTF loader inside Suspense +// --------------------------------------------------------------------------- + +class GltfErrorBoundary extends Component< + { children: ReactNode; onError: (msg: string) => void }, + { hasError: boolean } +> { + constructor(props: { children: ReactNode; onError: (msg: string) => void }) { + super(props) + this.state = { hasError: false } + } + + static getDerivedStateFromError(): { hasError: boolean } { + return { hasError: true } + } + + componentDidCatch(error: Error, _info: ErrorInfo): void { + this.props.onError(error.message || 'Failed to parse GLTF') + } + + render(): ReactNode { + if (this.state.hasError) return null + return this.props.children + } +} + +// --------------------------------------------------------------------------- +// Loading overlay (shown while model resolves inside Canvas) +// --------------------------------------------------------------------------- + +function LoadingOverlay() { + return ( +
+ +

Loading 3D model…

+
+ ) +} + +// --------------------------------------------------------------------------- +// Model loader with resolved tracking +// --------------------------------------------------------------------------- + +interface ModelWithReadyProps { + url: string + onReady: () => void +} + +function ModelWithReady({ url, onReady }: ModelWithReadyProps) { + const { scene } = useGLTF(url) + + useEffect(() => { + onReady() + }, [onReady]) + + return +} + +// --------------------------------------------------------------------------- +// Main exported component +// --------------------------------------------------------------------------- + +export default function ThreeDViewer({ cadFileId, onClose }: ThreeDViewerProps) { + const modelUrl = `/api/cad/${cadFileId}/model` + + const [capturing, setCapturing] = useState(false) + const [loadError, setLoadError] = useState(null) + const [modelReady, setModelReady] = useState(false) + + const handleModelReady = useCallback(() => setModelReady(true), []) + const handleError = useCallback((msg: string) => setLoadError(msg), []) + const handleCaptureDone = useCallback(() => setCapturing(false), []) + + return ( +
+ {/* ------------------------------------------------------------------ */} + {/* Toolbar */} + {/* ------------------------------------------------------------------ */} +
+ 3D Viewer +
+ + + +
+
+ + {/* ------------------------------------------------------------------ */} + {/* Viewport */} + {/* ------------------------------------------------------------------ */} +
+ {/* Error state */} + {loadError && ( +
+ +

Failed to load 3D model

+

{loadError}

+ +
+ )} + + {/* Loading overlay – visible until model signals ready */} + {!modelReady && !loadError && } + + {/* Three.js Canvas */} + + {/* Lights */} + + + + + {/* GLTF model */} + + + + + + + {/* Camera controls */} + + + {/* Environment map for PBR materials */} + + + {/* Screenshot capture – only active when triggered */} + {capturing && ( + + )} + +
+
+ ) +} diff --git a/frontend/src/components/dashboard/AdminDashboard.tsx b/frontend/src/components/dashboard/AdminDashboard.tsx new file mode 100644 index 0000000..025f7e8 --- /dev/null +++ b/frontend/src/components/dashboard/AdminDashboard.tsx @@ -0,0 +1,593 @@ +import { useState, useMemo } from 'react' +import { useQuery } from '@tanstack/react-query' +import { + LineChart, Line, BarChart, Bar, PieChart, Pie, Cell, Legend, + XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer, +} from 'recharts' +import { getDashboardKPIs } from '../../api/analytics' + +const SCHAEFFLER_GREEN = '#00893d' +const INDIGO = '#6366f1' +const AMBER = '#f59e0b' +const GREEN = '#22c55e' +const RED = '#ef4444' +const BLUE = '#3b82f6' +const PURPLE = '#8b5cf6' +const TEAL = '#14b8a6' +const ROSE = '#f43f5e' +const CYAN = '#06b6d4' + +const CATEGORY_COLORS = [SCHAEFFLER_GREEN, INDIGO, AMBER, BLUE, PURPLE, TEAL, ROSE, CYAN] + +const CHART_TOOLTIP_STYLE = { + backgroundColor: 'var(--color-bg-surface)', + border: '1px solid var(--color-border)', + borderRadius: '8px', + color: 'var(--color-text)', +} + +type Preset = '4w' | '3m' | '6m' | '1y' | 'all' | 'custom' + +const PRESETS: { key: Preset; label: string }[] = [ + { key: '4w', label: '4 W' }, + { key: '3m', label: '3 M' }, + { key: '6m', label: '6 M' }, + { key: '1y', label: '1 Y' }, + { key: 'all', label: 'All' }, + { key: 'custom', label: 'Custom' }, +] + +function toISO(d: Date): string { + return d.toISOString().slice(0, 10) +} + +function presetRange(key: Preset): { from: string; to: string } | null { + const now = new Date() + const to = toISO(now) + switch (key) { + case '4w': { const d = new Date(now); d.setDate(d.getDate() - 28); return { from: toISO(d), to } } + case '3m': { const d = new Date(now); d.setMonth(d.getMonth() - 3); return { from: toISO(d), to } } + case '6m': { const d = new Date(now); d.setMonth(d.getMonth() - 6); return { from: toISO(d), to } } + case '1y': { const d = new Date(now); d.setFullYear(d.getFullYear() - 1); return { from: toISO(d), to } } + case 'all': return null // no params → backend defaults omitted, we send nothing + case 'custom': return null + } +} + +function presetSubtitle(key: Preset, customFrom: string, customTo: string): string { + switch (key) { + case '4w': return 'Last 4 weeks' + case '3m': return 'Last 3 months' + case '6m': return 'Last 6 months' + case '1y': return 'Last year' + case 'all': return 'All time' + case 'custom': { + if (customFrom && customTo) { + const f = new Date(customFrom + 'T00:00:00') + const t = new Date(customTo + 'T00:00:00') + const fmt = (d: Date) => d.toLocaleDateString('en-US', { month: 'short', day: 'numeric', year: 'numeric' }) + return `${fmt(f)} – ${fmt(t)}` + } + return 'Select a date range' + } + } +} + +function fmtSeconds(s: number | null | undefined): string { + if (s == null) return '—' + if (s >= 60) return `${(s / 60).toFixed(1)} min` + return `${s.toFixed(1)} s` +} + +export default function AdminDashboard() { + const [preset, setPreset] = useState('6m') + const [customFrom, setCustomFrom] = useState('') + const [customTo, setCustomTo] = useState('') + + const { dateFrom, dateTo } = useMemo(() => { + if (preset === 'custom') { + return { dateFrom: customFrom || undefined, dateTo: customTo || undefined } + } + if (preset === 'all') { + return { dateFrom: '2000-01-01', dateTo: toISO(new Date()) } + } + const range = presetRange(preset) + return range ? { dateFrom: range.from, dateTo: range.to } : { dateFrom: undefined, dateTo: undefined } + }, [preset, customFrom, customTo]) + + const { data, isLoading, error } = useQuery({ + queryKey: ['dashboard-kpis', dateFrom, dateTo], + queryFn: () => getDashboardKPIs(dateFrom, dateTo), + staleTime: 60_000, + }) + + function selectPreset(key: Preset) { + setPreset(key) + if (key !== 'custom') { + setCustomFrom('') + setCustomTo('') + } + } + + const subtitle = presetSubtitle(preset, customFrom, customTo) + + if (isLoading) return
Loading analytics…
+ if (error) return
Failed to load analytics
+ if (!data) return null + + const { + summary, throughput, revenue, processing_times, item_status, render_times, + product_stats, output_type_usage, render_status, renderer_usage, + top_products, orders_by_user, category_revenue, render_backend_stats, + render_time_by_output_type, + } = data + + const pieData = [ + { name: 'Pending', value: item_status.pending, color: AMBER }, + { name: 'Approved', value: item_status.approved, color: GREEN }, + { name: 'Rejected', value: item_status.rejected, color: RED }, + ] + + const renderStatusPieData = [ + { name: 'Pending', value: render_status.pending, color: AMBER }, + { name: 'Processing', value: render_status.processing, color: BLUE }, + { name: 'Completed', value: render_status.completed, color: GREEN }, + { name: 'Failed', value: render_status.failed, color: RED }, + ] + + const rendererPieData = renderer_usage.map((r, i) => ({ + name: r.renderer || 'unknown', + value: r.count, + color: CATEGORY_COLORS[i % CATEGORY_COLORS.length], + })) + + return ( +
+
+

Analytics Dashboard

+

{subtitle} · refreshes every 60 s

+
+ + {/* Timeframe selector bar */} +
+ {PRESETS.map(({ key, label }) => ( + + ))} + + {preset === 'custom' && ( +
+ setCustomFrom(e.target.value)} + className="border border-border-default rounded px-2 py-1 text-xs" + /> + + setCustomTo(e.target.value)} + className="border border-border-default rounded px-2 py-1 text-xs" + /> +
+ )} +
+ + {/* Row 1 — Summary cards */} +
+ + + + + + +
+ + {/* Row 2 — Throughput + Item status */} +
+
+

Order Throughput (weekly)

+ {throughput.length === 0 ? ( +

No data yet

+ ) : ( + + + + + + + + + + + + )} +
+ +
+

Item Status

+ {pieData.every((d) => d.value === 0) ? ( +

No data yet

+ ) : ( + + + `${name}: ${value}`} + labelLine={false} + > + {pieData.map((entry) => ( + + ))} + + + + + + )} +
+
+ + {/* Row 3 — Revenue + Processing times */} +
+
+

Revenue per Month (€)

+ {revenue.length === 0 ? ( +

No data yet

+ ) : ( + + + + + + v != null ? [`€ ${v.toFixed(2)}`, 'Revenue'] : ['—', 'Revenue']} /> + + + + )} +
+ +
+

Processing Times

+ + + + + + + +
+ +

Render Time Breakdown

+ + + + + +
+
+
+ + {/* Row 3b — Render Time by Output Type */} + {render_time_by_output_type && render_time_by_output_type.length > 0 && ( +
+

Renderzeit pro Output-Typ

+
+ {/* Horizontal bar chart: Avg + P50 per output type */} + + + + v >= 60 ? `${(v / 60).toFixed(0)}m` : `${v.toFixed(0)}s`} + /> + + [ + v != null ? (v >= 60 ? `${(v / 60).toFixed(1)} min` : `${v.toFixed(0)} s`) : '—', + name, + ]} + /> + + + + + + + {/* Detail table */} +
+ + + + + + + + + + + + + {render_time_by_output_type.map((r) => ( + + + + + + + + + ))} + +
Output-TypJobsØP50MinMax
+ {r.output_type} + {r.job_count}{fmtSeconds(r.avg_render_s)}{fmtSeconds(r.p50_render_s)}{fmtSeconds(r.min_render_s)}{fmtSeconds(r.max_render_s)}
+
+
+
+ )} + + {/* Row 4 — Output Type Usage + Render Status */} +
+
+

Output Type Usage

+ {output_type_usage.length === 0 ? ( +

No data yet

+ ) : ( + + + + + + + + + + )} +
+ +
+

Render Status

+ {renderStatusPieData.every((d) => d.value === 0) ? ( +

No data yet

+ ) : ( + + + `${name}: ${value}`} + labelLine={false} + > + {renderStatusPieData.map((entry) => ( + + ))} + + + + + + )} +
+
+ + {/* Row 5 — Products by Category + Renderer Usage */} +
+
+

Products by Category

+ {product_stats.products_by_category.length === 0 ? ( +

No data yet

+ ) : ( + + + + + + + + {product_stats.products_by_category.map((_, i) => ( + + ))} + + + + )} +
+ +
+

Renderer Usage

+ {rendererPieData.length === 0 ? ( +

No data yet

+ ) : ( + + + `${name}: ${value}`} + labelLine={false} + > + {rendererPieData.map((entry) => ( + + ))} + + + + + + )} +
+
+ + {/* Row 5b — Render Backend Comparison */} + {render_backend_stats.length > 0 && ( +
+
+

Render Backend — Job Count

+ + + + + + + + + + + +
+ +
+

Render Backend — Avg Time

+ + + + + + v != null ? [`${v.toFixed(1)}s`, ''] : ['—', '']} /> + + + + + +
+
+ )} + + {/* Row 6 — Top 10 Products + Category Revenue */} +
+
+

Top 10 Products

+ {top_products.length === 0 ? ( +

No data yet

+ ) : ( + + + + + + + + + + + {top_products.map((p) => ( + + + + + + + ))} + +
PIM-IDProductCategoryOrders
{p.pim_id}{p.product_name || '—'}{p.category}{p.order_count}
+ )} +
+ +
+

Revenue by Category (€)

+ {category_revenue.length === 0 ? ( +

No data yet

+ ) : ( + + + + + + v != null ? [`€ ${v.toFixed(2)}`, 'Revenue'] : ['—', 'Revenue']} /> + + {category_revenue.map((_, i) => ( + + ))} + + + + )} +
+
+ + {/* Row 7 — Orders by User */} +
+

Orders by User

+ {orders_by_user.length === 0 ? ( +

No data yet

+ ) : ( + + + + + + + + + + + + {orders_by_user.map((u) => ( + + + + + + + + ))} + +
NameEmailRoleOrdersRevenue (€)
{u.full_name}{u.email} + + {u.role === 'project_manager' ? 'PM' : u.role} + + {u.order_count}€ {u.revenue.toFixed(2)}
+ )} +
+
+ ) +} + +function SummaryCard({ label, value }: { label: string; value: number | string }) { + return ( +
+

{value}

+

{label}

+
+ ) +} + +function MetricRow({ label, value }: { label: string; value: string }) { + return ( + + {label} + {value} + + ) +} diff --git a/frontend/src/components/dashboard/ClientDashboard.tsx b/frontend/src/components/dashboard/ClientDashboard.tsx new file mode 100644 index 0000000..dad4695 --- /dev/null +++ b/frontend/src/components/dashboard/ClientDashboard.tsx @@ -0,0 +1,105 @@ +import { useQuery } from '@tanstack/react-query' +import { Link } from 'react-router-dom' +import { Package, Upload, CheckCircle, Clock, AlertCircle } from 'lucide-react' +import { listOrders } from '../../api/orders' +import { useAuthStore } from '../../store/auth' + +export default function ClientDashboard() { + const user = useAuthStore((s) => s.user) + const { data: orders } = useQuery({ queryKey: ['orders'], queryFn: () => listOrders({ limit: 100 }) }) + + const stats = { + total: orders?.length ?? 0, + draft: orders?.filter((o) => o.status === 'draft').length ?? 0, + submitted: orders?.filter((o) => o.status === 'submitted').length ?? 0, + completed: orders?.filter((o) => o.status === 'completed').length ?? 0, + } + + return ( +
+
+

Welcome, {user?.full_name}

+

Schaeffler Media Creation Pipeline

+
+ +
+ + + + +
+ +
+
+

Quick Actions

+
+ + + Upload Excel Order List + + + + View All Orders + +
+
+ +
+

Recent Orders

+ {orders && orders.length > 0 ? ( +
+ {orders.slice(0, 5).map((order) => ( + + {order.order_number} +
+ {order.estimated_price != null && ( + + € {Number(order.estimated_price).toFixed(2)} + + )} + +
+ + ))} +
+ ) : ( +

No orders yet. Upload an Excel file to get started.

+ )} +
+
+
+ ) +} + +function StatCard({ label, value, icon: Icon, color }: { label: string; value: number; icon: any; color: string }) { + const colors: Record = { + blue: 'text-status-info-text bg-status-info-bg', + yellow: 'text-yellow-600 bg-yellow-50', + orange: 'text-status-warning-text bg-status-warning-bg', + green: 'text-status-success-text bg-status-success-bg', + } + return ( +
+
+ +
+

{value}

+

{label}

+
+ ) +} + +function StatusBadge({ status }: { status: string }) { + const map: Record = { + draft: 'badge-gray', + submitted: 'badge-blue', + processing: 'badge-yellow', + completed: 'badge-green', + rejected: 'badge-red', + } + return {status} +} diff --git a/frontend/src/components/layout/Layout.tsx b/frontend/src/components/layout/Layout.tsx new file mode 100644 index 0000000..60595ff --- /dev/null +++ b/frontend/src/components/layout/Layout.tsx @@ -0,0 +1,153 @@ +import { Outlet, NavLink, useNavigate, Link } from 'react-router-dom' +import { LayoutDashboard, Package, Settings, LogOut, FlaskConical, Activity, Library, Plus, SlidersHorizontal } from 'lucide-react' +import { useAuthStore } from '../../store/auth' +import { clsx } from 'clsx' +import { useQuery } from '@tanstack/react-query' +import { getWorkerActivity } from '../../api/worker' +import { listOrders } from '../../api/orders' +import NotificationCenter from './NotificationCenter' + +const nav = [ + { to: '/', icon: LayoutDashboard, label: 'Dashboard', end: true }, + { to: '/orders', icon: Package, label: 'Orders' }, + { to: '/products', icon: Library, label: 'Products' }, + { to: '/materials', icon: FlaskConical, label: 'Materials' }, + { to: '/activity', icon: Activity, label: 'Activity' }, + { to: '/preferences', icon: SlidersHorizontal, label: 'Preferences' }, +] + +export default function Layout() { + const { user, logout } = useAuthStore() + const navigate = useNavigate() + + const { data: activity } = useQuery({ + queryKey: ['worker-activity'], + queryFn: getWorkerActivity, + refetchInterval: 8000, + staleTime: 4000, + }) + + const { data: draftOrders } = useQuery({ + queryKey: ['orders', 'draft-count'], + queryFn: () => listOrders({ status: 'draft' }), + staleTime: 10_000, + refetchInterval: 30_000, + }) + const draftCount = draftOrders?.length ?? 0 + + function handleLogout() { + logout() + navigate('/login') + } + + return ( +
+ {/* Sidebar */} + + + {/* Main content */} +
+ +
+
+ ) +} diff --git a/frontend/src/components/layout/NotificationCenter.tsx b/frontend/src/components/layout/NotificationCenter.tsx new file mode 100644 index 0000000..73b9d86 --- /dev/null +++ b/frontend/src/components/layout/NotificationCenter.tsx @@ -0,0 +1,235 @@ +import { useState, useRef, useEffect } from 'react' +import { createPortal } from 'react-dom' +import { useNavigate } from 'react-router-dom' +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { + Bell, Send, PlayCircle, CheckCircle, XCircle, Image, AlertTriangle, X, +} from 'lucide-react' +import { clsx } from 'clsx' +import { + getNotifications, getUnreadCount, markAsRead, markOneAsRead, + type Notification, +} from '../../api/notifications' + +const ACTION_CONFIG: Record | null) => string; color: string }> = { + 'order.submitted': { + icon: Send, + label: (d) => `Order ${d?.order_number ?? '?'} submitted`, + color: 'text-blue-500', + }, + 'order.processing': { + icon: PlayCircle, + label: (d) => `Order ${d?.order_number ?? '?'} is processing`, + color: 'text-yellow-500', + }, + 'order.completed': { + icon: CheckCircle, + label: (d) => `Order ${d?.order_number ?? '?'} completed`, + color: 'text-status-success-text', + }, + 'order.rejected': { + icon: XCircle, + label: (d) => `Order ${d?.order_number ?? '?'} was rejected`, + color: 'text-red-500', + }, + 'render.completed': { + icon: Image, + label: (d) => `Render done: ${d?.product_name ?? 'unknown'} — ${d?.output_type ?? ''}`, + color: 'text-status-success-text', + }, + 'render.failed': { + icon: AlertTriangle, + label: (d) => `Render failed: ${d?.product_name ?? 'unknown'} — ${d?.output_type ?? ''}`, + color: 'text-red-500', + }, + 'excel.import_warnings': { + icon: AlertTriangle, + label: (d) => `Excel '${d?.filename ?? '?'}' had ${d?.warning_count ?? '?'} warning(s)`, + color: 'text-amber-500', + }, + 'excel.import_error': { + icon: XCircle, + label: (d) => `Excel parse failed: ${d?.filename ?? '?'}`, + color: 'text-red-500', + }, + 'excel.finalize_error': { + icon: XCircle, + label: (d) => `Order creation failed: ${d?.filename ?? '?'}`, + color: 'text-red-500', + }, +} + +function relativeTime(ts: string): string { + const diff = Date.now() - new Date(ts).getTime() + const seconds = Math.floor(diff / 1000) + if (seconds < 60) return 'just now' + const minutes = Math.floor(seconds / 60) + if (minutes < 60) return `${minutes}m ago` + const hours = Math.floor(minutes / 60) + if (hours < 24) return `${hours}h ago` + const days = Math.floor(hours / 24) + return `${days}d ago` +} + +export default function NotificationCenter() { + const [open, setOpen] = useState(false) + const bellRef = useRef(null) + const dropdownRef = useRef(null) + const navigate = useNavigate() + const qc = useQueryClient() + + const { data: unreadCount = 0 } = useQuery({ + queryKey: ['notifications', 'unread-count'], + queryFn: getUnreadCount, + refetchInterval: 15_000, + staleTime: 5_000, + }) + + const { data } = useQuery({ + queryKey: ['notifications', 'list'], + queryFn: () => getNotifications({ limit: 20 }), + enabled: open, + staleTime: 5_000, + }) + + const markAllMutation = useMutation({ + mutationFn: () => markAsRead(), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['notifications'] }) + }, + }) + + const markOneMutation = useMutation({ + mutationFn: (id: string) => markOneAsRead(id), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['notifications'] }) + }, + }) + + // Click-outside to close + useEffect(() => { + if (!open) return + function handleClick(e: MouseEvent) { + if ( + dropdownRef.current && !dropdownRef.current.contains(e.target as Node) && + bellRef.current && !bellRef.current.contains(e.target as Node) + ) { + setOpen(false) + } + } + document.addEventListener('mousedown', handleClick) + return () => document.removeEventListener('mousedown', handleClick) + }, [open]) + + function handleNotificationClick(n: Notification) { + if (!n.read_at) markOneMutation.mutate(n.id) + if (n.entity_type === 'order' && n.entity_id) { + navigate(`/orders/${n.entity_id}`) + } + setOpen(false) + } + + // Position dropdown relative to bell button + const bellRect = bellRef.current?.getBoundingClientRect() + + return ( + <> + + + {open && bellRect && createPortal( +
+ {/* Header */} +
+ Notifications +
+ {unreadCount > 0 && ( + + )} + +
+
+ + {/* List */} +
+ {!data?.items.length && ( +
No notifications
+ )} + {data?.items.map((n) => { + const cfg = ACTION_CONFIG[n.action] ?? { + icon: Bell, + label: () => n.action, + color: 'text-content-secondary', + } + const Icon = cfg.icon + return ( + + ) + })} +
+ + {/* Footer */} +
+ +
+
, + document.body, + )} + + ) +} diff --git a/frontend/src/components/layout/ThemePreferences.tsx b/frontend/src/components/layout/ThemePreferences.tsx new file mode 100644 index 0000000..fbc4671 --- /dev/null +++ b/frontend/src/components/layout/ThemePreferences.tsx @@ -0,0 +1,63 @@ +import { Sun, Monitor, Moon } from 'lucide-react' +import { clsx } from 'clsx' +import { useThemeStore, ACCENT_PRESETS, type ThemeMode } from '../../store/theme' + +const MODES: { key: ThemeMode; icon: typeof Sun; label: string }[] = [ + { key: 'light', icon: Sun, label: 'Light' }, + { key: 'system', icon: Monitor, label: 'System' }, + { key: 'dark', icon: Moon, label: 'Dark' }, +] + +export default function ThemePreferences() { + const { mode, accent, setMode, setAccent } = useThemeStore() + + return ( +
+ {/* Mode row */} +
+ Theme +
+ {MODES.map(({ key, icon: Icon, label }) => ( + + ))} +
+
+ + {/* Accent row */} +
+ Accent +
+ {ACCENT_PRESETS.map(({ key, label, hex }) => ( +
+
+
+ ) +} diff --git a/frontend/src/components/orders/CadPartMaterials.tsx b/frontend/src/components/orders/CadPartMaterials.tsx new file mode 100644 index 0000000..7accac4 --- /dev/null +++ b/frontend/src/components/orders/CadPartMaterials.tsx @@ -0,0 +1,176 @@ +import { useState, useEffect } from 'react' +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { toast } from 'sonner' +import { Save, FlaskConical, AlertCircle } from 'lucide-react' +import { listMaterials, saveCadPartMaterials } from '../../api/materials' +import MaterialInput from '../shared/MaterialInput' +import MaterialWizard from '../MaterialWizard' + +interface CadPartRow { + part_name: string + material: string +} + +interface ExcelComponent { + part_name: string | null + material: string | null + component_type: string | null + column_index: number +} + +interface Props { + orderId: string + itemId: string + partNames: string[] // from cad_parsed_objects + savedMaterials: CadPartRow[] // from cad_part_materials + excelComponents?: ExcelComponent[] // from item.components (Excel data) +} + +function normName(s: string) { + return s.trim().toLowerCase() +} + +export default function CadPartMaterials({ orderId, itemId, partNames, savedMaterials, excelComponents = [] }: Props) { + const qc = useQueryClient() + const [wizardOpen, setWizardOpen] = useState(false) + const [wizardTargetIdx, setWizardTargetIdx] = useState(null) + + const initRows = (): CadPartRow[] => + partNames.map((name) => { + // 1. Use saved value if present + const saved = savedMaterials.find((s) => s.part_name === name) + if (saved) return { part_name: name, material: saved.material } + // 2. Fall back to Excel component data (case-insensitive match) + const excelMatch = excelComponents.find( + (c) => c.part_name && normName(c.part_name) === normName(name), + ) + return { part_name: name, material: excelMatch?.material ?? '' } + }) + + const [rows, setRows] = useState(initRows) + + // Re-sync when props change (e.g. after save or STEP file change) + useEffect(() => { + setRows(initRows()) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [partNames.join(','), savedMaterials.length, excelComponents.length]) + + const { data: library = [] } = useQuery({ + queryKey: ['materials'], + queryFn: listMaterials, + }) + + const saveMut = useMutation({ + mutationFn: () => saveCadPartMaterials(orderId, itemId, rows.filter((r) => r.material.trim())), + onSuccess: () => { + toast.success('Materials saved') + qc.invalidateQueries({ queryKey: ['order', orderId] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Save failed'), + }) + + const isDirty = rows.some((r) => { + const saved = savedMaterials.find((s) => s.part_name === r.part_name)?.material ?? '' + return r.material !== saved + }) + + const missingCount = rows.filter((r) => !r.material.trim()).length + + const setMaterial = (idx: number, value: string) => + setRows((prev) => prev.map((r, i) => (i === idx ? { ...r, material: value } : r))) + + return ( +
+
+ +

+ CAD Part Materials ({partNames.length}) +

+ {missingCount > 0 && ( + + + {missingCount} missing + + )} +
+ +
+ {/* Header */} +
+

Part Name

+

Material

+
+ + {/* Rows */} + {rows.map((row, idx) => { + const missing = !row.material.trim() + return ( +
+
+ {missing && } + + {row.part_name} + +
+
+ setMaterial(idx, v)} + library={library} + missing={missing} + onOpenWizard={() => { + setWizardTargetIdx(idx) + setWizardOpen(true) + }} + /> +
+
+ ) + })} +
+ + {(isDirty || missingCount > 0) && ( +
+ {isDirty && ( + + )} + {missingCount > 0 && !isDirty && ( +

+ + {missingCount} part{missingCount !== 1 ? 's' : ''} have no material assigned +

+ )} +
+ )} + + {/* Material Wizard (opened from MaterialInput) */} + { setWizardOpen(false); setWizardTargetIdx(null) }} + onCreated={(name) => { + if (wizardTargetIdx !== null) { + setMaterial(wizardTargetIdx, name) + } + setWizardTargetIdx(null) + }} + /> +
+ ) +} diff --git a/frontend/src/components/shared/MaterialInput.tsx b/frontend/src/components/shared/MaterialInput.tsx new file mode 100644 index 0000000..bfb1958 --- /dev/null +++ b/frontend/src/components/shared/MaterialInput.tsx @@ -0,0 +1,146 @@ +import { useState, useRef, useEffect, useMemo } from 'react' +import { Wand2 } from 'lucide-react' +import type { Material } from '../../api/materials' + +const TYPE_GROUPS: Record = { + '01': { label: 'Metals', color: 'text-slate-500' }, + '02': { label: 'Coatings', color: 'text-blue-500' }, + '03': { label: 'Non-metals', color: 'text-amber-600' }, + '04': { label: 'Compounds', color: 'text-purple-500' }, + '05': { label: 'Misc', color: 'text-content-muted' }, +} + +function getTypeCode(mat: Material): string | null { + if (mat.schaeffler_code == null) return null + const s = String(mat.schaeffler_code).padStart(6, '0') + return s.slice(0, 2) +} + +/** Extract the human-readable short name after the last underscore: SCHAEFFLER_010101_Steel-Bare -> Steel-Bare */ +function shortName(name: string): string { + const match = name.match(/^SCHAEFFLER_\d{6}_(.+)$/) + return match ? match[1].replace(/-/g, ' ') : name +} + +export interface MaterialInputProps { + value: string + onChange: (v: string) => void + library: Material[] + missing: boolean + onOpenWizard: () => void +} + +export default function MaterialInput({ value, onChange, library, missing, onOpenWizard }: MaterialInputProps) { + const [open, setOpen] = useState(false) + const wrapRef = useRef(null) + + const trimmed = value.trim() + const suggestions = trimmed + ? library.filter((m) => m.name.toLowerCase().includes(trimmed.toLowerCase()) + || shortName(m.name).toLowerCase().includes(trimmed.toLowerCase()) + || (m.description ?? '').toLowerCase().includes(trimmed.toLowerCase())) + : library + + // Group suggestions by type code + const grouped = useMemo(() => { + const groups: Array<{ code: string | null; label: string; color: string; items: Material[] }> = [] + const buckets = new Map() + + for (const m of suggestions) { + const tc = getTypeCode(m) + if (!buckets.has(tc)) buckets.set(tc, []) + buckets.get(tc)!.push(m) + } + + // Sorted type codes first, then non-schaeffler + const sortedKeys = [...buckets.keys()].sort((a, b) => { + if (a === null) return 1 + if (b === null) return -1 + return a.localeCompare(b) + }) + + for (const key of sortedKeys) { + const info = key ? TYPE_GROUPS[key] : null + groups.push({ + code: key, + label: info?.label ?? 'Custom', + color: info?.color ?? 'text-content-muted', + items: buckets.get(key)!, + }) + } + return groups + }, [suggestions]) + + useEffect(() => { + const handler = (e: MouseEvent) => { + if (wrapRef.current && !wrapRef.current.contains(e.target as Node)) { + setOpen(false) + } + } + document.addEventListener('mousedown', handler) + return () => document.removeEventListener('mousedown', handler) + }, []) + + const select = (name: string) => { + onChange(name) + setOpen(false) + } + + return ( +
+ { onChange(e.target.value); setOpen(true) }} + onFocus={() => setOpen(true)} + placeholder={missing ? 'Required — assign a material' : 'Search materials...'} + className={`w-full px-2 py-1 text-sm border rounded focus:outline-none bg-surface ${ + missing + ? 'border-red-300 focus:border-red-500 placeholder-red-300' + : 'border-border-default focus:border-accent' + }`} + /> + + {open && (suggestions.length > 0 || true) && ( +
+ {grouped.map((group) => ( +
+ {/* Group header */} +
+ + {group.code ? `${group.code} ` : ''}{group.label} + +
+ {group.items.map((m) => ( + + ))} +
+ ))} + + {suggestions.length === 0 && ( +
No materials match "{trimmed}"
+ )} + + {/* Create new material via wizard */} + +
+ )} +
+ ) +} diff --git a/frontend/src/components/shared/Modal.tsx b/frontend/src/components/shared/Modal.tsx new file mode 100644 index 0000000..eedff23 --- /dev/null +++ b/frontend/src/components/shared/Modal.tsx @@ -0,0 +1,81 @@ +import { useEffect, useRef } from 'react' +import { X } from 'lucide-react' +import { cn } from '../../utils/format' + +interface ModalProps { + title: string + onClose: () => void + children: React.ReactNode + /** Extra classes applied to the inner panel */ + className?: string + /** Width preset – defaults to 'md' */ + size?: 'sm' | 'md' | 'lg' | 'xl' | 'full' +} + +const sizeMap: Record, string> = { + sm: 'max-w-sm', + md: 'max-w-lg', + lg: 'max-w-2xl', + xl: 'max-w-4xl', + full: 'max-w-full mx-4', +} + +export default function Modal({ title, onClose, children, className, size = 'md' }: ModalProps) { + const backdropRef = useRef(null) + + /* Close on Escape */ + useEffect(() => { + const handler = (e: KeyboardEvent) => { + if (e.key === 'Escape') onClose() + } + window.addEventListener('keydown', handler) + return () => window.removeEventListener('keydown', handler) + }, [onClose]) + + /* Prevent scroll on body while modal is open */ + useEffect(() => { + document.body.style.overflow = 'hidden' + return () => { document.body.style.overflow = '' } + }, []) + + function handleBackdropClick(e: React.MouseEvent) { + if (e.target === backdropRef.current) onClose() + } + + return ( +
+
+ {/* Header */} +
+ + +
+ + {/* Body */} +
{children}
+
+
+ ) +} diff --git a/frontend/src/components/upload/ExcelSpreadsheet.tsx b/frontend/src/components/upload/ExcelSpreadsheet.tsx new file mode 100644 index 0000000..92966e8 --- /dev/null +++ b/frontend/src/components/upload/ExcelSpreadsheet.tsx @@ -0,0 +1,168 @@ +import React from 'react' +import { ParsedRow, ParsedComponent, ParsedExcelResponse } from '../../api/uploads' + +interface Props { + parsed: ParsedExcelResponse + rows: ParsedRow[] + onChange: (rows: ParsedRow[]) => void +} + +const STANDARD_FIELDS: { key: keyof ParsedRow; label: string; width: number; mono?: boolean }[] = [ + { key: 'ebene1', label: 'Ebene 1', width: 140 }, + { key: 'ebene2', label: 'Ebene 2', width: 120 }, + { key: 'baureihe', label: 'Baureihe', width: 160 }, + { key: 'pim_id', label: 'PIM-ID', width: 110 }, + { key: 'produkt_baureihe', label: 'Produkt-Baureihe', width: 150 }, + { key: 'gewaehltes_produkt', label: 'Gewähltes Produkt', width: 150 }, + { key: 'name_cad_modell', label: 'CAD-Modell', width: 190, mono: true }, + { key: 'gewuenschte_bildnummer', label: 'Bildnummer', width: 170, mono: true }, + { key: 'lagertyp', label: 'Lagertyp', width: 100 }, +] + +export default function ExcelSpreadsheet({ parsed, rows, onChange }: Props) { + const maxComps = Math.max(0, ...rows.map((r) => r.components.length)) + + function updateField(ri: number, field: keyof ParsedRow, value: string | boolean | null) { + const next = rows.map((r, i) => (i === ri ? { ...r, [field]: value } : r)) + onChange(next) + } + + function updateComp(ri: number, ci: number, field: keyof ParsedComponent, value: string) { + const next = rows.map((r, i) => { + if (i !== ri) return r + const comps = r.components.map((c, j) => + j === ci ? { ...c, [field]: value || null } : c, + ) + // If the row doesn't have this component slot yet, pad it + while (comps.length <= ci) { + comps.push({ part_name: null, material: null, component_type: null, column_index: 11 + comps.length * 2 }) + } + comps[ci] = { ...comps[ci], [field]: value || null } + return { ...r, components: comps } + }) + onChange(next) + } + + const cell = + 'w-full px-2 py-1 text-xs bg-transparent border-0 focus:outline-none focus:ring-1 focus:ring-inset focus:ring-accent rounded focus:bg-surface' + const th = + 'px-2 py-2 text-left text-xs font-semibold text-content-secondary whitespace-nowrap bg-surface-alt border-b border-r border-border-default sticky top-0 z-10' + const td = 'border-b border-r border-border-light p-0' + + return ( +
+
+
+

+ {parsed.template_name || parsed.category_key} — {rows.length} rows +

+

Click any cell to edit before creating the order

+
+ {maxComps} component columns +
+ +
+ + + {/* Group header row */} + + + + + {Array.from({ length: maxComps }, (_, i) => ( + + ))} + + + {/* Field name row */} + + + {STANDARD_FIELDS.map((f) => ( + + ))} + + {Array.from({ length: maxComps }, (_, i) => ( + + + + + ))} + + + + + {rows.map((row, ri) => ( + + {/* Row number */} + + + {/* Standard text fields */} + {STANDARD_FIELDS.map((f) => ( + + ))} + + {/* Rendering checkbox */} + + + {/* Component pairs */} + {Array.from({ length: maxComps }, (_, ci) => { + const comp = row.components[ci] + return ( + + + + + ) + })} + + ))} + +
# + Standard Fields + Rendering + Component {i + 1} +
# + {f.label} + + Rendering + + Part Name + + Material +
+ {row.row_index} + + updateField(ri, f.key, e.target.value || null)} + className={`${cell} ${f.mono ? 'font-mono' : ''}`} + /> + + updateField(ri, 'medias_rendering', e.target.checked)} + className="w-3.5 h-3.5" + /> + + updateComp(ri, ci, 'part_name', e.target.value)} + className={`${cell} font-mono`} + placeholder="—" + /> + + updateComp(ri, ci, 'material', e.target.value)} + className={cell} + placeholder="—" + /> +
+
+
+ ) +} diff --git a/frontend/src/components/upload/StepDropzone.tsx b/frontend/src/components/upload/StepDropzone.tsx new file mode 100644 index 0000000..b58e225 --- /dev/null +++ b/frontend/src/components/upload/StepDropzone.tsx @@ -0,0 +1,272 @@ +/** + * StepDropzone — Phase 3 + * + * Accepts one or more .stp/.step files via react-dropzone, uploads each to + * POST /api/uploads/step, then calls POST /api/cad/match-to-order to link + * matched files to order items by filename. + */ +import { useState, useCallback } from 'react' +import { useDropzone } from 'react-dropzone' +import { Upload, CheckCircle, XCircle, Loader2, Link2 } from 'lucide-react' +import { toast } from 'sonner' +import api from '../../api/client' + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +interface StepUploadResponse { + cad_file_id: string + original_name: string + file_hash: string + status: string +} + +interface MatchedItem { + item_id: string + cad_file_id: string + item_name: string + cad_name: string +} + +interface MatchToOrderResponse { + matched: MatchedItem[] + unmatched_cad: string[] + unmatched_items: string[] +} + +type FileStatus = 'idle' | 'uploading' | 'done' | 'error' + +interface FileEntry { + file: File + status: FileStatus + errorMsg?: string + cadFileId?: string +} + +interface StepDropzoneProps { + orderId: string + /** Called after matching completes so the parent can refresh the order */ + onMatchComplete?: (result: MatchToOrderResponse) => void +} + +// --------------------------------------------------------------------------- +// Component +// --------------------------------------------------------------------------- + +export default function StepDropzone({ orderId, onMatchComplete }: StepDropzoneProps) { + const [entries, setEntries] = useState([]) + const [matching, setMatching] = useState(false) + const [matchResult, setMatchResult] = useState(null) + + // Update a single entry by index + const updateEntry = useCallback( + (idx: number, patch: Partial) => + setEntries((prev) => prev.map((e, i) => (i === idx ? { ...e, ...patch } : e))), + [], + ) + + const onDrop = useCallback( + async (accepted: File[]) => { + if (accepted.length === 0) return + + // Append new file entries + const startIdx = entries.length + const newEntries: FileEntry[] = accepted.map((f) => ({ file: f, status: 'uploading' })) + setEntries((prev) => [...prev, ...newEntries]) + setMatchResult(null) + + // Upload each file sequentially to avoid overwhelming the server + const uploadedIds: string[] = [] + for (let i = 0; i < accepted.length; i++) { + const globalIdx = startIdx + i + const file = accepted[i] + const form = new FormData() + form.append('file', file) + try { + const res = await api.post('/uploads/step', form, { + headers: { 'Content-Type': 'multipart/form-data' }, + }) + const { cad_file_id } = res.data + uploadedIds.push(cad_file_id) + updateEntry(globalIdx, { status: 'done', cadFileId: cad_file_id }) + } catch (err: any) { + const msg: string = + err?.response?.data?.detail ?? err?.message ?? 'Upload failed' + updateEntry(globalIdx, { status: 'error', errorMsg: msg }) + toast.error(`${file.name}: ${msg}`) + } + } + + // Collect all successful cad_file_ids from this session (including previous uploads) + const allSuccessfulIds: string[] = [ + ...entries + .filter((e) => e.status === 'done' && e.cadFileId) + .map((e) => e.cadFileId as string), + ...uploadedIds, + ] + + if (allSuccessfulIds.length === 0) return + + // Match to order + setMatching(true) + try { + const res = await api.post('/cad/match-to-order', { + order_id: orderId, + cad_file_ids: allSuccessfulIds, + }) + setMatchResult(res.data) + const { matched, unmatched_cad } = res.data + if (matched.length > 0) { + toast.success(`Matched ${matched.length} file(s) to order items`) + } + if (unmatched_cad.length > 0) { + toast.warning(`${unmatched_cad.length} file(s) could not be matched to any item`) + } + onMatchComplete?.(res.data) + } catch (err: any) { + const msg: string = + err?.response?.data?.detail ?? err?.message ?? 'Matching failed' + toast.error(`CAD matching error: ${msg}`) + } finally { + setMatching(false) + } + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [entries, orderId, onMatchComplete, updateEntry], + ) + + const { getRootProps, getInputProps, isDragActive } = useDropzone({ + onDrop, + accept: { 'application/octet-stream': ['.stp', '.step'] }, + multiple: true, + }) + + const hasEntries = entries.length > 0 + + return ( +
+ {/* Drop target */} +
+ + + {isDragActive ? ( +

Drop STEP files here

+ ) : ( + <> +

+ Drag and drop .stp / .step files here +

+

or click to browse

+ + )} +
+ + {/* Per-file status list */} + {hasEntries && ( +
    + {entries.map((entry, idx) => ( +
  • + +
    +

    + {entry.file.name} +

    + {entry.status === 'error' && ( +

    {entry.errorMsg}

    + )} + {entry.status === 'done' && ( +

    + ID: {entry.cadFileId} +

    + )} +
    + +
  • + ))} +
+ )} + + {/* Matching spinner */} + {matching && ( +
+ + Matching files to order items... +
+ )} + + {/* Match result summary */} + {matchResult && !matching && ( +
+
+ + Matching Results +
+ + {matchResult.matched.length > 0 && ( +
+

+ Matched ({matchResult.matched.length}) +

+
    + {matchResult.matched.map((m) => ( +
  • + + {m.cad_name} + + {m.item_name} +
  • + ))} +
+
+ )} + + {matchResult.unmatched_cad.length > 0 && ( +
+

+ Unmatched CAD files ({matchResult.unmatched_cad.length}) +

+
    + {matchResult.unmatched_cad.map((id) => ( +
  • + {entries.find((e) => e.cadFileId === id)?.file.name ?? id} +
  • + ))} +
+
+ )} + + {matchResult.matched.length === 0 && matchResult.unmatched_cad.length === 0 && ( +

No files were processed.

+ )} +
+ )} +
+ ) +} + +// --------------------------------------------------------------------------- +// Sub-components +// --------------------------------------------------------------------------- + +function FileStatusIcon({ status }: { status: FileStatus }) { + if (status === 'uploading') return + if (status === 'done') return + if (status === 'error') return + return
+} + +function StatusLabel({ status }: { status: FileStatus }) { + if (status === 'uploading') return Uploading... + if (status === 'done') return Uploaded + if (status === 'error') return Failed + return null +} diff --git a/frontend/src/components/upload/StepPreUpload.tsx b/frontend/src/components/upload/StepPreUpload.tsx new file mode 100644 index 0000000..3b523d2 --- /dev/null +++ b/frontend/src/components/upload/StepPreUpload.tsx @@ -0,0 +1,223 @@ +/** + * StepPreUpload — STEP file uploader used during order creation (before an + * order ID exists). Files are uploaded immediately to /api/uploads/step so + * we have cad_file_ids ready. Client-side filename matching gives the user + * live feedback on which Excel rows already have a STEP file. + */ +import { useState, useCallback } from 'react' +import { useDropzone } from 'react-dropzone' +import { Upload, CheckCircle, XCircle, Loader2, FileBox } from 'lucide-react' +import { toast } from 'sonner' +import api from '../../api/client' + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +interface StepUploadResponse { + cad_file_id: string + original_name: string + file_hash: string + status: string +} + +type FileStatus = 'uploading' | 'done' | 'error' + +interface FileEntry { + file: File + status: FileStatus + errorMsg?: string + cadFileId?: string +} + +export interface StepUploadState { + ids: string[] // cad_file_ids of successfully uploaded files + names: string[] // original_names of successfully uploaded files +} + +interface Props { + /** name_cad_modell values from parsed rows — used for match preview */ + itemNames: string[] + /** Called whenever the set of successfully uploaded files changes */ + onUpdate: (state: StepUploadState) => void +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function normStem(name: string): string { + return name.trim().toLowerCase().replace(/\.(step|stp)$/i, '') +} + +// --------------------------------------------------------------------------- +// Component +// --------------------------------------------------------------------------- + +export default function StepPreUpload({ itemNames, onUpdate }: Props) { + const [entries, setEntries] = useState([]) + + const getSuccessState = (updated: FileEntry[]): StepUploadState => ({ + ids: updated.filter((e) => e.status === 'done' && e.cadFileId).map((e) => e.cadFileId!), + names: updated.filter((e) => e.status === 'done').map((e) => e.file.name), + }) + + const onDrop = useCallback( + async (accepted: File[]) => { + if (accepted.length === 0) return + + const startIdx = entries.length + const newEntries: FileEntry[] = accepted.map((f) => ({ file: f, status: 'uploading' })) + const merged = [...entries, ...newEntries] + setEntries(merged) + + let working = [...merged] + + for (let i = 0; i < accepted.length; i++) { + const idx = startIdx + i + const file = accepted[i] + const form = new FormData() + form.append('file', file) + try { + const res = await api.post('/uploads/step', form, { + headers: { 'Content-Type': 'multipart/form-data' }, + }) + working = working.map((e, j) => + j === idx ? { ...e, status: 'done', cadFileId: res.data.cad_file_id } : e, + ) + } catch (err: any) { + const msg: string = err?.response?.data?.detail ?? err?.message ?? 'Upload failed' + working = working.map((e, j) => + j === idx ? { ...e, status: 'error', errorMsg: msg } : e, + ) + toast.error(`${file.name}: ${msg}`) + } + setEntries([...working]) + } + + onUpdate(getSuccessState(working)) + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [entries, onUpdate], + ) + + const { getRootProps, getInputProps, isDragActive } = useDropzone({ + onDrop, + accept: { 'application/octet-stream': ['.stp', '.step'] }, + multiple: true, + }) + + // Client-side match preview + const uploadedStems = new Set( + entries.filter((e) => e.status === 'done').map((e) => normStem(e.file.name)), + ) + const matched = itemNames.filter((n) => uploadedStems.has(normStem(n))) + const missing = itemNames.filter((n) => !uploadedStems.has(normStem(n))) + + return ( +
+ {/* Match status bar */} + {itemNames.length > 0 && ( +
+
+ + {matched.length} matched +
+ {missing.length > 0 && ( +
+ + {missing.length} still need a STEP file +
+ )} + {missing.length === 0 && ( + All items covered ✓ + )} +
+ )} + + {/* Drop zone */} +
+ + + {isDragActive ? ( +

Drop STEP files here

+ ) : ( + <> +

Drag & drop .stp / .step files

+

or click to browse — multiple files at once

+ + )} +
+ + {/* Uploaded file list */} + {entries.length > 0 && ( +
    + {entries.map((entry, idx) => { + const stem = normStem(entry.file.name) + const isMatched = itemNames.some((n) => normStem(n) === stem) + return ( +
  • + {entry.status === 'uploading' && ( + + )} + {entry.status === 'done' && ( + + )} + {entry.status === 'error' && ( + + )} +
    +

    {entry.file.name}

    + {entry.status === 'error' && ( +

    {entry.errorMsg}

    + )} + {entry.status === 'done' && !isMatched && ( +

    No matching row in Excel

    + )} +
    + {entry.status === 'uploading' && ( + Uploading… + )} + {entry.status === 'done' && ( + + {isMatched ? 'Matched' : 'Unmatched'} + + )} +
  • + ) + })} +
+ )} + + {/* Missing items list */} + {missing.length > 0 && entries.some((e) => e.status === 'done') && ( +
+

+ Still missing ({missing.length}): +

+
+ {missing.slice(0, 12).map((name) => ( + + {name} + + ))} + {missing.length > 12 && ( + +{missing.length - 12} more + )} +
+
+ )} +
+ ) +} diff --git a/frontend/src/index.css b/frontend/src/index.css new file mode 100644 index 0000000..1e92395 --- /dev/null +++ b/frontend/src/index.css @@ -0,0 +1,260 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +/* ============================================================ + ACCENT PRESETS + Applied via data-accent="" on + ============================================================ */ + +/* Default / Schaeffler Green */ +:root, +[data-accent="green"] { + --color-accent: #00893d; + --color-accent-hover: #006e31; + --color-accent-light: #e6f4ec; + --color-accent-text: #ffffff; +} + +[data-accent="blue"] { + --color-accent: #2563eb; + --color-accent-hover: #1d4ed8; + --color-accent-light: #dbeafe; + --color-accent-text: #ffffff; +} + +[data-accent="purple"] { + --color-accent: #7c3aed; + --color-accent-hover: #6d28d9; + --color-accent-light: #ede9fe; + --color-accent-text: #ffffff; +} + +[data-accent="amber"] { + --color-accent: #d97706; + --color-accent-hover: #b45309; + --color-accent-light: #fef3c7; + --color-accent-text: #ffffff; +} + +[data-accent="teal"] { + --color-accent: #0d9488; + --color-accent-hover: #0f766e; + --color-accent-light: #ccfbf1; + --color-accent-text: #ffffff; +} + +/* ============================================================ + LIGHT THEME (default) + ============================================================ */ +:root { + /* Surfaces */ + --color-bg-app: #f9fafb; + --color-bg-surface: #ffffff; + --color-bg-surface-hover: #f9fafb; + --color-bg-muted: #f3f4f6; + + /* Text */ + --color-text: #111827; + --color-text-secondary: #4b5563; + --color-text-muted: #9ca3af; + --color-text-inverse: #ffffff; + + /* Borders */ + --color-border: #e5e7eb; + --color-border-light: #f3f4f6; + + /* Status — Success */ + --color-status-success-bg: #dcfce7; + --color-status-success-text: #166534; + + /* Status — Warning */ + --color-status-warning-bg: #fef9c3; + --color-status-warning-text: #854d0e; + + /* Status — Error */ + --color-status-error-bg: #fee2e2; + --color-status-error-text: #991b1b; + + /* Status — Info */ + --color-status-info-bg: #dbeafe; + --color-status-info-text: #1e40af; +} + +/* ============================================================ + DARK THEME + Applied via .dark class on + ============================================================ */ +:root.dark { + /* Surfaces */ + --color-bg-app: #0f172a; + --color-bg-surface: #1e293b; + --color-bg-surface-hover: #334155; + --color-bg-muted: #1e293b; + + /* Text */ + --color-text: #f1f5f9; + --color-text-secondary: #94a3b8; + --color-text-muted: #64748b; + --color-text-inverse: #0f172a; + + /* Borders */ + --color-border: #334155; + --color-border-light: #1e293b; + + /* Status — Success */ + --color-status-success-bg: rgba(34, 197, 94, 0.15); + --color-status-success-text: #4ade80; + + /* Status — Warning */ + --color-status-warning-bg: rgba(234, 179, 8, 0.15); + --color-status-warning-text: #facc15; + + /* Status — Error */ + --color-status-error-bg: rgba(239, 68, 68, 0.15); + --color-status-error-text: #f87171; + + /* Status — Info */ + --color-status-info-bg: rgba(59, 130, 246, 0.15); + --color-status-info-text: #60a5fa; +} + +/* Dark accent-light overrides (rgba instead of solid pastel) */ +:root.dark, +:root.dark [data-accent="green"] { + --color-accent-light: rgba(0, 137, 61, 0.15); +} +:root.dark [data-accent="blue"] { + --color-accent-light: rgba(37, 99, 235, 0.15); +} +:root.dark [data-accent="purple"] { + --color-accent-light: rgba(124, 58, 237, 0.15); +} +:root.dark [data-accent="amber"] { + --color-accent-light: rgba(217, 119, 6, 0.15); +} +:root.dark [data-accent="teal"] { + --color-accent-light: rgba(13, 148, 136, 0.15); +} + +/* ============================================================ + BASE LAYER + ============================================================ */ +@layer base { + body { + @apply antialiased; + background-color: var(--color-bg-app); + color: var(--color-text); + transition: background-color 200ms ease, color 200ms ease; + } + + /* Native color scheme for form controls */ + :root { + color-scheme: light; + } + :root.dark { + color-scheme: dark; + } + + /* Checkbox / radio accent color */ + input[type="checkbox"], + input[type="radio"] { + accent-color: var(--color-accent); + } +} + +/* ============================================================ + COMPONENT CLASSES + ============================================================ */ +@layer components { + /* Buttons */ + .btn { + @apply inline-flex items-center gap-2 px-4 py-2 rounded-md font-medium text-sm transition-colors focus:outline-none focus:ring-2 focus:ring-offset-2 disabled:opacity-50 disabled:cursor-not-allowed; + } + .btn-primary { + @apply btn; + background-color: var(--color-accent); + color: var(--color-accent-text); + --tw-ring-color: var(--color-accent); + } + .btn-primary:hover:not(:disabled) { + background-color: var(--color-accent-hover); + } + .btn-secondary { + @apply btn border; + background-color: var(--color-bg-surface); + color: var(--color-text-secondary); + border-color: var(--color-border); + --tw-ring-color: var(--color-accent); + } + .btn-secondary:hover:not(:disabled) { + background-color: var(--color-bg-surface-hover); + } + .btn-danger { + @apply btn bg-red-600 text-white hover:bg-red-700 focus:ring-red-500; + } + + /* Cards */ + .card { + @apply rounded-lg border shadow-sm; + background-color: var(--color-bg-surface); + border-color: var(--color-border); + } + + /* Badges */ + .badge { + @apply inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium; + } + .badge-green { + @apply badge; + background-color: var(--color-status-success-bg); + color: var(--color-status-success-text); + } + .badge-yellow { + @apply badge; + background-color: var(--color-status-warning-bg); + color: var(--color-status-warning-text); + } + .badge-red { + @apply badge; + background-color: var(--color-status-error-bg); + color: var(--color-status-error-text); + } + .badge-blue { + @apply badge; + background-color: var(--color-status-info-bg); + color: var(--color-status-info-text); + } + .badge-gray { + @apply badge; + background-color: var(--color-bg-muted); + color: var(--color-text-secondary); + } + + /* Input base — replaces repeated inline input patterns */ + .input-base { + @apply w-full px-3 py-2 rounded-md text-sm border focus:outline-none focus:ring-2 transition-colors; + background-color: var(--color-bg-surface); + color: var(--color-text); + border-color: var(--color-border); + --tw-ring-color: var(--color-accent); + } + .input-base::placeholder { + color: var(--color-text-muted); + } + .input-base:focus { + border-color: var(--color-accent); + } + + /* Small input variant (used in admin tables) */ + .input-sm { + @apply px-2 py-1 rounded text-sm border focus:outline-none focus:ring-1 transition-colors; + background-color: var(--color-bg-surface); + color: var(--color-text); + border-color: var(--color-border); + --tw-ring-color: var(--color-accent); + } + .input-sm:focus { + border-color: var(--color-accent); + } +} diff --git a/frontend/src/main.tsx b/frontend/src/main.tsx new file mode 100644 index 0000000..7baac5d --- /dev/null +++ b/frontend/src/main.tsx @@ -0,0 +1,70 @@ +import React, { useEffect } from 'react' +import ReactDOM from 'react-dom/client' +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import { Toaster } from 'sonner' +import App from './App' +import './index.css' +import { useThemeStore, applyTheme, resolveTheme, type ThemeMode, type AccentKey } from './store/theme' + +/* --------------------------------------------------------------- + Flash prevention: apply theme BEFORE React hydrates. + Reads directly from localStorage to avoid the Zustand wrapper. + --------------------------------------------------------------- */ +;(function () { + try { + const raw = localStorage.getItem('schaeffler-theme') + if (raw) { + const { state } = JSON.parse(raw) as { state: { mode: ThemeMode; accent: AccentKey } } + applyTheme(state.mode ?? 'light', state.accent ?? 'green') + } + } catch { + // ignore — default theme already applied by CSS + } +})() + +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + staleTime: 30_000, + retry: 1, + }, + }, +}) + +/** Subscribes to store changes and system preference changes */ +function ThemeProvider({ children }: { children: React.ReactNode }) { + const mode = useThemeStore((s) => s.mode) + const accent = useThemeStore((s) => s.accent) + const resolvedTheme = resolveTheme(mode) + + // Apply whenever mode or accent changes + useEffect(() => { + applyTheme(mode, accent) + }, [mode, accent]) + + // Listen to system preference changes when mode='system' + useEffect(() => { + if (mode !== 'system') return + const mq = window.matchMedia('(prefers-color-scheme: dark)') + const handler = () => applyTheme('system', accent) + mq.addEventListener('change', handler) + return () => mq.removeEventListener('change', handler) + }, [mode, accent]) + + return ( + <> + {children} + + + ) +} + +ReactDOM.createRoot(document.getElementById('root')!).render( + + + + + + + , +) diff --git a/frontend/src/pages/Admin.tsx b/frontend/src/pages/Admin.tsx new file mode 100644 index 0000000..edb2de8 --- /dev/null +++ b/frontend/src/pages/Admin.tsx @@ -0,0 +1,1168 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { useState } from 'react' +import { toast } from 'sonner' +import { UserPlus, Trash2, Pencil, ChevronDown, ChevronUp, ChevronRight, Settings, RefreshCw, CheckCircle2, XCircle, Clock, DollarSign, Layers, Server, ExternalLink, AlertTriangle, Upload, FileBox, Plus, X } from 'lucide-react' +import { Link } from 'react-router-dom' +import api from '../api/client' +import TemplateEditor from '../components/admin/TemplateEditor' +import PricingTierTable from '../components/admin/PricingTierTable' +import OutputTypeTable from '../components/admin/OutputTypeTable' +import RenderTemplateTable from '../components/admin/RenderTemplateTable' +import { useAuthStore } from '../store/auth' +import { getMaterialLibraryInfo, uploadMaterialLibrary, deleteMaterialLibrary } from '../api/renderTemplates' +import type { MaterialLibraryInfo } from '../api/renderTemplates' +import { listPricingTiers } from '../api/pricing' +import { listOutputTypes } from '../api/outputTypes' + +export default function AdminPage() { + const qc = useQueryClient() + const user = useAuthStore((s) => s.user) + const isAdmin = user?.role === 'admin' + const [showNewUser, setShowNewUser] = useState(false) + const [newUser, setNewUser] = useState({ email: '', password: '', full_name: '', role: 'client' }) + const [editingTemplateId, setEditingTemplateId] = useState(null) + const [showFlamencoAdvanced, setShowFlamencoAdvanced] = useState(false) + const [flamencoUrlDraft, setFlamencoUrlDraft] = useState('') + const [workerCountDraft, setWorkerCountDraft] = useState(1) + const [priorityNewEntry, setPriorityNewEntry] = useState('') + + const { data: users } = useQuery({ + queryKey: ['admin-users'], + queryFn: async () => { + const res = await api.get('/admin/users') + return res.data as any[] + }, + }) + + const { data: templates } = useQuery({ + queryKey: ['admin-templates'], + queryFn: async () => { + const res = await api.get('/templates?include_inactive=true') + return res.data as any[] + }, + }) + + const createUserMut = useMutation({ + mutationFn: (data: typeof newUser) => api.post('/admin/users', data), + onSuccess: () => { + toast.success('User created') + qc.invalidateQueries({ queryKey: ['admin-users'] }) + setShowNewUser(false) + setNewUser({ email: '', password: '', full_name: '', role: 'client' }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed'), + }) + + const deleteUserMut = useMutation({ + mutationFn: (id: string) => api.delete(`/admin/users/${id}`), + onSuccess: () => { + toast.success('User deleted') + qc.invalidateQueries({ queryKey: ['admin-users'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed'), + }) + + type Settings = { + thumbnail_renderer: string + blender_engine: string + blender_cycles_samples: number + blender_eevee_samples: number + threejs_render_size: number + thumbnail_format: string + stl_quality: string + blender_smooth_angle: number + cycles_device: string + blender_max_concurrent_renders: number + render_stall_timeout_minutes: number + product_thumbnail_priority: string // JSON array + } + + const { data: settings } = useQuery({ + queryKey: ['admin-settings'], + queryFn: async () => { + const res = await api.get('/admin/settings') + return res.data as Settings + }, + }) + + const { data: outputTypes } = useQuery({ + queryKey: ['output-types-all'], + queryFn: () => listOutputTypes(false), + enabled: isAdmin, + }) + + // Local draft for Blender options so the user can change multiple fields before saving + const [blenderDraft, setBlenderDraft] = useState>({}) + const blender = { ...settings, ...blenderDraft } as Settings + + const { data: rendererStatus, refetch: refetchStatus } = useQuery({ + queryKey: ['renderer-status'], + queryFn: async () => { + const res = await api.get('/admin/settings/renderer-status') + return res.data as Record + }, + refetchInterval: 30000, + }) + + const updateSettingsMut = useMutation({ + mutationFn: (data: Partial) => api.put('/admin/settings', data), + onSuccess: () => { + toast.success('Settings saved') + qc.invalidateQueries({ queryKey: ['admin-settings'] }) + setBlenderDraft({}) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed'), + }) + + const processUnprocessedMut = useMutation({ + mutationFn: () => api.post('/admin/settings/process-unprocessed'), + onSuccess: (res) => { + toast.success(res.data.message || 'Processing queued') + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed'), + }) + + const regenerateMut = useMutation({ + mutationFn: () => api.post('/admin/settings/regenerate-thumbnails'), + onSuccess: (res) => { + toast.success(res.data.message || 'Thumbnails re-queued') + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed'), + }) + + const generateMissingStlsMut = useMutation({ + mutationFn: () => api.post('/admin/settings/generate-missing-stls'), + onSuccess: (res) => { + toast.success(res.data.message || 'STL generation queued') + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed'), + }) + + type FlamencoStatus = { + manager: { available: boolean; version: string | null; name: string | null; error?: string } + workers: any[] + manager_url: string + } + + const { data: flamencoStatus, refetch: refetchFlamenco } = useQuery({ + queryKey: ['flamenco-status'], + queryFn: async () => { + const res = await api.get('/admin/settings/flamenco-status') + return res.data as FlamencoStatus + }, + refetchInterval: 30000, + enabled: isAdmin, + }) + + const { data: actualWorkers, refetch: refetchActualWorkers } = useQuery({ + queryKey: ['flamenco-worker-actual'], + queryFn: () => api.get('/admin/settings/flamenco-worker-actual').then(r => r.data as { running: number; available: boolean }), + refetchInterval: 10000, + enabled: isAdmin, + }) + + const setWorkerCountMut = useMutation({ + mutationFn: (count: number) => api.post('/admin/settings/flamenco-worker-count', { count }), + onSuccess: (res) => { + const d = res.data + if (d.current >= 0) { + toast.success(`Workers scaled: ${d.previous} → ${d.current}`) + } else { + toast.warning(d.message || 'Setting saved — manual scaling may be needed') + } + qc.invalidateQueries({ queryKey: ['admin-settings'] }) + refetchActualWorkers() + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed'), + }) + + return ( +
+

Admin

+ + {/* ------------------------------------------------------------------ */} + {/* Pricing Summary */} + {/* ------------------------------------------------------------------ */} + + + {/* ------------------------------------------------------------------ */} + {/* Pricing Tiers */} + {/* ------------------------------------------------------------------ */} +
+
+ +
+

Pricing Tiers

+

+ Configure price per rendering item by category and quality level. +

+
+
+ +
+ + {/* ------------------------------------------------------------------ */} + {/* Output Types */} + {/* ------------------------------------------------------------------ */} +
+
+ +
+

Output Types

+

+ Define what kinds of outputs orders can request (thumbnails, views, formats). +

+
+
+ +
+ + {/* ------------------------------------------------------------------ */} + {/* Render Templates (admin/PM) */} + {/* ------------------------------------------------------------------ */} +
+
+ +
+

Render Templates

+

+ Upload .blend studio setups matched by Category + Output Type. Geometry is imported into the template at render time. +

+
+
+
+ +
+ + {/* Material Library sub-section */} +
+ +
+
+ + {/* ------------------------------------------------------------------ */} + {/* Render Farm (admin only) */} + {/* ------------------------------------------------------------------ */} + {isAdmin &&
+
+
+ +
+

Render Farm

+

+ Route render jobs to Celery (stills) or Flamenco (animations). +

+
+
+ +
+ +
+ {/* Global backend selector */} +
+ + {(['celery', 'flamenco', 'auto'] as const).map((b) => ( + + ))} + {settings?.render_backend === 'auto' && ( +

Stills via Celery, animations via Flamenco

+ )} +
+ + {/* Flamenco status panel */} +
+
+

Flamenco Status

+ {flamencoStatus?.manager?.available && ( + + Open Flamenco Web UI + + )} +
+ +
+ {/* Manager health */} +
+ {flamencoStatus?.manager?.available + ? + : } +
+

Manager

+

+ {flamencoStatus?.manager?.available + ? `v${flamencoStatus.manager.version || '?'}` + : flamencoStatus?.manager?.error || 'Offline'} +

+
+
+ + {/* Workers */} +
+

+ Workers: {flamencoStatus?.workers?.length ?? 0} +

+ {flamencoStatus?.workers && flamencoStatus.workers.length > 0 && ( +
+ {flamencoStatus.workers.slice(0, 5).map((w: any, i: number) => ( +
+ + {w.name || `worker-${i + 1}`} + {w.status || '—'} +
+ ))} +
+ )} +
+
+ + {/* Worker count control */} +
+ Worker count: + setWorkerCountDraft(Number(e.target.value))} + title="Number of Flamenco worker containers to run (1–16). Each worker handles one render job at a time." + className="w-20 px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-accent" + /> + + {actualWorkers?.available ? ( + + {actualWorkers.running} running + + ) : ( + Docker socket unavailable + )} +
+ + {/* Advanced: Manager URL */} +
+ + {showFlamencoAdvanced && ( +
+ + setFlamencoUrlDraft(e.target.value)} + className="flex-1 px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-accent" + placeholder="http://flamenco-manager:8080" + /> + +
+ )} +
+
+
+
} + + {/* ------------------------------------------------------------------ */} + {/* Users (admin only) */} + {/* ------------------------------------------------------------------ */} + {isAdmin &&
+
+

Users

+ +
+ + {showNewUser && ( +
+
+ setNewUser({ ...newUser, full_name: e.target.value })} + className="px-3 py-2 border border-border-default rounded-md text-sm" + /> + setNewUser({ ...newUser, email: e.target.value })} + className="px-3 py-2 border border-border-default rounded-md text-sm" + /> + setNewUser({ ...newUser, password: e.target.value })} + className="px-3 py-2 border border-border-default rounded-md text-sm" + /> + +
+ +
+ )} + +
+ {users?.map((user) => ( +
+
+

{user.full_name}

+

{user.email}

+
+ + {user.role} + + + {user.is_active ? 'active' : 'inactive'} + + +
+ ))} +
+
} + + {/* ------------------------------------------------------------------ */} + {/* Renderer Settings (admin only) */} + {/* ------------------------------------------------------------------ */} + {isAdmin &&
+
+
+ +

Thumbnail Renderer

+
+ +
+ +
+ {/* Renderer picker */} +
+ + {(['pillow', 'blender', 'threejs'] as const).map((r) => ( + + ))} +
+ + {/* Blender options — shown only when blender is the active renderer */} + {settings?.thumbnail_renderer === 'blender' && ( +
+

Blender 5 Options

+ + {/* Engine */} +
+ Render engine + {(['cycles', 'eevee'] as const).map((eng) => ( + + ))} +
+ + {/* Cycles device — only relevant for Cycles */} + {blender.blender_engine === 'cycles' && ( +
+ Cycles device + {(['auto', 'gpu', 'cpu'] as const).map((dev) => ( + + ))} +

+ {blender.cycles_device === 'auto' + ? 'Tries OptiX / CUDA / HIP, falls back to CPU if no GPU is available.' + : blender.cycles_device === 'gpu' + ? 'Always use GPU. Logs a warning if no compatible GPU is found.' + : 'Always use CPU — useful for debugging or when GPU is busy.'} +

+
+ )} + + {/* Sample counts */} +
+
+ + setBlenderDraft((d) => ({ ...d, blender_cycles_samples: Number(e.target.value) }))} + title="Number of Cycles path-tracing samples (1–4096). Higher values = better quality + longer render time. Default: 256" + className="w-full px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-blue-400" + /> +

Higher = better quality, slower

+
+
+ + setBlenderDraft((d) => ({ ...d, blender_eevee_samples: Number(e.target.value) }))} + title="EEVEE anti-aliasing sample count (1–1024). Higher values = smoother edges + longer render time. Default: 64" + className="w-full px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-blue-400" + /> +

Higher = better AA, slower

+
+
+ + {/* STL quality */} +
+ STL quality + {(['low', 'high'] as const).map((q) => ( + + ))} +

+ {blender.stl_quality === 'high' + ? 'Fine mesh (tol=0.01) — slower STEP→STL, sharper edges.' + : 'Coarse mesh (tol=0.3) — faster, good for previews.'} +

+
+ + {/* Smooth by angle */} +
+ Smooth angle + setBlenderDraft((d) => ({ ...d, blender_smooth_angle: Number(e.target.value) }))} + title="Auto-smooth angle in degrees (0–180°). Faces with dihedral angles below this threshold are shaded smooth; sharper edges stay hard. 30° works well for most mechanical parts." + className="w-24 px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-blue-400" + /> + ° +

+ {(blender.blender_smooth_angle ?? 30) === 0 + ? '0° = flat shading on all faces.' + : `Faces with edges sharper than ${blender.blender_smooth_angle ?? 30}° stay hard; others smooth. 30° works well for most mechanical parts.`} +

+
+ + {/* Max concurrent renders */} +
+ Max concurrent + setBlenderDraft((d) => ({ ...d, blender_max_concurrent_renders: Number(e.target.value) }))} + title="Maximum parallel Blender render jobs (1–16). Each job uses ~400 MB RAM. Applied live without restart. Default: 3" + className="w-24 px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-blue-400" + /> +

+ Max parallel Blender render jobs (1–16). Higher values use more RAM (~400 MB each). Applied live without restart. +

+
+ +
+ Stall timeout + setBlenderDraft((d) => ({ ...d, render_stall_timeout_minutes: Number(e.target.value) }))} + title="Minutes before a stuck render job is automatically restarted (10–10080). The watchdog checks every 5 minutes. Default: 120" + className="w-24 px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-blue-400" + /> +

+ Minutes before a stuck render job is auto-restarted (10–10080). Checked every 5 min by the watchdog. +

+
+ + {/* Save blender options */} + {Object.keys(blenderDraft).length > 0 && ( + + )} +
+ )} + + {/* Three.js options — shown only when threejs is the active renderer */} + {settings?.thumbnail_renderer === 'threejs' && ( +
+

Three.js (WebGL) Options

+
+ Render size + {([512, 1024, 2048] as const).map((size) => ( + + ))} +
+

+ Higher resolution = larger PNG thumbnails. 1024px recommended for most screens. +

+
+ )} + + {/* Output format — always visible, applies to all renderers */} +
+ + {(['jpg', 'png'] as const).map((fmt) => ( + + ))} +

+ {settings?.thumbnail_format === 'jpg' + ? 'JPEG — ~3–5× smaller files, minimal quality loss at 92% quality.' + : 'PNG — lossless, larger files.'} +

+
+ + {/* Product thumbnail priority chain */} + {(() => { + let priorityList: string[] = ['latest_render', 'cad_thumbnail'] + try { + const parsed = JSON.parse(settings?.product_thumbnail_priority ?? '["latest_render","cad_thumbnail"]') + if (Array.isArray(parsed)) priorityList = parsed + } catch {} + + const savePriority = (list: string[]) => { + updateSettingsMut.mutate({ product_thumbnail_priority: JSON.stringify(list) } as any) + } + + const moveUp = (i: number) => { + if (i === 0) return + const next = [...priorityList] + ;[next[i - 1], next[i]] = [next[i], next[i - 1]] + savePriority(next) + } + const moveDown = (i: number) => { + if (i === priorityList.length - 1) return + const next = [...priorityList] + ;[next[i], next[i + 1]] = [next[i + 1], next[i]] + savePriority(next) + } + const remove = (i: number) => savePriority(priorityList.filter((_, j) => j !== i)) + const addEntry = () => { + if (!priorityNewEntry || priorityList.includes(priorityNewEntry)) return + savePriority([...priorityList, priorityNewEntry]) + setPriorityNewEntry('') + } + + const entryLabel = (e: string) => + e === 'cad_thumbnail' ? 'CAD Thumbnail' + : e === 'latest_render' ? 'Latest Render (any type)' + : outputTypes?.find((ot) => ot.id === e)?.name ?? `Output type …${e.slice(-8)}` + + const entryColor = (e: string) => + e === 'cad_thumbnail' ? 'bg-surface-alt border-border-default text-content-muted' + : e === 'latest_render' ? 'bg-status-info-bg border-border-default text-status-info-text' + : 'bg-status-success-bg border-border-default text-status-success-text' + + // Options not yet in the list + const addableOptions = [ + ...(['latest_render', 'cad_thumbnail'] as string[]).filter((v) => !priorityList.includes(v)), + ...(outputTypes ?? []).filter((ot) => !priorityList.includes(ot.id)).map((ot) => ot.id), + ] + + return ( +
+ +
+ {priorityList.map((entry, i) => ( +
+ {i + 1} +
+ {entryLabel(entry)} + {entry !== 'cad_thumbnail' && entry !== 'latest_render' && ( + newest completed render + )} +
+ + + +
+ ))} + + {addableOptions.length > 0 && ( +
+ + +
+ )} + +

+ Sources are tried top to bottom. For specific output types, the newest completed render of that type is used. "CAD Thumbnail" always matches and stops the search. +

+
+
+ ) + })()} + + {/* Service health */} +
+ {rendererStatus && Object.entries(rendererStatus).map(([name, info]) => ( +
+ {info.available + ? + : info.url === null + ? + : + } +
+

{name}

+

{info.note || (info.available ? 'Online' : 'Offline')}

+
+
+ ))} + {!rendererStatus && ( +
+ Checking service status… +
+ )} +
+ + {/* Process unprocessed / Regenerate thumbnails */} +
+
+ +

+ Queues all pending and failed STEP files for initial processing. +

+
+
+ +

+ Re-processes all existing STEP files with the currently selected renderer. +

+
+
+ +

+ Generates low + high STL files for any completed STEP file that is missing them. +

+
+
+
+
} + + {/* ------------------------------------------------------------------ */} + {/* Templates */} + {/* ------------------------------------------------------------------ */} +
+
+

Templates

+

+ Click Edit to configure standard fields and component schema for each template. +

+
+
+ {templates?.map((t) => { + const isEditing = editingTemplateId === t.id + return ( +
+ {/* Row */} +
+ +
+

{t.name}

+

{t.category_key}

+
+ + {t.is_active ? 'active' : 'inactive'} + + +
+ + {/* Inline editor panel */} + {isEditing && ( +
+ setEditingTemplateId(null)} + /> +
+ )} +
+ ) + })} +
+
+ + {/* ------------------------------------------------------------------ */} + {/* Material Library link */} + {/* ------------------------------------------------------------------ */} +
+
+

Material Library

+

+ Manage shared materials for CAD part assignments. +

+
+ + Open Material Library → + +
+
+ ) +} + + +function MaterialLibraryPanel() { + const qc = useQueryClient() + + const { data: info } = useQuery({ + queryKey: ['material-library-info'], + queryFn: getMaterialLibraryInfo, + }) + + const uploadMut = useMutation({ + mutationFn: (file: File) => uploadMaterialLibrary(file), + onSuccess: () => { + toast.success('Material library uploaded') + qc.invalidateQueries({ queryKey: ['material-library-info'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Upload failed'), + }) + + const deleteMut = useMutation({ + mutationFn: deleteMaterialLibrary, + onSuccess: () => { + toast.success('Material library removed') + qc.invalidateQueries({ queryKey: ['material-library-info'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Delete failed'), + }) + + function handleFileChange(e: React.ChangeEvent) { + const file = e.target.files?.[0] + if (file) uploadMut.mutate(file) + e.target.value = '' + } + + return ( +
+

Material Library (.blend)

+

+ Materials in this file can be assigned to product parts when "Material Replace" is enabled on a template. +

+ + {info?.exists ? ( +
+ +
+

{info.filename}

+

+ {info.size_bytes ? `${(info.size_bytes / 1024 / 1024).toFixed(1)} MB` : ''} +

+
+ + +
+ ) : ( + + )} +
+ ) +} + + +function PricingSummaryCard() { + const { data: tiers } = useQuery({ + queryKey: ['pricing-tiers'], + queryFn: listPricingTiers, + }) + const { data: outputTypes } = useQuery({ + queryKey: ['output-types-admin'], + queryFn: () => listOutputTypes(true), + }) + + const defaultTier = tiers?.find((t) => t.category_key === 'default' && t.is_active) + const activeTiers = tiers?.filter((t) => t.is_active).length ?? 0 + const totalOTs = outputTypes?.length ?? 0 + const otsWithTier = outputTypes?.filter((ot) => ot.pricing_tier_id != null).length ?? 0 + + return ( +
+
+ +

Pricing Overview

+
+
+
+

+ {defaultTier ? `${Number(defaultTier.price_per_item).toFixed(2)}` : '—'} +

+

Global default price

+ {!defaultTier && ( +

+ Not configured +

+ )} +
+
+

{activeTiers}

+

Active pricing tiers

+
+
+

{otsWithTier} / {totalOTs}

+

Output types with explicit tier

+
+
+

{totalOTs - otsWithTier}

+

Using category default

+
+
+
+ ) +} diff --git a/frontend/src/pages/CadPreview.tsx b/frontend/src/pages/CadPreview.tsx new file mode 100644 index 0000000..fcd7bf3 --- /dev/null +++ b/frontend/src/pages/CadPreview.tsx @@ -0,0 +1,36 @@ +import { useParams, useNavigate } from 'react-router-dom' +import { ArrowLeft } from 'lucide-react' +import ThreeDViewer from '../components/cad/ThreeDViewer' + +/** + * Route: /cad/:id + * + * Renders the full-screen 3D viewer for a specific CAD file. + * When the viewer is closed the user is navigated back. + */ +export default function CadPreviewPage() { + const { id } = useParams<{ id: string }>() + const navigate = useNavigate() + + if (!id) { + return ( +
+

No CAD file ID provided.

+ +
+ ) + } + + return ( + navigate(-1)} + /> + ) +} diff --git a/frontend/src/pages/Dashboard.tsx b/frontend/src/pages/Dashboard.tsx new file mode 100644 index 0000000..aa8cb7f --- /dev/null +++ b/frontend/src/pages/Dashboard.tsx @@ -0,0 +1,9 @@ +import { useAuthStore } from '../store/auth' +import AdminDashboard from '../components/dashboard/AdminDashboard' +import ClientDashboard from '../components/dashboard/ClientDashboard' + +export default function DashboardPage() { + const user = useAuthStore((s) => s.user) + const isPrivileged = user?.role === 'admin' || user?.role === 'project_manager' + return isPrivileged ? : +} diff --git a/frontend/src/pages/Login.tsx b/frontend/src/pages/Login.tsx new file mode 100644 index 0000000..cf45d17 --- /dev/null +++ b/frontend/src/pages/Login.tsx @@ -0,0 +1,68 @@ +import { useState } from 'react' +import { useNavigate } from 'react-router-dom' +import { toast } from 'sonner' +import api from '../api/client' +import { useAuthStore } from '../store/auth' + +export default function LoginPage() { + const navigate = useNavigate() + const setAuth = useAuthStore((s) => s.setAuth) + const [email, setEmail] = useState('') + const [password, setPassword] = useState('') + const [loading, setLoading] = useState(false) + + async function handleSubmit(e: React.FormEvent) { + e.preventDefault() + setLoading(true) + try { + const res = await api.post('/auth/login', { email, password }) + setAuth(res.data.access_token, res.data.user) + navigate('/') + } catch (err: any) { + toast.error(err.response?.data?.detail || 'Login failed') + } finally { + setLoading(false) + } + } + + return ( +
+
+
+
+ S +
+

Schaeffler Automat

+

Media Creation Pipeline

+
+ +
+
+ + setEmail(e.target.value)} + required + className="input-base w-full" + placeholder="admin@schaeffler.com" + /> +
+
+ + setPassword(e.target.value)} + required + className="input-base w-full" + /> +
+ +
+
+
+ ) +} diff --git a/frontend/src/pages/Materials.tsx b/frontend/src/pages/Materials.tsx new file mode 100644 index 0000000..9014626 --- /dev/null +++ b/frontend/src/pages/Materials.tsx @@ -0,0 +1,541 @@ +import { useState, useMemo } from 'react' +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { toast } from 'sonner' +import { + Plus, Trash2, Pencil, Check, X, FlaskConical, Search, Wand2, Download, + Wrench, Paintbrush, Shapes, HelpCircle, ChevronDown, ChevronRight, Tag, +} from 'lucide-react' +import { + listMaterials, createMaterial, updateMaterial, deleteMaterial, + seedSchaefflerMaterials, addAlias, deleteAlias, seedAliases, +} from '../api/materials' +import type { Material } from '../api/materials' +import MaterialWizard from '../components/MaterialWizard' + +const TYPE_GROUPS = [ + { code: '01', label: 'Metals', icon: Wrench, bg: 'bg-slate-50', border: 'border-slate-200', text: 'text-slate-700' }, + { code: '02', label: 'Coatings', icon: Paintbrush, bg: 'bg-status-info-bg', border: 'border-border-default', text: 'text-status-info-text' }, + { code: '03', label: 'Non-metals', icon: Shapes, bg: 'bg-status-warning-bg', border: 'border-border-default', text: 'text-status-warning-text' }, + { code: '04', label: 'Compounds', icon: FlaskConical, bg: 'bg-purple-50', border: 'border-purple-200', text: 'text-purple-700' }, + { code: '05', label: 'Misc', icon: HelpCircle, bg: 'bg-surface-alt', border: 'border-border-default', text: 'text-content-secondary' }, +] as const + +function getTypeCode(mat: Material): string | null { + if (mat.schaeffler_code == null) return null + return String(mat.schaeffler_code).padStart(6, '0').slice(0, 2) +} + +interface MaterialGroup { + code: string | null + label: string + icon: typeof Wrench + bg: string + border: string + text: string + items: Material[] +} + +export default function MaterialsPage() { + const qc = useQueryClient() + const [search, setSearch] = useState('') + const [showAdd, setShowAdd] = useState(false) + const [showWizard, setShowWizard] = useState(false) + const [newName, setNewName] = useState('') + const [newDesc, setNewDesc] = useState('') + const [editingId, setEditingId] = useState(null) + const [editName, setEditName] = useState('') + const [editDesc, setEditDesc] = useState('') + const [collapsed, setCollapsed] = useState>(new Set()) + const [expandedAliases, setExpandedAliases] = useState>(new Set()) + const [aliasInput, setAliasInput] = useState>({}) + + const { data: materials = [], isLoading } = useQuery({ + queryKey: ['materials'], + queryFn: listMaterials, + }) + + const createMut = useMutation({ + mutationFn: () => createMaterial({ name: newName.trim(), description: newDesc.trim() || undefined }), + onSuccess: () => { + toast.success('Material added') + qc.invalidateQueries({ queryKey: ['materials'] }) + setShowAdd(false) + setNewName('') + setNewDesc('') + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to add material'), + }) + + const updateMut = useMutation({ + mutationFn: (id: string) => updateMaterial(id, { name: editName.trim(), description: editDesc.trim() || undefined }), + onSuccess: () => { + toast.success('Material updated') + qc.invalidateQueries({ queryKey: ['materials'] }) + setEditingId(null) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to update'), + }) + + const deleteMut = useMutation({ + mutationFn: deleteMaterial, + onSuccess: () => { + toast.success('Material deleted') + qc.invalidateQueries({ queryKey: ['materials'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to delete'), + }) + + const seedMut = useMutation({ + mutationFn: seedSchaefflerMaterials, + onSuccess: (data) => { + if (data.inserted > 0) { + toast.success(`Imported ${data.inserted} of ${data.total} Schaeffler standard materials`) + } else { + toast.info('All Schaeffler standard materials already exist') + } + qc.invalidateQueries({ queryKey: ['materials'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to import'), + }) + + const seedAliasMut = useMutation({ + mutationFn: seedAliases, + onSuccess: (data) => { + if (data.inserted > 0) { + toast.success(`Seeded ${data.inserted} aliases (${data.total} total checked)`) + } else { + toast.info('All aliases already exist') + } + qc.invalidateQueries({ queryKey: ['materials'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to seed aliases'), + }) + + const addAliasMut = useMutation({ + mutationFn: ({ materialId, alias }: { materialId: string; alias: string }) => addAlias(materialId, alias), + onSuccess: (_data, vars) => { + toast.success('Alias added') + qc.invalidateQueries({ queryKey: ['materials'] }) + setAliasInput((prev) => ({ ...prev, [vars.materialId]: '' })) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to add alias'), + }) + + const deleteAliasMut = useMutation({ + mutationFn: deleteAlias, + onSuccess: () => { + toast.success('Alias removed') + qc.invalidateQueries({ queryKey: ['materials'] }) + }, + onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to remove alias'), + }) + + const startEdit = (mat: Material) => { + setEditingId(mat.id) + setEditName(mat.name) + setEditDesc(mat.description ?? '') + } + + const toggleAliases = (id: string) => { + setExpandedAliases((prev) => { + const next = new Set(prev) + if (next.has(id)) next.delete(id) + else next.add(id) + return next + }) + } + + const handleAddAlias = (materialId: string) => { + const val = (aliasInput[materialId] || '').trim() + if (val) addAliasMut.mutate({ materialId, alias: val }) + } + + // Search filters include aliases + const filtered = search.trim() + ? materials.filter((m) => { + const q = search.toLowerCase() + return ( + m.name.toLowerCase().includes(q) || + m.description?.toLowerCase().includes(q) || + m.aliases.some((a) => a.toLowerCase().includes(q)) + ) + }) + : materials + + // Group filtered materials by type code + const groups = useMemo((): MaterialGroup[] => { + const buckets = new Map() + for (const m of filtered) { + const tc = getTypeCode(m) + if (!buckets.has(tc)) buckets.set(tc, []) + buckets.get(tc)!.push(m) + } + + const result: MaterialGroup[] = [] + // Known type groups first + for (const tg of TYPE_GROUPS) { + const items = buckets.get(tg.code) + if (items && items.length > 0) { + result.push({ code: tg.code, label: tg.label, icon: tg.icon, bg: tg.bg, border: tg.border, text: tg.text, items }) + buckets.delete(tg.code) + } + } + // Custom / non-schaeffler materials + const custom = buckets.get(null) + if (custom && custom.length > 0) { + result.push({ code: null, label: 'Custom', icon: Plus, bg: 'bg-surface-alt', border: 'border-border-default', text: 'text-content-secondary', items: custom }) + } + return result + }, [filtered]) + + const toggleGroup = (code: string | null) => { + setCollapsed((prev) => { + const next = new Set(prev) + if (next.has(code)) next.delete(code) + else next.add(code) + return next + }) + } + + const totalAliases = materials.reduce((sum, m) => sum + m.aliases.length, 0) + + return ( +
+ {/* Header */} +
+ +
+

Material Library

+

+ Shared materials used when assigning CAD part materials to order items. + {totalAliases > 0 && ({totalAliases} aliases configured)} +

+
+ + + + +
+ + {/* Add form */} + {showAdd && ( +
+
+ + setNewName(e.target.value)} + onKeyDown={(e) => e.key === 'Enter' && newName.trim() && createMut.mutate()} + className="w-full px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-accent" + /> +
+
+ + setNewDesc(e.target.value)} + onKeyDown={(e) => e.key === 'Enter' && newName.trim() && createMut.mutate()} + className="w-full px-3 py-1.5 border border-border-default rounded-md text-sm focus:outline-none focus:border-accent" + /> +
+
+ + +
+
+ )} + + {/* Search */} +
+ + setSearch(e.target.value)} + className="w-full pl-9 pr-4 py-2 border border-border-default rounded-lg text-sm focus:outline-none focus:border-accent bg-surface" + /> +
+ + {/* Grouped table */} + {isLoading ? ( +
Loading...
+ ) : filtered.length === 0 ? ( +
+ {search ? 'No materials match your search.' : 'No materials yet. Add the first one above.'} +
+ ) : ( +
+ {groups.map((group) => { + const Icon = group.icon + const isCollapsed = collapsed.has(group.code) + return ( +
+ {/* Group header */} + + + {!isCollapsed && ( + <> + {/* Column header */} +
+

Name

+

Description

+

Source

+

Aliases

+

Actions

+
+ + {/* Rows */} +
+ {group.items.map((mat) => { + const aliasesExpanded = expandedAliases.has(mat.id) + return ( +
+
+ {editingId === mat.id ? ( +
+ setEditName(e.target.value)} + placeholder="Name" + className="flex-1 min-w-[140px] px-2 py-1 border border-border-default rounded text-sm focus:outline-none focus:border-accent" + /> + setEditDesc(e.target.value)} + placeholder="Description" + className="flex-1 min-w-[200px] px-2 py-1 border border-border-default rounded text-sm focus:outline-none focus:border-accent" + /> + + +
+ ) : ( + <> +
+

{mat.name}

+ {mat.schaeffler_code != null && ( +

Nr: {mat.schaeffler_code}

+ )} +
+
+

{mat.description || '—'}

+
+
+ +
+
+ +
+
+ + +
+ + )} +
+ + {/* Expandable alias section */} + {aliasesExpanded && editingId !== mat.id && ( +
+
+ {mat.aliases.length === 0 && ( + No aliases configured + )} + {mat.aliases.map((alias) => ( + + ))} +
+
+ setAliasInput((prev) => ({ ...prev, [mat.id]: e.target.value }))} + onKeyDown={(e) => { + if (e.key === 'Enter') handleAddAlias(mat.id) + }} + className="flex-1 max-w-xs px-2 py-1 border border-border-default rounded text-xs focus:outline-none focus:border-accent" + /> + +
+
+ )} +
+ ) + })} +
+ + )} +
+ ) + })} + + {/* Footer count */} +
+

+ {filtered.length} of {materials.length} material{materials.length !== 1 ? 's' : ''} + {totalAliases > 0 && ` · ${totalAliases} aliases`} +

+
+
+ )} + + {/* Wizard modal */} + setShowWizard(false)} /> +
+ ) +} + +function AliasPill({ + alias, + materialId, + onDelete, + materials, +}: { + alias: string + materialId: string + onDelete: { mutate: (id: string) => void; isPending: boolean } + materials: Material[] +}) { + // We need the alias ID for deletion - find it from the material's aliases list + // Since we only have alias strings from MaterialOut, we need to query the ID + // We'll use a lazy approach: delete by fetching aliases for this material + const handleDelete = async () => { + try { + const { listAliases: fetchAliases } = await import('../api/materials') + const aliases = await fetchAliases(materialId) + const found = aliases.find((a) => a.alias === alias) + if (found) { + onDelete.mutate(found.id) + } + } catch { + // Fallback: ignore + } + } + + return ( + + {alias} + + + ) +} + +function SourceBadge({ source }: { source: string }) { + if (source === 'schaeffler_standard') { + return ( + + Standard + + ) + } + if (source === 'cad_import') { + return ( + + CAD import + + ) + } + return ( + + Manual + + ) +} diff --git a/frontend/src/pages/NewOrder.tsx b/frontend/src/pages/NewOrder.tsx new file mode 100644 index 0000000..be19bcc --- /dev/null +++ b/frontend/src/pages/NewOrder.tsx @@ -0,0 +1,43 @@ +import { Link } from 'react-router-dom' +import { ArrowLeft, FileSpreadsheet, Package } from 'lucide-react' + +export default function NewOrderPage() { + return ( +
+
+ Back +

New Order

+
+ +
+ {/* Excel Upload */} + +
+ +
+

Upload Excel

+

+ Import order items from an Excel template file with product data and components. +

+ + + {/* Product Library */} + +
+ +
+

Product Library

+

+ Select products from the library and configure output types for rendering. +

+ +
+
+ ) +} diff --git a/frontend/src/pages/NewProductOrder.tsx b/frontend/src/pages/NewProductOrder.tsx new file mode 100644 index 0000000..a4df66e --- /dev/null +++ b/frontend/src/pages/NewProductOrder.tsx @@ -0,0 +1,899 @@ +import { useState, useMemo } from 'react' +import { useNavigate, Link } from 'react-router-dom' +import { useQuery, keepPreviousData } from '@tanstack/react-query' +import { + ArrowLeft, ArrowRight, Search, Box, Check, ShoppingCart, Trash2, + ChevronDown, ChevronRight, +} from 'lucide-react' +import { toast } from 'sonner' +import { listProducts } from '../api/products' +import { listOutputTypes } from '../api/outputTypes' +import { createOrder } from '../api/orders' +import { estimatePrice } from '../api/pricing' +import type { Product, RenderPosition } from '../api/products' +import type { OutputType } from '../api/outputTypes' + +const CATEGORIES = [ + { key: 'TRB', label: 'TRB' }, + { key: 'Kugellager', label: 'Kugellager' }, + { key: 'CRB', label: 'CRB' }, + { key: 'Gleitlager', label: 'Gleitlager' }, + { key: 'SRB_TORB', label: 'SRB/TORB' }, + { key: 'Linear_schiene', label: 'Linear' }, + { key: 'Anschlagplatten', label: 'Anschlag' }, +] + +type WizardStep = 1 | 2 | 3 + +// Maps product_id → Set of output_type_id +type OutputSelections = Record> +// Maps product_id → Set of position_id +type PositionSelections = Record> + +export default function NewProductOrderPage() { + const navigate = useNavigate() + const [step, setStep] = useState(1) + const [searchQ, setSearchQ] = useState('') + const [categoryFilter, setCategoryFilter] = useState('') + const [selectedProducts, setSelectedProducts] = useState>(new Map()) + const [outputSelections, setOutputSelections] = useState({}) + const [positionSelections, setPositionSelections] = useState({}) + const [notes, setNotes] = useState('') + const [submitting, setSubmitting] = useState(false) + + // ---- Step 1: load products with STEP files ---- + const { data: products, isLoading: productsLoading } = useQuery({ + queryKey: ['wizard-products', searchQ, categoryFilter], + queryFn: () => listProducts({ + q: searchQ, + category_key: categoryFilter, + ready_only: true, + limit: 200, + }), + }) + + // ---- Step 2: load all output types (we'll filter client-side per product category) ---- + const { data: allOutputTypes } = useQuery({ + queryKey: ['wizard-output-types'], + queryFn: () => listOutputTypes(false), + enabled: step >= 2, + }) + + function initPositionsForProduct(product: Product) { + if ((product.render_positions?.length ?? 0) > 0) { + // Default: all positions selected + setPositionSelections((ps) => ({ + ...ps, + [product.id]: new Set(product.render_positions!.map((p) => p.id)), + })) + } + } + + function toggleProduct(product: Product) { + const willSelect = !selectedProducts.has(product.id) + setSelectedProducts((prev) => { + const next = new Map(prev) + if (next.has(product.id)) { + next.delete(product.id) + } else { + next.set(product.id, product) + } + return next + }) + if (willSelect) { + initPositionsForProduct(product) + } + } + + const allFilteredSelected = + (products?.length ?? 0) > 0 && (products ?? []).every((p) => selectedProducts.has(p.id)) + + function selectAllFiltered() { + const toInit = (products ?? []).filter((p) => !selectedProducts.has(p.id)) + setSelectedProducts((prev) => { + const next = new Map(prev) + ;(products ?? []).forEach((p) => next.set(p.id, p)) + return next + }) + toInit.forEach(initPositionsForProduct) + } + + function deselectAllFiltered() { + setSelectedProducts((prev) => { + const next = new Map(prev) + ;(products ?? []).forEach((p) => next.delete(p.id)) + return next + }) + } + + function getCompatibleOutputTypes(categoryKey: string | null): OutputType[] { + if (!allOutputTypes) return [] + return allOutputTypes.filter((ot) => + ot.compatible_categories.length === 0 || + (categoryKey && ot.compatible_categories.includes(categoryKey)) + ) + } + + function toggleOutputType(productId: string, outputTypeId: string) { + setOutputSelections((prev) => { + const set = new Set(prev[productId] || []) + if (set.has(outputTypeId)) { + set.delete(outputTypeId) + } else { + set.add(outputTypeId) + } + return { ...prev, [productId]: set } + }) + } + + // Union of all output types compatible with at least one selected product + const globalOutputTypes = useMemo(() => { + if (!allOutputTypes || selectedProducts.size === 0) return [] + const seenIds = new Set() + const result: OutputType[] = [] + for (const product of selectedProducts.values()) { + for (const ot of getCompatibleOutputTypes(product.category_key)) { + if (!seenIds.has(ot.id)) { + seenIds.add(ot.id) + result.push(ot) + } + } + } + return result + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [selectedProducts, allOutputTypes]) + + function toggleOutputTypeGlobal(otId: string) { + let compatibleCount = 0 + let selectedCount = 0 + for (const [productId, product] of selectedProducts) { + const compatible = getCompatibleOutputTypes(product.category_key) + if (!compatible.some((ot) => ot.id === otId)) continue + compatibleCount++ + if (outputSelections[productId]?.has(otId)) selectedCount++ + } + if (compatibleCount === 0) return + const shouldSelect = selectedCount < compatibleCount + setOutputSelections((prev) => { + const next = { ...prev } + for (const [productId, product] of selectedProducts) { + const compatible = getCompatibleOutputTypes(product.category_key) + if (!compatible.some((ot) => ot.id === otId)) continue + const set = new Set(prev[productId] || []) + if (shouldSelect) set.add(otId) + else set.delete(otId) + next[productId] = set + } + return next + }) + } + + function togglePosition(productId: string, positionId: string) { + setPositionSelections((prev) => { + const set = new Set(prev[productId] || []) + if (set.has(positionId)) set.delete(positionId) + else set.add(positionId) + return { ...prev, [productId]: set } + }) + } + + // Union of all unique position names across selected products that have positions + const globalPositionNames = useMemo(() => { + const seen = new Set() + const result: string[] = [] + for (const product of selectedProducts.values()) { + for (const pos of product.render_positions ?? []) { + if (!seen.has(pos.name)) { + seen.add(pos.name) + result.push(pos.name) + } + } + } + return result + }, [selectedProducts]) + + function togglePositionGlobal(positionName: string) { + // Count how many products have this position name and how many have it selected + let compatibleCount = 0 + let selectedCount = 0 + for (const [productId, product] of selectedProducts) { + const pos = (product.render_positions ?? []).find((p) => p.name === positionName) + if (!pos) continue + compatibleCount++ + if (positionSelections[productId]?.has(pos.id)) selectedCount++ + } + if (compatibleCount === 0) return + const shouldSelect = selectedCount < compatibleCount + setPositionSelections((prev) => { + const next = { ...prev } + for (const [productId, product] of selectedProducts) { + const pos = (product.render_positions ?? []).find((p) => p.name === positionName) + if (!pos) continue + const set = new Set(prev[productId] || []) + if (shouldSelect) set.add(pos.id) + else set.delete(pos.id) + next[productId] = set + } + return next + }) + } + + // Build flat list of order lines for review (Step 3) + // Each (product, outputType, position?) triple becomes one line. + const orderLines = useMemo(() => { + const lines: Array<{ + key: string + product: Product + outputType: OutputType + position: RenderPosition | null + }> = [] + for (const [productId, product] of selectedProducts) { + const selectedOts = outputSelections[productId] + if (!selectedOts) continue + const hasPositions = (product.render_positions?.length ?? 0) > 0 + for (const otId of selectedOts) { + const ot = allOutputTypes?.find((o) => o.id === otId) + if (!ot) continue + if (hasPositions) { + const selectedPosIds = positionSelections[productId] || new Set() + if (selectedPosIds.size === 0) { + lines.push({ key: `${productId}-${otId}`, product, outputType: ot, position: null }) + } else { + for (const posId of selectedPosIds) { + const pos = product.render_positions!.find((p) => p.id === posId) + if (pos) lines.push({ key: `${productId}-${otId}-${posId}`, product, outputType: ot, position: pos }) + } + } + } else { + lines.push({ key: `${productId}-${otId}`, product, outputType: ot, position: null }) + } + } + } + return lines + }, [selectedProducts, outputSelections, positionSelections, allOutputTypes]) + + function removeLine(productId: string, outputTypeId: string, positionId: string | null) { + if (positionId) { + setPositionSelections((prev) => { + const set = new Set(prev[productId] || []) + set.delete(positionId) + return { ...prev, [productId]: set } + }) + } else { + setOutputSelections((prev) => { + const set = new Set(prev[productId] || []) + set.delete(outputTypeId) + return { ...prev, [productId]: set } + }) + } + } + + // Check that every selected product has at least one output type + const allProductsHaveOutputTypes = useMemo(() => { + for (const productId of selectedProducts.keys()) { + const set = outputSelections[productId] + if (!set || set.size === 0) return false + } + return true + }, [selectedProducts, outputSelections]) + + const totalRenderJobs = useMemo(() => { + let count = 0 + for (const set of Object.values(outputSelections)) { + count += set.size + } + return count + }, [outputSelections]) + + // Build estimate lines for pricing query + const estimateLines = useMemo(() => { + return orderLines.map((l) => ({ + product_id: l.product.id, + output_type_id: l.outputType.id, + })) + }, [orderLines]) + + const { data: priceEstimate } = useQuery({ + queryKey: ['price-estimate', estimateLines], + queryFn: () => estimatePrice(estimateLines), + enabled: estimateLines.length > 0 && step >= 2, + placeholderData: keepPreviousData, + }) + + // Helper to find per-line price from estimate breakdown + function getLinePrice(productId: string, outputTypeId: string): number | null { + if (!priceEstimate) return null + const match = priceEstimate.breakdown.find( + (b) => b.product_id === productId && b.output_type_id === outputTypeId + ) + return match?.unit_price ?? null + } + + async function handleSubmit() { + if (orderLines.length === 0) return + setSubmitting(true) + try { + const result = await createOrder({ + notes: notes || undefined, + lines: orderLines.map((l) => ({ + product_id: l.product.id, + output_type_id: l.outputType.id, + render_position_id: l.position?.id ?? null, + })), + }) + toast.success(`Draft order ${result.order_number} created — review and submit`) + navigate(`/orders/${result.id}`) + } catch (e: any) { + toast.error(e.response?.data?.detail || 'Failed to create order') + } finally { + setSubmitting(false) + } + } + + return ( +
+ {/* Header */} +
+ Back +

New Product Order

+
+ + {/* Step indicator */} +
+ {[ + { n: 1, label: 'Select Products' }, + { n: 2, label: 'Configure Outputs' }, + { n: 3, label: 'Review & Submit' }, + ].map(({ n, label }, i) => ( +
+ {i > 0 && ( +
= n ? 'var(--color-accent)' : 'var(--color-border)' }} + /> + )} +
n + ? 'bg-status-success-bg text-status-success-text' + : 'bg-surface-muted text-content-muted' + }`} + style={step === n ? { backgroundColor: 'var(--color-accent)' } : undefined} + > + + {step > n ? : n} + + {label} +
+
+ ))} +
+ + {/* ================================================================ */} + {/* STEP 1: Select Products */} + {/* ================================================================ */} + {step === 1 && ( +
+ {/* Search + filter bar */} +
+
+ + setSearchQ(e.target.value)} + className="w-full pl-9 pr-3 py-2 border border-border-default rounded-lg text-sm focus:outline-none focus:border-accent" + /> +
+ + {(products?.length ?? 0) > 0 && ( + + )} +
+ + {/* Product grid */} + {productsLoading ? ( +
Loading products...
+ ) : !products?.length ? ( +
No products with STEP files found.
+ ) : ( +
+ {products.map((p) => { + const isSelected = selectedProducts.has(p.id) + return ( +
toggleProduct(p)} + className={`card cursor-pointer transition-all overflow-hidden relative ${ + isSelected + ? 'ring-2 ring-accent shadow-md' + : 'hover:shadow-md' + }`} + > + {/* Selection checkbox overlay */} +
+ {isSelected && } +
+ + {/* Thumbnail */} +
+ {(p.render_image_url || p.thumbnail_url) ? ( + {p.name + ) : ( + + )} +
+ + {/* Info */} +
+

{p.pim_id}

+

+ {p.name || p.pim_id} +

+ {p.category_key && ( + + {CATEGORIES.find((c) => c.key === p.category_key)?.label || p.category_key} + + )} +
+
+ ) + })} +
+ )} + + {/* Sticky bottom bar */} + {selectedProducts.size > 0 && ( +
+ + + {selectedProducts.size} product{selectedProducts.size !== 1 ? 's' : ''} selected + + +
+ )} +
+ )} + + {/* ================================================================ */} + {/* STEP 2: Configure Output Types */} + {/* ================================================================ */} + {step === 2 && ( +
+

+ Select which output types to generate for each product. Only compatible types are shown. +

+ + {/* Global toggles — apply to all products at once */} + {(globalOutputTypes.length > 0 || globalPositionNames.length > 0) && ( +
+

+ Apply to all products +

+ + {/* Output types row */} + {globalOutputTypes.length > 0 && ( +
+

Output Types

+
+ {globalOutputTypes.map((ot) => { + let compatibleCount = 0 + let selectedCount = 0 + for (const [productId, product] of selectedProducts) { + const compatible = getCompatibleOutputTypes(product.category_key) + if (!compatible.some((o) => o.id === ot.id)) continue + compatibleCount++ + if (outputSelections[productId]?.has(ot.id)) selectedCount++ + } + const allSel = selectedCount === compatibleCount && compatibleCount > 0 + const someSel = selectedCount > 0 && !allSel + return ( + + ) + })} +
+
+ )} + + {/* Perspectives row */} + {globalPositionNames.length > 0 && ( +
+

Perspectives

+
+ {globalPositionNames.map((posName) => { + let compatibleCount = 0 + let selectedCount = 0 + for (const [productId, product] of selectedProducts) { + const pos = (product.render_positions ?? []).find((p) => p.name === posName) + if (!pos) continue + compatibleCount++ + if (positionSelections[productId]?.has(pos.id)) selectedCount++ + } + const allSel = selectedCount === compatibleCount && compatibleCount > 0 + const someSel = selectedCount > 0 && !allSel + return ( + + ) + })} +
+
+ )} +
+ )} + +
+ {Array.from(selectedProducts.values()).map((product) => ( + toggleOutputType(product.id, otId)} + selectedPositions={positionSelections[product.id] || new Set()} + onTogglePosition={(posId) => togglePosition(product.id, posId)} + /> + ))} +
+ + {/* Bottom bar */} +
+
+ + + {selectedProducts.size} product{selectedProducts.size !== 1 ? 's' : ''} · {orderLines.length} render job{orderLines.length !== 1 ? 's' : ''} + {priceEstimate && priceEstimate.total > 0 && ( + <> · Estimated: {priceEstimate.total.toFixed(2)} + )} + +
+ +
+
+ )} + + {/* ================================================================ */} + {/* STEP 3: Review & Submit */} + {/* ================================================================ */} + {step === 3 && ( +
+ {orderLines.length === 0 ? ( +
+ No render jobs configured. Go back and select output types. +
+ ) : ( + <> +
+ + + + + + + + + + + + + + {orderLines.map((line) => ( + + + + + + + + + + ))} + +
ProductOutput TypePositionRendererFormatPrice
+
+
+ {(line.product.render_image_url || line.product.thumbnail_url) ? ( + + ) : ( + + )} +
+
+

+ {line.product.name || line.product.pim_id} +

+ {line.product.pim_id} +
+
+
{line.outputType.name} + {line.position ? ( + + {line.position.name} + + ) : ( + + )} + {line.outputType.renderer}{line.outputType.output_format} + {(() => { + const price = getLinePrice(line.product.id, line.outputType.id) + return price != null ? ( + {price.toFixed(2)} + ) : ( + + ) + })()} + + +
+
+ + {/* Notes */} +
+ +