STL
diff --git a/frontend/src/pages/WorkerManagement.tsx b/frontend/src/pages/WorkerManagement.tsx
new file mode 100644
index 0000000..11cbf9a
--- /dev/null
+++ b/frontend/src/pages/WorkerManagement.tsx
@@ -0,0 +1,281 @@
+import { useState } from 'react'
+import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
+import { toast } from 'sonner'
+import { RefreshCw, ChevronDown, ChevronRight, Cpu, Layers, Minus, Plus } from 'lucide-react'
+import {
+ getCeleryWorkers,
+ getQueueStatus,
+ scaleWorkers,
+ type CeleryWorker,
+ type ScaleRequest,
+} from '../api/worker'
+
+// ---------------------------------------------------------------------------
+// Worker card
+// ---------------------------------------------------------------------------
+
+function WorkerCard({ worker }: { worker: CeleryWorker }) {
+ const [expanded, setExpanded] = useState(false)
+ return (
+
+
+
+
+ {worker.name}
+
+
+ 0
+ ? 'bg-blue-500/20 text-blue-400'
+ : 'bg-green-500/20 text-green-400'
+ }`}
+ >
+ {worker.active_task_count > 0 ? `${worker.active_task_count} active` : 'idle'}
+
+ {worker.active_tasks.length > 0 && (
+
+ )}
+
+
+
+ {/* Queues */}
+
+ {worker.queues.map((q) => (
+
+ {q}
+
+ ))}
+
+
+ {/* Active tasks */}
+ {expanded && worker.active_tasks.length > 0 && (
+
+ {worker.active_tasks.map((t) => (
+
+ {t.name}
+
+ ))}
+
+ )}
+
+ )
+}
+
+// ---------------------------------------------------------------------------
+// Scale controls
+// ---------------------------------------------------------------------------
+
+type ScalableService = ScaleRequest['service']
+
+const SCALABLE_SERVICES: { service: ScalableService; label: string; description: string }[] = [
+ { service: 'render-worker', label: 'Render Worker', description: 'Blender renders — concurrency=1' },
+ { service: 'worker', label: 'Step Worker', description: 'STEP processing — concurrency=8' },
+ { service: 'worker-thumbnail', label: 'Thumbnail Worker', description: 'Thumbnail rendering' },
+]
+
+function ScaleControl({
+ service,
+ label,
+ description,
+}: {
+ service: ScalableService
+ label: string
+ description: string
+}) {
+ const [count, setCount] = useState(1)
+ const scaleMut = useMutation({
+ mutationFn: () => scaleWorkers({ service, count }),
+ onSuccess: (data) => toast.success(`${data.service} → ${data.count} instance(s)`),
+ onError: (e: unknown) => {
+ const detail = (e as { response?: { data?: { detail?: string } } })?.response?.data?.detail
+ toast.error(detail ?? `Failed to scale ${service}`)
+ },
+ })
+
+ return (
+
+
+
{label}
+
{description}
+
+
+
+
{count}
+
+
+
+
+ )
+}
+
+// ---------------------------------------------------------------------------
+// Queue depth bar
+// ---------------------------------------------------------------------------
+
+function QueueDepthRow({ queue, depth }: { queue: string; depth: number }) {
+ return (
+
+
{queue}
+
+
10 ? 'var(--color-red-500)' : 'var(--color-accent)',
+ }}
+ />
+
+
10 ? 'text-red-400' : 'text-content-muted'
+ }`}
+ >
+ {depth}
+
+
+ )
+}
+
+// ---------------------------------------------------------------------------
+// Main page
+// ---------------------------------------------------------------------------
+
+export default function WorkerManagement() {
+ const qc = useQueryClient()
+
+ const { data: workerData, isLoading: workersLoading } = useQuery({
+ queryKey: ['celery-workers'],
+ queryFn: getCeleryWorkers,
+ refetchInterval: 10_000,
+ })
+
+ const { data: queueData, isLoading: queuesLoading } = useQuery({
+ queryKey: ['queue-status'],
+ queryFn: getQueueStatus,
+ refetchInterval: 5_000,
+ })
+
+ function refresh() {
+ qc.invalidateQueries({ queryKey: ['celery-workers'] })
+ qc.invalidateQueries({ queryKey: ['queue-status'] })
+ }
+
+ const workers = workerData?.workers ?? []
+ const queueDepths = queueData?.queue_depths ?? {}
+
+ return (
+
+ {/* Header */}
+
+
+
Worker Management
+
+ Monitor active Celery workers and scale services up or down.
+
+
+
+
+
+ {/* Queue depths */}
+
+
+
+
Queue Depths
+
+ {queuesLoading ? (
+
+ {[0, 1, 2].map((i) => (
+
+ ))}
+
+ ) : Object.keys(queueDepths).length === 0 ? (
+ No queue data available.
+ ) : (
+
+ {Object.entries(queueDepths).map(([queue, depth]) => (
+
+ ))}
+
+ )}
+
+
+ {/* Active workers */}
+
+
+
+
+ Active Workers
+ {workers.length > 0 && (
+
+ ({workers.length})
+
+ )}
+
+
+ {workersLoading ? (
+
+ {[0, 1].map((i) => (
+
+ ))}
+
+ ) : workerData?.error ? (
+
+ Failed to fetch workers: {workerData.error}
+
+ ) : workers.length === 0 ? (
+
+ No active workers detected. Make sure Celery workers are running.
+
+ ) : (
+
+ {workers.map((w) => (
+
+ ))}
+
+ )}
+
+
+ {/* Scale controls */}
+
+ Scale Services
+
+ Adjust the number of container instances for each service via Docker Compose.
+ Changes take effect immediately but are not persisted across deployments.
+
+
+ {SCALABLE_SERVICES.map((s) => (
+
+ ))}
+
+
+
+ )
+}
diff --git a/plan.md b/plan.md
index 413dfbd..4badec7 100644
--- a/plan.md
+++ b/plan.md
@@ -1,420 +1,365 @@
-# Plan: Phase J (WebSocket) + Turntable Bug + Phase K (Asset Library)
+# Plan: Phase N — Workflow-Pipeline, 3D-Viewer Production-Modus, Worker-Management, QC-Tests
## Kontext
-Analyse des aktuellen Codestands ergab: **Phasen F, G, H, I, L sind bereits vollständig implementiert.**
+Vier offene Bereiche aus dem PLAN.md müssen abgeschlossen werden:
-| Phase | Status | Beleg |
-|-------|--------|-------|
-| F - Hash-Caching | DONE | `domains/products/cache_service.py` + migration 041 |
-| G - Billing | DONE | `domains/billing/` vollständig, WeasyPrint in Dockerfile |
-| H - Excel Sanity-Check | DONE | `domains/imports/service.py run_sanity_check()` + Upload.tsx Dialog |
-| I - Notification-Config | DONE | `notification_configs` migration 044, NotificationSettings.tsx |
-| L - Dashboard | DONE | AdminDashboard.tsx + ClientDashboard.tsx vollständig |
-| **J - WebSocket** | **FEHLT** | Kein `core/websocket.py`, alle Polls noch aktiv |
+1. **Workflow-Pipeline verdrahten**: `workflow_builder.py` enthält nur defekte Stubs. `_build_still` übergibt `order_line_id` als `step_path` an `render_still_task` → würde crashen. Der neue `still_with_exports`-Workflow (still + gltf_export + blend_export) ist nicht implementiert. Die Celery-Tasks für export_gltf/export_blend fehlen in `domains/rendering/tasks.py`.
-Zusätzlich: **Kritischer Bug in `render_blender.py`** — ffmpeg-Overlay-Befehl haengt bei endlicher Frame-Sequenz (kein `shortest=1`) -> Timeout -> Turntable-Render schlaegt fehl.
+2. **K6: 3D-Viewer Production-Modus**: `ThreeDViewer.tsx` hat keinen Mode-Toggle, Wireframe, Env-Preset oder Download-Buttons. Für Testdaten wird `POST /api/cad/{id}/generate-gltf-geometry` benötigt (trimesh STL→GLB, kein Blender nötig).
----
+3. **L3: Worker-Management UI**: `WorkerManagement.tsx` fehlt. Backend braucht `/celery-workers` (Celery inspect) und `/scale` (docker compose subprocess). Backend-Container bekommt Docker-Socket-Mount.
-## Bug Fix: Turntable ffmpeg Timeout
-
-**Root cause**: In `backend/app/services/render_blender.py:507`:
-```python
-"-filter_complex", "[1:v][0:v]overlay=0:0",
-```
-Der `lavfi color`-Quell-Stream hat keine definierte Laenge. Ohne `shortest=1` wartet ffmpeg auf
-weitere Frames vom Farb-Stream nachdem die PNG-Sequenz endet -> haengt bis Timeout (300s).
-
-**Fix**: `overlay=0:0` -> `overlay=0:0:shortest=1`
-
----
-
-## Phase J: WebSocket Backend + Frontend
-
-### Architektur (ADR-05: FastAPI nativ + Redis Pub/Sub)
-
-```
-Backend Task/Router:
- -> redis.publish(f"tenant:{tenant_id}", json.dumps(event))
-
-core/websocket.py:
- ConnectionManager: tenant_id -> set[WebSocket]
- background_task: asyncio.Task (redis subscribe loop)
-
-Frontend:
- useWebSocket() hook -> WebSocket('/api/ws')
- Empfaengt Events, invalidiert React Query caches
-```
-
-### Events die gesendet werden:
-| Event | Sender | Daten |
-|-------|--------|-------|
-| `render_complete` | step_tasks.py | order_line_id, status, thumbnail_url |
-| `render_failed` | step_tasks.py | order_line_id, error |
-| `cad_processing_complete` | step_tasks.py | cad_file_id, status |
-| `order_status_change` | orders router | order_id, new_status |
-| `queue_update` | beat task (alle 10s) | depth per queue |
+4. **M: QC-Tests**: `pytest` ist im Backend-Container nicht installiert. Dockerfile: `pip install -e ".[dev]"`. Neue Service-Tests für rendering und orders domains. 2 neue Vitest-Dateien.
---
## Betroffene Dateien
-### Neu erstellen:
-- `backend/app/core/websocket.py` -- ConnectionManager + Redis Pub/Sub Loop
-- `frontend/src/hooks/useWebSocket.ts` -- WebSocket hook mit Auto-Reconnect
-- `frontend/src/contexts/WebSocketContext.tsx` -- Context Provider
-
-### Aendern:
-- `backend/app/services/render_blender.py` -- ffmpeg shortest=1 Bug-Fix
-- `backend/app/main.py` -- WebSocket-Endpoint registrieren (`/api/ws`)
-- `backend/app/tasks/step_tasks.py` -- WebSocket-Events emittieren
-- `backend/app/domains/orders/router.py` -- Order-Status-Events emittieren
-- `backend/app/tasks/celery_app.py` -- `broadcast_queue_status` Beat-Task hinzufuegen
-- `frontend/src/App.tsx` -- WebSocketProvider wrappen
-- `frontend/src/pages/WorkerActivity.tsx` -- polling durch WS ersetzen
-- `frontend/src/pages/OrderDetail.tsx` -- polling durch WS ersetzen
-- `frontend/src/pages/Orders.tsx` -- polling reduzieren
-- `frontend/src/components/layout/Layout.tsx` -- polling reduzieren
-- `frontend/src/components/layout/NotificationCenter.tsx` -- polling durch WS ersetzen
-
-### Nach Phase J Commit -- Phase K:
-- `backend/alembic/versions/045_asset_libraries.py` -- asset_libraries Tabelle
-- `backend/app/domains/materials/models.py` -- AssetLibrary Model hinzufuegen
-- `backend/app/domains/materials/router.py` -- Asset Library CRUD + Upload
-- `render-worker/scripts/asset_library.py` -- Materialien + Node-Groups aus .blend laden
-- `render-worker/scripts/catalog_assets.py` -- Katalog aus .blend lesen
-- `render-worker/scripts/export_gltf.py` -- GLB Export mit Materialien
-- `render-worker/scripts/export_blend.py` -- .blend Export mit pack_all()
-- `backend/app/domains/rendering/workflow_builder.py` -- Asset Library Nodes
-- `frontend/src/pages/Admin.tsx` -- Asset Library Manager UI
-- `frontend/src/api/assetLibraries.ts` -- API Client
+| Datei | Änderung |
+|-------|----------|
+| `backend/app/domains/rendering/tasks.py` | 3 neue Tasks: `render_order_line_still_task`, `export_gltf_for_order_line_task`, `export_blend_for_order_line_task` |
+| `backend/app/domains/rendering/workflow_builder.py` | Stubs ersetzen durch order-line-aware Tasks, `still_with_exports` hinzufügen |
+| `backend/app/api/routers/cad.py` | `POST /{id}/generate-gltf-geometry` Endpoint |
+| `backend/app/api/routers/worker.py` | `GET /celery-workers`, `POST /scale` Endpoints |
+| `backend/Dockerfile` | `pip install -e ".[dev]"` |
+| `docker-compose.yml` | Backend + Worker: Docker-Socket + Compose-File-Mount |
+| `frontend/src/components/cad/ThreeDViewer.tsx` | Mode-Toggle, Wireframe, Env-Preset, Download-Buttons |
+| `frontend/src/pages/WorkerManagement.tsx` | NEU: Worker-Liste, Queue-Stats, Scale-Button |
+| `frontend/src/api/worker.ts` | Neue Interfaces + API-Funktionen |
+| `frontend/src/App.tsx` | Route für /workers |
+| `frontend/src/components/layout/Layout.tsx` | Sidebar-Link Workers |
+| `backend/tests/domains/test_rendering_service.py` | NEU: ≥5 Tests für Rendering-Tasks und Workflow-Builder |
+| `backend/tests/domains/test_orders_service.py` | NEU: ≥5 Tests für Orders-Endpoints |
+| `frontend/src/__tests__/pages/WorkerActivity.test.tsx` | NEU: Vitest-Tests |
+| `frontend/src/__tests__/pages/WorkerManagement.test.tsx` | NEU: Vitest-Tests |
---
## Tasks (in Reihenfolge)
-### Task 1: Bug-Fix ffmpeg Turntable Timeout [x]
-- **Datei**: `backend/app/services/render_blender.py:507`
-- **Was**: `"[1:v][0:v]overlay=0:0"` -> `"[1:v][0:v]overlay=0:0:shortest=1"`
-- **Akzeptanzkriterium**: Turntable-Render fuer Order f0436188 kann erneut gestartet werden und produziert MP4
-- **Abhaengigkeiten**: keine
+### Task 1: Backend — Neue order-line-aware Rendering-Tasks
+- **Datei**: `backend/app/domains/rendering/tasks.py`
+- **Was**: Drei neue Celery-Tasks hinzufügen (UNTER den bestehenden Tasks):
-### Task 2: WebSocket Backend -- core/websocket.py [x]
-- **Datei**: `backend/app/core/websocket.py` (neu)
-- **Was**:
- ```python
- class ConnectionManager:
- _connections: dict[str, set[WebSocket]] # tenant_id -> sockets
- async def connect(ws, tenant_id)
- def disconnect(ws, tenant_id)
- async def broadcast_to_tenant(tenant_id, event: dict)
- async def start_redis_subscriber() # asyncio background task
+ **`render_order_line_still_task(order_line_id, **params)`** — Queue `thumbnail_rendering`:
+ - Lädt OrderLine + CadFile via sync SQLAlchemy (wie `publish_asset`)
+ - Setzt `render_status = 'processing'`
+ - Ruft `render_still()` aus `app.services.render_blender` auf
+ - Setzt `render_status = 'completed'`, speichert `render_log`
+ - Bei Fehler: `render_status = 'failed'`
+ - Returns dict mit `output_path`
- def publish_event_sync(tenant_id: str, event: dict):
- # Sync version fuer Celery tasks -- redis.publish()
- ```
- - Redis Pub/Sub: subscribe auf `tenant:*` Channels
- - Bei Nachricht: alle WebSockets des Tenants benachrichtigen
- - Auto-Ping alle 30s gegen Disconnects
-- **Akzeptanzkriterium**: broadcast_to_tenant sendet an alle verbundenen WS des Tenants
-- **Abhaengigkeiten**: keine
+ **`export_gltf_for_order_line_task(order_line_id)`** — Queue `thumbnail_rendering`:
+ - Lädt OrderLine + CadFile sync
+ - Sucht STL-Cache (`{step_stem}_low.stl`)
+ - Ruft Blender subprocess mit `export_gltf.py` auf: `blender --background --python export_gltf.py -- --stl_path X --output_path Y`
+ - Lädt GLB nach MinIO `production-exports/{cad_file_id}/{order_line_id}.glb`
+ - Erstellt `MediaAsset(asset_type=gltf_production, storage_key=...)`
+ - Returns `storage_key`
-### Task 3: WebSocket Endpoint in main.py [x]
-- **Datei**: `backend/app/main.py`
-- **Was**:
- ```python
- @app.websocket("/api/ws")
- async def ws_endpoint(websocket: WebSocket, token: str = Query(...)):
- user = await verify_ws_token(token)
- await manager.connect(websocket, str(user.tenant_id))
- try:
- while True:
- await websocket.receive_text() # Keep-alive pings
- except WebSocketDisconnect:
- manager.disconnect(websocket, str(user.tenant_id))
- ```
- - Token-Auth via Query-Parameter (WS kann keinen Authorization-Header senden)
- - `verify_ws_token`: JWT decode, User laden (analog zu get_current_user)
- - `manager` als globale Instanz, gestartet im lifespan
-- **Akzeptanzkriterium**: `ws://localhost:8888/api/ws?token=
` oeffnet Verbindung
-- **Abhaengigkeiten**: Task 2
+ **`export_blend_for_order_line_task(order_line_id)`** — Queue `thumbnail_rendering`:
+ - Analog zu export_gltf, aber mit `export_blend.py`
+ - MediaAsset type: `blend_production`
-### Task 4: WebSocket Events in step_tasks.py [x]
-- **Datei**: `backend/app/tasks/step_tasks.py`
-- **Was**: In render_order_line_task und render_step_thumbnail nach Erfolg/Fehler:
- ```python
- from app.core.websocket import publish_event_sync
- # bei render complete:
- publish_event_sync(tenant_id, {"type": "render_complete", "order_line_id": str(line.id), "status": "completed"})
- # bei render failed:
- publish_event_sync(tenant_id, {"type": "render_failed", "order_line_id": str(line.id), "error": str(exc)})
- # bei CAD processing complete:
- publish_event_sync(tenant_id, {"type": "cad_processing_complete", "cad_file_id": str(cad_file.id), "status": "completed"})
- ```
- - tenant_id aus cad_file.tenant_id bzw. order_line -> order -> user.tenant_id laden
-- **Akzeptanzkriterium**: Render fertig -> WebSocket-Client empfaengt Event
-- **Abhaengigkeiten**: Task 2
+- **Akzeptanzkriterium**: Tasks in `domains/rendering/tasks.py` vorhanden, keine Import-Fehler
+- **Abhängigkeiten**: keine
-### Task 5: WebSocket Events in orders router [x]
-- **Datei**: `backend/app/domains/orders/router.py`
-- **Was**: Bei Order-Status-Aenderung (submit, complete, cancel):
- ```python
- from app.core.websocket import manager
- await manager.broadcast_to_tenant(
- str(current_user.tenant_id),
- {"type": "order_status_change", "order_id": str(order.id), "status": new_status}
- )
- ```
-- **Akzeptanzkriterium**: Order-Submit -> WebSocket-Event geht an alle Browser-Tabs des Tenants
-- **Abhaengigkeiten**: Task 2
-
-### Task 6: Queue-Update Beat-Task [x]
-- **Datei**: `backend/app/tasks/celery_app.py`
-- **Was**: Neuer Beat-Task alle 10s:
- ```python
- @shared_task(name="beat.broadcast_queue_status", queue="step_processing")
- def broadcast_queue_status():
- from app.core.websocket import publish_event_sync
- from redis import Redis
- r = Redis.from_url(settings.redis_url)
- depths = {
- "step_processing": r.llen("step_processing"),
- "thumbnail_rendering": r.llen("thumbnail_rendering"),
- }
- # Broadcast an alle Tenants (broadcast_all)
- r.publish("__broadcast__", json.dumps({"type": "queue_update", "depths": depths}))
- ```
- - `__broadcast__` Channel: wird an ALLE verbundenen WS gesendet (nicht tenant-spezifisch)
- - ConnectionManager subscribt auch auf `__broadcast__`
-- **Akzeptanzkriterium**: WorkerActivity-Queue-Tiefe aktualisiert alle 10s automatisch
-- **Abhaengigkeiten**: Task 2
-
-### Task 7: Frontend WebSocket Hook [x]
-- **Datei**: `frontend/src/hooks/useWebSocket.ts` (neu)
-- **Was**:
- ```typescript
- export function useWebSocketConnection() {
- // Verbindet zu ws://localhost:8888/api/ws?token=
- // Auto-Reconnect: 1s, 2s, 4s, 8s, ... max 30s
- // Emittiert Events via onMessage callback
- // Pings alle 25s (keep-alive)
- // Trennt Verbindung bei Logout
- }
- ```
-- **Akzeptanzkriterium**: Verbindung bleibt offen, reconnected nach Netzwerktrennung
-- **Abhaengigkeiten**: keine
-
-### Task 8: Frontend WebSocket Context [x]
-- **Datei**: `frontend/src/contexts/WebSocketContext.tsx` (neu), `frontend/src/App.tsx` aendern
-- **Was**:
- ```typescript
- export function WebSocketProvider({ children }) {
- const queryClient = useQueryClient()
- // on 'render_complete': invalidateQueries(['orders', order_line_id])
- // on 'render_failed': invalidateQueries(['orders', order_line_id])
- // on 'cad_processing_complete': invalidateQueries(['cad-activity'])
- // on 'order_status_change': invalidateQueries(['orders'])
- // on 'queue_update': queryClient.setQueryData(['queue-status'], ...)
- }
- // App.tsx: um wrappen
- ```
-- **Akzeptanzkriterium**: render_complete Event -> OrderDetail aktualisiert ohne Poll-Interval
-- **Abhaengigkeiten**: Task 7
-
-### Task 9: Polling ersetzen -- WorkerActivity.tsx [x]
-- **Datei**: `frontend/src/pages/WorkerActivity.tsx`
-- **Was**:
- - `refetchInterval: 5000` entfernen -- bei `cad_processing_complete` invalidieren
- - `refetchInterval: 3000` fuer Queue-Status entfernen -- bei `queue_update` setQueryData
-- **Akzeptanzkriterium**: Keine automatischen HTTP-Requests im Network-Tab (nur WS-Frames)
-- **Abhaengigkeiten**: Task 8
-
-### Task 10: Polling ersetzen -- OrderDetail.tsx [x]
-- **Datei**: `frontend/src/pages/OrderDetail.tsx`
-- **Was**:
- - `refetchInterval: (query) => {...}` entfernen
- - Stattdessen: bei `render_complete` / `render_failed` fuer matching order_line_id -> invalidate
-- **Akzeptanzkriterium**: Render-Status in OrderDetail aktualisiert live ohne Poll
-- **Abhaengigkeiten**: Task 8
-
-### Task 11: Polling reduzieren -- Layout.tsx + NotificationCenter.tsx [x]
-- **Dateien**: `frontend/src/components/layout/Layout.tsx`, `NotificationCenter.tsx`
-- **Was**:
- - Layout: `refetchInterval: 8000` -> 60000 (1min)
- - NotificationCenter: `refetchInterval: 15_000` -> 60000; bei `order_status_change` zusaetzlich invalidieren
-- **Akzeptanzkriterium**: Signifikant weniger Poll-Requests im Network-Tab
-- **Abhaengigkeiten**: Task 8
-
-### Task 12: PLAN.md + LEARNINGS.md + Commit [x]
-- **Was**:
- - PLAN.md: Phase J als ABGESCHLOSSEN markieren, Status auf "Phase K als naechstes"
- - LEARNINGS.md: ffmpeg `shortest=1` Learning + WebSocket Auth via Query-Param Learning
- - `git commit -m "feat(J): WebSocket live-events + replace polling + fix ffmpeg turntable timeout"`
-- **Abhaengigkeiten**: Tasks 1-11
-
----
-
-## Phase K Tasks (nach Commit)
-
-### Task K1: Migration 045 + AssetLibrary Model [x]
-- **Datei**: `backend/alembic/versions/045_asset_libraries.py` (neu, autogenerate), `domains/materials/models.py`
-- **Was**:
- ```python
- class AssetLibrary(Base):
- id: UUID PK, tenant_id FK nullable, name VARCHAR(200)
- blend_file_key TEXT, # MinIO key
- catalog JSONB, # {materials: [...], node_groups: [...]}
- description TEXT, is_active BOOL, created_at TIMESTAMP
- ```
- - `render_templates.asset_library_id` FK optional (nullable)
- - `output_types.asset_library_id` FK optional (nullable)
-- **Akzeptanzkriterium**: `alembic upgrade head` erfolgreich, `asset_libraries` Tabelle in DB
-
-### Task K2: Asset Library CRUD Backend [x]
-- **Datei**: `backend/app/domains/materials/router.py` + `service.py` + `schemas.py`
-- **Was**:
- - `POST /api/asset-libraries` -- .blend Upload -> MinIO `asset-libraries/{id}.blend` -> queut Katalog-Refresh
- - `GET /api/asset-libraries` -- Liste
- - `GET /api/asset-libraries/{id}/catalog` -- Materialien + Node-Groups
- - `DELETE /api/asset-libraries/{id}` -- nur wenn nicht in Verwendung (FK-Check)
- - `AssetLibraryOut` Schema mit `catalog` field
-- **Akzeptanzkriterium**: POST + GET funktionieren, .blend in MinIO gespeichert
-
-### Task K3: Katalog-Refresh Celery Task + Blender Script [x]
-- **Datei**: `backend/app/domains/materials/tasks.py` (neu), `render-worker/scripts/catalog_assets.py` (neu)
-- **Was**:
- - Celery Task `refresh_asset_library_catalog(asset_library_id)` auf Queue `thumbnail_rendering`
- - Laedt .blend aus MinIO in tmpdir
- - Startet `blender --background --python catalog_assets.py -- `
- - `catalog_assets.py`: oeffnet .blend, liest alle markierten Assets:
- ```python
- import bpy, json, sys
- blend_path = sys.argv[sys.argv.index('--') + 1]
- bpy.ops.wm.open_mainfile(filepath=blend_path)
- catalog = {
- "materials": [m.name for m in bpy.data.materials if m.asset_data],
- "node_groups": [ng.name for ng in bpy.data.node_groups if ng.asset_data],
- }
- print(json.dumps(catalog))
- ```
- - Schreibt Katalog in `asset_libraries.catalog JSONB`
-- **Akzeptanzkriterium**: Nach .blend-Upload enthaelt `catalog` JSONB die Asset-Namen
-
-### Task K4: Blender Asset Library Apply Script [x]
-- **Datei**: `render-worker/scripts/asset_library.py` (neu)
-- **Was**:
- ```python
- def apply_asset_library_materials(blend_path: str, material_map: dict) -> None:
- """Laedt Materialien aus Asset-Library .blend, wendet auf Mesh-Parts an."""
- with bpy.data.libraries.load(blend_path, link=True, assets_only=True) as (src, dst):
- dst.materials = [n for n in src.materials if n in material_map.values()]
- for obj in bpy.data.objects:
- if obj.type == 'MESH':
- for slot in obj.material_slots:
- resolved = material_map.get(slot.material.name if slot.material else '')
- if resolved and resolved in bpy.data.materials:
- slot.material = bpy.data.materials[resolved]
-
- def apply_asset_library_modifiers(blend_path: str, modifier_map: dict) -> None:
- """Laedt Geometry-Node-Gruppen, wendet als Modifier an."""
- with bpy.data.libraries.load(blend_path, link=True, assets_only=True) as (src, dst):
- dst.node_groups = [n for n in src.node_groups if n in modifier_map.values()]
- for obj in bpy.data.objects:
- if obj.type == 'MESH':
- for part_name, mod_name in modifier_map.items():
- if part_name.lower() in obj.name.lower():
- mod = obj.modifiers.new(name=mod_name, type='NODES')
- mod.node_group = bpy.data.node_groups.get(mod_name)
- ```
-- **Akzeptanzkriterium**: Render mit Asset-Library zeigt korrekte Produktionsmaterialien
-
-### Task K5: export_gltf + export_blend Scripts [x]
-- **Dateien**: `render-worker/scripts/export_gltf.py` (neu), `render-worker/scripts/export_blend.py` (neu)
-- **Was**:
- - `export_gltf.py`:
- 1. STL importieren (`bpy.ops.import_mesh.stl`)
- 2. Asset Library laden via `apply_asset_library_materials` + `apply_asset_library_modifiers`
- 3. `bpy.ops.export_scene.gltf(filepath=out, export_format='GLB', export_apply=True, export_draco_mesh_compression_enable=True)`
- 4. Output nach MinIO `production-exports/{cad_file_id}/{run_id}.glb`
- 5. MediaAsset-Record mit `asset_type=gltf_production`
- - `export_blend.py`:
- 1. STL + Asset Library laden (wie export_gltf)
- 2. `bpy.ops.file.pack_all()`
- 3. `bpy.ops.wm.save_as_mainfile(filepath=out, compress=True, copy=True)`
- 4. MediaAsset-Record mit `asset_type=blend_production`
-- **Akzeptanzkriterium**: GLB-Download oeffnet sich im Three.js Viewer mit Materialien
-
-### Task K6: Workflow-Builder -- Asset Library Nodes [x]
+### Task 2: Backend — workflow_builder.py reparieren + still_with_exports
- **Datei**: `backend/app/domains/rendering/workflow_builder.py`
- **Was**:
- - Neue Celery Tasks: `apply_asset_library_materials_task`, `apply_asset_library_modifiers_task`, `export_gltf_task`, `export_blend_task`
- - Neuer Workflow-Typ `still_production`:
+
+ - `_build_still`: Nutzt `render_order_line_still_task` statt `render_still_task`
+ - `_build_turntable`: Bleibt vorerst mit `render_turntable_task` (file-path-basiert, funktioniert via legacy path)
+ - `_build_multi_angle`: Nutzt `render_order_line_still_task` mit `camera_angle` param
+ - **NEU** `_build_still_with_exports(order_line_id, params)`:
```python
- chain(
- convert_step.si(order_line_id),
+ from celery import chain, group
+ return chain(
+ render_order_line_still_task.si(order_line_id, **params),
group(
- chain(apply_asset_library_materials.si(order_line_id), render_still.si(order_line_id)),
- chain(apply_asset_library_materials.si(order_line_id), export_gltf.si(order_line_id)),
- chain(apply_asset_library_materials.si(order_line_id), export_blend.si(order_line_id)),
- ),
- generate_thumbnail.si(order_line_id),
- publish_asset.si(order_line_id),
+ export_gltf_for_order_line_task.si(order_line_id),
+ export_blend_for_order_line_task.si(order_line_id),
+ )
)
```
-- **Akzeptanzkriterium**: Dispatch eines `still_production` Workflows -> PNG + GLB + .blend erzeugt
+ - `dispatch_workflow()`: `"still_with_exports"` zu `builders` hinzufügen
-### Task K7: Asset Library Management UI [x]
-- **Dateien**: `frontend/src/api/assetLibraries.ts` (neu), `frontend/src/pages/Admin.tsx` erweitern
-- **Was**:
- - API Client: `getAssetLibraries`, `uploadAssetLibrary` (multipart), `deleteAssetLibrary`, `getAssetLibraryCatalog`
- - Admin.tsx: neues Panel "Asset Libraries" (nach Render Templates)
- - Upload-Button + Drag-Drop
- - Tabelle: Name, Materialien-Anzahl, Node-Groups-Anzahl, Aktionen
- - Katalog-Detail: Material-Badge-Liste (gruen) + Node-Group-Badge-Liste (blau)
- - OutputTypeTable: Asset-Library-Dropdown-Spalte
-- **Akzeptanzkriterium**: Admin kann .blend hochladen, Katalog sehen, OutputType zuweisen
+- **Akzeptanzkriterium**: `dispatch_workflow("still_with_exports", order_line_id)` löst keine Exception aus
+- **Abhängigkeiten**: Task 1
-### Task K8: PLAN.md + LEARNINGS.md + Commit [x]
+### Task 3: Backend — generate-gltf-geometry Endpoint (Testdaten für K6)
+- **Datei**: `backend/app/api/routers/cad.py`
+- **Was**: Neuer Endpoint `POST /api/cad/{id}/generate-gltf-geometry` (require_admin_or_pm):
+ - Prüft ob CadFile existiert + STL-Cache vorhanden (`{step_dir}/{stem}_low.stl`)
+ - Queut neuen Celery-Task `generate_gltf_geometry_task.delay(str(cad_file.id))`
+ - Returns `{"task_id": ..., "message": "GLB generation queued"}`
+
+ Neuer Task `generate_gltf_geometry_task` in `domains/rendering/tasks.py` (Queue `thumbnail_rendering`):
+ - Lädt CadFile sync, findet STL-Cache
+ - **Nutzt trimesh** (kein Blender): `import trimesh; mesh = trimesh.load(stl_path); mesh.export(glb_path)`
+ → Warum trimesh: Schnell, kein Blender nötig, läuft auf worker-Container (trimesh in pyproject.toml cad-extras)
+ - Lädt GLB nach MinIO `uploads/{cad_file_id}/geometry.glb`
+ - Erstellt/aktualisiert `MediaAsset(asset_type=gltf_geometry, storage_key=..., cad_file_id=...)`
+ → `MediaAsset` braucht `cad_file_id` FK — prüfen ob vorhanden
+
+ **Wichtig**: Prüfen ob `media_assets.cad_file_id` existiert. Falls nicht: Migration 047 notwendig.
+
+- **Akzeptanzkriterium**: `POST /api/cad/{id}/generate-gltf-geometry` gibt 202 zurück, nach Task-Ausführung existiert MediaAsset mit type=gltf_geometry
+- **Abhängigkeiten**: Task 1
+
+### Task 4: Migration 047 — media_assets.cad_file_id (wenn nötig)
+- **Datei**: `backend/alembic/versions/047_media_assets_cad_file_id.py`
+- **Was**: Nullable FK `cad_file_id UUID REFERENCES cad_files(id) ON DELETE SET NULL` auf `media_assets`
+- **Prüfen**: `grep -n "cad_file_id" backend/app/domains/media/models.py` — falls schon vorhanden: Task überspringen
+- **Akzeptanzkriterium**: `alembic upgrade head` erfolgreich
+- **Abhängigkeiten**: keine
+
+### Task 5: ThreeDViewer.tsx — Production-Modus, Wireframe, Env-Preset, Downloads
+- **Datei**: `frontend/src/components/cad/ThreeDViewer.tsx`
+- **Was**: Props erweitern + Toolbar-Erweiterung:
+
+ ```typescript
+ interface ThreeDViewerProps {
+ cadFileId: string
+ onClose: () => void
+ productionGltfUrl?: string // wenn vorhanden: Mode-Toggle anzeigen
+ downloadUrls?: { glb?: string; blend?: string }
+ }
+ ```
+
+ **Neuer State:**
+ - `mode: 'geometry' | 'production'` (default: 'geometry')
+ - `wireframe: boolean` (default: false)
+ - `envPreset: 'city' | 'studio' | 'sunset'` (default: 'city')
+
+ **Toolbar** (neu, rechts vom "Capture Angle"-Button):
+ - Mode-Toggle (nur wenn `productionGltfUrl` gesetzt): Button-Gruppe "Geometry | Production"
+ - Wireframe-Toggle: Button
+ - Env-Preset-Dropdown: `