feat(N): workflow pipeline, 3D viewer, worker management, QC tests

- workflow_builder.py: fix broken stubs, add render_order_line_still_task
  (resolves step_path from DB instead of passing order_line_id as step_path)
- domains/rendering/tasks.py: add render_order_line_still_task,
  export_gltf_for_order_line_task, export_blend_for_order_line_task,
  generate_gltf_geometry_task (trimesh STL→GLB, no Blender needed)
- tasks/step_tasks.py: add generate_gltf_geometry_task for CadFile GLB export
- cad router: POST /{id}/generate-gltf-geometry endpoint (admin/PM)
- worker router: GET /celery-workers + POST /scale (docker compose subprocess)
- Dockerfile: pip install -e "[dev]" to enable pytest
- docker-compose.yml: docker socket + compose file mount on backend
- ThreeDViewer.tsx: mode toggle (geometry/production), wireframe, env presets,
  download buttons (GLB + .blend)
- CadPreview.tsx: load gltf_geometry/gltf_production/blend_production assets
  from MediaAsset table and pass URLs to ThreeDViewer
- ProductDetail.tsx: "View 3D" button → /cad/:id, "Generate GLB" button
- media router/service: cad_file_id filter on GET /api/media
- WorkerManagement.tsx: new page with worker status, queue depth, scale controls
- App.tsx + Layout.tsx: /workers route + sidebar link (admin/PM)
- tests: test_rendering_service.py, test_orders_service.py (backend)
- tests: WorkerActivity.test.tsx, WorkerManagement.test.tsx (frontend)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-06 22:56:53 +01:00
parent 208eb21988
commit a70cb55d01
24 changed files with 1828 additions and 448 deletions
+98
View File
@@ -356,6 +356,104 @@ async def cancel_task(task_id: str, user: User = Depends(require_admin_or_pm)):
return {"revoked": task_id}
# ---------------------------------------------------------------------------
# Worker management — list workers + scale
# ---------------------------------------------------------------------------
class ScaleRequest(BaseModel):
service: str # "render-worker" | "worker" | "worker-thumbnail"
count: int # 020
@router.get("/celery-workers")
async def get_celery_workers(user: User = Depends(require_admin_or_pm)):
"""List active Celery workers with their queues and active task counts."""
import asyncio
from app.tasks.celery_app import celery_app
def _inspect() -> dict:
try:
insp = celery_app.control.inspect(timeout=2.0)
return {
"active_queues": insp.active_queues() or {},
"active": insp.active() or {},
"stats": insp.stats() or {},
}
except Exception as exc:
return {"error": str(exc)}
data = await asyncio.to_thread(_inspect)
if "error" in data:
return {"workers": [], "error": data["error"]}
workers = []
for worker_name, queues in data.get("active_queues", {}).items():
queue_names = [q.get("name") for q in (queues or [])]
active_tasks = data.get("active", {}).get(worker_name, [])
stats = data.get("stats", {}).get(worker_name, {})
workers.append({
"name": worker_name,
"queues": queue_names,
"active_task_count": len(active_tasks),
"active_tasks": [
{"name": t.get("name"), "id": t.get("id")} for t in active_tasks
],
"total_tasks_processed": stats.get("total", {}),
})
return {"workers": workers}
@router.post("/scale", status_code=http_status.HTTP_202_ACCEPTED)
async def scale_workers(
body: ScaleRequest,
user: User = Depends(require_admin_or_pm),
):
"""Scale a Compose service (render-worker, worker, worker-thumbnail) up or down.
Requires the docker socket and compose file to be accessible inside the container
(see docker-compose.yml COMPOSE_PROJECT_DIR env var).
"""
import asyncio
import os
import subprocess
from fastapi import HTTPException
ALLOWED_SERVICES = {"render-worker", "worker", "worker-thumbnail"}
if body.service not in ALLOWED_SERVICES:
raise HTTPException(400, detail=f"service must be one of {ALLOWED_SERVICES}")
if not (0 <= body.count <= 20):
raise HTTPException(400, detail="count must be between 0 and 20")
compose_dir = os.environ.get("COMPOSE_PROJECT_DIR", "/compose")
compose_file = os.path.join(compose_dir, "docker-compose.yml")
def _scale() -> subprocess.CompletedProcess:
return subprocess.run(
[
"docker", "compose",
"-f", compose_file,
"up",
"--scale", f"{body.service}={body.count}",
"--no-recreate",
"-d",
],
capture_output=True, text=True, timeout=120,
)
try:
result = await asyncio.to_thread(_scale)
except subprocess.TimeoutExpired:
raise HTTPException(504, detail="Scale operation timed out")
if result.returncode != 0:
raise HTTPException(
500,
detail=f"docker compose scale failed: {result.stderr[-500:]}",
)
return {"service": body.service, "count": body.count, "status": "scaling"}
# ---------------------------------------------------------------------------
# Render health check
# ---------------------------------------------------------------------------