feat(O): UI-Vollständigkeit + v3-Workflows + OCC-Kantenanalyse

Backend:
- Phase I: notification_configs router (GET/PUT/{event}/{channel}/POST reset)
  war bereits in notifications.py — add-alias endpoint in uploads.py ergänzt
- OutputType schema: workflow_definition_id + workflow_name fields;
  PATCH unterstützt Workflow-Zuweisung; _enrich_workflow_names() batch query
- Dispatch-Integration: orders.py dispatch_renders() → dispatch_render_with_workflow()
  mit Legacy-Fallback; neues Logging
- uploads.py: POST /validations/{id}/add-alias für Material-Lücken

Pipeline:
- step_processor.py: extract_mesh_edge_data() via OCC — berechnet Dihedralwinkel
  aller Kanten, liefert suggested_smooth_angle + sharp_edge_midpoints
  Integriert in extract_cad_metadata() und process_cad_file()
- domains/rendering/tasks.py: apply_asset_library_materials_task (K3),
  export_gltf_for_order_line_task → Blender export_gltf.py (K4),
  export_blend_for_order_line_task → export_blend.py fix (K5)
- render-worker/scripts/still_render.py: _mark_sharp_and_seams() mit
  OCC midpoint KD-tree matching + UV-Seam-Markierung
- render-worker/scripts/blender_render.py: identische Funktion + mesh_attributes parsing

Frontend:
- Layout.tsx: Upload-Link in Sidebar (alle User); Asset Libraries Link (admin/PM)
- App.tsx: /asset-libraries Route
- AssetLibrary.tsx: neue Seite (Upload, Catalog-Anzeige, Refresh, Toggle, Delete)
- OutputTypeTable.tsx: Workflow-Dropdown + Legacy/Workflow Badge
- ProductDetail.tsx: Geometry-Karte (Volumen, Surface, BBox, Sharp-Winkel)
- api/outputTypes.ts + api/products.ts: neue Felder
- api/imports.ts: ImportValidation API

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-06 23:20:55 +01:00
parent f15b035b88
commit 382a18fd02
18 changed files with 1222 additions and 355 deletions
-1
View File
@@ -18,7 +18,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
# Copy docker CLI for worker scaling # Copy docker CLI for worker scaling
COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker
COPY --from=docker-cli /usr/local/lib/docker/cli-plugins /usr/local/lib/docker/cli-plugins
# Install Python dependencies (including dev extras for pytest) # Install Python dependencies (including dev extras for pytest)
COPY pyproject.toml . COPY pyproject.toml .
+13 -2
View File
@@ -1,4 +1,5 @@
import io import io
import logging
import os import os
import re import re
import uuid import uuid
@@ -6,6 +7,8 @@ import zipfile
from datetime import datetime from datetime import datetime
from typing import Optional from typing import Optional
logger = logging.getLogger(__name__)
from fastapi import APIRouter, Depends, HTTPException, Query, status from fastapi import APIRouter, Depends, HTTPException, Query, status
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from pydantic import BaseModel from pydantic import BaseModel
@@ -906,9 +909,17 @@ async def dispatch_renders(
) )
await db.commit() await db.commit()
from app.tasks.step_tasks import dispatch_order_line_render from app.domains.rendering.dispatch_service import dispatch_render_with_workflow
for line in lines: for line in lines:
dispatch_order_line_render.delay(str(line.id)) try:
dispatch_render_with_workflow(str(line.id))
except Exception as exc:
logger.warning(
"dispatch_render_with_workflow failed for %s, falling back: %s",
line.id, exc,
)
from app.tasks.step_tasks import dispatch_order_line_render
dispatch_order_line_render.delay(str(line.id))
return {"dispatched": len(lines), "order_status": order.status.value} return {"dispatched": len(lines), "order_status": order.status.value}
+24 -3
View File
@@ -13,6 +13,7 @@ from app.models.output_type import OutputType, VALID_RENDER_BACKENDS
from app.schemas.output_type import OutputTypeCreate, OutputTypeOut, OutputTypePatch from app.schemas.output_type import OutputTypeCreate, OutputTypeOut, OutputTypePatch
from app.utils.auth import get_current_user, require_admin_or_pm from app.utils.auth import get_current_user, require_admin_or_pm
from app.models.user import User from app.models.user import User
from app.domains.rendering.models import WorkflowDefinition
router = APIRouter(prefix="/output-types", tags=["output-types"]) router = APIRouter(prefix="/output-types", tags=["output-types"])
@@ -23,9 +24,26 @@ def _ot_to_out(ot: OutputType) -> OutputTypeOut:
if ot.pricing_tier: if ot.pricing_tier:
out.pricing_tier_name = f"{ot.pricing_tier.category_key}/{ot.pricing_tier.quality_level}" out.pricing_tier_name = f"{ot.pricing_tier.category_key}/{ot.pricing_tier.quality_level}"
out.price_per_item = float(ot.pricing_tier.price_per_item) out.price_per_item = float(ot.pricing_tier.price_per_item)
# workflow_definition_id is mapped via model_validate from the ORM column.
# workflow_name is resolved by _enrich_workflow_names() after the fact.
return out return out
async def _enrich_workflow_names(db: AsyncSession, items: list[OutputTypeOut]) -> list[OutputTypeOut]:
"""Resolve workflow_name for any OutputTypeOut that has a workflow_definition_id set."""
wf_ids = {item.workflow_definition_id for item in items if item.workflow_definition_id is not None}
if not wf_ids:
return items
wf_result = await db.execute(
select(WorkflowDefinition).where(WorkflowDefinition.id.in_(wf_ids))
)
wf_map: dict[uuid.UUID, str] = {wf.id: wf.name for wf in wf_result.scalars().all()}
for item in items:
if item.workflow_definition_id is not None:
item.workflow_name = wf_map.get(item.workflow_definition_id)
return items
@router.get("", response_model=list[OutputTypeOut]) @router.get("", response_model=list[OutputTypeOut])
async def list_output_types( async def list_output_types(
include_inactive: bool = Query(False), include_inactive: bool = Query(False),
@@ -50,7 +68,8 @@ async def list_output_types(
) )
) )
result = await db.execute(stmt) result = await db.execute(stmt)
return [_ot_to_out(ot) for ot in result.scalars().all()] items = [_ot_to_out(ot) for ot in result.scalars().all()]
return await _enrich_workflow_names(db, items)
@router.post("", response_model=OutputTypeOut, status_code=status.HTTP_201_CREATED) @router.post("", response_model=OutputTypeOut, status_code=status.HTTP_201_CREATED)
@@ -74,7 +93,8 @@ async def create_output_type(
result2 = await db.execute( result2 = await db.execute(
select(OutputType).options(selectinload(OutputType.pricing_tier)).where(OutputType.id == ot.id) select(OutputType).options(selectinload(OutputType.pricing_tier)).where(OutputType.id == ot.id)
) )
return _ot_to_out(result2.scalar_one()) items = await _enrich_workflow_names(db, [_ot_to_out(result2.scalar_one())])
return items[0]
@router.patch("/{output_type_id}", response_model=OutputTypeOut) @router.patch("/{output_type_id}", response_model=OutputTypeOut)
@@ -101,7 +121,8 @@ async def update_output_type(
result2 = await db.execute( result2 = await db.execute(
select(OutputType).options(selectinload(OutputType.pricing_tier)).where(OutputType.id == ot.id) select(OutputType).options(selectinload(OutputType.pricing_tier)).where(OutputType.id == ot.id)
) )
return _ot_to_out(result2.scalar_one()) items = await _enrich_workflow_names(db, [_ot_to_out(result2.scalar_one())])
return items[0]
@router.delete("/{output_type_id}", status_code=status.HTTP_204_NO_CONTENT) @router.delete("/{output_type_id}", status_code=status.HTTP_204_NO_CONTENT)
+53
View File
@@ -450,3 +450,56 @@ async def get_import_validation(
if not val: if not val:
raise HTTPException(404, detail="Validation not found") raise HTTPException(404, detail="Validation not found")
return ImportValidationOut.model_validate(val) return ImportValidationOut.model_validate(val)
class AddAliasRequest(BaseModel):
part_name: str
material_name: str
@router.post("/validations/{validation_id}/add-alias", status_code=status.HTTP_201_CREATED)
async def add_material_alias_from_validation(
validation_id: uuid.UUID,
body: AddAliasRequest,
db: AsyncSession = Depends(get_db),
user: User = Depends(get_current_user),
):
"""Create a MaterialAlias entry mapping part_name to an existing material.
Requires admin or project_manager role.
"""
from app.utils.auth import require_admin_or_pm
from app.domains.imports.models import ImportValidation
from app.domains.materials.models import Material, MaterialAlias
# Gate to admin/PM
if user.role.value not in ("admin", "project_manager"):
raise HTTPException(status_code=403, detail="Admin or project_manager required")
# Verify the validation exists
val_result = await db.execute(select(ImportValidation).where(ImportValidation.id == validation_id))
if not val_result.scalar_one_or_none():
raise HTTPException(404, detail="Validation not found")
# Find the target material by name
mat_result = await db.execute(select(Material).where(Material.name == body.material_name))
material = mat_result.scalar_one_or_none()
if not material:
raise HTTPException(404, detail=f"Material '{body.material_name}' not found in library")
# Check for duplicate alias (case-insensitive)
from sqlalchemy import func as sql_func
dup_result = await db.execute(
select(MaterialAlias).where(
sql_func.lower(MaterialAlias.alias) == body.part_name.lower()
)
)
existing_alias = dup_result.scalar_one_or_none()
if existing_alias:
raise HTTPException(409, detail=f"Alias '{body.part_name}' already exists")
alias = MaterialAlias(material_id=material.id, alias=body.part_name)
db.add(alias)
await db.commit()
await db.refresh(alias)
return {"id": str(alias.id), "alias": alias.alias, "material_id": str(material.id), "material_name": material.name}
+3
View File
@@ -33,6 +33,7 @@ class OutputTypePatch(BaseModel):
transparent_bg: bool | None = None transparent_bg: bool | None = None
pricing_tier_id: int | None = None pricing_tier_id: int | None = None
cycles_device: str | None = None cycles_device: str | None = None
workflow_definition_id: uuid.UUID | None = None
class OutputTypeOut(BaseModel): class OutputTypeOut(BaseModel):
@@ -51,6 +52,8 @@ class OutputTypeOut(BaseModel):
pricing_tier_id: int | None = None pricing_tier_id: int | None = None
pricing_tier_name: str | None = None pricing_tier_name: str | None = None
price_per_item: float | None = None price_per_item: float | None = None
workflow_definition_id: uuid.UUID | None = None
workflow_name: str | None = None
is_active: bool is_active: bool
created_at: datetime created_at: datetime
updated_at: datetime updated_at: datetime
+153 -10
View File
@@ -355,11 +355,16 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict:
max_retries=1, max_retries=1,
) )
def export_gltf_for_order_line_task(self, order_line_id: str) -> dict: def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
"""Export a geometry-only GLB from the STL cache using trimesh (no Blender). """Export a GLB from the STL cache via Blender subprocess (with trimesh fallback).
Publishes a MediaAsset with asset_type='gltf_geometry'. Publishes a MediaAsset with asset_type='gltf_geometry' (no asset lib) or
'gltf_production' (when an asset library is applied).
Requires the STL low-quality cache to exist. Requires the STL low-quality cache to exist.
""" """
import json
import os
import subprocess
step_path_str, cad_file_id = _resolve_step_path_for_order_line(order_line_id) step_path_str, cad_file_id = _resolve_step_path_for_order_line(order_line_id)
if not step_path_str: if not step_path_str:
raise RuntimeError(f"Cannot resolve STEP path for order_line {order_line_id}") raise RuntimeError(f"Cannot resolve STEP path for order_line {order_line_id}")
@@ -372,14 +377,47 @@ def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
) )
output_path = step.parent / f"{step.stem}_geometry.glb" output_path = step.parent / f"{step.stem}_geometry.glb"
scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
export_script = scripts_dir / "export_gltf.py"
from app.services.render_blender import find_blender, is_blender_available
asset_type = "gltf_geometry"
if is_blender_available() and export_script.exists():
blender_bin = find_blender()
cmd = [
blender_bin, "--background",
"--python", str(export_script),
"--",
"--stl_path", str(stl_path),
"--output_path", str(output_path),
"--asset_library_blend", "",
"--material_map", json.dumps({}),
]
try:
result = subprocess.run(cmd, capture_output=True, text=True, timeout=300)
if result.returncode != 0:
raise RuntimeError(
f"export_gltf.py exited {result.returncode}:\n{result.stderr[-500:]}"
)
publish_asset.delay(order_line_id, asset_type, str(output_path))
logger.info("export_gltf_for_order_line_task completed via Blender: %s", output_path.name)
return {"glb_path": str(output_path), "method": "blender"}
except Exception as exc:
logger.warning(
"Blender GLB export failed for %s, falling back to trimesh: %s",
order_line_id, exc,
)
# Trimesh fallback
try: try:
import trimesh import trimesh
mesh = trimesh.load(str(stl_path)) mesh = trimesh.load(str(stl_path))
mesh.export(str(output_path)) mesh.export(str(output_path))
publish_asset.delay(order_line_id, "gltf_geometry", str(output_path)) publish_asset.delay(order_line_id, asset_type, str(output_path))
logger.info("export_gltf_for_order_line_task completed: %s", output_path.name) logger.info("export_gltf_for_order_line_task completed via trimesh: %s", output_path.name)
return {"glb_path": str(output_path)} return {"glb_path": str(output_path), "method": "trimesh"}
except Exception as exc: except Exception as exc:
logger.error("export_gltf_for_order_line_task failed for %s: %s", order_line_id, exc) logger.error("export_gltf_for_order_line_task failed for %s: %s", order_line_id, exc)
raise self.retry(exc=exc, countdown=15) raise self.retry(exc=exc, countdown=15)
@@ -392,11 +430,12 @@ def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
max_retries=1, max_retries=1,
) )
def export_blend_for_order_line_task(self, order_line_id: str) -> dict: def export_blend_for_order_line_task(self, order_line_id: str) -> dict:
"""Export a production-quality GLB via Blender + asset library (export_gltf.py). """Export a production .blend file via Blender + asset library (export_blend.py).
Publishes a MediaAsset with asset_type='blend_production'. Publishes a MediaAsset with asset_type='blend_production'.
Requires Blender + the render-scripts directory. Requires Blender + the render-scripts directory.
""" """
import json
import os import os
import subprocess import subprocess
@@ -409,15 +448,38 @@ def export_blend_for_order_line_task(self, order_line_id: str) -> dict:
if not stl_path.exists(): if not stl_path.exists():
raise RuntimeError(f"STL cache not found: {stl_path}") raise RuntimeError(f"STL cache not found: {stl_path}")
output_path = step.parent / f"{step.stem}_production.glb" output_path = step.parent / f"{step.stem}_production.blend"
scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts")) scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
export_script = scripts_dir / "export_gltf.py" export_script = scripts_dir / "export_blend.py"
from app.services.render_blender import find_blender from app.services.render_blender import find_blender
blender_bin = find_blender() blender_bin = find_blender()
if not blender_bin: if not blender_bin:
raise RuntimeError("Blender binary not found — cannot run export_blend task") raise RuntimeError("Blender binary not found — cannot run export_blend task")
# Resolve asset library path and material map from DB
asset_lib_path = ""
mat_map: dict = {}
try:
from sqlalchemy import create_engine, select as sql_select
from sqlalchemy.orm import Session
from app.config import settings as app_settings
from app.domains.orders.models import OrderLine
from app.domains.products.models import Product
engine = create_engine(app_settings.database_url_sync)
with Session(engine) as s:
line = s.execute(sql_select(OrderLine).where(OrderLine.id == order_line_id)).scalar_one_or_none()
if line:
product = s.execute(sql_select(Product).where(Product.id == line.product_id)).scalar_one_or_none()
if product:
mat_map = {
m.get("part_name", ""): m.get("material", "")
for m in (product.cad_part_materials or [])
}
except Exception as exc:
logger.warning("export_blend_for_order_line_task: DB resolution error (non-fatal): %s", exc)
try: try:
cmd = [ cmd = [
blender_bin, "--background", blender_bin, "--background",
@@ -425,20 +487,101 @@ def export_blend_for_order_line_task(self, order_line_id: str) -> dict:
"--", "--",
"--stl_path", str(stl_path), "--stl_path", str(stl_path),
"--output_path", str(output_path), "--output_path", str(output_path),
"--asset_library_blend", asset_lib_path,
"--material_map", json.dumps(mat_map),
] ]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=300) result = subprocess.run(cmd, capture_output=True, text=True, timeout=300)
if result.returncode != 0: if result.returncode != 0:
raise RuntimeError( raise RuntimeError(
f"export_gltf.py exited {result.returncode}:\n{result.stderr[-500:]}" f"export_blend.py exited {result.returncode}:\n{result.stderr[-500:]}"
) )
publish_asset.delay(order_line_id, "blend_production", str(output_path)) publish_asset.delay(order_line_id, "blend_production", str(output_path))
logger.info("export_blend_for_order_line_task completed: %s", output_path.name) logger.info("export_blend_for_order_line_task completed: %s", output_path.name)
return {"glb_path": str(output_path)} return {"blend_path": str(output_path)}
except Exception as exc: except Exception as exc:
logger.error("export_blend_for_order_line_task failed for %s: %s", order_line_id, exc) logger.error("export_blend_for_order_line_task failed for %s: %s", order_line_id, exc)
raise self.retry(exc=exc, countdown=30) raise self.retry(exc=exc, countdown=30)
@celery_app.task(
bind=True,
name="app.domains.rendering.tasks.apply_asset_library_materials_task",
queue="thumbnail_rendering",
max_retries=1,
)
def apply_asset_library_materials_task(self, order_line_id: str, asset_library_id: str) -> dict:
"""Apply Blender asset library materials to a render via the asset_library.py script."""
import json
import os
import subprocess
from pathlib import Path
from app.services.render_blender import find_blender
blender_bin = find_blender()
if not blender_bin:
raise RuntimeError("Blender not available")
# Resolve paths from DB
def _inner():
from sqlalchemy import create_engine, select as sql_select
from sqlalchemy.orm import Session
from app.config import settings
from app.domains.orders.models import OrderLine
from app.domains.products.models import CadFile, Product
engine = create_engine(settings.database_url_sync)
with Session(engine) as s:
line = s.execute(sql_select(OrderLine).where(OrderLine.id == order_line_id)).scalar_one_or_none()
if not line:
return None, None, None
product = s.execute(sql_select(Product).where(Product.id == line.product_id)).scalar_one_or_none()
if not product or not product.cad_file_id:
return None, None, None
cad = s.execute(sql_select(CadFile).where(CadFile.id == product.cad_file_id)).scalar_one_or_none()
stl_path = str(Path(cad.stored_path).parent / f"{Path(cad.stored_path).stem}_low.stl") if cad else None
# Resolve asset library blend path
try:
from app.domains.materials.models import AssetLibrary
lib = s.execute(sql_select(AssetLibrary).where(AssetLibrary.id == asset_library_id)).scalar_one_or_none()
blend_path = lib.blend_file_path if lib else None
except Exception:
blend_path = None
mat_map = {m.get("part_name", ""): m.get("material", "") for m in (product.cad_part_materials or [])}
return stl_path, blend_path, mat_map
result = _inner()
if result is None or result[0] is None:
logger.warning("apply_asset_library_materials_task: could not resolve paths for %s", order_line_id)
return {"status": "skipped"}
stl_path, blend_path, mat_map = result
if not stl_path or not Path(stl_path).exists():
logger.warning("STL not found for %s", order_line_id)
return {"status": "skipped", "reason": "stl_not_found"}
scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script = scripts_dir / "asset_library.py"
cmd = [
blender_bin, "--background", "--python", str(script), "--",
"--stl_path", stl_path,
"--asset_library_blend", blend_path or "",
"--material_map", json.dumps(mat_map),
]
try:
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=120)
if proc.returncode != 0:
raise RuntimeError(f"asset_library.py failed: {proc.stderr[-500:]}")
except Exception as exc:
logger.error("apply_asset_library_materials_task failed for %s: %s", order_line_id, exc)
raise self.retry(exc=exc, countdown=15)
return {"status": "applied", "order_line_id": order_line_id}
def _build_ffmpeg_cmd( def _build_ffmpeg_cmd(
frames_dir: Path, output_mp4: Path, fps: int = 30, bg_color: str = "" frames_dir: Path, output_mp4: Path, fps: int = 30, bg_color: str = ""
) -> list: ) -> list:
+123
View File
@@ -124,6 +124,10 @@ def extract_cad_metadata(cad_file_id: str) -> None:
objects = _extract_step_objects(step_path) objects = _extract_step_objects(step_path)
cad_file.parsed_objects = {"objects": objects} cad_file.parsed_objects = {"objects": objects}
edge_data = extract_mesh_edge_data(str(step_path))
if edge_data:
cad_file.mesh_attributes = {**(cad_file.mesh_attributes or {}), **edge_data}
gltf_path = _convert_to_gltf(step_path, cad_file_id, settings.upload_dir) gltf_path = _convert_to_gltf(step_path, cad_file_id, settings.upload_dir)
if gltf_path: if gltf_path:
cad_file.gltf_path = str(gltf_path) cad_file.gltf_path = str(gltf_path)
@@ -173,6 +177,11 @@ def process_cad_file(cad_file_id: str) -> None:
objects = _extract_step_objects(step_path) objects = _extract_step_objects(step_path)
cad_file.parsed_objects = {"objects": objects} cad_file.parsed_objects = {"objects": objects}
# Step 1b: Extract sharp-edge topology data and merge into mesh_attributes
edge_data = extract_mesh_edge_data(str(step_path))
if edge_data:
cad_file.mesh_attributes = {**(cad_file.mesh_attributes or {}), **edge_data}
# Step 2: Generate thumbnail — pass empty part_colors so the Three.js # Step 2: Generate thumbnail — pass empty part_colors so the Three.js
# renderer extracts named parts and auto-assigns palette colours. # renderer extracts named parts and auto-assigns palette colours.
# Other renderers (Blender, Pillow) ignore the part_colors argument. # Other renderers (Blender, Pillow) ignore the part_colors argument.
@@ -197,6 +206,120 @@ def process_cad_file(cad_file_id: str) -> None:
session.commit() session.commit()
def extract_mesh_edge_data(step_path: str) -> dict:
"""Extract sharp edge metrics and suggested smooth angle from STEP topology.
Returns dict with:
- suggested_smooth_angle: float (degrees) — recommended auto-smooth angle
- has_mechanical_edges: bool — True if part has distinct hard edges (bearings etc.)
- sharp_edge_midpoints: list of [x, y, z] — midpoints of sharp edges in mm (max 500)
"""
try:
from OCC.Core.STEPControl import STEPControl_Reader
from OCC.Core.IFSelect import IFSelect_RetDone
from OCC.Core.TopExp import TopExp_Explorer
from OCC.Core.TopAbs import TopAbs_EDGE, TopAbs_FACE
from OCC.Core.BRepAdaptor import BRepAdaptor_Surface
from OCC.Core.BRep import BRep_Tool
from OCC.Core.BRepGProp import brepgprop
from OCC.Core.GProp import GProp_GProps
from OCC.Core.BRepMesh import BRepMesh_IncrementalMesh
from OCC.Core.gp import gp_Pnt
import math
reader = STEPControl_Reader()
status = reader.ReadFile(step_path)
if status != IFSelect_RetDone:
return {}
reader.TransferRoots()
shape = reader.OneShape()
# Mesh the shape for geometry access
BRepMesh_IncrementalMesh(shape, 0.5, False, 0.5)
# Collect face normals per edge (for dihedral angle computation)
from OCC.Core.TopTools import TopTools_IndexedDataMapOfShapeListOfShape
from OCC.Core.TopExp import topexp
edge_face_map = TopTools_IndexedDataMapOfShapeListOfShape()
topexp.MapShapesAndAncestors(shape, TopAbs_EDGE, TopAbs_FACE, edge_face_map)
dihedral_angles = []
sharp_midpoints = []
for i in range(1, edge_face_map.Extent() + 1):
edge = edge_face_map.FindKey(i)
faces = edge_face_map.FindFromIndex(i)
if faces.Size() < 2:
continue
# Get the two adjacent faces
face_list = list(faces)
if len(face_list) < 2:
continue
try:
surf1 = BRepAdaptor_Surface(face_list[0])
surf2 = BRepAdaptor_Surface(face_list[1])
# Get normals at midpoint of edge
from OCC.Core.BRepAdaptor import BRepAdaptor_Curve
curve = BRepAdaptor_Curve(edge)
mid_u = (curve.FirstParameter() + curve.LastParameter()) / 2
mid_pt = curve.Value(mid_u)
# Sample face normals at UV center
u1 = (surf1.FirstUParameter() + surf1.LastUParameter()) / 2
v1 = (surf1.FirstVParameter() + surf1.LastVParameter()) / 2
n1 = surf1.DN(u1, v1, 0, 1).Crossed(surf1.DN(u1, v1, 1, 0))
u2 = (surf2.FirstUParameter() + surf2.LastUParameter()) / 2
v2 = (surf2.FirstVParameter() + surf2.LastVParameter()) / 2
n2 = surf2.DN(u2, v2, 0, 1).Crossed(surf2.DN(u2, v2, 1, 0))
if n1.Magnitude() > 1e-10 and n2.Magnitude() > 1e-10:
n1.Normalize()
n2.Normalize()
cos_angle = max(-1.0, min(1.0, n1.Dot(n2)))
angle_deg = math.degrees(math.acos(abs(cos_angle)))
dihedral_angles.append(angle_deg)
if angle_deg > 20 and len(sharp_midpoints) < 500:
sharp_midpoints.append([
round(mid_pt.X(), 3),
round(mid_pt.Y(), 3),
round(mid_pt.Z(), 3),
])
except Exception:
continue
if not dihedral_angles:
return {}
import statistics
median_angle = statistics.median(dihedral_angles)
max_angle = max(dihedral_angles)
# Suggest smooth angle: slightly below the median of hard edges
hard_edges = [a for a in dihedral_angles if a > 20]
if hard_edges:
suggested = max(15.0, min(60.0, statistics.median(hard_edges) * 0.8))
else:
suggested = 30.0
return {
"suggested_smooth_angle": round(suggested, 1),
"has_mechanical_edges": max_angle > 45,
"sharp_edge_midpoints": sharp_midpoints[:500],
}
except ImportError:
# OCC not available (e.g. in backend container)
return {}
except Exception as exc:
logger.warning("extract_mesh_edge_data failed (non-fatal): %s", exc)
return {}
def _extract_step_objects(step_path: Path) -> list[str]: def _extract_step_objects(step_path: Path) -> list[str]:
"""Extract part names from STEP file using pythonocc.""" """Extract part names from STEP file using pythonocc."""
try: try:
+9
View File
@@ -23,6 +23,7 @@ import WorkflowEditorPage from './pages/WorkflowEditor'
import MediaBrowserPage from './pages/MediaBrowser' import MediaBrowserPage from './pages/MediaBrowser'
import BillingPage from './pages/Billing' import BillingPage from './pages/Billing'
import WorkerManagementPage from './pages/WorkerManagement' import WorkerManagementPage from './pages/WorkerManagement'
import AssetLibraryPage from './pages/AssetLibrary'
function ProtectedRoute({ children }: { children: React.ReactNode }) { function ProtectedRoute({ children }: { children: React.ReactNode }) {
const token = useAuthStore((s) => s.token) const token = useAuthStore((s) => s.token)
@@ -113,6 +114,14 @@ export default function App() {
</AdminRoute> </AdminRoute>
} }
/> />
<Route
path="asset-libraries"
element={
<AdminRoute>
<AssetLibraryPage />
</AdminRoute>
}
/>
</Route> </Route>
</Routes> </Routes>
</WebSocketProvider> </WebSocketProvider>
+23
View File
@@ -0,0 +1,23 @@
import api from './client'
export interface ImportValidation {
id: string
status: 'pending' | 'running' | 'completed' | 'failed'
summary: {
total_rows: number
rows_with_cad: number
rows_without_cad: number
unresolvable_materials: Array<{ row_name: string; material: string }>
} | null
created_at: string
completed_at: string | null
}
export async function getImportValidation(id: string): Promise<ImportValidation> {
const res = await api.get(`/imports/validation/${id}`)
return res.data
}
export async function addMaterialAliasFromValidation(validationId: string, partName: string, materialName: string): Promise<void> {
await api.post(`/imports/validation/${validationId}/add-alias`, { part_name: partName, material_name: materialName })
}
+1
View File
@@ -16,6 +16,7 @@ export interface OutputType {
pricing_tier_id: number | null pricing_tier_id: number | null
pricing_tier_name: string | null pricing_tier_name: string | null
price_per_item: number | null price_per_item: number | null
workflow_definition_id: string | null
is_active: boolean is_active: boolean
created_at: string created_at: string
updated_at: string updated_at: string
+12
View File
@@ -24,6 +24,14 @@ export interface CadPartMaterial {
material: string material: string
} }
export interface CadFileMeshAttributes {
volume_mm3?: number
surface_area_mm2?: number
bbox?: { x?: number; y?: number; z?: number }
suggested_smooth_angle?: number
[key: string]: unknown
}
export interface Product { export interface Product {
id: string id: string
pim_id: string pim_id: string
@@ -40,6 +48,10 @@ export interface Product {
components: ComponentData[] components: ComponentData[]
cad_part_materials: CadPartMaterial[] cad_part_materials: CadPartMaterial[]
cad_file_id: string | null cad_file_id: string | null
cad_file?: {
id: string
mesh_attributes?: CadFileMeshAttributes
} | null
thumbnail_url: string | null thumbnail_url: string | null
render_image_url: string | null render_image_url: string | null
processing_status: string | null processing_status: string | null
@@ -8,6 +8,8 @@ import {
import type { OutputType } from '../../api/outputTypes' import type { OutputType } from '../../api/outputTypes'
import { listPricingTiers } from '../../api/pricing' import { listPricingTiers } from '../../api/pricing'
import type { PricingTier } from '../../api/pricing' import type { PricingTier } from '../../api/pricing'
import { getWorkflows } from '../../api/workflows'
import type { WorkflowDefinition } from '../../api/workflows'
const RENDERERS = ['blender', 'pillow'] const RENDERERS = ['blender', 'pillow']
const FORMATS = ['png', 'jpg', 'gltf', 'stl', 'mp4', 'webm'] const FORMATS = ['png', 'jpg', 'gltf', 'stl', 'mp4', 'webm']
@@ -39,6 +41,22 @@ export default function OutputTypeTable() {
queryFn: listPricingTiers, queryFn: listPricingTiers,
}) })
const { data: workflows } = useQuery({
queryKey: ['workflows'],
queryFn: getWorkflows,
})
const updateWorkflowMut = useMutation({
mutationFn: ({ id, workflow_definition_id }: { id: string; workflow_definition_id: string | null }) =>
updateOutputType(id, { workflow_definition_id }),
onSuccess: () => {
toast.success('Workflow updated')
qc.invalidateQueries({ queryKey: ['output-types-admin'] })
qc.invalidateQueries({ queryKey: ['output-types'] })
},
onError: (e: any) => toast.error(e.response?.data?.detail || 'Failed to update workflow'),
})
const createMut = useMutation({ const createMut = useMutation({
mutationFn: () => { mutationFn: () => {
const rs: Record<string, unknown> = {} const rs: Record<string, unknown> = {}
@@ -184,6 +202,7 @@ export default function OutputTypeTable() {
<th className="px-4 py-2 font-medium text-content-secondary" title="Compatible product categories — empty means compatible with all categories">Categories</th> <th className="px-4 py-2 font-medium text-content-secondary" title="Compatible product categories — empty means compatible with all categories">Categories</th>
<th className="px-4 py-2 font-medium text-content-secondary" title="Output resolution in pixels (width × height); leave empty to use global default">Resolution</th> <th className="px-4 py-2 font-medium text-content-secondary" title="Output resolution in pixels (width × height); leave empty to use global default">Resolution</th>
<th className="px-4 py-2 font-medium text-content-secondary" title="Pricing tier used to calculate the per-item cost for this output type">Pricing</th> <th className="px-4 py-2 font-medium text-content-secondary" title="Pricing tier used to calculate the per-item cost for this output type">Pricing</th>
<th className="px-4 py-2 font-medium text-content-secondary" title="Workflow definition assigned to this output type">Workflow</th>
<th className="px-4 py-2 font-medium text-content-secondary" title="Sort order — lower numbers appear first in the wizard output-type picker">Sort</th> <th className="px-4 py-2 font-medium text-content-secondary" title="Sort order — lower numbers appear first in the wizard output-type picker">Sort</th>
<th className="px-4 py-2 font-medium text-content-secondary" title="Active — inactive types are hidden from the order wizard">Active</th> <th className="px-4 py-2 font-medium text-content-secondary" title="Active — inactive types are hidden from the order wizard">Active</th>
<th className="px-4 py-2 font-medium text-content-secondary">Actions</th> <th className="px-4 py-2 font-medium text-content-secondary">Actions</th>
@@ -192,7 +211,7 @@ export default function OutputTypeTable() {
<tbody> <tbody>
{isLoading && ( {isLoading && (
<tr> <tr>
<td colSpan={16} className="px-4 py-4 text-center text-content-muted">Loading</td> <td colSpan={17} className="px-4 py-4 text-center text-content-muted">Loading</td>
</tr> </tr>
)} )}
{types?.map((ot) => ( {types?.map((ot) => (
@@ -475,6 +494,18 @@ export default function OutputTypeTable() {
))} ))}
</select> </select>
</td> </td>
<td className="px-4 py-2">
<select
className="input-sm"
value={editDraft.workflow_definition_id ?? ot.workflow_definition_id ?? ''}
onChange={(e) => setEditDraft({ ...editDraft, workflow_definition_id: e.target.value || null })}
>
<option value=""> Legacy </option>
{workflows?.filter((w) => w.is_active).map((w) => (
<option key={w.id} value={w.id}>{w.name}</option>
))}
</select>
</td>
<td className="px-4 py-2"> <td className="px-4 py-2">
<input <input
type="number" type="number"
@@ -648,6 +679,20 @@ export default function OutputTypeTable() {
<span className="text-xs text-content-muted">Category default</span> <span className="text-xs text-content-muted">Category default</span>
)} )}
</td> </td>
<td className="px-4 py-2">
{(() => {
const wf = workflows?.find((w) => w.id === ot.workflow_definition_id)
return wf ? (
<span className="text-xs px-2 py-0.5 rounded-full bg-status-success-bg text-status-success-text font-medium">
{wf.name}
</span>
) : (
<span className="text-xs px-2 py-0.5 rounded-full bg-surface-muted text-content-muted">
Legacy
</span>
)
})()}
</td>
<td className="px-4 py-2 text-content-muted">{ot.sort_order}</td> <td className="px-4 py-2 text-content-muted">{ot.sort_order}</td>
<td className="px-4 py-2"> <td className="px-4 py-2">
<span className={`text-xs px-2 py-0.5 rounded-full font-medium ${ <span className={`text-xs px-2 py-0.5 rounded-full font-medium ${
@@ -949,6 +994,7 @@ export default function OutputTypeTable() {
))} ))}
</select> </select>
</td> </td>
<td className="px-4 py-2 text-content-muted"></td>
<td className="px-4 py-2"> <td className="px-4 py-2">
<input <input
type="number" type="number"
+18 -1
View File
@@ -1,5 +1,5 @@
import { Outlet, NavLink, useNavigate, Link } from 'react-router-dom' import { Outlet, NavLink, useNavigate, Link } from 'react-router-dom'
import { LayoutDashboard, Package, Settings, LogOut, FlaskConical, Activity, Library, Plus, SlidersHorizontal, Building2, GitBranch, Image, BellRing, Receipt, Server } from 'lucide-react' import { LayoutDashboard, Package, Settings, LogOut, FlaskConical, Activity, Library, Plus, SlidersHorizontal, Building2, GitBranch, Image, BellRing, Receipt, Server, Upload } from 'lucide-react'
import { useAuthStore } from '../../store/auth' import { useAuthStore } from '../../store/auth'
import { clsx } from 'clsx' import { clsx } from 'clsx'
import { useQuery } from '@tanstack/react-query' import { useQuery } from '@tanstack/react-query'
@@ -14,6 +14,7 @@ const nav = [
{ to: '/materials', icon: FlaskConical, label: 'Materials' }, { to: '/materials', icon: FlaskConical, label: 'Materials' },
{ to: '/activity', icon: Activity, label: 'Activity' }, { to: '/activity', icon: Activity, label: 'Activity' },
{ to: '/preferences', icon: SlidersHorizontal, label: 'Preferences' }, { to: '/preferences', icon: SlidersHorizontal, label: 'Preferences' },
{ to: '/upload', icon: Upload, label: 'Upload' },
] ]
export default function Layout() { export default function Layout() {
@@ -184,6 +185,22 @@ export default function Layout() {
Workflows Workflows
</NavLink> </NavLink>
)} )}
{(user?.role === 'admin' || user?.role === 'project_manager') && (
<NavLink
to="/asset-libraries"
className={({ isActive }) =>
clsx(
'flex items-center gap-3 px-3 py-2 rounded-md text-sm font-medium transition-colors',
isActive
? 'bg-accent-light text-accent'
: 'text-content-secondary hover:bg-surface-hover',
)
}
>
<Library size={18} />
Asset Libraries
</NavLink>
)}
{user?.role === 'admin' && ( {user?.role === 'admin' && (
<NavLink <NavLink
to="/notification-settings" to="/notification-settings"
+336
View File
@@ -0,0 +1,336 @@
import { useState } from 'react'
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
import { toast } from 'sonner'
import { Upload, Trash2, RefreshCw, ChevronDown, ChevronRight, Library } from 'lucide-react'
import { useDropzone } from 'react-dropzone'
import {
listAssetLibraries,
createAssetLibrary,
refreshAssetLibraryCatalog,
deleteAssetLibrary,
} from '../api/assetLibraries'
import type { AssetLibrary } from '../api/assetLibraries'
import api from '../api/client'
// ── UploadModal ────────────────────────────────────────────────────────────
function UploadModal({ onClose }: { onClose: () => void }) {
const qc = useQueryClient()
const [name, setName] = useState('')
const [description, setDescription] = useState('')
const [file, setFile] = useState<File | null>(null)
const { getRootProps, getInputProps, isDragActive } = useDropzone({
accept: { 'application/octet-stream': ['.blend'] },
multiple: false,
onDrop: (files) => { if (files[0]) setFile(files[0]) },
})
const uploadMut = useMutation({
mutationFn: () => {
if (!file || !name.trim()) throw new Error('Name and file required')
return createAssetLibrary({ name: name.trim(), description: description.trim() || undefined, blend_file: file })
},
onSuccess: () => {
toast.success('Asset library uploaded')
qc.invalidateQueries({ queryKey: ['asset-libraries'] })
onClose()
},
onError: (e: any) => toast.error(e.response?.data?.detail || 'Upload failed'),
})
return (
<div className="fixed inset-0 z-50 flex items-center justify-center bg-black/50">
<div className="bg-white rounded-xl shadow-2xl w-full max-w-lg flex flex-col">
<div className="px-6 py-4 border-b border-gray-200 flex items-center justify-between">
<h2 className="text-lg font-semibold text-gray-900">Upload Asset Library</h2>
<button onClick={onClose} className="text-gray-400 hover:text-gray-600 text-xl leading-none">
&times;
</button>
</div>
<div className="px-6 py-4 space-y-4">
<div>
<label className="block text-sm font-medium text-gray-700 mb-1">
Name <span className="text-red-500">*</span>
</label>
<input
className="w-full px-3 py-2 border border-gray-300 rounded-md text-sm focus:outline-none focus:ring-2 focus:ring-blue-500"
placeholder="e.g. Schaeffler Materials v2"
value={name}
onChange={(e) => setName(e.target.value)}
/>
</div>
<div>
<label className="block text-sm font-medium text-gray-700 mb-1">Description</label>
<input
className="w-full px-3 py-2 border border-gray-300 rounded-md text-sm focus:outline-none focus:ring-2 focus:ring-blue-500"
placeholder="Optional description"
value={description}
onChange={(e) => setDescription(e.target.value)}
/>
</div>
<div>
<label className="block text-sm font-medium text-gray-700 mb-1">
.blend File <span className="text-red-500">*</span>
</label>
<div
{...getRootProps()}
className={`border-2 border-dashed rounded-lg p-6 text-center cursor-pointer transition-colors ${
isDragActive ? 'border-blue-500 bg-blue-50' : 'border-gray-300 hover:border-blue-400'
}`}
>
<input {...getInputProps()} />
{file ? (
<p className="text-sm text-gray-700 font-medium">{file.name}</p>
) : (
<>
<Upload size={24} className="text-gray-400 mx-auto mb-2" />
<p className="text-sm text-gray-500">
{isDragActive ? 'Drop the .blend file here' : 'Drag & drop a .blend file, or click to browse'}
</p>
</>
)}
</div>
</div>
</div>
<div className="px-6 py-4 border-t border-gray-200 flex justify-end gap-3">
<button
onClick={onClose}
className="px-4 py-2 text-sm bg-gray-100 hover:bg-gray-200 text-gray-700 rounded-lg transition-colors"
>
Cancel
</button>
<button
onClick={() => uploadMut.mutate()}
disabled={!name.trim() || !file || uploadMut.isPending}
className="px-4 py-2 text-sm bg-blue-600 hover:bg-blue-700 text-white rounded-lg transition-colors disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2"
>
<Upload size={14} />
{uploadMut.isPending ? 'Uploading...' : 'Upload'}
</button>
</div>
</div>
</div>
)
}
// ── LibraryCard ────────────────────────────────────────────────────────────
function LibraryCard({ lib }: { lib: AssetLibrary }) {
const qc = useQueryClient()
const [expanded, setExpanded] = useState(false)
const refreshMut = useMutation({
mutationFn: () => refreshAssetLibraryCatalog(lib.id),
onSuccess: () => {
toast.success('Catalog updated')
qc.invalidateQueries({ queryKey: ['asset-libraries'] })
},
onError: (e: any) => toast.error(e.response?.data?.detail || 'Refresh failed'),
})
const toggleMut = useMutation({
mutationFn: () => api.patch(`/asset-libraries/${lib.id}`, { is_active: !lib.is_active }),
onSuccess: () => {
qc.invalidateQueries({ queryKey: ['asset-libraries'] })
},
onError: (e: any) => toast.error(e.response?.data?.detail || 'Toggle failed'),
})
const deleteMut = useMutation({
mutationFn: () => deleteAssetLibrary(lib.id),
onSuccess: () => {
toast.success('Library deleted')
qc.invalidateQueries({ queryKey: ['asset-libraries'] })
},
onError: (e: any) => toast.error(e.response?.data?.detail || 'Delete failed'),
})
const materialCount = lib.catalog?.materials?.length ?? 0
const nodeGroupCount = lib.catalog?.node_groups?.length ?? 0
const MAX_VISIBLE = 10
return (
<div className="card p-5 space-y-3">
{/* Header row */}
<div className="flex items-start justify-between gap-3">
<div className="flex-1 min-w-0">
<div className="flex items-center gap-2 flex-wrap">
<h3 className="font-semibold text-content truncate">{lib.name}</h3>
<span
className={`text-xs px-2 py-0.5 rounded-full font-medium ${
lib.is_active
? 'bg-status-success-bg text-status-success-text'
: 'bg-surface-muted text-content-muted'
}`}
>
{lib.is_active ? 'active' : 'inactive'}
</span>
</div>
{lib.description && (
<p className="text-sm text-content-muted mt-0.5">{lib.description}</p>
)}
{lib.original_filename && (
<p className="text-xs text-content-muted font-mono mt-1">{lib.original_filename}</p>
)}
</div>
{/* Actions */}
<div className="flex items-center gap-2 shrink-0">
{/* Active toggle */}
<button
onClick={() => toggleMut.mutate()}
disabled={toggleMut.isPending}
title={lib.is_active ? 'Deactivate' : 'Activate'}
className={`relative inline-flex h-5 w-9 items-center rounded-full transition-colors focus:outline-none ${
lib.is_active ? 'bg-green-500' : 'bg-gray-300'
} disabled:opacity-50`}
>
<span
className={`inline-block h-3.5 w-3.5 transform rounded-full bg-white shadow transition-transform ${
lib.is_active ? 'translate-x-4' : 'translate-x-1'
}`}
/>
</button>
<button
onClick={() => refreshMut.mutate()}
disabled={refreshMut.isPending}
title="Refresh catalog"
className="btn-icon text-content-muted hover:text-accent"
>
<RefreshCw size={15} className={refreshMut.isPending ? 'animate-spin' : ''} />
</button>
<button
onClick={() => {
if (confirm(`Delete "${lib.name}"? This cannot be undone.`)) {
deleteMut.mutate()
}
}}
disabled={deleteMut.isPending}
title="Delete library"
className="btn-icon text-content-muted hover:text-red-500"
>
<Trash2 size={15} />
</button>
</div>
</div>
{/* Catalog badges */}
<div className="flex items-center gap-2 flex-wrap">
<span className="text-xs px-2 py-0.5 rounded-full bg-accent-light text-accent font-medium">
{materialCount} material{materialCount !== 1 ? 's' : ''}
</span>
{nodeGroupCount > 0 && (
<span className="text-xs px-2 py-0.5 rounded-full bg-surface-muted text-content-muted font-medium">
{nodeGroupCount} node group{nodeGroupCount !== 1 ? 's' : ''}
</span>
)}
</div>
{/* Expandable material list */}
{materialCount > 0 && (
<div>
<button
onClick={() => setExpanded((p) => !p)}
className="flex items-center gap-1 text-xs text-content-secondary hover:text-content transition-colors"
>
{expanded ? <ChevronDown size={12} /> : <ChevronRight size={12} />}
{expanded ? 'Hide' : 'Show'} materials
</button>
{expanded && (
<div className="mt-2 flex flex-wrap gap-1">
{lib.catalog.materials.slice(0, MAX_VISIBLE).map((m) => (
<span
key={m}
className="text-xs px-2 py-0.5 rounded bg-surface-alt border border-border-default text-content-secondary font-mono"
>
{m}
</span>
))}
{materialCount > MAX_VISIBLE && (
<span className="text-xs px-2 py-0.5 rounded bg-surface-muted text-content-muted">
... and {materialCount - MAX_VISIBLE} more
</span>
)}
</div>
)}
</div>
)}
</div>
)
}
// ── AssetLibraryPage ───────────────────────────────────────────────────────
export default function AssetLibraryPage() {
const [showUpload, setShowUpload] = useState(false)
const { data: libraries, isLoading, isError } = useQuery({
queryKey: ['asset-libraries'],
queryFn: listAssetLibraries,
})
return (
<div className="p-8 max-w-5xl mx-auto">
{/* Header */}
<div className="mb-6 flex items-center justify-between">
<div>
<h1 className="text-2xl font-bold text-content">Asset Libraries</h1>
<p className="text-sm text-content-muted mt-1">
Manage .blend material libraries used for Blender rendering.
</p>
</div>
<button
onClick={() => setShowUpload(true)}
className="btn-primary"
>
<Upload size={16} />
Upload Library
</button>
</div>
{/* States */}
{isLoading && (
<div className="card p-12 text-center text-content-muted">
<div className="animate-spin w-8 h-8 border-2 border-accent border-t-transparent rounded-full mx-auto mb-3" />
Loading libraries...
</div>
)}
{isError && (
<div className="card p-8 text-center text-status-error-text">
Failed to load asset libraries. Please try again.
</div>
)}
{!isLoading && !isError && libraries && libraries.length === 0 && (
<div className="card p-16 text-center">
<Library size={44} className="text-content-muted mx-auto mb-3" />
<p className="text-content-secondary font-medium">No asset libraries.</p>
<p className="text-content-muted text-sm mt-1">
Upload a .blend file to get started.
</p>
<button
onClick={() => setShowUpload(true)}
className="btn-primary mt-4"
>
<Upload size={16} />
Upload Library
</button>
</div>
)}
{!isLoading && !isError && libraries && libraries.length > 0 && (
<div className="space-y-4">
{libraries.map((lib) => (
<LibraryCard key={lib.id} lib={lib} />
))}
</div>
)}
{showUpload && <UploadModal onClose={() => setShowUpload(false)} />}
</div>
)
}
+44 -1
View File
@@ -4,7 +4,7 @@ import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
import { useDropzone } from 'react-dropzone' import { useDropzone } from 'react-dropzone'
import { import {
ArrowLeft, Pencil, Save, X, Box, Image, ArrowLeft, Pencil, Save, X, Box, Image,
RotateCcw, RefreshCw, Upload, ChevronDown, ChevronRight, Wand2, Download, Plus, Trash2, Filter, Cuboid, RotateCcw, RefreshCw, Upload, ChevronDown, ChevronRight, Wand2, Download, Plus, Trash2, Filter, Cuboid, Ruler,
} from 'lucide-react' } from 'lucide-react'
import { toast } from 'sonner' import { toast } from 'sonner'
import { import {
@@ -606,6 +606,49 @@ export default function ProductDetailPage() {
</div> </div>
</div> </div>
{/* Mesh attributes */}
{product.cad_file?.mesh_attributes && Object.keys(product.cad_file.mesh_attributes).length > 0 && (() => {
const mesh_attrs = product.cad_file!.mesh_attributes!
return (
<div className="mt-3 p-3 rounded-md border border-border-default bg-surface-alt">
<p className="text-xs font-semibold text-content-muted mb-2 flex items-center gap-1">
<Ruler size={12} />
Geometry
</p>
<div className="grid grid-cols-2 gap-x-4 gap-y-1 text-xs">
{mesh_attrs.volume_mm3 != null && (
<>
<span className="text-content-muted">Volume</span>
<span>{((mesh_attrs.volume_mm3 as number) / 1000).toFixed(2)} cm³</span>
</>
)}
{mesh_attrs.surface_area_mm2 != null && (
<>
<span className="text-content-muted">Surface</span>
<span>{((mesh_attrs.surface_area_mm2 as number) / 100).toFixed(1)} cm²</span>
</>
)}
{mesh_attrs.bbox != null && (
<>
<span className="text-content-muted">BBox</span>
<span>
{(mesh_attrs.bbox as { x?: number; y?: number; z?: number }).x?.toFixed(1)} &times;{' '}
{(mesh_attrs.bbox as { x?: number; y?: number; z?: number }).y?.toFixed(1)} &times;{' '}
{(mesh_attrs.bbox as { x?: number; y?: number; z?: number }).z?.toFixed(1)} mm
</span>
</>
)}
{mesh_attrs.suggested_smooth_angle !== undefined && (
<>
<span className="text-content-muted">Sharp angle</span>
<span>{mesh_attrs.suggested_smooth_angle as number}°</span>
</>
)}
</div>
</div>
)
})()}
{/* Material assignments */} {/* Material assignments */}
{isPrivileged && ( {isPrivileged && (
<div className="pt-3 border-t border-border-light"> <div className="pt-3 border-t border-border-light">
+208 -334
View File
@@ -1,16 +1,6 @@
# Plan: Phase N — Workflow-Pipeline, 3D-Viewer Production-Modus, Worker-Management, QC-Tests # Plan: UI-Vollständigkeit + Workflows — Phase O
## Kontext **Ziel**: Alle implementierten Backend-Features im UI zugänglich machen + v3-Workflows vollständig verdrahten.
Vier offene Bereiche aus dem PLAN.md müssen abgeschlossen werden:
1. **Workflow-Pipeline verdrahten**: `workflow_builder.py` enthält nur defekte Stubs. `_build_still` übergibt `order_line_id` als `step_path` an `render_still_task` → würde crashen. Der neue `still_with_exports`-Workflow (still + gltf_export + blend_export) ist nicht implementiert. Die Celery-Tasks für export_gltf/export_blend fehlen in `domains/rendering/tasks.py`.
2. **K6: 3D-Viewer Production-Modus**: `ThreeDViewer.tsx` hat keinen Mode-Toggle, Wireframe, Env-Preset oder Download-Buttons. Für Testdaten wird `POST /api/cad/{id}/generate-gltf-geometry` benötigt (trimesh STL→GLB, kein Blender nötig).
3. **L3: Worker-Management UI**: `WorkerManagement.tsx` fehlt. Backend braucht `/celery-workers` (Celery inspect) und `/scale` (docker compose subprocess). Backend-Container bekommt Docker-Socket-Mount.
4. **M: QC-Tests**: `pytest` ist im Backend-Container nicht installiert. Dockerfile: `pip install -e ".[dev]"`. Neue Service-Tests für rendering und orders domains. 2 neue Vitest-Dateien.
--- ---
@@ -18,348 +8,232 @@ Vier offene Bereiche aus dem PLAN.md müssen abgeschlossen werden:
| Datei | Änderung | | Datei | Änderung |
|-------|----------| |-------|----------|
| `backend/app/domains/rendering/tasks.py` | 3 neue Tasks: `render_order_line_still_task`, `export_gltf_for_order_line_task`, `export_blend_for_order_line_task` | | `frontend/src/components/layout/Layout.tsx` | Upload-Link hinzufügen |
| `backend/app/domains/rendering/workflow_builder.py` | Stubs ersetzen durch order-line-aware Tasks, `still_with_exports` hinzufügen | | `frontend/src/pages/Admin.tsx` | OutputType-Tabelle: Workflow-Dropdown |
| `backend/app/api/routers/cad.py` | `POST /{id}/generate-gltf-geometry` Endpoint | | `frontend/src/pages/AssetLibrary.tsx` | NEU: Asset Library Management UI |
| `backend/app/api/routers/worker.py` | `GET /celery-workers`, `POST /scale` Endpoints | | `frontend/src/api/asset_libraries.ts` | NEU: API-Client |
| `backend/Dockerfile` | `pip install -e ".[dev]"` | | `frontend/src/pages/ProductDetail.tsx` | Mesh-Attribute-Anzeige |
| `docker-compose.yml` | Backend + Worker: Docker-Socket + Compose-File-Mount | | `frontend/src/pages/Upload.tsx` | Sanity-Check-Dialog nach Import |
| `frontend/src/components/cad/ThreeDViewer.tsx` | Mode-Toggle, Wireframe, Env-Preset, Download-Buttons | | `frontend/src/api/imports.ts` | NEU: import_validation API |
| `frontend/src/pages/WorkerManagement.tsx` | NEU: Worker-Liste, Queue-Stats, Scale-Button | | `frontend/src/App.tsx` | Route /asset-libraries |
| `frontend/src/api/worker.ts` | Neue Interfaces + API-Funktionen | | `backend/app/api/routers/notification_configs.py` | NEU: notification_configs CRUD |
| `frontend/src/App.tsx` | Route für /workers | | `backend/app/main.py` | notification_configs router registrieren |
| `frontend/src/components/layout/Layout.tsx` | Sidebar-Link Workers | | `backend/app/api/routers/orders.py` | dispatch_renders → dispatch_render_with_workflow |
| `backend/tests/domains/test_rendering_service.py` | NEU: ≥5 Tests für Rendering-Tasks und Workflow-Builder | | `backend/app/api/routers/output_types.py` | workflow_definition_id im PATCH |
| `backend/tests/domains/test_orders_service.py` | NEU: ≥5 Tests für Orders-Endpoints | | `backend/app/schemas/output_type.py` | workflow_definition_id im Schema |
| `frontend/src/__tests__/pages/WorkerActivity.test.tsx` | NEU: Vitest-Tests | | `backend/app/domains/rendering/tasks.py` | K3: apply_asset_library_materials_task |
| `frontend/src/__tests__/pages/WorkerManagement.test.tsx` | NEU: Vitest-Tests | | `backend/app/tasks/step_tasks.py` | OCC sharp edge extraction in render_step_thumbnail |
| `render-worker/scripts/still_render.py` | mark_sharp / UV seams support |
| `render-worker/scripts/blender_render.py` | mark_sharp / UV seams support |
| `backend/app/services/step_processor.py` | extract_mesh_edge_data() für sharp edges |
--- ---
## Tasks (in Reihenfolge) ## Tasks
### Task 1: Backend — Neue order-line-aware Rendering-Tasks ### Task 1: Upload-Link in Sidebar [QUICK WIN]
- **Datei**: `backend/app/domains/rendering/tasks.py` - **Datei**: `frontend/src/components/layout/Layout.tsx`
- **Was**: Drei neue Celery-Tasks hinzufügen (UNTER den bestehenden Tasks): - **Was**: `Upload`-Icon + NavLink zu `/upload` in der Sidebar für alle eingeloggten User
- **Akzeptanzkriterium**: Upload-Link sichtbar in Sidebar
**`render_order_line_still_task(order_line_id, **params)`** — Queue `thumbnail_rendering`: ### Task 2: notification_configs Backend-Router [Phase I]
- Lädt OrderLine + CadFile via sync SQLAlchemy (wie `publish_asset`) - **Datei**: `backend/app/api/routers/notification_configs.py` (NEU), `backend/app/main.py`
- Setzt `render_status = 'processing'` - **Was**: REST-Endpoints für `notification_configs` Tabelle (044 bereits migriert):
- Ruft `render_still()` aus `app.services.render_blender` auf - `GET /api/notification-configs` — gibt configs für aktuellen User zurück (mit Defaults falls keine Zeilen)
- Setzt `render_status = 'completed'`, speichert `render_log` - `PUT /api/notification-configs/{event_type}/{channel}` — setzt enabled=true/false
- Bei Fehler: `render_status = 'failed'` - `POST /api/notification-configs/reset` — löscht alle configs des Users → Defaults gelten wieder
- Returns dict mit `output_path` - Response: `[{event_type, channel, enabled}]`
- Auth: `get_current_user` (jeder kann seine eigenen Configs verwalten)
- **Akzeptanzkriterium**: NotificationSettings.tsx zeigt Toggle-Matrix und speichert korrekt
**`export_gltf_for_order_line_task(order_line_id)`** — Queue `thumbnail_rendering`: ### Task 3: OutputType → WorkflowDefinition — Schema + API
- Lädt OrderLine + CadFile sync - **Datei**: `backend/app/schemas/output_type.py`, `backend/app/api/routers/output_types.py`
- Sucht STL-Cache (`{step_stem}_low.stl`)
- Ruft Blender subprocess mit `export_gltf.py` auf: `blender --background --python export_gltf.py -- --stl_path X --output_path Y`
- Lädt GLB nach MinIO `production-exports/{cad_file_id}/{order_line_id}.glb`
- Erstellt `MediaAsset(asset_type=gltf_production, storage_key=...)`
- Returns `storage_key`
**`export_blend_for_order_line_task(order_line_id)`** — Queue `thumbnail_rendering`:
- Analog zu export_gltf, aber mit `export_blend.py`
- MediaAsset type: `blend_production`
- **Akzeptanzkriterium**: Tasks in `domains/rendering/tasks.py` vorhanden, keine Import-Fehler
- **Abhängigkeiten**: keine
### Task 2: Backend — workflow_builder.py reparieren + still_with_exports
- **Datei**: `backend/app/domains/rendering/workflow_builder.py`
- **Was**: - **Was**:
- `OutputTypeOut` + `OutputTypePatch`: `workflow_definition_id: uuid.UUID | None` hinzufügen
- PATCH-Handler: `workflow_definition_id` setzen wenn in body
- `OutputTypeOut` soll `workflow_name: str | None` als convenience field enthalten
- **Akzeptanzkriterium**: `PATCH /api/output-types/{id}` mit `{"workflow_definition_id": "..."}` funktioniert
- `_build_still`: Nutzt `render_order_line_still_task` statt `render_still_task` ### Task 4: Workflow-Dispatch Integration
- `_build_turntable`: Bleibt vorerst mit `render_turntable_task` (file-path-basiert, funktioniert via legacy path) - **Datei**: `backend/app/api/routers/orders.py`
- `_build_multi_angle`: Nutzt `render_order_line_still_task` mit `camera_angle` param - **Was**: In `dispatch_renders()` (Zeile 910):
- **NEU** `_build_still_with_exports(order_line_id, params)`: - Statt `dispatch_order_line_render.delay(str(line.id))` aufrufen:
```python - `from app.domains.rendering.dispatch_service import dispatch_render_with_workflow`
from celery import chain, group - `dispatch_render_with_workflow(str(line.id))` aufrufen
return chain( - Das dispatch_service lädt OutputType.workflow_definition_id und nutzt Celery Canvas falls verknüpft; fällt auf Legacy zurück wenn nicht.
render_order_line_still_task.si(order_line_id, **params), - **Akzeptanzkriterium**: Dispatch nutzt neuen Pfad; Legacy-Fallback bleibt erhalten
group(
export_gltf_for_order_line_task.si(order_line_id),
export_blend_for_order_line_task.si(order_line_id),
)
)
```
- `dispatch_workflow()`: `"still_with_exports"` zu `builders` hinzufügen
- **Akzeptanzkriterium**: `dispatch_workflow("still_with_exports", order_line_id)` löst keine Exception aus ### Task 5: Asset Library API-Client (Frontend)
- **Abhängigkeiten**: Task 1 - **Datei**: `frontend/src/api/asset_libraries.ts` (NEU)
### Task 3: Backend — generate-gltf-geometry Endpoint (Testdaten für K6)
- **Datei**: `backend/app/api/routers/cad.py`
- **Was**: Neuer Endpoint `POST /api/cad/{id}/generate-gltf-geometry` (require_admin_or_pm):
- Prüft ob CadFile existiert + STL-Cache vorhanden (`{step_dir}/{stem}_low.stl`)
- Queut neuen Celery-Task `generate_gltf_geometry_task.delay(str(cad_file.id))`
- Returns `{"task_id": ..., "message": "GLB generation queued"}`
Neuer Task `generate_gltf_geometry_task` in `domains/rendering/tasks.py` (Queue `thumbnail_rendering`):
- Lädt CadFile sync, findet STL-Cache
- **Nutzt trimesh** (kein Blender): `import trimesh; mesh = trimesh.load(stl_path); mesh.export(glb_path)`
→ Warum trimesh: Schnell, kein Blender nötig, läuft auf worker-Container (trimesh in pyproject.toml cad-extras)
- Lädt GLB nach MinIO `uploads/{cad_file_id}/geometry.glb`
- Erstellt/aktualisiert `MediaAsset(asset_type=gltf_geometry, storage_key=..., cad_file_id=...)`
→ `MediaAsset` braucht `cad_file_id` FK — prüfen ob vorhanden
**Wichtig**: Prüfen ob `media_assets.cad_file_id` existiert. Falls nicht: Migration 047 notwendig.
- **Akzeptanzkriterium**: `POST /api/cad/{id}/generate-gltf-geometry` gibt 202 zurück, nach Task-Ausführung existiert MediaAsset mit type=gltf_geometry
- **Abhängigkeiten**: Task 1
### Task 4: Migration 047 — media_assets.cad_file_id (wenn nötig)
- **Datei**: `backend/alembic/versions/047_media_assets_cad_file_id.py`
- **Was**: Nullable FK `cad_file_id UUID REFERENCES cad_files(id) ON DELETE SET NULL` auf `media_assets`
- **Prüfen**: `grep -n "cad_file_id" backend/app/domains/media/models.py` — falls schon vorhanden: Task überspringen
- **Akzeptanzkriterium**: `alembic upgrade head` erfolgreich
- **Abhängigkeiten**: keine
### Task 5: ThreeDViewer.tsx — Production-Modus, Wireframe, Env-Preset, Downloads
- **Datei**: `frontend/src/components/cad/ThreeDViewer.tsx`
- **Was**: Props erweitern + Toolbar-Erweiterung:
```typescript
interface ThreeDViewerProps {
cadFileId: string
onClose: () => void
productionGltfUrl?: string // wenn vorhanden: Mode-Toggle anzeigen
downloadUrls?: { glb?: string; blend?: string }
}
```
**Neuer State:**
- `mode: 'geometry' | 'production'` (default: 'geometry')
- `wireframe: boolean` (default: false)
- `envPreset: 'city' | 'studio' | 'sunset'` (default: 'city')
**Toolbar** (neu, rechts vom "Capture Angle"-Button):
- Mode-Toggle (nur wenn `productionGltfUrl` gesetzt): Button-Gruppe "Geometry | Production"
- Wireframe-Toggle: Button
- Env-Preset-Dropdown: `<select>` mit city/studio/sunset
- Download-Buttons (wenn `downloadUrls` gesetzt): Download-Icon + "GLB" + optional "BLEND"
**Canvas-Änderungen:**
- `Environment preset={envPreset}` (jetzt konfigurierbar, bisher hardcoded "city")
- `WireframeToggle`-Komponente: setzt `material.wireframe = wireframe` auf allen Mesh-Children
- Model-URL: `mode === 'production' && productionGltfUrl ? productionGltfUrl : modelUrl`
**GltfErrorBoundary**: Reset bei mode-Wechsel (key prop ändern)
- **Akzeptanzkriterium**: Mode-Toggle erscheint wenn `productionGltfUrl` vorhanden, Wireframe-Toggle schaltet um, Env-Preset ändert Beleuchtung
- **Abhängigkeiten**: keine
### Task 6: CadPreview.tsx — Production-Asset-URLs übergeben
- **Datei**: `frontend/src/pages/CadPreview.tsx`
- **Was**: Beim Öffnen des ThreeDViewers:
- `GET /api/media-assets?cad_file_id={id}&asset_type=gltf_geometry` (oder gltf_production falls vorhanden)
- Download-URLs für GLB + BLEND laden
- `<ThreeDViewer productionGltfUrl={...} downloadUrls={...} />`
- "Generate GLB" Button (admin/PM): ruft `POST /api/cad/{id}/generate-gltf-geometry` auf + Toast + Reload
- **Akzeptanzkriterium**: Vorhandene MediaAssets werden als Production-URLs übergeben
- **Abhängigkeiten**: Task 3, Task 5
### Task 7: Media-API — assets by cad_file_id Query-Parameter
- **Datei**: `backend/app/domains/media/router.py`
- **Was**: `GET /api/media-assets?cad_file_id={uuid}` — Query-Param zu `list_assets` hinzufügen (optional, nullable)
- `list_media_assets(db, cad_file_id=...)` in service.py erweitern
- **Akzeptanzkriterium**: `GET /api/media-assets?cad_file_id=abc` gibt nur Assets dieses CadFile zurück
- **Abhängigkeiten**: Task 4
### Task 8: Frontend API — media.ts + cad.ts erweitern
- **Datei**: `frontend/src/api/media.ts`, `frontend/src/api/cad.ts`
- **Was**:
- `media.ts`: `listMediaAssets(params: {cad_file_id?: string, asset_type?: string}): Promise<MediaAsset[]>`
- `cad.ts`: `generateGltfGeometry(cadFileId: string): Promise<{task_id: string}>`
- Interface `MediaAsset` um `cad_file_id?: string` ergänzen (falls noch nicht vorhanden)
- **Akzeptanzkriterium**: TypeScript-Kompilierung fehlerfrei
- **Abhängigkeiten**: Task 7
### Task 9: Backend — Worker-Management Endpoints
- **Datei**: `backend/app/api/routers/worker.py`
- **Was**: Zwei neue Endpoints (require_admin):
**`GET /api/worker/celery-workers`**:
```python
from app.tasks.celery_app import celery_app
inspect = celery_app.control.inspect()
active = inspect.active() or {}
stats = inspect.stats() or {}
# Aggregiere: worker_name, hostname, active_tasks_count, queues
```
Response: `list[CeleryWorkerInfo]` mit Feldern: `worker_name, hostname, active_tasks, status`
**`POST /api/worker/scale`** (Body: `{service: "render-worker"|"worker", count: int}`):
```python
import subprocess, shutil
compose_file = os.environ.get("COMPOSE_FILE", "/docker-compose.yml")
result = subprocess.run(
["docker", "compose", "-f", compose_file,
"up", "--scale", f"{service}={count}", "--no-deps", "-d"],
capture_output=True, text=True, timeout=60
)
```
- Erfordert Docker-Socket-Mount (docker-compose.yml Änderung, Task 10)
- Validierung: count zwischen 0 und 10, service in erlaubte Liste
- **Akzeptanzkriterium**: `GET /api/worker/celery-workers` gibt Worker-Liste zurück (leer wenn keine aktiv)
- **Abhängigkeiten**: keine
### Task 10: docker-compose.yml — Docker-Socket + Compose-File-Mount
- **Datei**: `docker-compose.yml`
- **Was**: Im `backend`-Service:
```yaml
volumes:
- ./backend:/app
- uploads:/app/uploads
- /var/run/docker.sock:/var/run/docker.sock
- ./docker-compose.yml:/docker-compose.yml
environment:
- COMPOSE_FILE=/docker-compose.yml
```
Außerdem `docker-cli` im Backend-Dockerfile installieren:
```dockerfile
RUN apt-get update && apt-get install -y --no-install-recommends \
... docker.io \
&& rm -rf /var/lib/apt/lists/*
```
- **Akzeptanzkriterium**: `docker compose exec backend docker compose version` funktioniert
- **Abhängigkeiten**: Task 9
### Task 11: Frontend — WorkerManagement.tsx
- **Datei**: `frontend/src/pages/WorkerManagement.tsx` (NEU)
- **Was**: Seite mit 3 Bereichen:
**Section 1 — Worker-Status** (useQuery `['celery-workers']`, refetchInterval 15s):
- Tabelle: Worker-Name, Hostname, Aktive Tasks, Status-Dot (grün=online, grau=keine Tasks)
- Leerer Zustand: "No active workers"
**Section 2 — Queue-Tiefe** (aus `GET /api/worker/activity`, bestehend):
- Karten: `step_processing` + `thumbnail_rendering` Queue-Tiefe
- Nutzt vorhandene WorkerActivity-Daten
**Section 3 — Scale-Worker** (require admin):
- Zwei Slider/Spinner: "step-worker (worker)" 1-8, "render-worker" 1-4
- Button "Scale" → `POST /api/worker/scale`
- Warnung: "Scaling down kills active renders"
- Toast bei Erfolg/Fehler
- **Akzeptanzkriterium**: Seite lädt, Worker-Liste zeigt laufende Worker, Scale-Button sendet Request
- **Abhängigkeiten**: Task 9, Task 12
### Task 12: Frontend — worker.ts API-Client
- **Datei**: `frontend/src/api/worker.ts` (NEU oder ergänzen)
- **Was**: - **Was**:
```typescript ```typescript
export interface CeleryWorkerInfo { export interface AssetLibrary { id, name, description, original_filename, catalog: {materials: string[], node_groups: string[]}, is_active, created_at }
worker_name: string export async function listAssetLibraries(): Promise<AssetLibrary[]>
hostname: string export async function uploadAssetLibrary(name: string, file: File, description?: string): Promise<AssetLibrary>
active_tasks: number export async function refreshLibraryCatalog(id: string): Promise<AssetLibrary>
status: 'online' | 'idle' export async function deleteAssetLibrary(id: string): Promise<void>
} export async function updateAssetLibrary(id: string, data: Partial<AssetLibrary>): Promise<AssetLibrary>
export async function getCeleryWorkers(): Promise<CeleryWorkerInfo[]>
export async function scaleWorker(service: string, count: number): Promise<void>
``` ```
- **Akzeptanzkriterium**: TypeScript kompiliert - **Akzeptanzkriterium**: TypeScript kompiliert fehlerfrei
- **Abhängigkeiten**: Task 9
### Task 13: Frontend — Route + Sidebar-Link für WorkerManagement ### Task 6: Asset Library Management Page (K2)
- **Datei**: `frontend/src/pages/AssetLibrary.tsx` (NEU)
- **Was**: Seite `/asset-libraries` (admin/PM):
- Liste der Asset Libraries als Karten: Name, Filename, Badge-Grid mit Materialien/Node-Groups aus `catalog`
- Upload-Button: Datei-Input für `.blend` + Name-Feld → `uploadAssetLibrary()`
- "Refresh Catalog" Button je Library → `refreshLibraryCatalog(id)` → Toast
- Toggle `is_active` → `updateAssetLibrary()`
- Delete-Button → `deleteAssetLibrary()`
- Leer-Zustand: "No asset libraries yet — upload a .blend file"
- **Akzeptanzkriterium**: Libraries hochladen, Katalog anzeigen, löschen
### Task 7: Asset Library Route + Sidebar-Link
- **Datei**: `frontend/src/App.tsx`, `frontend/src/components/layout/Layout.tsx` - **Datei**: `frontend/src/App.tsx`, `frontend/src/components/layout/Layout.tsx`
- **Was**: - **Was**:
- App.tsx: Route `/workers` → `<WorkerManagement />` - App.tsx: Route `/asset-libraries` → `<AssetLibraryPage />` (AdminRoute)
- Layout.tsx: Sidebar-Link "Workers" mit `Server`-Icon (admin only) - Layout.tsx: Sidebar-Link "Asset Libraries" mit `Library`-Icon (admin/PM)
- **Akzeptanzkriterium**: `/workers` erreichbar, Link erscheint für Admins - **Abhängigkeiten**: Task 6
- **Abhängigkeiten**: Task 11
### Task 14: Dockerfile — pytest installieren ### Task 8: OutputType Workflow-Dropdown (Frontend)
- **Datei**: `backend/Dockerfile` - **Datei**: `frontend/src/pages/Admin.tsx` (OutputTypeTable-Bereich)
- **Was**: `pip install --no-cache-dir -e .` → `pip install --no-cache-dir -e ".[dev]"` - **Was**: In der OutputType-Tabelle eine neue Spalte "Workflow":
- **Akzeptanzkriterium**: `docker compose exec backend pytest --version` gibt Versionsnummer aus (nach Rebuild) - Dropdown mit allen WorkflowDefinitions (aus `GET /api/workflows`) + "— None —"
- **Abhängigkeiten**: keine - Bei Änderung: `PATCH /api/output-types/{id}` mit `{workflow_definition_id: ...}`
- Wenn kein Workflow: zeige "Legacy" Badge; wenn Workflow: zeige Workflow-Name als grünes Badge
- **Akzeptanzkriterium**: Workflow kann pro OutputType zugewiesen werden
### Task 15: Backend-Tests — test_rendering_service.py ### Task 9: Excel Sanity-Check Backend (Phase H)
- **Datei**: `backend/tests/domains/test_rendering_service.py` (NEU) - **Datei**: `backend/app/domains/imports/sanity_check.py` (NEU), `backend/app/domains/imports/router.py`
- **Was**: ≥5 Tests:
1. `test_dispatch_workflow_unknown_type_raises` — ValueError bei unbekanntem Typ
2. `test_dispatch_workflow_still_builds_chain` — `_build_still` gibt Celery-Chain zurück (ohne apply_async)
3. `test_dispatch_workflow_still_with_exports_builds_chain` — group in chain
4. `test_publish_asset_creates_media_asset(db, admin_user)` — async, erstellt MediaAsset
5. `test_publish_asset_nonexistent_order_line_returns_none` — graceful None
6. (Bonus) `test_legacy_dispatch_queues_task(monkeypatch)` — mock_celery, prüft Task wurde eingereicht
- **Akzeptanzkriterium**: `pytest tests/domains/test_rendering_service.py` → alles grün
- **Abhängigkeiten**: Task 14
### Task 16: Backend-Tests — test_orders_service.py
- **Datei**: `backend/tests/domains/test_orders_service.py` (NEU)
- **Was**: ≥5 Tests gegen `GET/POST /api/orders` und Orders-Service-Funktionen:
1. `test_create_order_returns_201(client, auth_headers)` — POST /api/orders
2. `test_list_orders_empty(client, auth_headers)` — leere Liste zurück
3. `test_get_order_404_for_unknown_id(client, auth_headers)` — 404 bei unbekannter ID
4. `test_order_submit_status_change(client, auth_headers)` — Submit ändert Status
5. `test_order_requires_auth(client)` — 401 ohne Token
- **Akzeptanzkriterium**: `pytest tests/domains/test_orders_service.py` → alles grün
- **Abhängigkeiten**: Task 14
### Task 17: Frontend-Tests — WorkerActivity.test.tsx + WorkerManagement.test.tsx
- **Datei**: `frontend/src/__tests__/pages/WorkerActivity.test.tsx` (NEU), `WorkerManagement.test.tsx` (NEU)
- **Was**: - **Was**:
- WorkerActivity: Test render + "No recent activity" leerer Zustand, Mock-API-Response - Sync-Funktion `run_sanity_check(import_validation_id: str)`:
- WorkerManagement: Test render Header "Worker Management", Scale-Button vorhanden - Lädt ImportValidation-Record
- Nutzen MSW handlers aus `mocks/` - Iteriert über `rows` (ParsedRows aus Excel)
- **Akzeptanzkriterium**: `npm run test` → 0 Failures (≥5 Tests total neu) - Für jede Zeile: prüft ob `name_cad_modell` eine CadFile zugeordnet hat (`cad_files.original_name ILIKE`)
- **Abhängigkeiten**: Task 11 - Prüft ob `cad_part_materials` alle Materialien in `materials`-Tabelle (via Alias-Lookup) auflösbar sind
- Erstellt `summary: {total_rows, rows_with_cad, rows_without_cad, material_gaps: [{product, missing_material}]}`
- Status → 'completed'
- Celery-Task `validate_excel_import_task(import_validation_id)` Queue `step_processing`
- Endpoint `GET /api/imports/{id}/validation` — gibt ImportValidation zurück
- Endpoint `POST /api/imports/{id}/add-alias` — schnell einen Alias hinzufügen (part_name → material)
- ImportValidation DB-Zugriif: sync SQLAlchemy (Celery-kompatibel)
- **Akzeptanzkriterium**: Nach Excel-Upload wird Import-Validierung automatisch gequeuet; `summary` liefert Material-Lücken
### Task 10: Upload.tsx — Sanity-Check-Dialog (Phase H)
- **Datei**: `frontend/src/pages/Upload.tsx`
- **Was**: Nach erfolgreichem Excel-Upload:
- `GET /api/imports/{id}/validation` pollen (alle 3s, max 30s)
- Wenn status='completed': Ampel-Dialog anzeigen:
- Grün-Badge: "X Produkte mit STEP-Datei"
- Gelb-Badge: "Y Produkte ohne STEP-Datei"
- Rote Liste: Material-Lücken (Part-Name → fehlendes Material, mit "Add Alias" Button)
- "Proceed" Button schließt Dialog
- Import API erweitern: `api/imports.ts` mit `getImportValidation(id)`, `addMaterialAlias()`
- **Akzeptanzkriterium**: Nach Upload erscheint Dialog mit Produktions-Readiness
### Task 11: Mesh-Attribute Anzeige in ProductDetail (Phase D)
- **Datei**: `frontend/src/pages/ProductDetail.tsx`
- **Was**: Im CAD-File-Bereich, nach dem Status-Badge:
- Wenn `product.cad_file.mesh_attributes` vorhanden: kleine Info-Karte
- Felder: `volume_cm3` (aus `mesh_attributes.volume_mm3 / 1000` → "12.5 cm³"),
`surface_area_cm2`, `bounding_box` ("W×H×D mm"), `sharp_angle_deg` (aus `suggested_smooth_angle`)
- Label "Geometry" mit `Ruler`-Icon
- **API-Änderung**: Product-API gibt `cad_file.mesh_attributes` zurück (prüfen ob vorhanden)
- **Akzeptanzkriterium**: Volumen, Oberfläche, BBox in ProductDetail sichtbar (wenn vorhanden)
### Task 12: OCC Edge-Analyse → mesh_attributes (Sharp/Seam)
- **Datei**: `backend/app/services/step_processor.py`
- **Was**: Neue Funktion `extract_mesh_edge_data(step_path: str) -> dict`:
- Öffnet STEP via OCC
- Iteriert über alle Faces und deren Edges
- Berechnet Winkel zwischen adjazenten Faces per Edge (Dihedralwinkel)
- Sammelt:
- `suggested_smooth_angle`: Median-Winkel aller Kanten wo Winkel > 5° (typisch 3060°)
- `has_mechanical_edges`: bool (True wenn mehrere Kanten mit Winkel > 60° → Lagerkante)
- `sharp_edge_midpoints`: Liste von `[x,y,z]` mm-Koordinaten der scharfen Kanten-Mittelpunkte (max 500 Stück, für Winkel > 45°)
- Integriert in `extract_cad_metadata()`: nach `_extract_step_objects()` aufrufen, Ergebnis in `mesh_attributes` mergen
- Fallback: bei OCC-Fehler gracefully `{}` zurückgeben
- **Akzeptanzkriterium**: `cad_files.mesh_attributes` enthält `suggested_smooth_angle` nach Verarbeitung
### Task 13: Blender-Scripts — mark_sharp + UV-Seams
- **Dateien**: `render-worker/scripts/still_render.py`, `render-worker/scripts/blender_render.py`
- **Was**: Nach STL-Import, vor dem Render:
1. Wenn `mesh_attributes.suggested_smooth_angle` vorhanden: diesen Winkel statt globalem `smooth_angle` nutzen
2. Neue Funktion `_mark_sharp_edges(obj, smooth_angle_deg, sharp_edge_midpoints=None)`:
- Setzt `obj.data.auto_smooth_angle = math.radians(smooth_angle_deg)`
- Wählt Kanten aus: `bpy.ops.mesh.edges_select_sharp(sharpness=math.radians(smooth_angle_deg))`
- Ruft `bpy.ops.mesh.mark_sharp()` auf
- Wenn `sharp_edge_midpoints` vorhanden: KD-Tree matching → zusätzliche Kanten markieren
3. Neue Funktion `_create_uv_seams_from_sharps(obj)`:
- Startet Edit-Mode
- Selektiert alle Sharp-Kanten: `[e for e in mesh.edges if e.use_edge_sharp]`
- Markiert diese als Seams: `edge.use_seam = True`
- Ruft `bpy.ops.uv.smart_project(angle_limit=math.radians(smooth_angle_deg))` auf
4. Beide Funktionen nach `_import_stl()` aufrufen (Mode A + Mode B)
- **Akzeptanzkriterium**: Gerenderte Bilder zeigen korrekte Kanten für Lager (30° Winkel scharf sichtbar)
### Task 14: K3 — apply_asset_library_materials_task
- **Datei**: `backend/app/domains/rendering/tasks.py`
- **Was**: Neuer Celery-Task:
```python
@celery_app.task(name="...apply_asset_library_materials_task", queue="thumbnail_rendering")
def apply_asset_library_materials_task(order_line_id: str, asset_library_id: str) -> dict:
# Lädt OrderLine, CadFile, AssetLibrary
# Prüft ob asset_library.blend_file_path existiert
# Ruft Blender subprocess auf mit asset_library.py:
# blender --background --python asset_library.py -- --stl_path X --asset_library_blend Y --material_map '{...}'
# Returns {'status': 'applied', 'materials_count': N}
```
Skript `render-worker/scripts/asset_library.py` existiert bereits.
- **Akzeptanzkriterium**: Task läuft ohne Fehler wenn Blender verfügbar
### Task 15: K4/K5 — export_gltf + export_blend via Blender
- **Datei**: `backend/app/domains/rendering/tasks.py`
- **Was**: `export_gltf_for_order_line_task` und `export_blend_for_order_line_task` überarbeiten:
- Statt trimesh: Blender subprocess mit `export_gltf.py` / `export_blend.py`
- Asset Library path aus LinkedAssetLibrary (via OutputType) übergeben falls vorhanden
- GLB → MinIO `production-exports/{cad_file_id}/{order_line_id}.glb`
- .blend → MinIO `production-exports/{cad_file_id}/{order_line_id}.blend`
- MediaAsset erstellen mit `gltf_production` / `blend_production` type
- **Akzeptanzkriterium**: Export-Tasks produzieren GLB/BLEND-Dateien in MinIO
--- ---
## Abhängigkeiten
```
Sofort (parallel):
Task 1 (Upload Link)
Task 2 (Notification Config Backend)
Task 3 (OutputType Schema)
Task 5 (Asset Library API)
Task 9 (Sanity Check Backend)
Task 12 (OCC Edge Analyse)
Nach Task 3:
Task 4 (Dispatch Integration)
Task 8 (OutputType Workflow Dropdown)
Nach Task 5+6:
Task 6 (Asset Library Page) — braucht Task 5
Task 7 (Route + Sidebar) — braucht Task 6
Nach Task 9:
Task 10 (Upload Sanity Dialog)
Nach Task 11:
Task 11 (Mesh Display) — unabhängig
Nach Task 12:
Task 13 (Blender Scripts)
Nach Task 14:
Task 15 (K4/K5 Exports)
```
## Migrations-Check ## Migrations-Check
Alle benötigten Migrationen existieren bereits:
- 043: import_validations ✅
- 044: notification_configs ✅
- 045: asset_libraries ✅
| Migration | Beschreibung | Notwendig? | Keine neue Migration nötig.
|-----------|-------------|------------|
| 047 | `media_assets.cad_file_id FK` | **Prüfen**: `grep cad_file_id backend/app/domains/media/models.py` — wenn fehlt → ja |
Vor Implementierung prüfen: `cat backend/app/domains/media/models.py | grep cad_file_id`
---
## Reihenfolge-Empfehlung
```
Parallel-Gruppe 1 (keine gegenseitigen Abhängigkeiten):
Task 1 (neue Celery-Tasks)
Task 4 (Migration 047 prüfen + ggf. erstellen)
Task 5 (ThreeDViewer Props)
Task 9 (Worker-Endpoints Backend)
Task 14 (Dockerfile pytest)
Nach Gruppe 1:
Task 2 (workflow_builder reparieren) — braucht Task 1
Task 3 (generate-gltf-geometry Endpoint) — braucht Task 1 + 4
Task 10 (docker-compose Mount) — braucht Task 9
Task 12 (worker.ts API) — braucht Task 9
Nach Gruppe 2:
Task 6 (CadPreview anpassen) — braucht Task 3, 5
Task 7 (media router cad_file_id param) — braucht Task 4
Task 8 (frontend API) — braucht Task 7
Task 11 (WorkerManagement.tsx) — braucht Task 9, 12
Nach Gruppe 3:
Task 13 (Route + Sidebar) — braucht Task 11
Task 15 (test_rendering_service.py) — braucht Task 14
Task 16 (test_orders_service.py) — braucht Task 14
Task 17 (frontend tests) — braucht Task 11
```
---
## Risiken / Offene Fragen
1. **media_assets.cad_file_id**: Muss vor Implementierung geprüft werden. Wenn schon vorhanden → Migration 047 entfällt.
2. **trimesh auf render-worker**: `trimesh` ist in `pyproject.toml` als optionale `cad`-Dependency gelistet (`trimesh>=4.2.0`). Der worker-Container muss sie installiert haben. Im render-worker Dockerfile prüfen: `pip install trimesh`.
3. **docker compose in Backend-Container**: Das scale-Feature setzt voraus, dass `docker.io` + compose-Plugin im Backend-Image installiert sind. Build-Zeit steigt ~30MB. Alternativ: Nur die Celery-Worker-Ansicht implementieren, Scale als Hinweis-Text mit dem CLI-Befehl.
4. **render_order_line_still_task vs. legacy render_order_line_task**: Beide tun ähnliches. Langfristig sollte `step_tasks.render_order_line_task` durch den neuen Task ersetzt werden. Für jetzt: Neuer Task läuft parallel, Legacy bleibt erhalten (backward-compat).
5. **Celery inspect Timeout**: `celery_app.control.inspect(timeout=2)` kann hängen wenn kein Worker läuft. Timeout setzen + leere Liste zurückgeben.
+82 -1
View File
@@ -98,6 +98,16 @@ denoising_prefilter_arg = argv[22] if len(argv) > 22 else ""
denoising_quality_arg = argv[23] if len(argv) > 23 else "" denoising_quality_arg = argv[23] if len(argv) > 23 else ""
denoising_use_gpu_arg = argv[24] if len(argv) > 24 else "" denoising_use_gpu_arg = argv[24] if len(argv) > 24 else ""
# Named argument: --mesh-attributes <json>
_mesh_attrs: dict = {}
_sys_argv = sys.argv
if "--mesh-attributes" in _sys_argv:
_idx = _sys_argv.index("--mesh-attributes")
try:
_mesh_attrs = _json.loads(_sys_argv[_idx + 1])
except Exception:
pass
# Validate template path: if provided it MUST exist on disk. # Validate template path: if provided it MUST exist on disk.
# Fail loudly rather than silently rendering with factory settings. # Fail loudly rather than silently rendering with factory settings.
if template_path and not os.path.isfile(template_path): if template_path and not os.path.isfile(template_path):
@@ -203,6 +213,69 @@ def _apply_rotation(parts, rx, ry, rz):
print(f"[blender_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts") print(f"[blender_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
def _mark_sharp_and_seams(obj, smooth_angle_deg: float, sharp_edge_midpoints=None):
"""Mark sharp edges and UV seams based on angle threshold and optional midpoints."""
import math
import bpy
# Ensure we're working with the right object
bpy.context.view_layer.objects.active = obj
obj.select_set(True)
# Set auto-smooth angle
if hasattr(obj.data, 'auto_smooth_angle'):
obj.data.auto_smooth_angle = math.radians(smooth_angle_deg)
# Enter edit mode to mark edges
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
# Select edges above threshold angle and mark sharp
bpy.ops.mesh.edges_select_sharp(sharpness=math.radians(smooth_angle_deg))
bpy.ops.mesh.mark_sharp()
# Mark same edges as UV seams
bpy.ops.mesh.mark_seam(clear=False)
# If we have OCC-derived midpoints, try to mark additional edges
if sharp_edge_midpoints and len(sharp_edge_midpoints) > 0:
try:
import bmesh
bpy.ops.object.mode_set(mode='OBJECT')
bm = bmesh.new()
bm.from_mesh(obj.data)
bm.edges.ensure_lookup_table()
bm.verts.ensure_lookup_table()
# Build KD-tree for edge midpoints
import mathutils
kd = mathutils.kdtree.KDTree(len(bm.edges))
for i, edge in enumerate(bm.edges):
midpt = (edge.verts[0].co + edge.verts[1].co) / 2
kd.insert(midpt, i)
kd.balance()
# For each OCC sharp midpoint, find nearest Blender edge
tol = 0.5 # 0.5 mm tolerance (coordinates in mm before scale)
for mp in sharp_edge_midpoints[:200]:
vec = mathutils.Vector(mp)
co, idx, dist = kd.find(vec)
if dist < tol:
bm.edges[idx].seam = True
try:
bm.edges[idx].smooth = False
except Exception:
pass
bm.to_mesh(obj.data)
bm.free()
except Exception:
pass # Non-fatal
# Return to object mode
bpy.ops.object.mode_set(mode='OBJECT')
def _import_stl(stl_file): def _import_stl(stl_file):
"""Import STL into Blender, using per-part STLs if available. """Import STL into Blender, using per-part STLs if available.
@@ -394,9 +467,13 @@ if use_template:
col.objects.unlink(part) col.objects.unlink(part)
target_col.objects.link(part) target_col.objects.link(part)
# Apply smooth shading # Apply smooth shading and mark sharp edges / UV seams
for part in parts: for part in parts:
_apply_smooth(part, smooth_angle) _apply_smooth(part, smooth_angle)
_mark_sharp_and_seams(
part, smooth_angle,
sharp_edge_midpoints=_mesh_attrs.get('sharp_edge_midpoints'),
)
# Material assignment: library materials if available, otherwise palette # Material assignment: library materials if available, otherwise palette
if material_library_path and material_map: if material_library_path and material_map:
@@ -469,6 +546,10 @@ else:
for i, part in enumerate(parts): for i, part in enumerate(parts):
_apply_smooth(part, smooth_angle) _apply_smooth(part, smooth_angle)
_mark_sharp_and_seams(
part, smooth_angle,
sharp_edge_midpoints=_mesh_attrs.get('sharp_edge_midpoints'),
)
_assign_palette_material(part, i) _assign_palette_material(part, i)
# Apply material library on top of palette colours (same logic as Mode B). # Apply material library on top of palette colours (same logic as Mode B).
+73 -1
View File
@@ -145,6 +145,70 @@ def _apply_mesh_attributes(objects: list, mesh_attributes: dict) -> None:
obj.data.auto_smooth_angle = threshold_rad obj.data.auto_smooth_angle = threshold_rad
def _mark_sharp_and_seams(obj, smooth_angle_deg: float, sharp_edge_midpoints=None):
"""Mark sharp edges and UV seams based on angle threshold and optional midpoints."""
import math
import bpy
# Ensure we're working with the right object
bpy.context.view_layer.objects.active = obj
obj.select_set(True)
# Set auto-smooth angle
if hasattr(obj.data, 'auto_smooth_angle'):
obj.data.auto_smooth_angle = math.radians(smooth_angle_deg)
# Enter edit mode to mark edges
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
# Select edges above threshold angle and mark sharp
bpy.ops.mesh.edges_select_sharp(sharpness=math.radians(smooth_angle_deg))
bpy.ops.mesh.mark_sharp()
# Mark same edges as UV seams
bpy.ops.mesh.mark_seam(clear=False)
# If we have OCC-derived midpoints, try to mark additional edges
if sharp_edge_midpoints and len(sharp_edge_midpoints) > 0:
try:
import bmesh
bpy.ops.object.mode_set(mode='OBJECT')
bm = bmesh.new()
bm.from_mesh(obj.data)
bm.edges.ensure_lookup_table()
bm.verts.ensure_lookup_table()
# Build KD-tree for edge midpoints
import mathutils
kd = mathutils.kdtree.KDTree(len(bm.edges))
for i, edge in enumerate(bm.edges):
midpt = (edge.verts[0].co + edge.verts[1].co) / 2
kd.insert(midpt, i)
kd.balance()
# For each OCC sharp midpoint, find nearest Blender edge
tol = 0.5 # 0.5 mm tolerance (coordinates in mm before scale)
for mp in sharp_edge_midpoints[:200]:
vec = mathutils.Vector(mp)
co, idx, dist = kd.find(vec)
if dist < tol:
bm.edges[idx].seam = True
# Mark sharp via custom attribute
try:
bm.edges[idx].smooth = False
except Exception:
pass
bm.to_mesh(obj.data)
bm.free()
except Exception:
pass # Non-fatal
# Return to object mode
bpy.ops.object.mode_set(mode='OBJECT')
def _import_stl(stl_file): def _import_stl(stl_file):
"""Import STL into Blender, using per-part STLs if available. """Import STL into Blender, using per-part STLs if available.
@@ -411,9 +475,13 @@ def main():
col.objects.unlink(part) col.objects.unlink(part)
target_col.objects.link(part) target_col.objects.link(part)
# Apply smooth shading # Apply smooth shading and mark sharp edges / UV seams
for part in parts: for part in parts:
_apply_smooth(part, SMOOTH_ANGLE) _apply_smooth(part, SMOOTH_ANGLE)
_mark_sharp_and_seams(
part, SMOOTH_ANGLE,
sharp_edge_midpoints=_mesh_attrs.get('sharp_edge_midpoints'),
)
# Material assignment: library materials if available, otherwise palette # Material assignment: library materials if available, otherwise palette
if material_library_path and material_map: if material_library_path and material_map:
@@ -504,6 +572,10 @@ def main():
for i, part in enumerate(parts): for i, part in enumerate(parts):
_apply_smooth(part, SMOOTH_ANGLE) _apply_smooth(part, SMOOTH_ANGLE)
_mark_sharp_and_seams(
part, SMOOTH_ANGLE,
sharp_edge_midpoints=_mesh_attrs.get('sharp_edge_midpoints'),
)
# Material assignment: library materials if available, else part_colors/palette # Material assignment: library materials if available, else part_colors/palette
if material_library_path and material_map: if material_library_path and material_map: