feat(O): UI-Vollständigkeit + v3-Workflows + OCC-Kantenanalyse

Backend:
- Phase I: notification_configs router (GET/PUT/{event}/{channel}/POST reset)
  war bereits in notifications.py — add-alias endpoint in uploads.py ergänzt
- OutputType schema: workflow_definition_id + workflow_name fields;
  PATCH unterstützt Workflow-Zuweisung; _enrich_workflow_names() batch query
- Dispatch-Integration: orders.py dispatch_renders() → dispatch_render_with_workflow()
  mit Legacy-Fallback; neues Logging
- uploads.py: POST /validations/{id}/add-alias für Material-Lücken

Pipeline:
- step_processor.py: extract_mesh_edge_data() via OCC — berechnet Dihedralwinkel
  aller Kanten, liefert suggested_smooth_angle + sharp_edge_midpoints
  Integriert in extract_cad_metadata() und process_cad_file()
- domains/rendering/tasks.py: apply_asset_library_materials_task (K3),
  export_gltf_for_order_line_task → Blender export_gltf.py (K4),
  export_blend_for_order_line_task → export_blend.py fix (K5)
- render-worker/scripts/still_render.py: _mark_sharp_and_seams() mit
  OCC midpoint KD-tree matching + UV-Seam-Markierung
- render-worker/scripts/blender_render.py: identische Funktion + mesh_attributes parsing

Frontend:
- Layout.tsx: Upload-Link in Sidebar (alle User); Asset Libraries Link (admin/PM)
- App.tsx: /asset-libraries Route
- AssetLibrary.tsx: neue Seite (Upload, Catalog-Anzeige, Refresh, Toggle, Delete)
- OutputTypeTable.tsx: Workflow-Dropdown + Legacy/Workflow Badge
- ProductDetail.tsx: Geometry-Karte (Volumen, Surface, BBox, Sharp-Winkel)
- api/outputTypes.ts + api/products.ts: neue Felder
- api/imports.ts: ImportValidation API

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-06 23:20:55 +01:00
parent f15b035b88
commit 382a18fd02
18 changed files with 1222 additions and 355 deletions
-1
View File
@@ -18,7 +18,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
# Copy docker CLI for worker scaling
COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker
COPY --from=docker-cli /usr/local/lib/docker/cli-plugins /usr/local/lib/docker/cli-plugins
# Install Python dependencies (including dev extras for pytest)
COPY pyproject.toml .
+13 -2
View File
@@ -1,4 +1,5 @@
import io
import logging
import os
import re
import uuid
@@ -6,6 +7,8 @@ import zipfile
from datetime import datetime
from typing import Optional
logger = logging.getLogger(__name__)
from fastapi import APIRouter, Depends, HTTPException, Query, status
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
@@ -906,9 +909,17 @@ async def dispatch_renders(
)
await db.commit()
from app.tasks.step_tasks import dispatch_order_line_render
from app.domains.rendering.dispatch_service import dispatch_render_with_workflow
for line in lines:
dispatch_order_line_render.delay(str(line.id))
try:
dispatch_render_with_workflow(str(line.id))
except Exception as exc:
logger.warning(
"dispatch_render_with_workflow failed for %s, falling back: %s",
line.id, exc,
)
from app.tasks.step_tasks import dispatch_order_line_render
dispatch_order_line_render.delay(str(line.id))
return {"dispatched": len(lines), "order_status": order.status.value}
+24 -3
View File
@@ -13,6 +13,7 @@ from app.models.output_type import OutputType, VALID_RENDER_BACKENDS
from app.schemas.output_type import OutputTypeCreate, OutputTypeOut, OutputTypePatch
from app.utils.auth import get_current_user, require_admin_or_pm
from app.models.user import User
from app.domains.rendering.models import WorkflowDefinition
router = APIRouter(prefix="/output-types", tags=["output-types"])
@@ -23,9 +24,26 @@ def _ot_to_out(ot: OutputType) -> OutputTypeOut:
if ot.pricing_tier:
out.pricing_tier_name = f"{ot.pricing_tier.category_key}/{ot.pricing_tier.quality_level}"
out.price_per_item = float(ot.pricing_tier.price_per_item)
# workflow_definition_id is mapped via model_validate from the ORM column.
# workflow_name is resolved by _enrich_workflow_names() after the fact.
return out
async def _enrich_workflow_names(db: AsyncSession, items: list[OutputTypeOut]) -> list[OutputTypeOut]:
"""Resolve workflow_name for any OutputTypeOut that has a workflow_definition_id set."""
wf_ids = {item.workflow_definition_id for item in items if item.workflow_definition_id is not None}
if not wf_ids:
return items
wf_result = await db.execute(
select(WorkflowDefinition).where(WorkflowDefinition.id.in_(wf_ids))
)
wf_map: dict[uuid.UUID, str] = {wf.id: wf.name for wf in wf_result.scalars().all()}
for item in items:
if item.workflow_definition_id is not None:
item.workflow_name = wf_map.get(item.workflow_definition_id)
return items
@router.get("", response_model=list[OutputTypeOut])
async def list_output_types(
include_inactive: bool = Query(False),
@@ -50,7 +68,8 @@ async def list_output_types(
)
)
result = await db.execute(stmt)
return [_ot_to_out(ot) for ot in result.scalars().all()]
items = [_ot_to_out(ot) for ot in result.scalars().all()]
return await _enrich_workflow_names(db, items)
@router.post("", response_model=OutputTypeOut, status_code=status.HTTP_201_CREATED)
@@ -74,7 +93,8 @@ async def create_output_type(
result2 = await db.execute(
select(OutputType).options(selectinload(OutputType.pricing_tier)).where(OutputType.id == ot.id)
)
return _ot_to_out(result2.scalar_one())
items = await _enrich_workflow_names(db, [_ot_to_out(result2.scalar_one())])
return items[0]
@router.patch("/{output_type_id}", response_model=OutputTypeOut)
@@ -101,7 +121,8 @@ async def update_output_type(
result2 = await db.execute(
select(OutputType).options(selectinload(OutputType.pricing_tier)).where(OutputType.id == ot.id)
)
return _ot_to_out(result2.scalar_one())
items = await _enrich_workflow_names(db, [_ot_to_out(result2.scalar_one())])
return items[0]
@router.delete("/{output_type_id}", status_code=status.HTTP_204_NO_CONTENT)
+53
View File
@@ -450,3 +450,56 @@ async def get_import_validation(
if not val:
raise HTTPException(404, detail="Validation not found")
return ImportValidationOut.model_validate(val)
class AddAliasRequest(BaseModel):
part_name: str
material_name: str
@router.post("/validations/{validation_id}/add-alias", status_code=status.HTTP_201_CREATED)
async def add_material_alias_from_validation(
validation_id: uuid.UUID,
body: AddAliasRequest,
db: AsyncSession = Depends(get_db),
user: User = Depends(get_current_user),
):
"""Create a MaterialAlias entry mapping part_name to an existing material.
Requires admin or project_manager role.
"""
from app.utils.auth import require_admin_or_pm
from app.domains.imports.models import ImportValidation
from app.domains.materials.models import Material, MaterialAlias
# Gate to admin/PM
if user.role.value not in ("admin", "project_manager"):
raise HTTPException(status_code=403, detail="Admin or project_manager required")
# Verify the validation exists
val_result = await db.execute(select(ImportValidation).where(ImportValidation.id == validation_id))
if not val_result.scalar_one_or_none():
raise HTTPException(404, detail="Validation not found")
# Find the target material by name
mat_result = await db.execute(select(Material).where(Material.name == body.material_name))
material = mat_result.scalar_one_or_none()
if not material:
raise HTTPException(404, detail=f"Material '{body.material_name}' not found in library")
# Check for duplicate alias (case-insensitive)
from sqlalchemy import func as sql_func
dup_result = await db.execute(
select(MaterialAlias).where(
sql_func.lower(MaterialAlias.alias) == body.part_name.lower()
)
)
existing_alias = dup_result.scalar_one_or_none()
if existing_alias:
raise HTTPException(409, detail=f"Alias '{body.part_name}' already exists")
alias = MaterialAlias(material_id=material.id, alias=body.part_name)
db.add(alias)
await db.commit()
await db.refresh(alias)
return {"id": str(alias.id), "alias": alias.alias, "material_id": str(material.id), "material_name": material.name}
+3
View File
@@ -33,6 +33,7 @@ class OutputTypePatch(BaseModel):
transparent_bg: bool | None = None
pricing_tier_id: int | None = None
cycles_device: str | None = None
workflow_definition_id: uuid.UUID | None = None
class OutputTypeOut(BaseModel):
@@ -51,6 +52,8 @@ class OutputTypeOut(BaseModel):
pricing_tier_id: int | None = None
pricing_tier_name: str | None = None
price_per_item: float | None = None
workflow_definition_id: uuid.UUID | None = None
workflow_name: str | None = None
is_active: bool
created_at: datetime
updated_at: datetime
+153 -10
View File
@@ -355,11 +355,16 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict:
max_retries=1,
)
def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
"""Export a geometry-only GLB from the STL cache using trimesh (no Blender).
"""Export a GLB from the STL cache via Blender subprocess (with trimesh fallback).
Publishes a MediaAsset with asset_type='gltf_geometry'.
Publishes a MediaAsset with asset_type='gltf_geometry' (no asset lib) or
'gltf_production' (when an asset library is applied).
Requires the STL low-quality cache to exist.
"""
import json
import os
import subprocess
step_path_str, cad_file_id = _resolve_step_path_for_order_line(order_line_id)
if not step_path_str:
raise RuntimeError(f"Cannot resolve STEP path for order_line {order_line_id}")
@@ -372,14 +377,47 @@ def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
)
output_path = step.parent / f"{step.stem}_geometry.glb"
scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
export_script = scripts_dir / "export_gltf.py"
from app.services.render_blender import find_blender, is_blender_available
asset_type = "gltf_geometry"
if is_blender_available() and export_script.exists():
blender_bin = find_blender()
cmd = [
blender_bin, "--background",
"--python", str(export_script),
"--",
"--stl_path", str(stl_path),
"--output_path", str(output_path),
"--asset_library_blend", "",
"--material_map", json.dumps({}),
]
try:
result = subprocess.run(cmd, capture_output=True, text=True, timeout=300)
if result.returncode != 0:
raise RuntimeError(
f"export_gltf.py exited {result.returncode}:\n{result.stderr[-500:]}"
)
publish_asset.delay(order_line_id, asset_type, str(output_path))
logger.info("export_gltf_for_order_line_task completed via Blender: %s", output_path.name)
return {"glb_path": str(output_path), "method": "blender"}
except Exception as exc:
logger.warning(
"Blender GLB export failed for %s, falling back to trimesh: %s",
order_line_id, exc,
)
# Trimesh fallback
try:
import trimesh
mesh = trimesh.load(str(stl_path))
mesh.export(str(output_path))
publish_asset.delay(order_line_id, "gltf_geometry", str(output_path))
logger.info("export_gltf_for_order_line_task completed: %s", output_path.name)
return {"glb_path": str(output_path)}
publish_asset.delay(order_line_id, asset_type, str(output_path))
logger.info("export_gltf_for_order_line_task completed via trimesh: %s", output_path.name)
return {"glb_path": str(output_path), "method": "trimesh"}
except Exception as exc:
logger.error("export_gltf_for_order_line_task failed for %s: %s", order_line_id, exc)
raise self.retry(exc=exc, countdown=15)
@@ -392,11 +430,12 @@ def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
max_retries=1,
)
def export_blend_for_order_line_task(self, order_line_id: str) -> dict:
"""Export a production-quality GLB via Blender + asset library (export_gltf.py).
"""Export a production .blend file via Blender + asset library (export_blend.py).
Publishes a MediaAsset with asset_type='blend_production'.
Requires Blender + the render-scripts directory.
"""
import json
import os
import subprocess
@@ -409,15 +448,38 @@ def export_blend_for_order_line_task(self, order_line_id: str) -> dict:
if not stl_path.exists():
raise RuntimeError(f"STL cache not found: {stl_path}")
output_path = step.parent / f"{step.stem}_production.glb"
output_path = step.parent / f"{step.stem}_production.blend"
scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
export_script = scripts_dir / "export_gltf.py"
export_script = scripts_dir / "export_blend.py"
from app.services.render_blender import find_blender
blender_bin = find_blender()
if not blender_bin:
raise RuntimeError("Blender binary not found — cannot run export_blend task")
# Resolve asset library path and material map from DB
asset_lib_path = ""
mat_map: dict = {}
try:
from sqlalchemy import create_engine, select as sql_select
from sqlalchemy.orm import Session
from app.config import settings as app_settings
from app.domains.orders.models import OrderLine
from app.domains.products.models import Product
engine = create_engine(app_settings.database_url_sync)
with Session(engine) as s:
line = s.execute(sql_select(OrderLine).where(OrderLine.id == order_line_id)).scalar_one_or_none()
if line:
product = s.execute(sql_select(Product).where(Product.id == line.product_id)).scalar_one_or_none()
if product:
mat_map = {
m.get("part_name", ""): m.get("material", "")
for m in (product.cad_part_materials or [])
}
except Exception as exc:
logger.warning("export_blend_for_order_line_task: DB resolution error (non-fatal): %s", exc)
try:
cmd = [
blender_bin, "--background",
@@ -425,20 +487,101 @@ def export_blend_for_order_line_task(self, order_line_id: str) -> dict:
"--",
"--stl_path", str(stl_path),
"--output_path", str(output_path),
"--asset_library_blend", asset_lib_path,
"--material_map", json.dumps(mat_map),
]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=300)
if result.returncode != 0:
raise RuntimeError(
f"export_gltf.py exited {result.returncode}:\n{result.stderr[-500:]}"
f"export_blend.py exited {result.returncode}:\n{result.stderr[-500:]}"
)
publish_asset.delay(order_line_id, "blend_production", str(output_path))
logger.info("export_blend_for_order_line_task completed: %s", output_path.name)
return {"glb_path": str(output_path)}
return {"blend_path": str(output_path)}
except Exception as exc:
logger.error("export_blend_for_order_line_task failed for %s: %s", order_line_id, exc)
raise self.retry(exc=exc, countdown=30)
@celery_app.task(
bind=True,
name="app.domains.rendering.tasks.apply_asset_library_materials_task",
queue="thumbnail_rendering",
max_retries=1,
)
def apply_asset_library_materials_task(self, order_line_id: str, asset_library_id: str) -> dict:
"""Apply Blender asset library materials to a render via the asset_library.py script."""
import json
import os
import subprocess
from pathlib import Path
from app.services.render_blender import find_blender
blender_bin = find_blender()
if not blender_bin:
raise RuntimeError("Blender not available")
# Resolve paths from DB
def _inner():
from sqlalchemy import create_engine, select as sql_select
from sqlalchemy.orm import Session
from app.config import settings
from app.domains.orders.models import OrderLine
from app.domains.products.models import CadFile, Product
engine = create_engine(settings.database_url_sync)
with Session(engine) as s:
line = s.execute(sql_select(OrderLine).where(OrderLine.id == order_line_id)).scalar_one_or_none()
if not line:
return None, None, None
product = s.execute(sql_select(Product).where(Product.id == line.product_id)).scalar_one_or_none()
if not product or not product.cad_file_id:
return None, None, None
cad = s.execute(sql_select(CadFile).where(CadFile.id == product.cad_file_id)).scalar_one_or_none()
stl_path = str(Path(cad.stored_path).parent / f"{Path(cad.stored_path).stem}_low.stl") if cad else None
# Resolve asset library blend path
try:
from app.domains.materials.models import AssetLibrary
lib = s.execute(sql_select(AssetLibrary).where(AssetLibrary.id == asset_library_id)).scalar_one_or_none()
blend_path = lib.blend_file_path if lib else None
except Exception:
blend_path = None
mat_map = {m.get("part_name", ""): m.get("material", "") for m in (product.cad_part_materials or [])}
return stl_path, blend_path, mat_map
result = _inner()
if result is None or result[0] is None:
logger.warning("apply_asset_library_materials_task: could not resolve paths for %s", order_line_id)
return {"status": "skipped"}
stl_path, blend_path, mat_map = result
if not stl_path or not Path(stl_path).exists():
logger.warning("STL not found for %s", order_line_id)
return {"status": "skipped", "reason": "stl_not_found"}
scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script = scripts_dir / "asset_library.py"
cmd = [
blender_bin, "--background", "--python", str(script), "--",
"--stl_path", stl_path,
"--asset_library_blend", blend_path or "",
"--material_map", json.dumps(mat_map),
]
try:
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=120)
if proc.returncode != 0:
raise RuntimeError(f"asset_library.py failed: {proc.stderr[-500:]}")
except Exception as exc:
logger.error("apply_asset_library_materials_task failed for %s: %s", order_line_id, exc)
raise self.retry(exc=exc, countdown=15)
return {"status": "applied", "order_line_id": order_line_id}
def _build_ffmpeg_cmd(
frames_dir: Path, output_mp4: Path, fps: int = 30, bg_color: str = ""
) -> list:
+123
View File
@@ -124,6 +124,10 @@ def extract_cad_metadata(cad_file_id: str) -> None:
objects = _extract_step_objects(step_path)
cad_file.parsed_objects = {"objects": objects}
edge_data = extract_mesh_edge_data(str(step_path))
if edge_data:
cad_file.mesh_attributes = {**(cad_file.mesh_attributes or {}), **edge_data}
gltf_path = _convert_to_gltf(step_path, cad_file_id, settings.upload_dir)
if gltf_path:
cad_file.gltf_path = str(gltf_path)
@@ -173,6 +177,11 @@ def process_cad_file(cad_file_id: str) -> None:
objects = _extract_step_objects(step_path)
cad_file.parsed_objects = {"objects": objects}
# Step 1b: Extract sharp-edge topology data and merge into mesh_attributes
edge_data = extract_mesh_edge_data(str(step_path))
if edge_data:
cad_file.mesh_attributes = {**(cad_file.mesh_attributes or {}), **edge_data}
# Step 2: Generate thumbnail — pass empty part_colors so the Three.js
# renderer extracts named parts and auto-assigns palette colours.
# Other renderers (Blender, Pillow) ignore the part_colors argument.
@@ -197,6 +206,120 @@ def process_cad_file(cad_file_id: str) -> None:
session.commit()
def extract_mesh_edge_data(step_path: str) -> dict:
"""Extract sharp edge metrics and suggested smooth angle from STEP topology.
Returns dict with:
- suggested_smooth_angle: float (degrees) — recommended auto-smooth angle
- has_mechanical_edges: bool — True if part has distinct hard edges (bearings etc.)
- sharp_edge_midpoints: list of [x, y, z] — midpoints of sharp edges in mm (max 500)
"""
try:
from OCC.Core.STEPControl import STEPControl_Reader
from OCC.Core.IFSelect import IFSelect_RetDone
from OCC.Core.TopExp import TopExp_Explorer
from OCC.Core.TopAbs import TopAbs_EDGE, TopAbs_FACE
from OCC.Core.BRepAdaptor import BRepAdaptor_Surface
from OCC.Core.BRep import BRep_Tool
from OCC.Core.BRepGProp import brepgprop
from OCC.Core.GProp import GProp_GProps
from OCC.Core.BRepMesh import BRepMesh_IncrementalMesh
from OCC.Core.gp import gp_Pnt
import math
reader = STEPControl_Reader()
status = reader.ReadFile(step_path)
if status != IFSelect_RetDone:
return {}
reader.TransferRoots()
shape = reader.OneShape()
# Mesh the shape for geometry access
BRepMesh_IncrementalMesh(shape, 0.5, False, 0.5)
# Collect face normals per edge (for dihedral angle computation)
from OCC.Core.TopTools import TopTools_IndexedDataMapOfShapeListOfShape
from OCC.Core.TopExp import topexp
edge_face_map = TopTools_IndexedDataMapOfShapeListOfShape()
topexp.MapShapesAndAncestors(shape, TopAbs_EDGE, TopAbs_FACE, edge_face_map)
dihedral_angles = []
sharp_midpoints = []
for i in range(1, edge_face_map.Extent() + 1):
edge = edge_face_map.FindKey(i)
faces = edge_face_map.FindFromIndex(i)
if faces.Size() < 2:
continue
# Get the two adjacent faces
face_list = list(faces)
if len(face_list) < 2:
continue
try:
surf1 = BRepAdaptor_Surface(face_list[0])
surf2 = BRepAdaptor_Surface(face_list[1])
# Get normals at midpoint of edge
from OCC.Core.BRepAdaptor import BRepAdaptor_Curve
curve = BRepAdaptor_Curve(edge)
mid_u = (curve.FirstParameter() + curve.LastParameter()) / 2
mid_pt = curve.Value(mid_u)
# Sample face normals at UV center
u1 = (surf1.FirstUParameter() + surf1.LastUParameter()) / 2
v1 = (surf1.FirstVParameter() + surf1.LastVParameter()) / 2
n1 = surf1.DN(u1, v1, 0, 1).Crossed(surf1.DN(u1, v1, 1, 0))
u2 = (surf2.FirstUParameter() + surf2.LastUParameter()) / 2
v2 = (surf2.FirstVParameter() + surf2.LastVParameter()) / 2
n2 = surf2.DN(u2, v2, 0, 1).Crossed(surf2.DN(u2, v2, 1, 0))
if n1.Magnitude() > 1e-10 and n2.Magnitude() > 1e-10:
n1.Normalize()
n2.Normalize()
cos_angle = max(-1.0, min(1.0, n1.Dot(n2)))
angle_deg = math.degrees(math.acos(abs(cos_angle)))
dihedral_angles.append(angle_deg)
if angle_deg > 20 and len(sharp_midpoints) < 500:
sharp_midpoints.append([
round(mid_pt.X(), 3),
round(mid_pt.Y(), 3),
round(mid_pt.Z(), 3),
])
except Exception:
continue
if not dihedral_angles:
return {}
import statistics
median_angle = statistics.median(dihedral_angles)
max_angle = max(dihedral_angles)
# Suggest smooth angle: slightly below the median of hard edges
hard_edges = [a for a in dihedral_angles if a > 20]
if hard_edges:
suggested = max(15.0, min(60.0, statistics.median(hard_edges) * 0.8))
else:
suggested = 30.0
return {
"suggested_smooth_angle": round(suggested, 1),
"has_mechanical_edges": max_angle > 45,
"sharp_edge_midpoints": sharp_midpoints[:500],
}
except ImportError:
# OCC not available (e.g. in backend container)
return {}
except Exception as exc:
logger.warning("extract_mesh_edge_data failed (non-fatal): %s", exc)
return {}
def _extract_step_objects(step_path: Path) -> list[str]:
"""Extract part names from STEP file using pythonocc."""
try: