chore: snapshot workflow migration progress

This commit is contained in:
2026-04-12 11:49:04 +02:00
parent 0cd02513d5
commit 3e810c74a3
163 changed files with 31774 additions and 2753 deletions
+86 -15
View File
@@ -3,10 +3,12 @@ import uuid
from datetime import datetime, timedelta
from typing import Any, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update as sql_update, func, case, distinct, and_, extract
from pydantic import BaseModel
from pydantic import BaseModel, ValidationError
from app.database import get_db
from app.core.render_paths import resolve_result_path, result_path_to_storage_key
from app.models.user import User
from app.models.system_setting import SystemSetting
from app.models.cad_file import CadFile, ProcessingStatus
@@ -27,7 +29,7 @@ SETTINGS_DEFAULTS: dict[str, str] = {
"blender_eevee_samples": "64",
"thumbnail_format": "jpg",
"blender_smooth_angle": "30",
"cycles_device": "auto",
"cycles_device": "gpu",
"render_backend": "celery",
"blender_max_concurrent_renders": "3",
"product_thumbnail_priority": '["latest_render","cad_thumbnail"]',
@@ -63,7 +65,7 @@ class SettingsOut(BaseModel):
blender_eevee_samples: int = 64
thumbnail_format: str = "jpg"
blender_smooth_angle: int = 30
cycles_device: str = "auto"
cycles_device: str = "gpu"
render_backend: str = "celery"
blender_max_concurrent_renders: int = 3
product_thumbnail_priority: str = '["latest_render","cad_thumbnail"]'
@@ -225,9 +227,9 @@ def _settings_to_out(raw: dict[str, str]) -> SettingsOut:
smtp_password=raw.get("smtp_password", ""),
smtp_from_address=raw.get("smtp_from_address", ""),
scene_linear_deflection=float(raw.get("scene_linear_deflection", "0.1")),
scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.5")),
scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.1")),
render_linear_deflection=float(raw.get("render_linear_deflection", "0.03")),
render_angular_deflection=float(raw.get("render_angular_deflection", "0.2")),
render_angular_deflection=float(raw.get("render_angular_deflection", "0.05")),
gltf_scale_factor=float(raw.get("gltf_scale_factor", "0.001")),
gltf_smooth_normals=raw.get("gltf_smooth_normals", "true") == "true",
viewer_max_distance=float(raw.get("viewer_max_distance", "50")),
@@ -680,7 +682,10 @@ async def seed_workflows(
):
"""Create the standard workflow definitions if they do not already exist."""
from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.workflow_config_utils import build_preset_workflow_config
from app.domains.rendering.workflow_config_utils import (
build_preset_workflow_config,
build_workflow_blueprint_config,
)
STANDARD_WORKFLOWS = [
{
@@ -697,6 +702,13 @@ async def seed_workflows(
{"render_engine": "eevee", "samples": 64, "resolution": [1920, 1080]},
),
},
{
"name": "Still Image — Graph",
"config": build_preset_workflow_config(
"still_graph",
{"render_engine": "cycles", "samples": 256, "resolution": [1920, 1080]},
),
},
{
"name": "Turntable Animation",
"config": build_preset_workflow_config(
@@ -711,6 +723,18 @@ async def seed_workflows(
{"render_engine": "cycles", "samples": 128, "angles": [0, 45, 90]},
),
},
{
"name": "CAD Intake Blueprint",
"config": build_workflow_blueprint_config("cad_intake"),
},
{
"name": "Order Rendering Blueprint",
"config": build_workflow_blueprint_config("order_rendering"),
},
{
"name": "Still Graph Blueprint",
"config": build_workflow_blueprint_config("still_graph_reference"),
},
]
existing_result = await db.execute(select(WorkflowDefinition))
@@ -730,6 +754,57 @@ async def seed_workflows(
return {"created": created, "message": f"Created {created} workflow definition(s)"}
@router.post("/settings/backfill-workflows", status_code=status.HTTP_200_OK)
async def backfill_workflows(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Rewrite persisted legacy workflow configs into canonical DAG form."""
from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.workflow_config_utils import (
canonicalize_workflow_config,
workflow_config_requires_canonicalization,
)
from app.domains.rendering.workflow_schema import WorkflowConfig
result = await db.execute(select(WorkflowDefinition).order_by(WorkflowDefinition.created_at))
workflows = result.scalars().all()
updated: list[dict[str, str]] = []
invalid: list[dict[str, str]] = []
for workflow in workflows:
if not workflow_config_requires_canonicalization(workflow.config):
continue
try:
normalized = canonicalize_workflow_config(workflow.config)
WorkflowConfig.model_validate(normalized)
except (ValidationError, ValueError) as exc:
invalid.append(
{
"id": str(workflow.id),
"name": workflow.name,
"error": str(exc),
}
)
continue
workflow.config = normalized
flag_modified(workflow, "config")
updated.append({"id": str(workflow.id), "name": workflow.name})
await db.commit()
return {
"scanned": len(workflows),
"updated": len(updated),
"invalid": invalid,
"workflows": updated,
"message": f"Canonicalized {len(updated)} workflow definition(s)",
}
@router.get("/settings/renderer-status")
async def renderer_status(
admin: User = Depends(require_global_admin),
@@ -756,13 +831,10 @@ async def import_existing_media_assets(
created = 0
skipped = 0
from app.config import settings as _app_settings
def _normalize_key(path: str) -> str:
"""Strip UPLOAD_DIR prefix to store relative storage keys."""
key = str(path)
prefix = str(_app_settings.upload_dir).rstrip("/") + "/"
return key[len(prefix):] if key.startswith(prefix) else key
"""Normalize mixed legacy/canonical paths to a stable relative storage key."""
key = result_path_to_storage_key(path)
return key or str(path)
# 1. CadFiles with thumbnail_path
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
@@ -843,7 +915,6 @@ async def purge_render_media(
"""
import logging
from pathlib import Path
from app.config import settings
from app.core.storage import get_storage
from app.domains.media.models import MediaAsset, MediaAssetType
@@ -865,8 +936,8 @@ async def purge_render_media(
# Delete backing file
key = asset.storage_key
try:
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
if candidate.exists():
candidate = resolve_result_path(key)
if candidate is not None and candidate.exists():
freed_bytes += candidate.stat().st_size
candidate.unlink()
deleted_files += 1
+37 -9
View File
@@ -13,6 +13,9 @@ from sqlalchemy import select
from sqlalchemy.orm import selectinload
from app.database import get_db
from app.core.render_paths import resolve_result_path
from app.config import settings
from app.domains.media.models import MediaAsset, MediaAssetType
from app.models.cad_file import CadFile, ProcessingStatus
from app.models.order import Order
from app.models.order_item import OrderItem
@@ -191,6 +194,38 @@ async def _get_cad_file(cad_id: uuid.UUID, db: AsyncSession) -> CadFile:
return cad
async def _resolve_gltf_path(cad: CadFile, db: AsyncSession) -> Path | None:
"""Resolve the best available GLTF/GLB path for a CAD file.
Prefer the legacy cad_files.gltf_path for compatibility, but fall back to
the canonical media_assets.gltf_geometry record written by the newer export
pipeline.
"""
if cad.gltf_path:
legacy_path = resolve_result_path(cad.gltf_path) or Path(cad.gltf_path)
if legacy_path.exists():
return legacy_path
asset_result = await db.execute(
select(MediaAsset)
.where(
MediaAsset.cad_file_id == cad.id,
MediaAsset.asset_type == MediaAssetType.gltf_geometry,
MediaAsset.is_archived == False, # noqa: E712
)
.order_by(MediaAsset.created_at.desc())
)
asset = asset_result.scalars().first()
if asset and asset.storage_key:
asset_path = resolve_result_path(asset.storage_key)
if asset_path is None:
asset_path = Path(settings.upload_dir) / asset.storage_key.lstrip("/")
if asset_path.exists():
return asset_path
return None
@router.get("/{id}/thumbnail")
async def get_thumbnail(
id: uuid.UUID,
@@ -228,20 +263,13 @@ async def get_model(
):
"""Serve the glTF file for a CAD file."""
cad = await _get_cad_file(id, db)
if not cad.gltf_path:
gltf_path = await _resolve_gltf_path(cad, db)
if gltf_path is None:
raise HTTPException(
status_code=404,
detail="glTF model not yet generated for this CAD file",
)
gltf_path = Path(cad.gltf_path)
if not gltf_path.exists():
raise HTTPException(
status_code=404,
detail="glTF file missing from storage",
)
# glTF files may be either .gltf (JSON) or .glb (binary)
suffix = gltf_path.suffix.lower()
if suffix == ".glb":
+5 -19
View File
@@ -30,6 +30,7 @@ from app.schemas.order_line import OrderLineCreate, OrderLineOut
from app.schemas.product import ProductOut
from app.schemas.output_type import OutputTypeOut
from app.services.order_service import generate_order_number
from app.core.render_paths import resolve_result_path, result_path_to_public_url
from app.utils.auth import get_current_user, require_admin_or_pm, require_pm_or_above
router = APIRouter(prefix="/orders", tags=["orders"])
@@ -41,13 +42,7 @@ def _is_privileged(user: User) -> bool:
def _result_path_to_url(result_path: str) -> str | None:
"""Convert an internal result_path to a servable static URL."""
if "/renders/" in result_path:
idx = result_path.index("/renders/")
return result_path[idx:]
if "/thumbnails/" in result_path:
idx = result_path.index("/thumbnails/")
return result_path[idx:]
return None
return result_path_to_public_url(result_path, require_exists=True)
def _build_line_out(line: OrderLine) -> OrderLineOut:
@@ -1544,15 +1539,6 @@ async def download_renders(
if not lines:
raise HTTPException(404, detail="No completed renders found for this order")
from app.config import settings as app_settings
def _resolve_path(p: str) -> str:
"""Translate container-relative paths to backend filesystem paths."""
# Flamenco worker mounts the uploads volume at /shared, backend at /app/uploads
if p.startswith("/shared/"):
return app_settings.upload_dir + p[len("/shared"):]
return p
buf = io.BytesIO()
# Track names used to avoid duplicates
name_counts: dict[str, int] = {}
@@ -1561,8 +1547,8 @@ async def download_renders(
for line in lines:
if not line.result_path:
continue
fs_path = _resolve_path(line.result_path)
if not os.path.isfile(fs_path):
resolved_path = resolve_result_path(line.result_path)
if resolved_path is None or not resolved_path.is_file():
continue
# Build a meaningful filename
product_name = (line.product.name or line.product.pim_id or "product") if line.product else "product"
@@ -1587,7 +1573,7 @@ async def download_renders(
name_counts[base_name] = 0
archive_name = base_name
zf.write(fs_path, archive_name)
zf.write(resolved_path, archive_name)
if not zf.infolist():
raise HTTPException(404, detail="No render files found on disk")
+128 -11
View File
@@ -12,6 +12,7 @@ from app.models.order_line import OrderLine
from app.models.output_type import (
OUTPUT_TYPE_ARTIFACT_KINDS,
OUTPUT_TYPE_WORKFLOW_FAMILIES,
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES,
OutputType,
VALID_RENDER_BACKENDS,
)
@@ -21,12 +22,19 @@ from app.models.user import User
from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.output_type_contracts import (
apply_invocation_overrides_to_render_settings,
build_output_type_contract_catalog,
build_output_type_invocation_profile,
derive_supported_artifact_kinds_from_workflow_config,
infer_output_type_artifact_kind,
infer_workflow_family_from_config,
InvalidInvocationOverridesError,
merge_output_type_invocation_overrides,
normalize_invocation_overrides,
resolve_output_type_invocation_overrides,
validate_and_normalize_invocation_overrides,
validate_output_type_contract,
)
from app.domains.rendering.schemas import OutputTypeContractCatalogOut, OutputTypeInvocationProfileOut
router = APIRouter(prefix="/output-types", tags=["output-types"])
@@ -34,6 +42,34 @@ router = APIRouter(prefix="/output-types", tags=["output-types"])
def _ot_to_out(ot: OutputType) -> OutputTypeOut:
"""Convert an OutputType ORM instance to OutputTypeOut with pricing convenience fields."""
out = OutputTypeOut.model_validate(ot)
resolved_invocation_overrides = resolve_output_type_invocation_overrides(
ot.render_settings,
getattr(ot, "invocation_overrides", None),
artifact_kind=ot.artifact_kind,
is_animation=ot.is_animation,
)
out.invocation_overrides = resolved_invocation_overrides
out.render_settings = apply_invocation_overrides_to_render_settings(
ot.render_settings,
resolved_invocation_overrides,
)
out.invocation_profile = OutputTypeInvocationProfileOut.model_validate(
build_output_type_invocation_profile(
renderer=ot.renderer,
render_backend=ot.render_backend,
workflow_family=ot.workflow_family,
artifact_kind=ot.artifact_kind,
output_format=ot.output_format,
is_animation=ot.is_animation,
workflow_definition_id=ot.workflow_definition_id,
workflow_rollout_mode=getattr(ot, "workflow_rollout_mode", "legacy_only"),
transparent_bg=ot.transparent_bg,
cycles_device=ot.cycles_device,
material_override=ot.material_override,
render_settings=ot.render_settings,
invocation_overrides=getattr(ot, "invocation_overrides", None),
)
)
if ot.pricing_tier:
out.pricing_tier_name = f"{ot.pricing_tier.category_key}/{ot.pricing_tier.quality_level}"
out.price_per_item = float(ot.pricing_tier.price_per_item)
@@ -62,6 +98,7 @@ async def _validate_output_type_workflow_link(
*,
workflow_definition_id: uuid.UUID | None,
workflow_family: str,
artifact_kind: str,
) -> None:
if workflow_definition_id is None:
return
@@ -86,6 +123,17 @@ async def _validate_output_type_workflow_link(
),
)
supported_artifact_kinds = derive_supported_artifact_kinds_from_workflow_config(workflow_definition.config)
if artifact_kind not in supported_artifact_kinds:
supported = ", ".join(supported_artifact_kinds) if supported_artifact_kinds else "none"
raise HTTPException(
400,
detail=(
f"Workflow artifact mismatch: output type expects '{artifact_kind}', "
f"but workflow '{workflow_definition.name}' supports [{supported}]"
),
)
def _ensure_output_type_contract_is_valid(
*,
@@ -105,6 +153,23 @@ def _ensure_output_type_contract_is_valid(
raise HTTPException(400, detail=str(exc)) from exc
def _normalize_explicit_invocation_overrides(
raw: dict | None,
*,
artifact_kind: str,
is_animation: bool,
) -> dict:
try:
return validate_and_normalize_invocation_overrides(
raw,
artifact_kind=artifact_kind,
is_animation=is_animation,
reject_unknown_keys=True,
)
except InvalidInvocationOverridesError as exc:
raise HTTPException(400, detail=str(exc)) from exc
@router.get("", response_model=list[OutputTypeOut])
async def list_output_types(
include_inactive: bool = Query(False),
@@ -133,6 +198,13 @@ async def list_output_types(
return await _enrich_workflow_names(db, items)
@router.get("/contract-catalog", response_model=OutputTypeContractCatalogOut)
async def get_output_type_contract_catalog(
user: User = Depends(get_current_user),
):
return OutputTypeContractCatalogOut.model_validate(build_output_type_contract_catalog())
@router.post("", response_model=OutputTypeOut, status_code=status.HTTP_201_CREATED)
async def create_output_type(
body: OutputTypeCreate,
@@ -146,25 +218,39 @@ async def create_output_type(
400,
detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}",
)
if body.workflow_rollout_mode not in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES:
raise HTTPException(
400,
detail=f"Invalid workflow_rollout_mode. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES))}",
)
existing = await db.execute(select(OutputType).where(OutputType.name == body.name))
if existing.scalar_one_or_none():
raise HTTPException(409, detail=f"Output type '{body.name}' already exists")
data = body.model_dump()
explicit_invocation = normalize_invocation_overrides(body.invocation_overrides)
if not explicit_invocation:
explicit_invocation = normalize_invocation_overrides(body.render_settings)
data["invocation_overrides"] = explicit_invocation
data["render_settings"] = apply_invocation_overrides_to_render_settings(
body.render_settings,
explicit_invocation,
)
data["artifact_kind"] = data.get("artifact_kind") or infer_output_type_artifact_kind(
body.output_format,
body.is_animation,
body.workflow_family,
)
explicit_invocation = _normalize_explicit_invocation_overrides(
body.invocation_overrides,
artifact_kind=data["artifact_kind"],
is_animation=body.is_animation,
)
if not explicit_invocation:
explicit_invocation = normalize_invocation_overrides(body.render_settings)
data["invocation_overrides"] = resolve_output_type_invocation_overrides(
body.render_settings,
explicit_invocation,
artifact_kind=data["artifact_kind"],
is_animation=body.is_animation,
)
data["render_settings"] = apply_invocation_overrides_to_render_settings(
body.render_settings,
data["invocation_overrides"],
)
if data["artifact_kind"] not in OUTPUT_TYPE_ARTIFACT_KINDS:
raise HTTPException(
400,
@@ -180,7 +266,10 @@ async def create_output_type(
db,
workflow_definition_id=body.workflow_definition_id,
workflow_family=body.workflow_family,
artifact_kind=data["artifact_kind"],
)
if body.workflow_definition_id is None:
data["workflow_rollout_mode"] = "legacy_only"
ot = OutputType(**data)
db.add(ot)
@@ -214,6 +303,11 @@ async def update_output_type(
400,
detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}",
)
if "workflow_rollout_mode" in data and data["workflow_rollout_mode"] not in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES:
raise HTTPException(
400,
detail=f"Invalid workflow_rollout_mode. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES))}",
)
candidate_workflow_family = data.get("workflow_family", ot.workflow_family)
candidate_workflow_definition_id = data.get("workflow_definition_id", ot.workflow_definition_id)
@@ -226,16 +320,25 @@ async def update_output_type(
if render_settings_supplied or invocation_supplied:
candidate_render_settings = data.get("render_settings", ot.render_settings)
if invocation_supplied:
candidate_invocation_overrides = normalize_invocation_overrides(data.get("invocation_overrides"))
candidate_invocation_overrides = _normalize_explicit_invocation_overrides(
data.get("invocation_overrides"),
artifact_kind=candidate_artifact_kind,
is_animation=candidate_is_animation,
)
else:
candidate_invocation_overrides = merge_output_type_invocation_overrides(
candidate_render_settings,
None,
)
data["invocation_overrides"] = candidate_invocation_overrides
data["render_settings"] = apply_invocation_overrides_to_render_settings(
data["invocation_overrides"] = resolve_output_type_invocation_overrides(
candidate_render_settings,
candidate_invocation_overrides,
artifact_kind=candidate_artifact_kind,
is_animation=candidate_is_animation,
)
data["render_settings"] = apply_invocation_overrides_to_render_settings(
candidate_render_settings,
data["invocation_overrides"],
)
should_recompute_artifact_kind = (
@@ -263,12 +366,26 @@ async def update_output_type(
output_format=candidate_output_format,
is_animation=candidate_is_animation,
)
if render_settings_supplied or invocation_supplied or should_recompute_artifact_kind:
data["invocation_overrides"] = resolve_output_type_invocation_overrides(
data.get("render_settings", ot.render_settings),
data.get("invocation_overrides", ot.invocation_overrides),
artifact_kind=candidate_artifact_kind,
is_animation=candidate_is_animation,
)
data["render_settings"] = apply_invocation_overrides_to_render_settings(
data.get("render_settings", ot.render_settings),
data["invocation_overrides"],
)
await _validate_output_type_workflow_link(
db,
workflow_definition_id=candidate_workflow_definition_id,
workflow_family=candidate_workflow_family,
artifact_kind=candidate_artifact_kind,
)
if candidate_workflow_definition_id is None:
data["workflow_rollout_mode"] = "legacy_only"
for field_name, value in data.items():
setattr(ot, field_name, value)
+9 -18
View File
@@ -16,6 +16,11 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload, joinedload
from app.config import settings
from app.core.render_paths import (
resolve_result_path,
resolve_public_asset_url,
result_path_to_public_url,
)
from app.database import get_db
from app.models.cad_file import CadFile, ProcessingStatus
from app.models.material import Material
@@ -829,24 +834,12 @@ VIDEO_EXTENSIONS = {".mp4", ".webm", ".avi", ".mov"}
def _result_path_to_url(result_path: str) -> str | None:
"""Convert an internal result_path to a servable static URL."""
# Flamenco / shared renders: /shared/renders/X/file.jpg → /renders/X/file.jpg
if "/renders/" in result_path:
idx = result_path.index("/renders/")
return result_path[idx:]
# Celery renders stored as thumbnails: /app/uploads/thumbnails/X.png → /thumbnails/X.png
if "/thumbnails/" in result_path:
idx = result_path.index("/thumbnails/")
return result_path[idx:]
return None
return result_path_to_public_url(result_path, require_exists=False)
def _resolve_disk_path(url: str) -> Path | None:
"""Given a servable URL like /renders/X/file.jpg, resolve to disk path."""
if url.startswith("/renders/"):
return Path(settings.upload_dir) / "renders" / url[len("/renders/"):]
if url.startswith("/thumbnails/"):
return Path(settings.upload_dir) / "thumbnails" / url[len("/thumbnails/"):]
return None
return resolve_public_asset_url(url)
@router.get("/{product_id}/renders")
@@ -983,9 +976,8 @@ async def download_product_renders(
raise HTTPException(404, detail="No completed renders found for the selected lines")
def _resolve_path(p: str) -> str:
if p.startswith("/shared/"):
return settings.upload_dir + p[len("/shared"):]
return p
resolved = resolve_result_path(p)
return str(resolved) if resolved is not None else p
def _safe(s: str) -> str:
return re.sub(r"[^\w\-.]", "_", s).strip("_")
@@ -1147,4 +1139,3 @@ async def delete_render_position(
raise HTTPException(404, detail="Render position not found")
await db.delete(pos)
await db.commit()
+34 -1
View File
@@ -1,17 +1,20 @@
"""Render Templates API — CRUD + .blend file upload/download + material library."""
import json
import uuid
import shutil
from datetime import datetime
from pathlib import Path
from typing import Any
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, status
from fastapi.responses import FileResponse
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update as sql_update, delete as sql_delete
from pydantic import BaseModel
from pydantic import BaseModel, TypeAdapter, ValidationError
from app.database import get_db
from app.config import settings as app_settings
from app.domains.rendering.workflow_node_registry import WorkflowNodeFieldDefinition
from app.models.user import User
from app.models.render_template import RenderTemplate
from app.models.output_type import OutputType
@@ -46,6 +49,7 @@ class RenderTemplateOut(BaseModel):
lighting_only: bool
shadow_catcher_enabled: bool
camera_orbit: bool
workflow_input_schema: list[WorkflowNodeFieldDefinition]
is_active: bool
created_at: str
updated_at: str
@@ -62,6 +66,7 @@ class RenderTemplateUpdate(BaseModel):
lighting_only: bool | None = None
shadow_catcher_enabled: bool | None = None
camera_orbit: bool | None = None
workflow_input_schema: list[WorkflowNodeFieldDefinition] | None = None
is_active: bool | None = None
@@ -72,6 +77,29 @@ class MaterialLibraryInfo(BaseModel):
path: str | None = None
_workflow_input_schema_adapter = TypeAdapter(list[WorkflowNodeFieldDefinition])
def _normalize_workflow_input_schema(schema: Any) -> list[dict[str, Any]]:
if schema in (None, "", "null"):
return []
try:
validated = _workflow_input_schema_adapter.validate_python(schema)
except ValidationError as exc:
raise HTTPException(status_code=422, detail={"workflow_input_schema": exc.errors()}) from exc
return [field.model_dump(mode="json") for field in validated]
def _parse_form_workflow_input_schema(raw_schema: str | None) -> list[dict[str, Any]]:
if raw_schema in (None, "", "null"):
return []
try:
payload = json.loads(raw_schema)
except json.JSONDecodeError as exc:
raise HTTPException(status_code=422, detail="workflow_input_schema must be valid JSON") from exc
return _normalize_workflow_input_schema(payload)
def _to_out(t: RenderTemplate) -> dict:
ot_name = None
if t.output_type:
@@ -94,6 +122,7 @@ def _to_out(t: RenderTemplate) -> dict:
"lighting_only": t.lighting_only,
"shadow_catcher_enabled": t.shadow_catcher_enabled,
"camera_orbit": t.camera_orbit,
"workflow_input_schema": t.workflow_input_schema or [],
"is_active": t.is_active,
"created_at": t.created_at.isoformat() if t.created_at else "",
"updated_at": t.updated_at.isoformat() if t.updated_at else "",
@@ -126,6 +155,7 @@ async def create_render_template(
lighting_only: bool = Form(False),
shadow_catcher_enabled: bool = Form(False),
camera_orbit: bool = Form(True),
workflow_input_schema: str | None = Form(None),
user: User = Depends(require_admin_or_pm),
db: AsyncSession = Depends(get_db),
):
@@ -182,6 +212,7 @@ async def create_render_template(
lighting_only=lighting_only,
shadow_catcher_enabled=shadow_catcher_enabled,
camera_orbit=camera_orbit,
workflow_input_schema=_parse_form_workflow_input_schema(workflow_input_schema),
)
db.add(tmpl)
await db.flush()
@@ -224,6 +255,8 @@ async def update_render_template(
# Normalise empty strings to None for nullable fields
if "category_key" in updates and updates["category_key"] in ("", "null"):
updates["category_key"] = None
if "workflow_input_schema" in updates:
updates["workflow_input_schema"] = _normalize_workflow_input_schema(updates["workflow_input_schema"])
# Handle M2M output_type_ids
new_ot_ids: list[str] | None = updates.pop("output_type_ids", None)
+15 -1
View File
@@ -519,6 +519,12 @@ async def trigger_gpu_probe(current_user: User = Depends(require_global_admin)):
return {"task_id": str(result.id), "queued": True}
@router.post("/gpu-probe", status_code=http_status.HTTP_202_ACCEPTED)
async def trigger_gpu_probe_legacy_alias(current_user: User = Depends(require_global_admin)):
"""Backward-compatible alias used by the current admin frontend."""
return await trigger_gpu_probe(current_user)
@router.get("/probe/gpu/result")
async def get_gpu_probe_result(
current_user: User = Depends(require_global_admin),
@@ -535,6 +541,15 @@ async def get_gpu_probe_result(
return json.loads(setting.value)
@router.get("/gpu-probe")
async def get_gpu_probe_result_legacy_alias(
current_user: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Backward-compatible alias used by the current admin frontend."""
return await get_gpu_probe_result(current_user, db)
# ---------------------------------------------------------------------------
# Render health check
# ---------------------------------------------------------------------------
@@ -733,4 +748,3 @@ async def update_worker_config(
enabled=cfg.enabled,
updated_at=cfg.updated_at.isoformat(),
)
+1
View File
@@ -75,6 +75,7 @@ class Settings(BaseSettings):
# Redis / Celery
redis_url: str = "redis://localhost:6379/0"
workflow_shadow_render_queue: str = "asset_pipeline_light"
@model_validator(mode="after")
def normalize_runtime_hosts(self) -> "Settings":
+1 -1
View File
@@ -39,7 +39,7 @@ class RenderConfig(BaseModel):
blender_eevee_samples: int = 64
thumbnail_format: str = "jpg"
blender_smooth_angle: int = 30
cycles_device: str = "auto"
cycles_device: str = "gpu"
render_backend: str = "celery"
product_thumbnail_priority: list[str] = Field(
default_factory=lambda: ["latest_render", "cad_thumbnail"]
+194
View File
@@ -0,0 +1,194 @@
from __future__ import annotations
import os
from pathlib import Path
from app.config import settings
SHARED_DIR_MODE = 0o2775
def _managed_directory_chain(path: Path) -> list[Path]:
"""Return upload-root-relative directories that should share writable perms."""
resolved_path = path.resolve(strict=False)
upload_root = Path(settings.upload_dir).resolve(strict=False)
if resolved_path != upload_root and upload_root not in resolved_path.parents:
return [path]
chain: list[Path] = [upload_root]
current = upload_root
try:
relative_parts = resolved_path.relative_to(upload_root).parts
except ValueError:
return [path]
for part in relative_parts:
current = current / part
chain.append(current)
return chain
def _normalize_directory_mode(path: Path, *, mode: int = SHARED_DIR_MODE) -> None:
try:
current_mode = path.stat().st_mode & 0o7777
except OSError:
return
desired_mode = mode
if current_mode == desired_mode:
return
try:
os.chmod(path, desired_mode)
except OSError:
# Best-effort only: callers still get the path, but existing root-owned
# trees can be repaired when the process has sufficient permissions.
return
def ensure_group_writable_dir(path: str | Path, *, mode: int = SHARED_DIR_MODE) -> Path:
"""Create a directory and normalize upload-tree permissions for shared workers."""
dir_path = Path(path)
for candidate in _managed_directory_chain(dir_path):
candidate.mkdir(parents=True, exist_ok=True)
_normalize_directory_mode(candidate, mode=mode)
return dir_path
def resolve_public_asset_url(url: str | None) -> Path | None:
"""Resolve a public static asset URL like /renders/... to a local disk path."""
if not url:
return None
normalized = url.replace("\\", "/")
if normalized.startswith("/renders/"):
candidate = Path(settings.upload_dir) / "renders" / normalized[len("/renders/"):]
elif normalized.startswith("/thumbnails/"):
candidate = Path(settings.upload_dir) / "thumbnails" / normalized[len("/thumbnails/"):]
else:
return None
return candidate
def resolve_result_path(result_path: str | None) -> Path | None:
"""Resolve stored result_path variants to a local disk path.
Supports canonical /app/uploads/... paths, legacy /shared/... paths, public
URLs, and bare storage keys such as renders/<id>/file.png.
"""
if not result_path:
return None
normalized = result_path.replace("\\", "/")
for marker in ("/uploads/", "/shared/"):
if marker in normalized:
relative = normalized.split(marker, 1)[1].lstrip("/")
return Path(settings.upload_dir) / relative
public_candidate = resolve_public_asset_url(normalized)
if public_candidate is not None:
return public_candidate
stripped = normalized.lstrip("/")
if stripped.startswith(("renders/", "thumbnails/", "exports/", "usd/", "step_files/")):
return Path(settings.upload_dir) / stripped
if Path(normalized).is_absolute():
return Path(normalized)
return None
def result_path_to_storage_key(result_path: str | None) -> str | None:
"""Normalize stored paths to a canonical relative storage key when possible."""
if not result_path:
return None
normalized = result_path.replace("\\", "/")
disk_path = resolve_result_path(result_path)
if disk_path is not None:
try:
return disk_path.relative_to(Path(settings.upload_dir)).as_posix()
except ValueError:
pass
public_candidate = normalized.lstrip("/")
if public_candidate.startswith(("renders/", "thumbnails/", "exports/", "usd/", "step_files/")):
return public_candidate
return normalized
def result_path_to_public_url(
result_path: str | None,
*,
require_exists: bool = False,
) -> str | None:
"""Convert internal result paths to a servable public URL.
Returns only /renders/... or /thumbnails/... URLs. Non-public internal paths
like step_files/renders stay hidden from API/UI callers.
"""
if not result_path:
return None
disk_path = resolve_result_path(result_path)
if require_exists:
if disk_path is None or not disk_path.is_file():
return None
normalized = result_path.replace("\\", "/")
for marker in ("/renders/", "/thumbnails/"):
if marker in normalized:
idx = normalized.index(marker)
public_url = normalized[idx:]
candidate = resolve_public_asset_url(public_url)
if require_exists and (candidate is None or not candidate.is_file()):
return None
return public_url
if disk_path is None:
return None
try:
relative = disk_path.relative_to(Path(settings.upload_dir))
except ValueError:
return None
relative_str = relative.as_posix()
if relative_str.startswith(("renders/", "thumbnails/")):
if require_exists and not disk_path.is_file():
return None
return f"/{relative_str}"
return None
def build_order_line_step_render_path(
step_path: str | Path,
order_line_id: str,
filename: str,
*,
ensure_exists: bool = False,
) -> Path:
"""Build a unique per-order-line render-worker artifact path beside the STEP file."""
artifact_dir = Path(step_path).parent / "renders" / str(order_line_id)
if ensure_exists:
ensure_group_writable_dir(artifact_dir)
return artifact_dir / filename
def build_order_line_export_path(
order_line_id: str,
filename: str,
*,
ensure_exists: bool = False,
) -> Path:
"""Build a unique per-order-line export artifact path under the shared upload root."""
artifact_dir = Path(settings.upload_dir) / "exports" / str(order_line_id)
if ensure_exists:
ensure_group_writable_dir(artifact_dir)
return artifact_dir / filename
+15 -18
View File
@@ -1,13 +1,11 @@
from __future__ import annotations
from typing import TYPE_CHECKING, AsyncGenerator, Optional
from typing import AsyncGenerator, Optional
from starlette.requests import Request
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
from sqlalchemy.orm import DeclarativeBase
from sqlalchemy import text
from app.config import settings
if TYPE_CHECKING:
from starlette.requests import Request
engine = create_async_engine(
settings.database_url,
echo=False,
@@ -27,22 +25,21 @@ class Base(DeclarativeBase):
pass
async def get_db(request: "Request | None" = None) -> AsyncGenerator[AsyncSession, None]:
async def get_db(request: Request) -> AsyncGenerator[AsyncSession, None]:
async with AsyncSessionLocal() as session:
# Auto-apply RLS context if TenantContextMiddleware populated request.state
if request is not None:
tenant_id = getattr(request.state, "tenant_id", None)
role = getattr(request.state, "role", None)
if tenant_id:
# global_admin and legacy admin bypass RLS to see all tenants
_bypass_roles = {"global_admin", "admin"}
if role in _bypass_roles:
await session.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
else:
await session.execute(
text("SET LOCAL app.current_tenant_id = :tid"),
{"tid": tenant_id},
)
tenant_id = getattr(request.state, "tenant_id", None)
role = getattr(request.state, "role", None)
if tenant_id:
# global_admin and legacy admin bypass RLS to see all tenants
_bypass_roles = {"global_admin", "admin"}
if role in _bypass_roles:
await session.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
else:
await session.execute(
text("SET LOCAL app.current_tenant_id = :tid"),
{"tid": tenant_id},
)
try:
yield session
finally:
@@ -0,0 +1,51 @@
from __future__ import annotations
from pathlib import Path
from typing import Any
from app.config import settings
def asset_library_dir() -> Path:
return Path(settings.upload_dir) / "asset-libraries"
def list_asset_library_blends() -> list[Path]:
directory = asset_library_dir()
if not directory.is_dir():
return []
return sorted(
(path for path in directory.glob("*.blend") if path.is_file()),
key=lambda path: (path.stat().st_mtime, path.name),
reverse=True,
)
def resolve_asset_library_blend_path(
*,
blend_file_path: str | None = None,
asset_library_id: Any | None = None,
) -> str | None:
"""Resolve the best available .blend path for an asset library.
Resolution order:
1. explicit configured path, when it exists
2. canonical uploads/asset-libraries/<id>.blend path
3. newest available .blend under uploads/asset-libraries
"""
if blend_file_path:
configured = Path(blend_file_path)
if configured.is_file():
return str(configured)
if asset_library_id:
candidate = asset_library_dir() / f"{asset_library_id}.blend"
if candidate.is_file():
return str(candidate)
available = list_asset_library_blends()
if available:
return str(available[0])
return None
+15 -1
View File
@@ -8,6 +8,7 @@ import subprocess
import uuid
from pathlib import Path
from app.domains.materials.library_paths import resolve_asset_library_blend_path
from app.tasks.celery_app import celery_app
logger = logging.getLogger(__name__)
@@ -43,7 +44,20 @@ def refresh_asset_library_catalog(self, asset_library_id: str) -> None:
if not lib:
logger.warning("AssetLibrary %s not found", asset_library_id)
return
blend_path = lib.blend_file_path
resolved_path = resolve_asset_library_blend_path(
blend_file_path=lib.blend_file_path,
asset_library_id=lib.id,
)
if resolved_path and resolved_path != lib.blend_file_path:
logger.warning(
"AssetLibrary %s path repaired from %s to %s before catalog refresh",
asset_library_id,
lib.blend_file_path,
resolved_path,
)
lib.blend_file_path = resolved_path
db.commit()
blend_path = resolved_path or lib.blend_file_path
engine.dispose()
if not blend_path or not Path(blend_path).exists():
+14 -41
View File
@@ -10,6 +10,7 @@ from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.core.render_paths import resolve_result_path
from app.domains.auth.models import User
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.media.schemas import MediaAssetOut, MediaAssetBrowseItem, MediaAssetBrowseResponse
@@ -19,6 +20,10 @@ from app.utils.auth import get_current_user
router = APIRouter(prefix="/api/media", tags=["media"], redirect_slashes=False)
def _resolve_asset_candidate(key: str):
return resolve_result_path(key)
async def _resolve_thumbnails_bulk(db: AsyncSession, assets: list) -> None:
"""Resolve thumbnail_url for assets using the same priority as product pages.
@@ -275,15 +280,8 @@ async def thumbnail_asset(
raise HTTPException(404, "Not a previewable asset")
key = asset.storage_key
from app.config import settings
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
if not candidate.exists() and "/shared/renders/" in key:
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
candidate = remapped
if candidate.exists():
candidate = _resolve_asset_candidate(key)
if candidate is not None and candidate.exists():
return FileResponse(
str(candidate), media_type=mime,
headers={"Cache-Control": "max-age=86400, public"},
@@ -314,22 +312,8 @@ async def download_asset(
mime = asset.mime_type or "application/octet-stream"
# Local file path (absolute or relative to UPLOAD_DIR)
from app.config import settings
candidate = Path(key)
if not candidate.is_absolute():
candidate = Path(settings.upload_dir) / key
# Legacy path remapping: /shared/renders/{uuid}/{file} → UPLOAD_DIR/renders/{uuid}/{file}
if not candidate.exists() and "/shared/renders/" in key:
import logging
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
logging.getLogger(__name__).warning(
"Remapped legacy path %s%s", key, remapped
)
candidate = remapped
if candidate.exists():
candidate = _resolve_asset_candidate(key)
if candidate is not None and candidate.exists():
ext = candidate.suffix.lstrip(".")
fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}"
return FileResponse(
@@ -395,11 +379,8 @@ async def zip_download(
fname = base
try:
# Check absolute path first (local filesystem)
candidate = Path(key)
if not candidate.is_absolute():
from app.config import settings
candidate = Path(settings.upload_dir) / key
if candidate.exists():
candidate = _resolve_asset_candidate(key)
if candidate is not None and candidate.exists():
data = candidate.read_bytes()
else:
data = storage.download_bytes(key)
@@ -440,7 +421,7 @@ async def batch_delete_assets(
):
"""Permanently delete multiple MediaAsset records."""
from app.utils.auth import require_global_admin
require_global_admin(_user)
await require_global_admin(_user)
deleted = 0
for aid in asset_ids:
@@ -461,23 +442,15 @@ async def cleanup_orphaned_assets(
"""
import logging
from pathlib import Path
from app.config import settings
from app.core.storage import get_storage
logger = logging.getLogger(__name__)
storage = get_storage()
def _file_exists(key: str) -> bool:
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
if candidate.exists():
candidate = _resolve_asset_candidate(key)
if candidate is not None and candidate.exists():
return True
# Legacy path remapping
if "/shared/renders/" in key:
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
return True
# Check MinIO
try:
storage.download_bytes(key)
+9 -4
View File
@@ -5,7 +5,7 @@ to create notification rows in the audit_log table.
"""
import logging
import uuid
from datetime import datetime
from datetime import datetime, timezone
from sqlalchemy import create_engine, select
from sqlalchemy.orm import Session
@@ -23,6 +23,11 @@ CHANNEL_ALERT = "alert" # admin-only infrastructure issues
_engine = None
def _utcnow_naive() -> datetime:
"""Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns."""
return datetime.now(timezone.utc).replace(tzinfo=None)
def _get_engine():
global _engine
if _engine is None:
@@ -53,7 +58,7 @@ async def emit_notification(
details=details,
notification=True,
channel=channel,
timestamp=datetime.utcnow(),
timestamp=_utcnow_naive(),
)
db.add(entry)
await db.commit()
@@ -85,7 +90,7 @@ def emit_notification_sync(
details=details,
notification=True,
channel=channel,
timestamp=datetime.utcnow(),
timestamp=_utcnow_naive(),
)
session.add(entry)
session.commit()
@@ -149,7 +154,7 @@ def emit_batch_render_notification_sync(order_id: str) -> None:
},
notification=True,
channel=CHANNEL_NOTIFICATION,
timestamp=datetime.utcnow(),
timestamp=_utcnow_naive(),
)
session.add(entry)
session.commit()
+8 -3
View File
@@ -1,5 +1,5 @@
"""Order service — order number generation and business logic."""
from datetime import datetime
from datetime import datetime, timezone
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, create_engine, update as sql_update
from sqlalchemy.orm import Session
@@ -9,9 +9,14 @@ import logging
logger = logging.getLogger(__name__)
def _utcnow_naive() -> datetime:
"""Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns."""
return datetime.now(timezone.utc).replace(tzinfo=None)
async def generate_order_number(db: AsyncSession) -> str:
"""Generate next sequential order number: SA-2026-XXXXX."""
year = datetime.utcnow().year
year = datetime.now(timezone.utc).year
prefix = f"SA-{year}-"
# Use MAX to find the highest existing sequence number this year.
@@ -68,7 +73,7 @@ def check_order_completion(order_id: str) -> bool:
return False
# Auto-advance to completed
now = datetime.utcnow()
now = _utcnow_naive()
session.execute(
sql_update(Order)
.where(Order.id == order_id)
@@ -13,8 +13,25 @@ from app.core.pipeline_logger import PipelineLogger
logger = logging.getLogger(__name__)
def _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_render_path) -> str | None:
"""Reuse the runtime freshness checks before accepting a USD cache hit."""
from app.domains.rendering.workflow_runtime_services import _usd_master_refresh_reason
return _usd_master_refresh_reason(
cad_file,
usd_asset=usd_asset,
usd_render_path=usd_render_path,
)
@celery_app.task(bind=True, name="app.tasks.step_tasks.generate_gltf_geometry_task", queue="asset_pipeline", max_retries=1)
def generate_gltf_geometry_task(self, cad_file_id: str):
def generate_gltf_geometry_task(
self,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
**_: object,
):
"""Export a geometry GLB directly from STEP via OCC (no STL intermediary).
Pipeline:
@@ -94,10 +111,10 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
_current_hash = _compute_step_hash(str(step_path_str))
_cache_hit_asset_id = None
# Composite cache key includes deflection settings so changing them invalidates cache
# v3: removed BRepBuilderAPI_Transform, writer handles mm→m from STEP unit metadata
# Composite cache key includes deflection settings so changing them invalidates cache.
# v5: occurrence-aware part-key stamping for repeated leaf meshes changed.
effective_cache_key = (
f"v3:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}"
f"v5:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}"
if _current_hash else None
)
@@ -112,6 +129,9 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
if stored_key == effective_cache_key:
_asset_disk_path = _Path(app_settings.upload_dir) / existing_geo.storage_key
if _asset_disk_path.exists():
if cad_file.gltf_path != str(_asset_disk_path):
cad_file.gltf_path = str(_asset_disk_path)
session.commit()
logger.info("[CACHE] cache key match — skipping geometry GLB tessellation for %s", cad_file_id)
pl.step_done("export_glb_geometry", result={"cached": True, "asset_id": str(existing_geo.id)})
_cache_hit_asset_id = str(existing_geo.id)
@@ -133,6 +153,20 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
generate_usd_master_task.delay(cad_file_id)
except Exception:
logger.debug("Could not queue generate_usd_master_task from cache-hit path (non-fatal)")
try:
from app.domains.rendering.tasks import _update_workflow_run_status
_update_workflow_run_status(
cad_file_id,
"completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception(
"Failed to update workflow state for cached GLB export %s",
cad_file_id,
)
return {"cached": True, "asset_id": _cache_hit_asset_id}
step = _Path(step_path_str)
@@ -219,6 +253,9 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
existing.render_config = {"cache_key": effective_cache_key}
if product_id:
existing.product_id = _uuid.UUID(product_id)
cad_file = _sess.get(CadFile, _uuid.UUID(cad_file_id))
if cad_file is not None:
cad_file.gltf_path = str(output_path)
_sess.commit()
asset_id = str(existing.id)
else:
@@ -232,12 +269,26 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
render_config={"cache_key": effective_cache_key},
)
_sess.add(asset)
cad_file = _sess.get(CadFile, _uuid.UUID(cad_file_id))
if cad_file is not None:
cad_file.gltf_path = str(output_path)
_sess.commit()
asset_id = str(asset.id)
_eng2.dispose()
pl.step_done("export_glb_geometry", result={"glb_path": str(output_path), "asset_id": asset_id})
logger.info("generate_gltf_geometry_task: MediaAsset %s created for cad %s", asset_id, cad_file_id)
try:
from app.domains.rendering.tasks import _update_workflow_run_status
_update_workflow_run_status(
cad_file_id,
"completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception("Failed to update workflow state for GLB export %s", cad_file_id)
# Auto-chain USD master export so the canonical scene is always up to date
try:
@@ -346,6 +397,33 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
angular_deflection = float(sys_settings.get("render_angular_deflection", "0.05"))
sharp_threshold = float(sys_settings.get("sharp_edge_threshold", "20.0"))
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script_path = scripts_dir / "export_step_to_usd.py"
materials_helper_path = scripts_dir / "_blender_materials.py"
if not script_path.exists():
err = f"export_step_to_usd.py not found at {script_path}"
pl.step_error("usd_master", err, None)
raise RuntimeError(err)
# Cache must include the active render-script revision. Otherwise
# material resolution fixes never invalidate previously generated USD masters.
script_fingerprint = "unknown"
try:
import hashlib as _hashlib_script
_script_hash = _hashlib_script.sha256()
for candidate in (script_path, materials_helper_path):
if not candidate.exists():
continue
_script_hash.update(candidate.read_bytes())
script_fingerprint = _script_hash.hexdigest()[:12]
except Exception as exc:
logger.warning(
"[USD_MASTER] failed to fingerprint render scripts, falling back to legacy cache key: %s",
exc,
)
# Hash-based cache check: skip tessellation if file and settings haven't changed
from app.domains.products.cache_service import compute_step_hash as _compute_step_hash_usd
_current_hash_usd = _compute_step_hash_usd(str(step_path))
@@ -357,7 +435,7 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
_json.dumps(material_map, sort_keys=True).encode()
).hexdigest()[:12] if material_map else "none"
effective_cache_key = (
f"{_current_hash_usd}:{linear_deflection}:{angular_deflection}:{sharp_threshold}:{_mat_hash}"
f"{_current_hash_usd}:{linear_deflection}:{angular_deflection}:{sharp_threshold}:{_mat_hash}:{script_fingerprint}"
if _current_hash_usd else None
)
@@ -372,9 +450,21 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
if stored_key == effective_cache_key:
_usd_disk_path = _Path(app_settings.upload_dir) / existing_usd.storage_key
if _usd_disk_path.exists():
logger.info("[CACHE] cache key match — skipping USD master tessellation for %s", cad_file_id)
pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)})
_cache_hit_asset_id = str(existing_usd.id)
refresh_reason = _usd_cache_hit_refresh_reason(
cad_file,
existing_usd,
_usd_disk_path,
)
if refresh_reason is None:
logger.info("[CACHE] cache key match — skipping USD master tessellation for %s", cad_file_id)
pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)})
_cache_hit_asset_id = str(existing_usd.id)
else:
logger.info(
"[CACHE] USD cache key matched for %s but asset is stale (%s) — rebuilding",
cad_file_id,
refresh_reason,
)
else:
logger.info("[CACHE] cache key match but USD asset missing on disk — re-running tessellation for %s", cad_file_id)
else:
@@ -396,13 +486,6 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
raise RuntimeError(err)
output_path = step_path.parent / f"{step_path.stem}_master.usd"
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script_path = scripts_dir / "export_step_to_usd.py"
if not script_path.exists():
err = f"export_step_to_usd.py not found at {script_path}"
pl.step_error("usd_master", err, None)
raise RuntimeError(err)
cmd = [
_sys.executable, str(script_path),
@@ -31,7 +31,13 @@ def _bbox_from_step_cadquery(step_path: str) -> dict | None:
@celery_app.task(bind=True, name="app.tasks.step_tasks.process_step_file", queue="step_processing")
def process_step_file(self, cad_file_id: str):
def process_step_file(
self,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
**_: object,
):
"""Process a STEP file: extract objects, generate thumbnail, convert to glTF.
After processing completes, auto-populate cad_part_materials from Excel
@@ -122,10 +128,24 @@ def process_step_file(self, cad_file_id: str):
r.delete(lock_key) # always release on completion or unhandled error
pl.step_done("process_step_file")
try:
from app.domains.rendering.tasks import _update_workflow_run_status
# Queue thumbnail rendering on the dedicated single-concurrency worker
from app.domains.pipeline.tasks.render_thumbnail import render_step_thumbnail
render_step_thumbnail.delay(cad_file_id)
_update_workflow_run_status(
cad_file_id,
"completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception("Failed to update workflow state for process_step_file %s", cad_file_id)
# Legacy flow still auto-queues thumbnail generation here.
# Graph-mode workflows dispatch explicit thumbnail save/render nodes instead.
if workflow_run_id is None:
from app.domains.pipeline.tasks.render_thumbnail import render_step_thumbnail
render_step_thumbnail.delay(cad_file_id)
def _auto_populate_materials_for_cad(cad_file_id: str, tenant_id: str | None = None) -> None:
@@ -8,6 +8,7 @@ import logging
from datetime import datetime
from app.tasks.celery_app import celery_app
from app.core.render_paths import ensure_group_writable_dir
from app.core.task_logs import log_task_event
from app.core.pipeline_logger import PipelineLogger
@@ -149,7 +150,7 @@ def render_order_line_task(self, order_line_id: str):
product_name = render_invocation.product_name
ot_name = render_invocation.output_type_name
output_path = render_invocation.output_path
_Path(output_path).parent.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(_Path(output_path).parent)
render_width = render_invocation.width
render_height = render_invocation.height
render_engine = render_invocation.engine
@@ -19,6 +19,247 @@ logger = logging.getLogger(__name__)
_THUMBNAIL_SAMPLE_CAP = 64
def _resolve_thumbnail_render_context(session, cad) -> dict[str, object]:
"""Reuse workflow material/USD resolution for CAD thumbnails when possible."""
context: dict[str, object] = {}
if not cad:
return context
parsed_objects = cad.parsed_objects if isinstance(cad.parsed_objects, dict) else {}
raw_part_names = parsed_objects.get("objects") if isinstance(parsed_objects, dict) else None
if isinstance(raw_part_names, list):
part_names_ordered = [
str(part_name).strip()
for part_name in raw_part_names
if isinstance(part_name, str) and part_name.strip()
]
if part_names_ordered:
context["part_names_ordered"] = part_names_ordered
try:
from sqlalchemy import select
from app.core.render_paths import resolve_result_path
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.products.models import Product
from app.domains.rendering.workflow_runtime_services import (
_build_effective_material_lookup,
_usd_master_refresh_reason,
)
from app.services.material_service import resolve_material_map
from app.services.template_service import get_material_library_path_for_session
product = session.execute(
select(Product)
.where(Product.cad_file_id == cad.id)
.order_by(Product.is_active.desc(), Product.updated_at.desc(), Product.created_at.desc())
.limit(1)
).scalar_one_or_none()
material_library_path = get_material_library_path_for_session(session)
materials_source = product.cad_part_materials or [] if product else []
raw_material_map = _build_effective_material_lookup(cad, materials_source)
if material_library_path and raw_material_map:
material_map = resolve_material_map(raw_material_map)
if material_map:
context["material_library_path"] = material_library_path
context["material_map"] = material_map
usd_asset = session.execute(
select(MediaAsset)
.where(
MediaAsset.cad_file_id == cad.id,
MediaAsset.asset_type == MediaAssetType.usd_master,
)
.order_by(MediaAsset.created_at.desc())
.limit(1)
).scalar_one_or_none()
if usd_asset:
usd_path = resolve_result_path(usd_asset.storage_key)
refresh_reason = _usd_master_refresh_reason(
cad,
usd_asset=usd_asset,
usd_render_path=usd_path,
)
if refresh_reason is None and usd_path and usd_path.exists():
context["usd_path"] = usd_path
except Exception:
logger.exception("Failed to resolve thumbnail render context for cad %s", getattr(cad, "id", None))
return context
def _render_thumbnail_core(
*,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
include_postprocess: bool,
queue_legacy_glb_follow_up: bool,
) -> None:
"""Render a CAD thumbnail with optional legacy post-processing."""
pl = PipelineLogger(task_id=None)
pl.step_start("render_step_thumbnail", {"cad_file_id": cad_file_id})
logger.info("Rendering thumbnail for CAD file: %s", cad_file_id)
from app.core.tenant_context import resolve_tenant_id_for_cad
tenant_id = resolve_tenant_id_for_cad(cad_file_id)
try:
from app.models.cad_file import CadFile
from app.domains.products.cache_service import compute_step_hash
with _pipeline_session(tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
if cad and cad.stored_path and not cad.step_file_hash:
cad.step_file_hash = compute_step_hash(cad.stored_path)
session.commit()
logger.info("Saved step_file_hash for %s: %s", cad_file_id, cad.step_file_hash[:12])
except Exception:
logger.warning("step_file_hash computation failed for %s (non-fatal)", cad_file_id)
render_context: dict[str, object] = {}
try:
from app.models.cad_file import CadFile
with _pipeline_session(tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
render_context = _resolve_thumbnail_render_context(session, cad)
except Exception:
logger.warning("thumbnail render context resolution failed for %s; using fallback render path", cad_file_id)
try:
from app.services.step_processor import regenerate_cad_thumbnail
pl.info("render_step_thumbnail", "Calling regenerate_cad_thumbnail")
with _capped_thumbnail_samples():
success = regenerate_cad_thumbnail(
cad_file_id,
part_colors={},
renderer=renderer,
render_engine=render_engine,
samples=samples,
width=width,
height=height,
transparent_bg=transparent_bg,
**render_context,
)
if not success:
raise RuntimeError("regenerate_cad_thumbnail returned False")
except Exception as exc:
pl.step_error("render_step_thumbnail", f"Thumbnail render failed: {exc}", exc)
logger.error("Thumbnail render failed for %s: %s", cad_file_id, exc)
raise
resolved_tenant_id: str | None = None
if include_postprocess:
try:
from app.models.cad_file import CadFile
from app.domains.rendering.workflow_runtime_services import resolve_cad_bbox
with _pipeline_session(tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
if not cad:
logger.warning("CadFile %s not found in post-render phase", cad_file_id)
else:
step_path = cad.stored_path
attrs = cad.mesh_attributes or {}
if step_path and not attrs.get("dimensions_mm"):
step_file = Path(step_path)
glb_path = step_file.parent / f"{step_file.stem}_thumbnail.glb"
bbox_data = resolve_cad_bbox(step_path, glb_path=str(glb_path)).bbox_data
if bbox_data:
cad.mesh_attributes = {**attrs, **bbox_data}
attrs = cad.mesh_attributes
dims = bbox_data["dimensions_mm"]
logger.info(
"bbox for %s: %s×%s×%s mm",
cad_file_id,
dims["x"],
dims["y"],
dims["z"],
)
if step_path and "sharp_edge_pairs" not in attrs:
try:
from app.services.step_processor import extract_mesh_edge_data
edge_data = extract_mesh_edge_data(step_path)
if edge_data:
cad.mesh_attributes = {**attrs, **edge_data}
n_pairs = len(edge_data.get("sharp_edge_pairs", []))
logger.info(
"Sharp edge data extracted for %s: %s sharp edges",
cad_file_id,
n_pairs,
)
except Exception:
logger.exception(
"Sharp edge extraction failed for %s (non-fatal)",
cad_file_id,
)
session.commit()
resolved_tenant_id = str(cad.tenant_id) if cad.tenant_id else None
except Exception:
logger.exception("Post-render processing failed for %s (non-fatal)", cad_file_id)
try:
from app.domains.pipeline.tasks.extract_metadata import _auto_populate_materials_for_cad
_auto_populate_materials_for_cad(cad_file_id, tenant_id=tenant_id)
except Exception:
logger.exception(
"Auto material population failed for cad_file %s (non-fatal)",
cad_file_id,
)
try:
if resolved_tenant_id:
from app.core.websocket import publish_event_sync
publish_event_sync(
resolved_tenant_id,
{
"type": "cad_processing_complete",
"cad_file_id": cad_file_id,
"status": "completed",
},
)
except Exception:
logger.debug("WebSocket publish for CAD complete skipped (non-fatal)")
if queue_legacy_glb_follow_up:
try:
from app.domains.pipeline.tasks.export_glb import generate_gltf_geometry_task
generate_gltf_geometry_task.delay(cad_file_id)
pl.info("render_step_thumbnail", f"Queued generate_gltf_geometry_task for {cad_file_id}")
except Exception:
logger.debug("Could not queue generate_gltf_geometry_task (non-fatal)")
pl.step_done("render_step_thumbnail")
try:
from app.domains.rendering.tasks import _update_workflow_run_status
_update_workflow_run_status(
cad_file_id,
"completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception("Failed to update workflow state for thumbnail render %s", cad_file_id)
@contextmanager
def _capped_thumbnail_samples():
"""Temporarily cap render samples for thumbnail renders.
@@ -73,123 +314,88 @@ def _pipeline_session(tenant_id: str | None = None):
@celery_app.task(bind=True, name="app.tasks.step_tasks.render_step_thumbnail", queue="asset_pipeline")
def render_step_thumbnail(self, cad_file_id: str):
def render_step_thumbnail(
self,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
**_: object,
):
"""Render the thumbnail for a freshly-processed STEP file.
Runs on the dedicated asset_pipeline queue (concurrency=1) so the
blender-renderer service is never overwhelmed by concurrent requests.
On success, also auto-populates materials and marks the CadFile as completed.
"""
pl = PipelineLogger(task_id=self.request.id)
pl.step_start("render_step_thumbnail", {"cad_file_id": cad_file_id})
logger.info(f"Rendering thumbnail for CAD file: {cad_file_id}")
from app.core.tenant_context import resolve_tenant_id_for_cad
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
# ── Pre-render: compute hash ──────────────────────────────────────────
try:
from app.models.cad_file import CadFile
from app.domains.products.cache_service import compute_step_hash
with _pipeline_session(_tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
if cad and cad.stored_path and not cad.step_file_hash:
cad.step_file_hash = compute_step_hash(cad.stored_path)
session.commit()
logger.info(f"Saved step_file_hash for {cad_file_id}: {cad.step_file_hash[:12]}")
except Exception:
logger.warning(f"step_file_hash computation failed for {cad_file_id} (non-fatal)")
# ── Render thumbnail (with capped samples for 512x512) ──────────────
try:
from app.services.step_processor import regenerate_cad_thumbnail
pl.info("render_step_thumbnail", "Calling regenerate_cad_thumbnail")
with _capped_thumbnail_samples():
success = regenerate_cad_thumbnail(cad_file_id, part_colors={})
if not success:
raise RuntimeError("regenerate_cad_thumbnail returned False")
_render_thumbnail_core(
cad_file_id=cad_file_id,
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
renderer=renderer,
render_engine=render_engine,
samples=samples,
width=width,
height=height,
transparent_bg=transparent_bg,
include_postprocess=True,
queue_legacy_glb_follow_up=workflow_run_id is None,
)
except Exception as exc:
pl.step_error("render_step_thumbnail", f"Thumbnail render failed: {exc}", exc)
logger.error(f"Thumbnail render failed for {cad_file_id}: {exc}")
raise self.retry(exc=exc, countdown=30, max_retries=2)
# ── Post-render: bbox + sharp edges + materials (single session) ──────
@celery_app.task(bind=True, name="app.tasks.step_tasks.render_graph_thumbnail", queue="asset_pipeline")
def render_graph_thumbnail(
self,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
**_: object,
):
"""Render a CAD thumbnail for graph workflows without legacy follow-up side effects."""
try:
from app.models.cad_file import CadFile
from app.domains.rendering.workflow_runtime_services import resolve_cad_bbox
with _pipeline_session(_tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
if not cad:
logger.warning(f"CadFile {cad_file_id} not found in post-render phase")
else:
step_path = cad.stored_path
attrs = cad.mesh_attributes or {}
# Bounding box extraction
if step_path and not attrs.get("dimensions_mm"):
_step = Path(step_path)
_glb = _step.parent / f"{_step.stem}_thumbnail.glb"
bbox_data = resolve_cad_bbox(step_path, glb_path=str(_glb)).bbox_data
if bbox_data:
cad.mesh_attributes = {**attrs, **bbox_data}
attrs = cad.mesh_attributes
dims = bbox_data["dimensions_mm"]
logger.info(f"bbox for {cad_file_id}: {dims['x']}×{dims['y']}×{dims['z']} mm")
# Sharp edge extraction (PCurve-based, runs on render-worker with OCP)
if step_path and "sharp_edge_pairs" not in attrs:
try:
from app.services.step_processor import extract_mesh_edge_data
edge_data = extract_mesh_edge_data(step_path)
if edge_data:
cad.mesh_attributes = {**attrs, **edge_data}
n_pairs = len(edge_data.get("sharp_edge_pairs", []))
logger.info(f"Sharp edge data extracted for {cad_file_id}: {n_pairs} sharp edges")
except Exception:
logger.exception(f"Sharp edge extraction failed for {cad_file_id} (non-fatal)")
session.commit()
# WebSocket broadcast
_tid = str(cad.tenant_id) if cad.tenant_id else None
except Exception:
logger.exception(f"Post-render processing failed for {cad_file_id} (non-fatal)")
_tid = None
# Auto-populate materials
try:
from app.domains.pipeline.tasks.extract_metadata import _auto_populate_materials_for_cad
_auto_populate_materials_for_cad(cad_file_id, tenant_id=_tenant_id)
except Exception:
logger.exception(f"Auto material population failed for cad_file {cad_file_id} (non-fatal)")
# Broadcast WebSocket event
try:
if _tid:
from app.core.websocket import publish_event_sync
publish_event_sync(_tid, {
"type": "cad_processing_complete",
"cad_file_id": cad_file_id,
"status": "completed",
})
except Exception:
logger.debug("WebSocket publish for CAD complete skipped (non-fatal)")
# Auto-generate geometry GLB
try:
from app.domains.pipeline.tasks.export_glb import generate_gltf_geometry_task
generate_gltf_geometry_task.delay(cad_file_id)
pl.info("render_step_thumbnail", f"Queued generate_gltf_geometry_task for {cad_file_id}")
except Exception:
logger.debug("Could not queue generate_gltf_geometry_task (non-fatal)")
pl.step_done("render_step_thumbnail")
_render_thumbnail_core(
cad_file_id=cad_file_id,
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
renderer=renderer,
render_engine=render_engine,
samples=samples,
width=width,
height=height,
transparent_bg=transparent_bg,
include_postprocess=False,
queue_legacy_glb_follow_up=False,
)
except Exception as exc:
raise self.retry(exc=exc, countdown=30, max_retries=2)
@celery_app.task(bind=True, name="app.tasks.step_tasks.regenerate_thumbnail", queue="asset_pipeline")
def regenerate_thumbnail(self, cad_file_id: str, part_colors: dict):
def regenerate_thumbnail(
self,
cad_file_id: str,
part_colors: dict,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
):
"""Regenerate thumbnail with per-part colours."""
pl = PipelineLogger(task_id=self.request.id)
pl.step_start("regenerate_thumbnail", {"cad_file_id": cad_file_id})
@@ -200,11 +406,40 @@ def regenerate_thumbnail(self, cad_file_id: str, part_colors: dict):
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
try:
from app.services.step_processor import regenerate_cad_thumbnail
from app.services.step_processor import MissingCadResourceError, regenerate_cad_thumbnail
render_context: dict[str, object] = {}
try:
from app.models.cad_file import CadFile
with _pipeline_session(_tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
render_context = _resolve_thumbnail_render_context(session, cad)
except Exception:
logger.warning(
"thumbnail render context resolution failed for %s during regeneration; using fallback render path",
cad_file_id,
)
with _capped_thumbnail_samples():
success = regenerate_cad_thumbnail(cad_file_id, part_colors)
success = regenerate_cad_thumbnail(
cad_file_id,
part_colors,
renderer=renderer,
render_engine=render_engine,
samples=samples,
width=width,
height=height,
transparent_bg=transparent_bg,
**render_context,
)
if not success:
raise RuntimeError("regenerate_cad_thumbnail returned False")
except MissingCadResourceError as exc:
pl.warning("regenerate_thumbnail", f"Skipping stale thumbnail regeneration: {exc}")
logger.warning("Skipping thumbnail regeneration for %s: %s", cad_file_id, exc)
pl.step_done("regenerate_thumbnail")
return
except Exception as exc:
pl.step_error("regenerate_thumbnail", f"Thumbnail regeneration failed: {exc}", exc)
logger.error(f"Thumbnail regeneration failed for {cad_file_id}: {exc}")
+158 -10
View File
@@ -16,6 +16,8 @@ import logging
logger = logging.getLogger(__name__)
_WORKFLOW_ROLLOUT_MODES = {"legacy_only", "shadow", "graph"}
def _build_rollout_signal(
*,
@@ -39,6 +41,13 @@ def _build_rollout_signal(
}
def _normalize_workflow_rollout_mode(value: str | None) -> str:
normalized = (value or "legacy_only").strip().lower()
if normalized in _WORKFLOW_ROLLOUT_MODES:
return normalized
return "legacy_only"
def dispatch_render_with_workflow(order_line_id: str) -> dict:
"""Dispatch a render for the given order line.
@@ -54,12 +63,19 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
from app.config import settings
from app.domains.orders.models import OrderLine
from app.domains.rendering.models import OutputType, WorkflowDefinition
from app.domains.rendering.output_type_contracts import (
derive_supported_artifact_kinds_from_workflow_config,
)
from app.domains.rendering.workflow_config_utils import (
canonicalize_workflow_config,
extract_runtime_workflow,
get_workflow_execution_mode,
)
from app.domains.rendering.workflow_executor import prepare_workflow_context
from app.domains.rendering.workflow_executor import (
WorkflowTaskSubmissionError,
prepare_workflow_context,
submit_prepared_workflow_tasks,
)
from app.domains.rendering.workflow_graph_runtime import (
execute_graph_workflow,
find_unsupported_graph_nodes,
@@ -150,7 +166,41 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
)
return legacy_result
execution_mode = get_workflow_execution_mode(canonical_config, default="legacy")
supported_artifact_kinds = derive_supported_artifact_kinds_from_workflow_config(canonical_config)
output_type_artifact_kind = getattr(output_type, "artifact_kind", None)
if output_type_artifact_kind and output_type_artifact_kind not in supported_artifact_kinds:
supported = ", ".join(supported_artifact_kinds) if supported_artifact_kinds else "none"
logger.warning(
"order_line %s: workflow_definition_id %s is incompatible with output_type %s artifact_kind %s; "
"falling back to legacy dispatch",
order_line_id,
wf_def.id,
output_type.id,
output_type_artifact_kind,
)
legacy_result = _legacy_dispatch(order_line_id)
legacy_result.update(
_build_rollout_signal(
gate_status="workflow_contract_mismatch",
ready=False,
reasons=[
"Linked workflow does not produce the artifact kind required by the output type; legacy dispatch remains authoritative.",
f"Expected artifact kind: {output_type_artifact_kind}. Supported by workflow: [{supported}].",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
return legacy_result
configured_execution_mode = get_workflow_execution_mode(canonical_config, default="legacy")
workflow_rollout_mode = _normalize_workflow_rollout_mode(
getattr(output_type, "workflow_rollout_mode", None)
)
legacy_runtime_gate_status = "workflow_legacy_runtime"
legacy_runtime_reasons = [
"Workflow definition is active, but execution still uses the legacy runtime path."
]
def _prepare_graph_context(target_mode: str):
workflow_context = prepare_workflow_context(
@@ -175,7 +225,38 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
session.commit()
return run
if execution_mode == "graph":
if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "legacy_only":
logger.info(
"order_line %s: workflow_definition_id %s is graph-capable but output_type %s is pinned to legacy_only rollout",
order_line_id,
wf_def.id,
output_type.id,
)
legacy_result = _legacy_dispatch(order_line_id)
legacy_result["workflow_rollout_mode"] = workflow_rollout_mode
legacy_result["configured_execution_mode"] = configured_execution_mode
legacy_result.update(
_build_rollout_signal(
gate_status="rollout_legacy_only",
ready=False,
reasons=[
"Output type rollout mode is pinned to legacy_only; legacy dispatch remains authoritative.",
f"Linked workflow stays attached in configured execution mode '{configured_execution_mode}' until rollout is promoted.",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
return legacy_result
if workflow_rollout_mode in {"graph", "shadow"} and configured_execution_mode not in {"graph", "shadow"}:
legacy_runtime_gate_status = "rollout_requires_graph_workflow"
legacy_runtime_reasons = [
f"Output type rollout mode '{workflow_rollout_mode}' requires a workflow configured for graph or shadow execution.",
f"Linked workflow is still configured for '{configured_execution_mode}', so legacy runtime remains authoritative.",
]
if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "graph":
try:
workflow_context = _prepare_graph_context("graph")
except Exception as exc:
@@ -225,13 +306,44 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
return legacy_result
try:
dispatch_result = execute_graph_workflow(session, workflow_context)
dispatch_result = execute_graph_workflow(
session,
workflow_context,
dispatch_tasks=False,
)
session.commit()
submit_prepared_workflow_tasks(dispatch_result)
except Exception as exc:
session.rollback()
session.add(run)
mark_workflow_run_failed(run, str(exc))
session.commit()
if isinstance(exc, WorkflowTaskSubmissionError) and exc.submitted_task_ids:
logger.exception(
"order_line %s: graph workflow submission partially failed after %d task(s); "
"not falling back to legacy to avoid duplicate renders",
order_line_id,
len(exc.submitted_task_ids),
)
return {
"backend": "workflow_graph",
"execution_mode": "graph",
"workflow_run_id": str(run.id),
"workflow_rollout_mode": workflow_rollout_mode,
"configured_execution_mode": configured_execution_mode,
"submission_status": "partial_failure",
"submitted_task_ids": exc.submitted_task_ids,
**_build_rollout_signal(
gate_status="graph_submission_failed",
ready=False,
reasons=[
"Graph workflow task submission failed after some tasks were already queued.",
f"Submission error: {exc}.",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
),
}
logger.exception(
"order_line %s: graph workflow execution via definition %s failed, falling back to legacy dispatch",
order_line_id,
@@ -257,6 +369,8 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
"workflow_run_id": str(run.id),
"celery_task_id": dispatch_result.task_ids[0] if dispatch_result.task_ids else None,
"task_ids": dispatch_result.task_ids,
"workflow_rollout_mode": workflow_rollout_mode,
"configured_execution_mode": configured_execution_mode,
}
result.update(
_build_rollout_signal(
@@ -267,10 +381,10 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
)
return result
if execution_mode == "shadow":
if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "shadow":
legacy_result = _legacy_dispatch(order_line_id)
try:
@@ -330,13 +444,43 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
return legacy_result
try:
dispatch_result = execute_graph_workflow(session, workflow_context)
dispatch_result = execute_graph_workflow(
session,
workflow_context,
dispatch_tasks=False,
)
session.commit()
submit_prepared_workflow_tasks(dispatch_result)
except Exception as exc:
session.rollback()
session.add(run)
mark_workflow_run_failed(run, str(exc))
session.commit()
if isinstance(exc, WorkflowTaskSubmissionError) and exc.submitted_task_ids:
logger.exception(
"order_line %s: shadow workflow submission partially failed after %d task(s); "
"legacy dispatch remains authoritative",
order_line_id,
len(exc.submitted_task_ids),
)
legacy_result["execution_mode"] = "shadow"
legacy_result["shadow_status"] = "partial_failure"
legacy_result["shadow_error"] = str(exc)
legacy_result["shadow_workflow_run_id"] = str(run.id)
legacy_result["shadow_submitted_task_ids"] = exc.submitted_task_ids
legacy_result.update(
_build_rollout_signal(
gate_status="shadow_submission_failed",
ready=False,
reasons=[
"Shadow workflow task submission failed after some tasks were already queued.",
f"Submission error: {exc}.",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
return legacy_result
logger.exception(
"order_line %s: shadow workflow execution via definition %s failed; legacy dispatch remains authoritative",
order_line_id,
@@ -364,6 +508,8 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
legacy_result["shadow_status"] = "dispatched"
legacy_result["shadow_workflow_run_id"] = str(run.id)
legacy_result["shadow_task_ids"] = dispatch_result.task_ids
legacy_result["workflow_rollout_mode"] = workflow_rollout_mode
legacy_result["configured_execution_mode"] = configured_execution_mode
legacy_result.update(
_build_rollout_signal(
gate_status="pending_shadow_verdict",
@@ -375,7 +521,7 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
)
return legacy_result
workflow_type, params = extract_runtime_workflow(canonical_config)
@@ -519,12 +665,14 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
"execution_mode": "legacy",
"workflow_run_id": str(run.id),
"celery_task_id": celery_task_id,
"workflow_rollout_mode": workflow_rollout_mode,
"configured_execution_mode": configured_execution_mode,
}
result.update(
_build_rollout_signal(
gate_status="workflow_legacy_runtime",
gate_status=legacy_runtime_gate_status,
ready=False,
reasons=["Workflow definition is active, but execution still uses the legacy runtime path."],
reasons=legacy_runtime_reasons,
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
+30 -2
View File
@@ -1,5 +1,6 @@
import uuid
from datetime import datetime
from typing import Any
from sqlalchemy import String, DateTime, Boolean, Text, Integer, Float, ForeignKey, Table, Column
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID, JSONB
@@ -15,6 +16,17 @@ render_template_output_types = Table(
)
VALID_RENDER_BACKENDS = {"celery"}
OUTPUT_TYPE_WORKFLOW_FAMILIES = {"cad_file", "order_line"}
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES = {"legacy_only", "shadow", "graph"}
OUTPUT_TYPE_ARTIFACT_KINDS = {
"still_image",
"turntable_video",
"model_export",
"thumbnail_image",
"blend_asset",
"package",
"custom",
}
class OutputType(Base):
@@ -23,14 +35,21 @@ class OutputType(Base):
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
name: Mapped[str] = mapped_column(String(200), unique=True, nullable=False)
description: Mapped[str | None] = mapped_column(Text, nullable=True)
renderer: Mapped[str] = mapped_column(String(50), nullable=False, default="threejs")
renderer: Mapped[str] = mapped_column(String(50), nullable=False, default="blender")
render_settings: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict)
output_format: Mapped[str] = mapped_column(String(20), nullable=False, default="png")
sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
compatible_categories: Mapped[list] = mapped_column(JSONB, default=list, server_default="[]")
render_backend: Mapped[str] = mapped_column(String(20), nullable=False, default="auto", server_default="auto")
render_backend: Mapped[str] = mapped_column(String(20), nullable=False, default="celery", server_default="auto")
is_animation: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
transparent_bg: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
workflow_family: Mapped[str] = mapped_column(
String(20), nullable=False, default="order_line", server_default="order_line"
)
artifact_kind: Mapped[str] = mapped_column(
String(50), nullable=False, default="still_image", server_default="still_image"
)
invocation_overrides: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict, server_default="{}")
cycles_device: Mapped[str | None] = mapped_column(String(10), nullable=True, default=None)
pricing_tier_id: Mapped[int | None] = mapped_column(
Integer, ForeignKey("pricing_tiers.id", ondelete="SET NULL"), nullable=True, index=True
@@ -49,6 +68,9 @@ class OutputType(Base):
workflow_definition_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True), ForeignKey("workflow_definitions.id", ondelete="SET NULL"), nullable=True
)
workflow_rollout_mode: Mapped[str] = mapped_column(
String(20), nullable=False, default="legacy_only", server_default="legacy_only"
)
order_lines: Mapped[list["OrderLine"]] = relationship("OrderLine", back_populates="output_type")
pricing_tier: Mapped["PricingTier | None"] = relationship("PricingTier", back_populates="output_types")
@@ -70,6 +92,12 @@ class RenderTemplate(Base):
lighting_only: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
shadow_catcher_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
camera_orbit: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
workflow_input_schema: Mapped[list[dict[str, Any]]] = mapped_column(
JSONB,
nullable=False,
default=list,
server_default="[]",
)
is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
tenant_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True
@@ -3,6 +3,12 @@ from __future__ import annotations
from collections.abc import Mapping
from typing import Any, Literal
from app.core.process_steps import StepName
from app.domains.rendering.models import (
OUTPUT_TYPE_ARTIFACT_KINDS,
OUTPUT_TYPE_WORKFLOW_FAMILIES,
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES,
)
from app.domains.rendering.workflow_config_utils import canonicalize_workflow_config
from app.domains.rendering.workflow_node_registry import get_node_definition
@@ -22,6 +28,11 @@ OutputTypeArtifactKind = Literal[
_MODEL_EXPORT_FORMATS = {"gltf", "glb", "stl", "obj", "usd", "usdz"}
_VIDEO_FORMATS = {"mp4", "webm", "mov"}
_IMAGE_FORMATS = {"png", "jpg", "jpeg", "webp"}
_BLEND_FORMATS = {"blend"}
_OUTPUT_FORMATS_BY_FAMILY: dict[OutputTypeWorkflowFamily, set[str]] = {
"cad_file": {*_IMAGE_FORMATS, *_MODEL_EXPORT_FORMATS},
"order_line": {*_IMAGE_FORMATS, *_VIDEO_FORMATS, *_BLEND_FORMATS},
}
_ARTIFACT_KINDS_BY_FAMILY: dict[OutputTypeWorkflowFamily, set[OutputTypeArtifactKind]] = {
"cad_file": {"thumbnail_image", "model_export", "package", "custom"},
"order_line": {"still_image", "turntable_video", "blend_asset", "package", "custom"},
@@ -42,6 +53,83 @@ INVOCATION_OVERRIDE_KEYS = (
"denoising_quality",
"denoising_use_gpu",
)
_STATIC_RENDER_OVERRIDE_KEYS = (
"width",
"height",
"engine",
"samples",
"bg_color",
"noise_threshold",
"denoiser",
"denoising_input_passes",
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
)
_ANIMATION_OVERRIDE_KEYS = (
"frame_count",
"fps",
"turntable_axis",
)
_TURNABLE_AXES = {"world_x", "world_y", "world_z"}
_WORKFLOW_FAMILY_DISPLAY_ORDER: tuple[OutputTypeWorkflowFamily, ...] = ("order_line", "cad_file")
_WORKFLOW_ROLLOUT_DISPLAY_ORDER: tuple[str, ...] = ("legacy_only", "shadow", "graph")
_ARTIFACT_KIND_DISPLAY_ORDER: tuple[OutputTypeArtifactKind, ...] = (
"still_image",
"turntable_video",
"model_export",
"thumbnail_image",
"blend_asset",
"package",
"custom",
)
_OUTPUT_FORMAT_DISPLAY_ORDER: tuple[str, ...] = (
"png",
"jpg",
"jpeg",
"webp",
"mp4",
"webm",
"mov",
"gltf",
"glb",
"stl",
"obj",
"usd",
"usdz",
"blend",
)
_DEFAULT_OUTPUT_FORMAT_BY_ARTIFACT_KIND: dict[OutputTypeArtifactKind, str] = {
"still_image": "png",
"turntable_video": "mp4",
"model_export": "gltf",
"thumbnail_image": "png",
"blend_asset": "blend",
"package": "png",
"custom": "png",
}
_OUTPUT_TYPE_PROFILE_KEYS: tuple[str, ...] = (
"transparent_bg",
"cycles_device",
"material_override",
)
_TEMPLATE_RUNTIME_KEYS: tuple[str, ...] = (
"target_collection",
"lighting_only",
"shadow_catcher",
"camera_orbit",
"template_inputs",
)
_WORKFLOW_NODE_PARAMETER_OWNERSHIP_STEPS: tuple[StepName, ...] = (
StepName.RESOLVE_TEMPLATE,
StepName.BLENDER_STILL,
StepName.BLENDER_TURNTABLE,
StepName.EXPORT_BLEND,
)
class InvalidInvocationOverridesError(ValueError):
pass
def list_allowed_artifact_kinds_for_family(
@@ -55,6 +143,79 @@ def list_allowed_artifact_kinds_for_family(
return tuple(sorted(allowed))
def list_allowed_output_formats_for_family(workflow_family: str) -> tuple[str, ...]:
normalized_family = (workflow_family or "order_line").strip().lower()
if normalized_family == "cad_file":
allowed = _OUTPUT_FORMATS_BY_FAMILY["cad_file"]
else:
allowed = _OUTPUT_FORMATS_BY_FAMILY["order_line"]
return tuple(sorted(allowed))
def build_output_type_contract_catalog() -> dict[str, Any]:
workflow_families = [
family for family in _WORKFLOW_FAMILY_DISPLAY_ORDER if family in OUTPUT_TYPE_WORKFLOW_FAMILIES
]
workflow_rollout_modes = [
mode for mode in _WORKFLOW_ROLLOUT_DISPLAY_ORDER if mode in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES
]
artifact_kinds = [
artifact_kind
for artifact_kind in _ARTIFACT_KIND_DISPLAY_ORDER
if artifact_kind in OUTPUT_TYPE_ARTIFACT_KINDS
]
allowed_artifact_kinds_by_family = {
family: [
artifact_kind
for artifact_kind in artifact_kinds
if artifact_kind in list_allowed_artifact_kinds_for_family(family)
]
for family in workflow_families
}
allowed_output_formats_by_family = {
family: [
output_format
for output_format in _OUTPUT_FORMAT_DISPLAY_ORDER
if output_format in list_allowed_output_formats_for_family(family)
]
for family in workflow_families
}
allowed_invocation_override_keys_by_artifact_kind = {
artifact_kind: list(
list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind,
is_animation=artifact_kind == "turntable_video",
)
)
for artifact_kind in artifact_kinds
}
default_output_format_by_artifact_kind = {
artifact_kind: _DEFAULT_OUTPUT_FORMAT_BY_ARTIFACT_KIND[artifact_kind]
for artifact_kind in artifact_kinds
}
workflow_node_keys_by_step = {
step.value: [field.key for field in definition.fields]
for step in _WORKFLOW_NODE_PARAMETER_OWNERSHIP_STEPS
if (definition := get_node_definition(step.value)) is not None
}
return {
"workflow_families": workflow_families,
"workflow_rollout_modes": workflow_rollout_modes,
"artifact_kinds": artifact_kinds,
"allowed_artifact_kinds_by_family": allowed_artifact_kinds_by_family,
"allowed_output_formats_by_family": allowed_output_formats_by_family,
"allowed_invocation_override_keys_by_artifact_kind": allowed_invocation_override_keys_by_artifact_kind,
"default_output_format_by_artifact_kind": default_output_format_by_artifact_kind,
"parameter_ownership": {
"output_type_profile_keys": list(_OUTPUT_TYPE_PROFILE_KEYS),
"template_runtime_keys": list(_TEMPLATE_RUNTIME_KEYS),
"workflow_node_keys_by_step": workflow_node_keys_by_step,
},
}
def infer_output_type_artifact_kind(
output_format: str | None,
is_animation: bool,
@@ -65,6 +226,8 @@ def infer_output_type_artifact_kind(
if is_animation or normalized_format in _VIDEO_FORMATS:
return "turntable_video"
if normalized_format in _BLEND_FORMATS:
return "blend_asset"
if normalized_format in _MODEL_EXPORT_FORMATS:
return "model_export"
if normalized_family == "cad_file" and normalized_format in _IMAGE_FORMATS:
@@ -91,6 +254,14 @@ def validate_output_type_contract(
f"'{workflow_family}'. Allowed: {allowed}"
)
allowed_output_formats = list_allowed_output_formats_for_family(normalized_family)
if normalized_format and normalized_format not in allowed_output_formats:
allowed = ", ".join(allowed_output_formats)
raise ValueError(
f"Output format '{output_format}' is not allowed for workflow_family "
f"'{workflow_family}'. Allowed: {allowed}"
)
if normalized_family == "cad_file" and is_animation:
raise ValueError("CAD-file workflows do not support animated output types")
@@ -114,6 +285,20 @@ def validate_output_type_contract(
f"({', '.join(sorted(_MODEL_EXPORT_FORMATS))})"
)
if normalized_artifact == "blend_asset":
if is_animation:
raise ValueError("Artifact kind 'blend_asset' does not support is_animation=true")
if normalized_format and normalized_format not in _BLEND_FORMATS:
raise ValueError(
"Artifact kind 'blend_asset' requires a blend output_format "
f"({', '.join(sorted(_BLEND_FORMATS))})"
)
if normalized_format in _BLEND_FORMATS and normalized_artifact != "blend_asset":
raise ValueError(
f"Output format '{output_format}' requires artifact kind 'blend_asset'"
)
def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily | None:
normalized = canonicalize_workflow_config(config)
@@ -121,6 +306,7 @@ def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily |
definition.family
for node in normalized.get("nodes", [])
if (definition := get_node_definition(node.get("step"))) is not None
if definition.family in {"cad_file", "order_line"}
}
if not families:
return None
@@ -129,14 +315,329 @@ def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily |
return next(iter(families))
def derive_workflow_terminal_node_ids(config: dict[str, Any]) -> tuple[str, ...]:
normalized = canonicalize_workflow_config(config)
nodes = normalized.get("nodes", [])
if not nodes:
return ()
node_ids = {
str(node.get("id"))
for node in nodes
if node.get("id") not in (None, "")
}
upstream_ids = {
str(edge.get("from"))
for edge in normalized.get("edges", [])
if edge.get("from") not in (None, "")
}
return tuple(sorted(node_id for node_id in node_ids if node_id not in upstream_ids))
def derive_supported_artifact_kinds_from_workflow_config(
config: dict[str, Any],
) -> tuple[OutputTypeArtifactKind, ...]:
try:
normalized = canonicalize_workflow_config(config)
except Exception:
return ()
nodes = normalized.get("nodes", [])
if not nodes:
return ()
nodes_by_id = {
str(node.get("id")): node
for node in nodes
if node.get("id") not in (None, "")
}
incoming_by_target: dict[str, set[str]] = {}
for edge in normalized.get("edges", []):
source = edge.get("from")
target = edge.get("to")
if source in (None, "") or target in (None, ""):
continue
incoming_by_target.setdefault(str(target), set()).add(str(source))
cache: dict[str, set[str]] = {}
def _collect_upstream_steps(node_id: str) -> set[str]:
cached = cache.get(node_id)
if cached is not None:
return set(cached)
steps: set[str] = set()
node = nodes_by_id.get(node_id)
if node is not None and node.get("step"):
steps.add(str(node["step"]))
for upstream_id in incoming_by_target.get(node_id, set()):
steps.update(_collect_upstream_steps(upstream_id))
cache[node_id] = set(steps)
return steps
def _derive_node_artifact_kinds(node_id: str) -> set[OutputTypeArtifactKind]:
node = nodes_by_id.get(node_id)
if node is None:
return set()
step = str(node.get("step") or "")
if step in {StepName.BLENDER_STILL.value}:
return {"still_image"}
if step in {StepName.BLENDER_TURNTABLE.value}:
return {"turntable_video"}
if step in {StepName.EXPORT_BLEND.value}:
return {"blend_asset"}
if step in {
StepName.OCC_GLB_EXPORT.value,
StepName.STL_CACHE_GENERATE.value,
}:
return {"model_export"}
if step == StepName.THUMBNAIL_SAVE.value:
return {"thumbnail_image"}
if step != StepName.OUTPUT_SAVE.value:
return set()
upstream_steps = _collect_upstream_steps(node_id)
has_still = StepName.BLENDER_STILL.value in upstream_steps
has_turntable = StepName.BLENDER_TURNTABLE.value in upstream_steps
if has_still and has_turntable:
return set()
if has_turntable:
return {"turntable_video"}
if has_still:
return {"still_image"}
return set()
supported: set[OutputTypeArtifactKind] = set()
for terminal_id in derive_workflow_terminal_node_ids(normalized):
supported.update(_derive_node_artifact_kinds(terminal_id))
return tuple(sorted(supported))
def workflow_supports_artifact_kind(
config: dict[str, Any],
artifact_kind: str,
) -> bool:
normalized_artifact = (artifact_kind or "").strip().lower()
if not normalized_artifact:
return False
return normalized_artifact in derive_supported_artifact_kinds_from_workflow_config(config)
def list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind: str,
*,
is_animation: bool = False,
) -> tuple[str, ...]:
normalized_artifact = (artifact_kind or "").strip().lower()
if normalized_artifact in {"still_image", "thumbnail_image"}:
return _STATIC_RENDER_OVERRIDE_KEYS
if normalized_artifact == "turntable_video":
return _STATIC_RENDER_OVERRIDE_KEYS + _ANIMATION_OVERRIDE_KEYS
if normalized_artifact in {"model_export", "blend_asset"}:
return ()
if normalized_artifact in {"package", "custom"}:
return INVOCATION_OVERRIDE_KEYS
if is_animation:
return _STATIC_RENDER_OVERRIDE_KEYS + _ANIMATION_OVERRIDE_KEYS
return _STATIC_RENDER_OVERRIDE_KEYS
def _normalize_positive_int_override(key: str, value: Any) -> int:
if isinstance(value, bool):
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be a positive integer")
try:
normalized = int(str(value).strip()) if isinstance(value, str) else int(value)
except (TypeError, ValueError) as exc:
raise InvalidInvocationOverridesError(
f"Invocation override '{key}' must be a positive integer"
) from exc
if normalized <= 0:
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be greater than zero")
return normalized
def _normalize_string_override(key: str, value: Any) -> str:
if not isinstance(value, str):
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be a string")
normalized = value.strip()
if not normalized:
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must not be blank")
return normalized
def _normalize_noise_threshold_override(value: Any) -> str:
if isinstance(value, bool):
raise InvalidInvocationOverridesError("Invocation override 'noise_threshold' must be a string or number")
if isinstance(value, (int, float)):
return str(value)
if isinstance(value, str) and value.strip():
return value.strip()
raise InvalidInvocationOverridesError("Invocation override 'noise_threshold' must be a string or number")
def _normalize_gpu_toggle_override(value: Any) -> str:
if isinstance(value, bool):
return "1" if value else "0"
if isinstance(value, int) and value in {0, 1}:
return str(value)
if isinstance(value, str):
normalized = value.strip().lower()
if normalized in {"1", "true", "enabled", "yes"}:
return "1"
if normalized in {"0", "false", "disabled", "no"}:
return "0"
raise InvalidInvocationOverridesError(
"Invocation override 'denoising_use_gpu' must be one of: 1, 0, true, false"
)
def _normalize_invocation_override_value(key: str, value: Any) -> int | str:
if key in {"width", "height", "samples", "frame_count", "fps"}:
return _normalize_positive_int_override(key, value)
if key == "turntable_axis":
normalized = _normalize_string_override(key, value).lower()
if normalized not in _TURNABLE_AXES:
raise InvalidInvocationOverridesError(
"Invocation override 'turntable_axis' must be one of: world_x, world_y, world_z"
)
return normalized
if key == "noise_threshold":
return _normalize_noise_threshold_override(value)
if key == "denoising_use_gpu":
return _normalize_gpu_toggle_override(value)
return _normalize_string_override(key, value)
def validate_and_normalize_invocation_overrides(
raw: Mapping[str, Any] | None,
*,
artifact_kind: str | None = None,
is_animation: bool = False,
reject_unknown_keys: bool = False,
) -> dict[str, Any]:
if raw is None:
return {}
if not isinstance(raw, Mapping):
raise InvalidInvocationOverridesError("invocation_overrides must be an object")
normalized: dict[str, Any] = {}
unknown_keys: list[str] = []
for key, value in raw.items():
key_name = str(key)
if key_name not in INVOCATION_OVERRIDE_KEYS:
if reject_unknown_keys:
unknown_keys.append(key_name)
continue
if value in (None, ""):
continue
normalized[key_name] = _normalize_invocation_override_value(key_name, value)
if unknown_keys:
supported = ", ".join(INVOCATION_OVERRIDE_KEYS)
raise InvalidInvocationOverridesError(
f"Unsupported invocation override keys: {', '.join(sorted(unknown_keys))}. Supported: {supported}"
)
if artifact_kind is not None:
allowed_keys = set(
list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind,
is_animation=is_animation,
)
)
disallowed = sorted(key for key in normalized if key not in allowed_keys)
if disallowed:
raise InvalidInvocationOverridesError(
f"Invocation overrides not allowed for artifact kind '{artifact_kind}': {', '.join(disallowed)}"
)
return normalized
def resolve_output_type_invocation_overrides(
render_settings: Mapping[str, Any] | None,
invocation_overrides: Mapping[str, Any] | None,
*,
artifact_kind: str,
is_animation: bool = False,
) -> dict[str, Any]:
merged = merge_output_type_invocation_overrides(render_settings, invocation_overrides)
allowed_keys = set(
list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind,
is_animation=is_animation,
)
)
return {
key: value
for key, value in merged.items()
if key in allowed_keys
}
def build_output_type_invocation_profile(
*,
renderer: str,
render_backend: str,
workflow_family: str,
artifact_kind: str,
output_format: str | None,
is_animation: bool,
workflow_definition_id: Any = None,
workflow_rollout_mode: str = "legacy_only",
transparent_bg: bool = False,
cycles_device: str | None = None,
material_override: str | None = None,
render_settings: Mapping[str, Any] | None = None,
invocation_overrides: Mapping[str, Any] | None = None,
) -> dict[str, Any]:
resolved_artifact_kind = artifact_kind or infer_output_type_artifact_kind(
output_format,
is_animation,
workflow_family,
)
resolved_overrides = resolve_output_type_invocation_overrides(
render_settings,
invocation_overrides,
artifact_kind=resolved_artifact_kind,
is_animation=is_animation,
)
return {
"renderer": renderer,
"render_backend": render_backend,
"workflow_family": workflow_family,
"artifact_kind": resolved_artifact_kind,
"output_format": (output_format or "").strip().lower(),
"is_animation": bool(is_animation),
"workflow_definition_id": workflow_definition_id,
"workflow_rollout_mode": workflow_rollout_mode,
"transparent_bg": bool(transparent_bg),
"cycles_device": cycles_device,
"material_override": material_override,
"allowed_override_keys": list(
list_allowed_invocation_override_keys_for_artifact_kind(
resolved_artifact_kind,
is_animation=is_animation,
)
),
"invocation_overrides": resolved_overrides,
}
def normalize_invocation_overrides(raw: Mapping[str, Any] | None) -> dict[str, Any]:
if not isinstance(raw, Mapping):
return {}
normalized: dict[str, Any] = {}
for key in INVOCATION_OVERRIDE_KEYS:
value = raw.get(key)
if value not in (None, ""):
normalized[key] = value
if value in (None, ""):
continue
try:
normalized[key] = _normalize_invocation_override_value(key, value)
except InvalidInvocationOverridesError:
continue
return normalized
+143 -3
View File
@@ -1,22 +1,27 @@
import uuid
from datetime import datetime
from pydantic import BaseModel
from pydantic import BaseModel, Field
class OutputTypeCreate(BaseModel):
name: str
description: str | None = None
renderer: str = "threejs"
renderer: str = "blender"
render_settings: dict = {}
output_format: str = "png"
sort_order: int = 0
is_active: bool = True
compatible_categories: list[str] = []
render_backend: str = "auto"
render_backend: str = "celery"
is_animation: bool = False
transparent_bg: bool = False
pricing_tier_id: int | None = None
cycles_device: str | None = None
workflow_family: str = "order_line"
artifact_kind: str | None = None
invocation_overrides: dict = {}
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str = "legacy_only"
material_override: str | None = None
@@ -32,12 +37,43 @@ class OutputTypePatch(BaseModel):
render_backend: str | None = None
is_animation: bool | None = None
transparent_bg: bool | None = None
workflow_family: str | None = None
artifact_kind: str | None = None
invocation_overrides: dict | None = None
pricing_tier_id: int | None = None
cycles_device: str | None = None
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str | None = None
material_override: str | None = None
class OutputTypeInvocationProfileOut(BaseModel):
renderer: str
render_backend: str
workflow_family: str
artifact_kind: str
output_format: str
is_animation: bool
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str = "legacy_only"
transparent_bg: bool
cycles_device: str | None = None
material_override: str | None = None
allowed_override_keys: list[str] = Field(default_factory=list)
invocation_overrides: dict = Field(default_factory=dict)
class OutputTypeContractCatalogOut(BaseModel):
workflow_families: list[str] = Field(default_factory=list)
workflow_rollout_modes: list[str] = Field(default_factory=list)
artifact_kinds: list[str] = Field(default_factory=list)
allowed_artifact_kinds_by_family: dict[str, list[str]] = Field(default_factory=dict)
allowed_output_formats_by_family: dict[str, list[str]] = Field(default_factory=dict)
allowed_invocation_override_keys_by_artifact_kind: dict[str, list[str]] = Field(default_factory=dict)
default_output_format_by_artifact_kind: dict[str, str] = Field(default_factory=dict)
parameter_ownership: dict[str, dict | list[str]] = Field(default_factory=dict)
class OutputTypeOut(BaseModel):
id: uuid.UUID
name: str
@@ -50,13 +86,18 @@ class OutputTypeOut(BaseModel):
render_backend: str
is_animation: bool
transparent_bg: bool
workflow_family: str
artifact_kind: str
invocation_overrides: dict
cycles_device: str | None = None
pricing_tier_id: int | None = None
pricing_tier_name: str | None = None
price_per_item: float | None = None
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str
workflow_name: str | None = None
material_override: str | None = None
invocation_profile: OutputTypeInvocationProfileOut | None = None
is_active: bool
created_at: datetime
updated_at: datetime
@@ -159,11 +200,28 @@ class WorkflowDefinitionOut(BaseModel):
name: str
output_type_id: uuid.UUID | None
config: dict
family: str | None = None
supported_artifact_kinds: list[str] = Field(default_factory=list)
rollout_summary: "WorkflowRolloutSummaryOut" = Field(
default_factory=lambda: WorkflowRolloutSummaryOut()
)
is_active: bool
created_at: datetime
model_config = {"from_attributes": True}
class WorkflowDraftPreflightRequest(BaseModel):
context_id: str
config: dict
workflow_id: uuid.UUID | None = None
class WorkflowDraftDispatchRequest(BaseModel):
context_id: str
config: dict
workflow_id: uuid.UUID | None = None
class WorkflowNodeResultOut(BaseModel):
id: uuid.UUID
node_name: str
@@ -190,6 +248,38 @@ class WorkflowRunOut(BaseModel):
model_config = {"from_attributes": True}
class WorkflowRolloutLatestRunOut(BaseModel):
workflow_run_id: uuid.UUID
execution_mode: str
status: str
created_at: datetime
completed_at: datetime | None = None
class WorkflowRolloutLinkedOutputTypeOut(BaseModel):
id: uuid.UUID
name: str
is_active: bool
artifact_kind: str
workflow_rollout_mode: str
class WorkflowRolloutSummaryOut(BaseModel):
linked_output_type_count: int = 0
active_output_type_count: int = 0
linked_output_type_names: list[str] = Field(default_factory=list)
linked_output_types: list[WorkflowRolloutLinkedOutputTypeOut] = Field(default_factory=list)
rollout_modes: list[str] = Field(default_factory=list)
has_blocking_contracts: bool = False
blocking_reasons: list[str] = Field(default_factory=list)
latest_run: WorkflowRolloutLatestRunOut | None = None
latest_shadow_run: WorkflowRolloutLatestRunOut | None = None
latest_rollout_gate_verdict: str | None = None
latest_rollout_ready: bool | None = None
latest_rollout_status: str | None = None
latest_rollout_reasons: list[str] = Field(default_factory=list)
class WorkflowComparisonArtifactOut(BaseModel):
path: str | None
storage_key: str | None
@@ -208,8 +298,58 @@ class WorkflowRunComparisonOut(BaseModel):
execution_mode: str
status: str
summary: str
rollout_gate_verdict: str
workflow_rollout_ready: bool
workflow_rollout_status: str
rollout_reasons: list[str] = []
rollout_thresholds: dict[str, float] = Field(default_factory=dict)
authoritative_output: WorkflowComparisonArtifactOut
observer_output: WorkflowComparisonArtifactOut
exact_match: bool | None
dimensions_match: bool | None
mean_pixel_delta: float | None
class WorkflowPreflightIssueOut(BaseModel):
severity: str
code: str
message: str
node_id: str | None = None
step: str | None = None
class WorkflowPreflightNodeOut(BaseModel):
node_id: str
step: str
label: str | None = None
execution_kind: str
supported: bool
status: str
issues: list[WorkflowPreflightIssueOut] = []
class WorkflowPreflightOut(BaseModel):
workflow_id: uuid.UUID | None = None
context_id: str
context_kind: str | None = None
expected_context_kind: str
execution_mode: str
graph_dispatch_allowed: bool
summary: str
resolved_order_line_id: uuid.UUID | None = None
resolved_cad_file_id: uuid.UUID | None = None
unsupported_node_ids: list[str] = []
issues: list[WorkflowPreflightIssueOut] = []
nodes: list[WorkflowPreflightNodeOut] = []
class WorkflowOrderLineContextOptionOut(BaseModel):
value: uuid.UUID
label: str
meta: str
class WorkflowOrderLineContextGroupOut(BaseModel):
order_id: uuid.UUID
order_label: str
options: list[WorkflowOrderLineContextOptionOut] = []
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,146 @@
from __future__ import annotations
import json
import re
from collections import defaultdict
from typing import Any, Iterable, Mapping
_MARKER_PROP_NAMES = (
"hartomat_template_input",
"hartomat.template_input",
"template_input",
"schaeffler_template_input",
)
_MARKER_KEY_PROP_NAMES = (
"hartomat_template_input_key",
"hartomat.template_input_key",
"template_input_key",
"schaeffler_template_input_key",
)
_MARKER_VALUE_PROP_NAMES = (
"hartomat_template_input_value",
"hartomat.template_input_value",
"template_input_value",
"schaeffler_template_input_value",
)
_NAME_PATTERNS = (
re.compile(r"template_input__(?P<key>[^_]+)__(?P<value>[^_]+)", re.IGNORECASE),
re.compile(r"template-input:(?P<key>[^=]+)=(?P<value>.+)", re.IGNORECASE),
re.compile(r"ti::(?P<key>[^:]+)::(?P<value>.+)", re.IGNORECASE),
)
def _normalize_marker_token(value: Any) -> str | None:
if value is None:
return None
if isinstance(value, bool):
return "true" if value else "false"
text = str(value).strip()
return text or None
def _parse_marker_text(text: str) -> tuple[str, str] | None:
cleaned = text.strip()
if not cleaned:
return None
if cleaned.startswith("{"):
try:
payload = json.loads(cleaned)
except Exception:
payload = None
if isinstance(payload, dict):
key = _normalize_marker_token(payload.get("key"))
value = _normalize_marker_token(payload.get("value"))
if key and value:
return key, value
if "=" in cleaned:
key, value = cleaned.split("=", 1)
key = _normalize_marker_token(key)
value = _normalize_marker_token(value)
if key and value:
return key, value
return None
def extract_template_input_marker(
*,
name: str | None = None,
props: Mapping[str, Any] | None = None,
) -> tuple[str, str] | None:
raw_props = props or {}
for prop_name in _MARKER_PROP_NAMES:
raw_value = raw_props.get(prop_name)
text = _normalize_marker_token(raw_value)
if not text:
continue
marker = _parse_marker_text(text)
if marker is not None:
return marker
key = None
value = None
for prop_name in _MARKER_KEY_PROP_NAMES:
key = _normalize_marker_token(raw_props.get(prop_name))
if key:
break
for prop_name in _MARKER_VALUE_PROP_NAMES:
value = _normalize_marker_token(raw_props.get(prop_name))
if value:
break
if key and value:
return key, value
candidate_name = (name or "").strip()
if candidate_name:
for pattern in _NAME_PATTERNS:
match = pattern.search(candidate_name)
if not match:
continue
marker_key = _normalize_marker_token(match.group("key"))
marker_value = _normalize_marker_token(match.group("value"))
if marker_key and marker_value:
return marker_key, marker_value
return None
def suggest_workflow_input_schema(
markers: Iterable[tuple[str, str]],
) -> list[dict[str, Any]]:
values_by_key: dict[str, set[str]] = defaultdict(set)
for key, value in markers:
normalized_key = _normalize_marker_token(key)
normalized_value = _normalize_marker_token(value)
if not normalized_key or not normalized_value:
continue
values_by_key[normalized_key].add(normalized_value)
schema: list[dict[str, Any]] = []
for key in sorted(values_by_key):
options = sorted(values_by_key[key])
if not options:
continue
label = key.replace("_", " ").strip().title()
if len(options) == 2 and set(options) == {"false", "true"}:
schema.append(
{
"key": key,
"label": label,
"type": "boolean",
"section": "Template Inputs",
"default": options[0] == "true",
}
)
continue
schema.append(
{
"key": key,
"label": label,
"type": "select",
"section": "Template Inputs",
"default": options[0],
"options": [{"value": option, "label": option.replace("_", " ").title()} for option in options],
}
)
return schema
@@ -18,6 +18,7 @@ def dispatch_workflow(
params = params or {}
builders = {
"still": _build_still,
"still_graph": _build_still,
"turntable": _build_turntable,
"multi_angle": _build_multi_angle,
"still_with_exports": _build_still_with_exports,
@@ -17,7 +17,7 @@ from app.domains.orders.models import OrderLine
from app.domains.rendering.models import WorkflowRun
from app.domains.rendering.schemas import WorkflowComparisonArtifactOut, WorkflowRunComparisonOut
ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA = 0.0
ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA = 1e-6
ROLLOUT_WARN_MAX_MEAN_PIXEL_DELTA = 0.02
@@ -217,6 +217,7 @@ def _find_shadow_file(order_line: OrderLine, workflow_run: WorkflowRun) -> str |
upload_root = Path(settings.upload_dir)
candidate_roots.append(upload_root / "renders" / str(order_line.id))
candidate_roots.append(upload_root / "step_files" / "renders" / str(order_line.id))
candidate_roots.append(upload_root / "step_files" / "renders")
seen_roots: set[Path] = set()
@@ -258,6 +259,13 @@ async def build_workflow_run_comparison(
authoritative_output = _build_artifact(authoritative_path)
observer_output = _build_artifact(observer_path)
rollout_gate = evaluate_rollout_gate(
authoritative_output=authoritative_output,
observer_output=observer_output,
exact_match=None,
dimensions_match=None,
mean_pixel_delta=None,
)
if not authoritative_output.exists:
status = "missing_authoritative"
@@ -283,9 +291,9 @@ async def build_workflow_run_comparison(
if exact_match:
status = "matched"
summary = "Observer output matches the authoritative legacy output byte-for-byte."
elif mean_pixel_delta == 0.0 and dimensions_match:
elif mean_pixel_delta is not None and mean_pixel_delta <= ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA and dimensions_match:
status = "matched"
summary = "Observer output matches the authoritative legacy output visually, but file metadata differs."
summary = "Observer output matches the authoritative legacy output within the visual pass threshold."
else:
status = "different"
if dimensions_match is False:
@@ -294,6 +302,13 @@ async def build_workflow_run_comparison(
summary = "Observer output differs from the authoritative output."
else:
summary = "Observer output differs from the authoritative output and could not be pixel-compared."
rollout_gate = evaluate_rollout_gate(
authoritative_output=authoritative_output,
observer_output=observer_output,
exact_match=exact_match,
dimensions_match=dimensions_match,
mean_pixel_delta=mean_pixel_delta,
)
return WorkflowRunComparisonOut(
workflow_run_id=workflow_run.id,
@@ -302,6 +317,14 @@ async def build_workflow_run_comparison(
execution_mode=workflow_run.execution_mode,
status=status,
summary=summary,
rollout_gate_verdict=str(rollout_gate["verdict"]),
workflow_rollout_ready=bool(rollout_gate["workflow_rollout_ready"]),
workflow_rollout_status=str(rollout_gate["workflow_rollout_status"]),
rollout_reasons=[str(reason) for reason in rollout_gate["reasons"]],
rollout_thresholds={
str(key): float(value)
for key, value in dict(rollout_gate["thresholds"]).items()
},
authoritative_output=authoritative_output.to_schema(),
observer_output=observer_output.to_schema(),
exact_match=exact_match,
@@ -21,6 +21,10 @@ _PRESET_TYPES = {
_EXECUTION_MODES = {"legacy", "graph", "shadow"}
_WORKFLOW_BLUEPRINTS = {"cad_intake", "order_rendering", "still_graph_reference"}
_WORKFLOW_STARTERS = {"cad_file", "order_line"}
_WORKFLOW_STARTER_BLUEPRINTS = {
"starter_cad_intake": "cad_file",
"starter_order_rendering": "order_line",
}
_NODE_TYPE_TO_STEP: dict[str, str] = {
"inputNode": StepName.RESOLVE_STEP_PATH.value,
@@ -72,7 +76,7 @@ def _extract_render_params_from_nodes(nodes: list[dict[str, Any]], step: StepNam
def _build_order_line_still_graph_nodes(render_params: dict[str, Any]) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
graph_render_params = deepcopy(render_params)
graph_render_params.setdefault("use_custom_render_settings", True)
graph_render_params.setdefault("use_custom_render_settings", False)
nodes = [
_make_node("setup", StepName.ORDER_LINE_SETUP, 0, 160, label="Order Line Setup"),
@@ -222,6 +226,7 @@ def build_preset_workflow_config(
"ui": {
"preset": preset_type,
"execution_mode": "graph" if preset_type == "still_graph" else "legacy",
"family": "order_line",
},
}
@@ -235,6 +240,7 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]:
_make_node("resolve_step", StepName.RESOLVE_STEP_PATH, 0, 180, label="Resolve STEP Path"),
_make_node("extract_objects", StepName.OCC_OBJECT_EXTRACT, 220, 180, label="Extract STEP Objects"),
_make_node("export_glb", StepName.OCC_GLB_EXPORT, 440, 180, label="Export GLB"),
_make_node("bbox", StepName.GLB_BBOX, 660, 120, label="Compute Bounding Box"),
_make_node("stl_cache", StepName.STL_CACHE_GENERATE, 660, 300, label="Generate STL Cache"),
_make_node(
"blender_thumb",
@@ -260,9 +266,11 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]:
edges = [
{"from": "resolve_step", "to": "extract_objects"},
{"from": "extract_objects", "to": "export_glb"},
{"from": "export_glb", "to": "bbox"},
{"from": "export_glb", "to": "stl_cache"},
{"from": "export_glb", "to": "blender_thumb"},
{"from": "export_glb", "to": "threejs_thumb"},
{"from": "bbox", "to": "threejs_thumb"},
{"from": "blender_thumb", "to": "save_blender_thumb"},
{"from": "threejs_thumb", "to": "save_threejs_thumb"},
]
@@ -329,6 +337,7 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]:
"ui": {
"preset": "custom",
"execution_mode": "graph" if blueprint == "still_graph_reference" else "legacy",
"family": "cad_file" if blueprint == "cad_intake" else "order_line",
"blueprint": blueprint,
},
}
@@ -356,6 +365,7 @@ def build_starter_workflow_config(family: str = "order_line") -> dict[str, Any]:
"ui": {
"preset": "custom",
"execution_mode": "legacy",
"family": family,
"blueprint": blueprint,
},
}
@@ -385,6 +395,7 @@ def _build_legacy_custom_render_fallback_config(params: dict[str, Any] | None =
"ui": {
"preset": "custom",
"execution_mode": "legacy",
"family": "order_line",
"blueprint": "starter_order_rendering",
},
}
@@ -480,9 +491,16 @@ def canonicalize_workflow_config(raw: dict[str, Any]) -> dict[str, Any]:
canonical["ui"].update(merged_ui)
return canonical
if blueprint == "still_graph_reference":
if blueprint in _WORKFLOW_BLUEPRINTS:
merged_ui = dict(normalized["ui"])
canonical = build_workflow_blueprint_config("still_graph_reference")
canonical = build_workflow_blueprint_config(blueprint)
merged_ui.setdefault("execution_mode", canonical["ui"]["execution_mode"])
canonical["ui"].update(merged_ui)
return canonical
if blueprint in _WORKFLOW_STARTER_BLUEPRINTS:
merged_ui = dict(normalized["ui"])
canonical = build_starter_workflow_config(_WORKFLOW_STARTER_BLUEPRINTS[blueprint])
merged_ui.setdefault("execution_mode", canonical["ui"]["execution_mode"])
canonical["ui"].update(merged_ui)
return canonical
@@ -25,7 +25,7 @@ from collections import deque
from dataclasses import dataclass, field
from typing import Literal
from app.domains.rendering.workflow_schema import WorkflowConfig, WorkflowNode
from app.domains.rendering.workflow_schema import WorkflowConfig, WorkflowEdge, WorkflowNode
from app.core.process_steps import StepName
logger = logging.getLogger(__name__)
@@ -40,6 +40,17 @@ class WorkflowContext:
execution_mode: WorkflowExecutionMode
workflow_run_id: uuid.UUID | None = None
ordered_nodes: list[WorkflowNode] = field(default_factory=list)
edges: list[WorkflowEdge] = field(default_factory=list)
@dataclass(slots=True)
class WorkflowTaskDispatchSpec:
node_id: str
task_name: str
args: list[str]
kwargs: dict
task_id: str
queue: str | None = None
@dataclass(slots=True)
@@ -48,6 +59,38 @@ class WorkflowDispatchResult:
task_ids: list[str]
node_task_ids: dict[str, str]
skipped_node_ids: list[str]
task_specs: list[WorkflowTaskDispatchSpec] = field(default_factory=list)
class WorkflowTaskSubmissionError(RuntimeError):
def __init__(self, message: str, *, submitted_task_ids: list[str] | None = None) -> None:
super().__init__(message)
self.submitted_task_ids = list(submitted_task_ids or [])
def submit_prepared_workflow_tasks(dispatch_result: WorkflowDispatchResult) -> None:
"""Submit pre-built Celery tasks after DB state has been committed."""
from app.tasks.celery_app import celery_app
submitted_task_ids: list[str] = []
for spec in dispatch_result.task_specs:
task_options: dict[str, str] = {"task_id": spec.task_id}
if spec.queue:
task_options["queue"] = spec.queue
try:
celery_app.send_task(
spec.task_name,
args=spec.args,
kwargs=spec.kwargs,
**task_options,
)
except Exception as exc:
raise WorkflowTaskSubmissionError(
f"Failed to submit workflow task for node '{spec.node_id}': {exc}",
submitted_task_ids=submitted_task_ids,
) from exc
submitted_task_ids.append(spec.task_id)
# ---------------------------------------------------------------------------
@@ -65,7 +108,7 @@ STEP_TASK_MAP: dict[StepName, str] = {
StepName.STL_CACHE_GENERATE: "app.tasks.step_tasks.process_step_file",
# ── Thumbnail generation ─────────────────────────────────────────────
StepName.BLENDER_RENDER: "app.tasks.step_tasks.render_step_thumbnail",
StepName.THUMBNAIL_SAVE: "app.tasks.step_tasks.render_step_thumbnail",
StepName.THUMBNAIL_SAVE: "app.tasks.step_tasks.render_graph_thumbnail",
# ── Order line stills & turntables ──────────────────────────────────
StepName.BLENDER_STILL: "app.domains.rendering.tasks.render_order_line_still_task",
StepName.BLENDER_TURNTABLE: "app.domains.rendering.tasks.render_turntable_task",
@@ -98,6 +141,7 @@ def prepare_workflow_context(
execution_mode=execution_mode,
workflow_run_id=workflow_run_id,
ordered_nodes=ordered_nodes,
edges=list(config.edges),
)
@@ -12,12 +12,19 @@ from sqlalchemy import select
from sqlalchemy.orm import Session, selectinload
from app.config import settings
from app.core.render_paths import build_order_line_export_path, build_order_line_step_render_path
from app.core.process_steps import StepName
from app.domains.products.models import CadFile
from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun
from app.domains.rendering.workflow_executor import STEP_TASK_MAP, WorkflowContext, WorkflowDispatchResult
from app.domains.rendering.workflow_executor import (
STEP_TASK_MAP,
WorkflowContext,
WorkflowDispatchResult,
WorkflowTaskDispatchSpec,
)
from app.domains.rendering.workflow_node_registry import get_node_definition
from app.domains.rendering.workflow_runtime_services import (
_resolve_render_output_extension,
AutoPopulateMaterialsResult,
BBoxResolutionResult,
MaterialResolutionResult,
@@ -25,6 +32,7 @@ from app.domains.rendering.workflow_runtime_services import (
TemplateResolutionResult,
auto_populate_materials_for_cad,
build_order_line_render_invocation,
extract_template_input_overrides,
prepare_order_line_render_context,
resolve_cad_bbox,
resolve_order_line_material_map,
@@ -89,11 +97,13 @@ _STILL_TASK_KEYS = {
"material_override",
"render_engine",
"resolution",
"template_inputs",
}
_TURNTABLE_TASK_KEYS = {
"output_name",
"engine",
"render_engine",
"samples",
"smooth_angle",
"cycles_device",
@@ -119,6 +129,8 @@ _TURNTABLE_TASK_KEYS = {
"focal_length_mm",
"sensor_width_mm",
"material_override",
"template_inputs",
"duration_s",
}
_THUMBNAIL_TASK_KEYS = {
@@ -144,13 +156,62 @@ _AUTHORITATIVE_RENDER_SETTING_KEYS = {
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
"camera_orbit",
"focal_length_mm",
"sensor_width_mm",
"bg_color",
}
def _inspect_active_worker_queues(timeout: float = 1.0) -> set[str]:
from app.tasks.celery_app import celery_app
try:
inspect_result = celery_app.control.inspect(timeout=timeout)
active_queues = inspect_result.active_queues() or {}
except Exception as exc:
logger.info("[WORKFLOW] Could not inspect active Celery queues: %s", exc)
return set()
queue_names: set[str] = set()
for queues in active_queues.values():
for queue in queues or []:
if not isinstance(queue, dict):
continue
name = queue.get("name")
if isinstance(name, str) and name.strip():
queue_names.add(name.strip())
return queue_names
def _resolve_shadow_render_queue(
*,
workflow_context: WorkflowContext,
node,
active_queue_names: set[str],
) -> str | None:
if workflow_context.execution_mode != "shadow":
return None
if node.step not in {
StepName.BLENDER_STILL,
StepName.BLENDER_TURNTABLE,
StepName.EXPORT_BLEND,
}:
return None
preferred_queue = (settings.workflow_shadow_render_queue or "").strip()
if not preferred_queue or preferred_queue == "asset_pipeline":
return None
if preferred_queue in active_queue_names:
return preferred_queue
logger.info(
"[WORKFLOW] Preferred shadow render queue %s unavailable for node %s; using default routing",
preferred_queue,
node.id,
)
return None
def _filter_graph_render_overrides(step: StepName, params: dict[str, Any]) -> dict[str, Any]:
normalized = dict(params)
use_custom_render_settings = bool(normalized.pop("use_custom_render_settings", False))
@@ -186,6 +247,8 @@ def find_unsupported_graph_nodes(workflow_context: WorkflowContext) -> list[str]
def execute_graph_workflow(
session: Session,
workflow_context: WorkflowContext,
*,
dispatch_tasks: bool = True,
) -> WorkflowDispatchResult:
if workflow_context.workflow_run_id is None:
raise ValueError("workflow_context.workflow_run_id is required for graph execution")
@@ -201,6 +264,12 @@ def execute_graph_workflow(
task_ids: list[str] = []
node_task_ids: dict[str, str] = {}
skipped_node_ids: list[str] = []
task_specs: list[WorkflowTaskDispatchSpec] = []
active_queue_names = (
_inspect_active_worker_queues()
if workflow_context.execution_mode == "shadow"
else set()
)
for node in workflow_context.ordered_nodes:
node_result = node_results.get(node.id)
@@ -326,8 +395,6 @@ def execute_graph_workflow(
skipped_node_ids.append(node.id)
continue
from app.tasks.celery_app import celery_app
task_kwargs = _build_task_kwargs(
session=session,
workflow_context=workflow_context,
@@ -335,12 +402,42 @@ def execute_graph_workflow(
node=node,
)
result = celery_app.send_task(
task_name,
args=[workflow_context.context_id],
kwargs=task_kwargs,
target_queue = _resolve_shadow_render_queue(
workflow_context=workflow_context,
node=node,
active_queue_names=active_queue_names,
)
metadata["task_id"] = result.id
if dispatch_tasks:
from app.tasks.celery_app import celery_app
if target_queue:
result = celery_app.send_task(
task_name,
args=[workflow_context.context_id],
kwargs=task_kwargs,
queue=target_queue,
)
else:
result = celery_app.send_task(
task_name,
args=[workflow_context.context_id],
kwargs=task_kwargs,
)
task_id = result.id
else:
task_id = str(uuid.uuid4())
task_specs.append(
WorkflowTaskDispatchSpec(
node_id=node.id,
task_name=task_name,
args=[workflow_context.context_id],
kwargs=dict(task_kwargs),
task_id=task_id,
queue=target_queue,
)
)
metadata["task_id"] = task_id
metadata["task_queue"] = target_queue or "asset_pipeline"
if definition is not None:
metadata["execution_kind"] = definition.execution_kind
metadata["attempt_count"] = 1
@@ -360,15 +457,15 @@ def execute_graph_workflow(
node_result.duration_s = None
state.node_outputs[node.id] = dict(metadata)
session.flush()
task_ids.append(result.id)
node_task_ids[node.id] = result.id
task_ids.append(task_id)
node_task_ids[node.id] = task_id
logger.info(
"[WORKFLOW] Dispatched node %r (step=%s, mode=%s, run=%s) -> Celery task %s",
node.id,
node.step,
workflow_context.execution_mode,
workflow_context.workflow_run_id,
result.id,
task_id,
)
continue
@@ -397,6 +494,7 @@ def execute_graph_workflow(
task_ids=task_ids,
node_task_ids=node_task_ids,
skipped_node_ids=skipped_node_ids,
task_specs=task_specs,
)
@@ -466,8 +564,15 @@ def _serialize_template_result(result: TemplateResolutionResult) -> dict[str, An
"material_map_count": len(result.material_map or {}),
"use_materials": result.use_materials,
"override_material": result.override_material,
"target_collection": result.target_collection,
"lighting_only": result.lighting_only,
"shadow_catcher": result.shadow_catcher,
"camera_orbit": result.camera_orbit,
"category_key": result.category_key,
"output_type_id": result.output_type_id,
"workflow_input_schema": result.workflow_input_schema,
"template_inputs": result.template_inputs,
"template_input_count": len(result.template_inputs or {}),
}
@@ -597,13 +702,17 @@ def _predict_task_output_metadata(
order_line_id = str(state.setup.order_line.id)
if node.step == StepName.BLENDER_STILL:
output_dir = step_path.parent / "renders"
output_filename = f"line_{order_line_id}.png"
output_extension = _resolve_render_output_extension(state.setup.order_line)
if output_extension not in {"png", "jpg", "webp"}:
output_extension = "png"
output_filename = f"line_{order_line_id}.{output_extension}"
if output_name_suffix:
output_filename = f"line_{order_line_id}_{output_name_suffix}.png"
output_filename = f"line_{order_line_id}_{output_name_suffix}.{output_extension}"
return {
"artifact_role": "render_output",
"predicted_output_path": str(output_dir / output_filename),
"predicted_output_path": str(
build_order_line_step_render_path(step_path, order_line_id, output_filename)
),
"predicted_asset_type": "still",
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
"graph_authoritative_output_enabled": bool(
@@ -618,9 +727,10 @@ def _predict_task_output_metadata(
output_filename = f"{step_path.stem}_production.blend"
if output_name_suffix:
output_filename = f"{step_path.stem}_production_{output_name_suffix}.blend"
predicted_output_path = str(build_order_line_export_path(order_line_id, output_filename))
return {
"artifact_role": "blend_export",
"predicted_output_path": str(step_path.parent / output_filename),
"predicted_output_path": predicted_output_path,
"predicted_asset_type": "blend_production",
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
"graph_authoritative_output_enabled": bool(
@@ -641,7 +751,9 @@ def _predict_task_output_metadata(
if isinstance(output_dir, str) and output_dir.strip():
predicted_output_path = str(Path(output_dir) / f"{output_name}.mp4")
else:
predicted_output_path = str(step_path.parent / "renders" / f"{output_name}.mp4")
predicted_output_path = str(
build_order_line_step_render_path(step_path, order_line_id, f"{output_name}.mp4")
)
return {
"artifact_role": "turntable_output",
"predicted_output_path": predicted_output_path,
@@ -733,6 +845,30 @@ def _resolve_thumbnail_request(
return None
def _normalize_turntable_task_kwargs(task_kwargs: dict[str, Any]) -> dict[str, Any]:
normalized = dict(task_kwargs)
raw_duration = normalized.get("duration_s")
if raw_duration in (None, ""):
return normalized
try:
duration_s = float(raw_duration)
except (TypeError, ValueError):
return normalized
try:
fps = int(float(normalized.get("fps", 0)))
except (TypeError, ValueError):
return normalized
if duration_s <= 0 or fps <= 0:
return normalized
normalized["duration_s"] = duration_s
normalized["frame_count"] = max(1, int(round(duration_s * fps)))
return normalized
def _build_task_kwargs(
*,
session: Session,
@@ -751,6 +887,7 @@ def _build_task_kwargs(
template_context=state.template,
position_context=resolve_render_position_context(session, state.setup.order_line),
material_context=state.materials,
artifact_kind_override=_artifact_kind_override_for_step(node.step),
)
render_defaults = render_invocation.task_defaults()
@@ -774,6 +911,15 @@ def _build_task_kwargs(
}.items()
if key in _TURNTABLE_TASK_KEYS
}
task_kwargs = _normalize_turntable_task_kwargs(task_kwargs)
if state.setup is not None and state.setup.is_ready and state.setup.cad_file is not None:
task_kwargs["output_dir"] = str(
build_order_line_step_render_path(
state.setup.cad_file.stored_path,
str(state.setup.order_line.id),
"turntable.mp4",
).parent
)
elif node.step == StepName.THUMBNAIL_SAVE:
thumbnail_request = _resolve_thumbnail_request(workflow_context, state, node.id) or {}
task_kwargs = {
@@ -787,7 +933,7 @@ def _build_task_kwargs(
task_kwargs["workflow_run_id"] = str(workflow_context.workflow_run_id)
task_kwargs["workflow_node_id"] = node.id
if workflow_context.execution_mode == "graph" and node.step in {
if workflow_context.execution_mode in {"graph", "shadow"} and node.step in {
StepName.BLENDER_STILL,
StepName.EXPORT_BLEND,
StepName.BLENDER_TURNTABLE,
@@ -798,19 +944,23 @@ def _build_task_kwargs(
step=StepName.OUTPUT_SAVE,
direction="downstream",
)
connected_notify_node_ids = _connected_node_ids_by_step(
workflow_context,
node_id=node.id,
step=StepName.NOTIFY,
direction="downstream",
)
if connected_output_node_ids:
task_kwargs["publish_asset_enabled"] = False
task_kwargs["graph_authoritative_output_enabled"] = True
task_kwargs["graph_output_node_ids"] = connected_output_node_ids
if connected_notify_node_ids:
task_kwargs["emit_legacy_notifications"] = True
task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids
if workflow_context.execution_mode == "graph":
task_kwargs["graph_authoritative_output_enabled"] = True
else:
task_kwargs["observer_output_enabled"] = True
if workflow_context.execution_mode == "graph":
connected_notify_node_ids = _connected_node_ids_by_step(
workflow_context,
node_id=node.id,
step=StepName.NOTIFY,
direction="downstream",
)
if connected_notify_node_ids:
task_kwargs["emit_legacy_notifications"] = True
task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids
if workflow_context.execution_mode == "shadow":
task_kwargs["publish_asset_enabled"] = False
task_kwargs["emit_events"] = False
@@ -819,6 +969,16 @@ def _build_task_kwargs(
return task_kwargs
def _artifact_kind_override_for_step(step: StepName) -> str | None:
if step == StepName.BLENDER_TURNTABLE:
return "turntable_video"
if step == StepName.BLENDER_STILL:
return "still_image"
if step == StepName.EXPORT_BLEND:
return "blend_asset"
return None
def _execute_order_line_setup(
*,
session: Session,
@@ -857,12 +1017,25 @@ def _execute_resolve_template(
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del workflow_context, node_params
del workflow_context
if state.setup is None or not state.setup.is_ready:
if state.setup is not None and state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
raise WorkflowGraphRuntimeError("resolve_template requires a ready order_line_setup result")
result = resolve_order_line_template_context(session, state.setup)
result = resolve_order_line_template_context(
session,
state.setup,
template_id_override=node_params.get("template_id_override"),
material_library_path_override=node_params.get("material_library_path"),
require_template=bool(node_params.get("require_template", False)),
disable_materials=bool(node_params.get("disable_materials", False)),
target_collection_override=node_params.get("target_collection"),
material_replace_mode=node_params.get("material_replace_mode"),
lighting_only_mode=node_params.get("lighting_only_mode"),
shadow_catcher_mode=node_params.get("shadow_catcher_mode"),
camera_orbit_mode=node_params.get("camera_orbit_mode"),
template_input_overrides=extract_template_input_overrides(node_params),
)
state.template = result
return _serialize_template_result(result), "completed", None
@@ -876,7 +1049,7 @@ def _execute_material_map_resolve(
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del session, workflow_context, node_params
del session, workflow_context
if state.setup is None or not state.setup.is_ready:
if state.setup is not None and state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
@@ -895,6 +1068,8 @@ def _execute_material_map_resolve(
state.setup.materials_source,
material_library=material_library,
template=template,
material_override=node_params.get("material_override"),
disable_materials=bool(node_params.get("disable_materials", False)),
)
state.materials = result
return _serialize_material_result(result), "completed", None
@@ -909,26 +1084,45 @@ def _execute_auto_populate_materials(
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del node_params
if state.setup is None or state.setup.cad_file is None:
if state.setup is not None and state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
raise WorkflowGraphRuntimeError("auto_populate_materials requires a resolved cad_file")
shadow_mode = workflow_context.execution_mode == "shadow"
persist_updates = bool(node_params.get("persist_updates", not shadow_mode))
if shadow_mode:
persist_updates = False
refresh_material_source = bool(node_params.get("refresh_material_source", True))
include_populated_products = bool(node_params.get("include_populated_products", False))
if shadow_mode:
result = auto_populate_materials_for_cad(
session,
str(state.setup.cad_file.id),
persist_updates=False,
include_populated_products=include_populated_products,
)
else:
result = auto_populate_materials_for_cad(session, str(state.setup.cad_file.id))
result = auto_populate_materials_for_cad(
session,
str(state.setup.cad_file.id),
persist_updates=persist_updates,
include_populated_products=include_populated_products,
)
state.auto_populate = result
if not shadow_mode and state.setup.order_line is not None and state.setup.order_line.product is not None:
if (
persist_updates
and refresh_material_source
and not shadow_mode
and state.setup.order_line is not None
and state.setup.order_line.product is not None
):
session.refresh(state.setup.order_line.product)
state.setup.materials_source = state.setup.order_line.product.cad_part_materials or []
payload = _serialize_auto_populate_result(result)
payload["shadow_mode"] = shadow_mode
payload["persist_updates"] = persist_updates
payload["refresh_material_source"] = refresh_material_source
payload["include_populated_products"] = include_populated_products
return payload, "completed", None
@@ -949,17 +1143,31 @@ def _execute_glb_bbox(
step_path = state.setup.cad_file.stored_path
glb_path = node_params.get("glb_path")
if glb_path is None and state.setup.glb_reuse_path is not None:
source_preference = str(node_params.get("source_preference") or "auto")
if glb_path is None and source_preference != "step_only" and state.setup.glb_reuse_path is not None:
glb_path = str(state.setup.glb_reuse_path)
elif glb_path is None:
elif glb_path is None and source_preference != "step_only":
step_file = Path(step_path)
fallback_glb = step_file.parent / f"{step_file.stem}_thumbnail.glb"
if fallback_glb.exists():
glb_path = str(fallback_glb)
if source_preference == "glb_only" and not glb_path:
payload = {
"bbox_data": None,
"has_bbox": False,
"source_kind": "none",
"step_path": step_path,
"glb_path": None,
"source_preference": source_preference,
}
return payload, "failed", "glb_only requested but no GLB artifact is available"
result = resolve_cad_bbox(step_path, glb_path=glb_path)
state.bbox = result
return _serialize_bbox_result(result), "completed", None
payload = _serialize_bbox_result(result)
payload["source_preference"] = source_preference
return payload, "completed", None
def _execute_resolve_step_path(
@@ -1069,7 +1277,7 @@ def _execute_output_save(
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del session, node_params
del session
if state.setup is None or state.setup.order_line is None:
raise WorkflowGraphRuntimeError("output_save requires an order_line_setup result")
@@ -1085,19 +1293,42 @@ def _execute_output_save(
"shadow_mode": workflow_context.execution_mode == "shadow",
}
upstream_artifacts = _connected_upstream_artifacts(workflow_context, state, node.id)
expected_artifact_role = str(node_params.get("expected_artifact_role") or "").strip() or None
require_upstream_artifact = bool(node_params.get("require_upstream_artifact", False))
if expected_artifact_role is not None:
upstream_artifacts = [
artifact for artifact in upstream_artifacts if artifact.get("artifact_role") == expected_artifact_role
]
if workflow_context.execution_mode == "shadow":
payload["publication_mode"] = "shadow_observer_only"
elif any(artifact["publish_asset_enabled"] for artifact in upstream_artifacts):
payload["publication_mode"] = "deferred_to_render_task"
else:
payload["publication_mode"] = "awaiting_graph_authoritative_save"
payload["expected_artifact_role"] = expected_artifact_role
payload["require_upstream_artifact"] = require_upstream_artifact
if upstream_artifacts:
payload["artifact_count"] = len(upstream_artifacts)
payload["upstream_artifacts"] = upstream_artifacts
elif require_upstream_artifact:
payload["artifact_count"] = 0
return payload, "failed", "No upstream render artifact is connected to this output node"
if state.template is not None and state.template.template is not None:
payload["template_name"] = state.template.template.name
if state.materials is not None:
payload["material_map_count"] = len(state.materials.material_map or {})
deferred_handoff_node_ids = [
str(artifact.get("node_id"))
for artifact in upstream_artifacts
if artifact.get("task_id")
]
if deferred_handoff_node_ids:
payload["handoff_state"] = "armed"
payload["handoff_node_ids"] = deferred_handoff_node_ids
payload["handoff_node_count"] = len(deferred_handoff_node_ids)
return payload, "pending", None
return payload, "completed", None
@@ -1109,7 +1340,7 @@ def _execute_notify(
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del session, node_params
del session
if state.setup is None or state.setup.order_line is None:
raise WorkflowGraphRuntimeError("notify requires an order_line_setup result")
@@ -1121,8 +1352,10 @@ def _execute_notify(
payload: dict[str, Any] = {
"order_line_id": str(state.setup.order_line.id),
"shadow_mode": workflow_context.execution_mode == "shadow",
"channel": "audit_log",
"channel": str(node_params.get("channel") or "audit_log"),
}
require_armed_render = bool(node_params.get("require_armed_render", False))
payload["require_armed_render"] = require_armed_render
if workflow_context.execution_mode == "shadow":
payload["notification_mode"] = "shadow_suppressed"
@@ -1136,12 +1369,15 @@ def _execute_notify(
]
if not armed_node_ids:
payload["notification_mode"] = "not_armed"
if require_armed_render:
return payload, "failed", "No graph render task is configured for notification handoff"
return payload, "skipped", "No graph render task is configured for notification handoff"
payload["notification_mode"] = "deferred_to_render_task"
payload["armed_node_ids"] = armed_node_ids
payload["armed_node_count"] = len(armed_node_ids)
return payload, "completed", None
payload["handoff_state"] = "armed"
return payload, "pending", None
_BRIDGE_EXECUTORS = {
@@ -10,7 +10,17 @@ from app.core.process_steps import StepName
StepCategory = Literal["input", "processing", "rendering", "output"]
FieldType = Literal["number", "select", "boolean", "text"]
ExecutionKind = Literal["native", "bridge"]
WorkflowNodeFamily = Literal["cad_file", "order_line"]
WorkflowNodeFamily = Literal["cad_file", "order_line", "shared"]
TextFormat = Literal[
"plain",
"uuid",
"absolute_path",
"absolute_blend_path",
"absolute_glb_path",
"float_string",
"hex_color",
"safe_filename_suffix",
]
class WorkflowNodeFieldOption(BaseModel):
@@ -30,6 +40,9 @@ class WorkflowNodeFieldDefinition(BaseModel):
step: float | None = None
unit: str | None = None
options: list[WorkflowNodeFieldOption] = []
allow_blank: bool = True
max_length: int | None = None
text_format: TextFormat = "plain"
class WorkflowNodeDefinition(BaseModel):
@@ -65,6 +78,9 @@ def _field(
step: float | None = None,
unit: str | None = None,
options: list[tuple[str | int | float | bool, str]] | None = None,
allow_blank: bool = True,
max_length: int | None = None,
text_format: TextFormat = "plain",
) -> WorkflowNodeFieldDefinition:
return WorkflowNodeFieldDefinition(
key=key,
@@ -81,6 +97,9 @@ def _field(
WorkflowNodeFieldOption(value=value, label=option_label)
for value, option_label in (options or [])
],
allow_blank=allow_blank,
max_length=max_length,
text_format=text_format,
)
@@ -169,7 +188,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"cad_file",
"cad.export_glb",
"processing",
"Convert STEP geometry into GLB for previews and downstream rendering.",
"Convert STEP geometry into GLB for previews and downstream rendering. Uses the system tessellation profile; this node does not expose per-node overrides yet.",
node_type="processNode",
icon="refresh-cw",
execution_kind="bridge",
@@ -181,10 +200,10 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
_definition(
StepName.GLB_BBOX,
"Compute Bounding Box",
"order_line",
"shared",
"geometry.compute_bbox",
"processing",
"Compute the model bounding box from the exported GLB for framing decisions.",
"Compute the model bounding box from a prepared GLB artifact for framing decisions in either CAD-intake or order-line workflows.",
node_type="processNode",
icon="layers",
execution_kind="bridge",
@@ -196,10 +215,24 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional absolute path to a specific GLB file. Leave empty to reuse the prepared preview/export artifact automatically.",
section="Inputs",
default="",
text_format="absolute_glb_path",
),
_field(
"source_preference",
"Source Preference",
"select",
description="Prefer a prepared GLB, force STEP fallback, or fail when no GLB artifact is available.",
section="Inputs",
default="auto",
options=[
("auto", "Auto"),
("step_only", "STEP Only"),
("glb_only", "GLB Only"),
],
),
],
input_contract={"context": "order_line", "requires": ["glb_preview"]},
output_contract={"context": "order_line", "provides": ["bbox"]},
input_contract={"requires": ["glb_preview"]},
output_contract={"provides": ["bbox"]},
artifact_roles_consumed=["glb_preview"],
artifact_roles_produced=["bbox"],
),
@@ -213,6 +246,25 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="processNode",
icon="layers",
execution_kind="bridge",
defaults={"disable_materials": False, "material_override": ""},
fields=[
_field(
"disable_materials",
"Disable Materials",
"boolean",
description="Bypass template and alias-based material mapping for this node.",
section="Materials",
default=False,
),
_field(
"material_override",
"Material Override",
"text",
description="Optional material name forced onto every detected part before rendering.",
section="Materials",
default="",
),
],
input_contract={"context": "order_line", "requires": ["order_line_context", "cad_materials"]},
output_contract={"context": "order_line", "provides": ["material_assignments"]},
artifact_roles_consumed=["order_line_context", "cad_materials"],
@@ -228,6 +280,37 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="processNode",
icon="layers",
execution_kind="bridge",
defaults={
"persist_updates": True,
"refresh_material_source": True,
"include_populated_products": False,
},
fields=[
_field(
"persist_updates",
"Persist Updates",
"boolean",
description="Write discovered part-material mappings back to product records in graph mode.",
section="Behavior",
default=True,
),
_field(
"refresh_material_source",
"Refresh Material Source",
"boolean",
description="Reload product material mappings into the workflow context after persistence.",
section="Behavior",
default=True,
),
_field(
"include_populated_products",
"Rewrite Populated Products",
"boolean",
description="Also rebuild material mappings for products that already have non-empty assignments.",
section="Behavior",
default=False,
),
],
input_contract={"context": "order_line", "requires": ["cad_materials"]},
output_contract={"context": "order_line", "provides": ["material_catalog_updates"]},
artifact_roles_consumed=["cad_materials"],
@@ -306,7 +389,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"cad_file",
"media.save_thumbnail",
"output",
"Persist the generated thumbnail back onto the CAD file record.",
"Persist the generated thumbnail back onto the CAD file record. Rendering settings are supplied by the connected upstream thumbnail request node.",
node_type="outputNode",
icon="download",
execution_kind="bridge",
@@ -360,6 +443,113 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="processNode",
icon="layers",
execution_kind="bridge",
defaults={
"template_id_override": "",
"material_library_path": "",
"require_template": False,
"disable_materials": False,
"target_collection": "",
"material_replace_mode": "inherit",
"lighting_only_mode": "inherit",
"shadow_catcher_mode": "inherit",
"camera_orbit_mode": "inherit",
},
fields=[
_field(
"template_id_override",
"Template ID Override",
"text",
description="Optional render-template UUID to force for this workflow node instead of category/output-type resolution.",
section="Template",
default="",
text_format="uuid",
),
_field(
"require_template",
"Require Template",
"boolean",
description="Fail this node when no active render template can be resolved.",
section="Template",
default=False,
),
_field(
"material_library_path",
"Material Library Path",
"text",
description="Optional absolute .blend path used instead of the active asset library.",
section="Materials",
default="",
text_format="absolute_blend_path",
),
_field(
"disable_materials",
"Disable Materials",
"boolean",
description="Resolve the template but skip material-map generation for downstream nodes.",
section="Materials",
default=False,
),
_field(
"target_collection",
"Target Collection Override",
"text",
description="Optional collection name override applied after template resolution. Leave blank to inherit from the template.",
section="Template Overrides",
default="",
),
_field(
"material_replace_mode",
"Material Replace",
"select",
description="Override whether template material replacement is active for downstream nodes.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Enabled"),
("disabled", "Force Disabled"),
],
),
_field(
"lighting_only_mode",
"Lighting Only",
"select",
description="Override the template lighting-only flag for downstream nodes.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Enabled"),
("disabled", "Force Disabled"),
],
),
_field(
"shadow_catcher_mode",
"Shadow Catcher",
"select",
description="Override the template shadow-catcher flag for downstream nodes.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Enabled"),
("disabled", "Force Disabled"),
],
),
_field(
"camera_orbit_mode",
"Camera Orbit",
"select",
description="Override whether turntable renders orbit the camera or rotate the object.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Camera Orbit"),
("disabled", "Force Object Rotation"),
],
),
],
input_contract={"context": "order_line", "requires": ["order_line_context"]},
output_contract={
"context": "order_line",
@@ -372,6 +562,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"use_materials",
"override_material",
"category_key",
"workflow_input_schema",
"template_inputs",
],
},
artifact_roles_consumed=["order_line_context"],
@@ -384,6 +576,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"use_materials",
"override_material",
"category_key",
"workflow_input_schema",
"template_inputs",
],
),
_definition(
@@ -420,7 +614,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"select",
description="Force CPU, GPU, or automatic device selection.",
section="Render",
default="auto",
default="gpu",
options=_CYCLES_DEVICE_OPTIONS,
),
_field(
@@ -451,6 +645,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional Cycles adaptive sampling threshold, for example 0.01.",
section="Denoising",
default="",
text_format="float_string",
),
_field(
"denoiser",
@@ -606,7 +801,11 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
defaults={
"use_custom_render_settings": False,
"fps": 24,
"frame_count": 120,
"duration_s": 5,
"turntable_degrees": 360,
"turntable_axis": "world_z",
"camera_orbit": True,
"rotation_z": 0,
},
fields=[
@@ -664,8 +863,20 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional hex color used during FFmpeg compositing, for example #FFFFFF.",
section="Output",
default="",
text_format="hex_color",
),
_field("fps", "FPS", "number", section="Animation", default=24, min=1, max=120, step=1),
_field(
"frame_count",
"Frame Count",
"number",
description="Explicit total frame count for the rendered turntable clip.",
section="Animation",
default=120,
min=1,
max=7200,
step=1,
),
_field(
"duration_s",
"Duration",
@@ -818,6 +1029,32 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="outputNode",
icon="download",
execution_kind="bridge",
defaults={"expected_artifact_role": "", "require_upstream_artifact": False},
fields=[
_field(
"expected_artifact_role",
"Expected Artifact Role",
"select",
description="Restrict this node to a specific upstream render artifact type.",
section="Output",
default="",
options=[
("", "Any Connected Artifact"),
("render_output", "Still Output"),
("turntable_output", "Turntable Output"),
("blend_export", "Blend Export"),
("thumbnail_output", "Thumbnail Output"),
],
),
_field(
"require_upstream_artifact",
"Require Upstream Artifact",
"boolean",
description="Fail the node when no matching upstream artifact is connected.",
section="Output",
default=False,
),
],
input_contract={
"context": "order_line",
"requires": ["order_line_context"],
@@ -833,7 +1070,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"order_line",
"media.export_blend",
"output",
"Persist the generated .blend file as a downloadable media asset.",
"Persist the generated .blend file as a downloadable media asset. Only the optional filename suffix is workflow-configurable today.",
node_type="outputNode",
icon="download",
defaults={"output_name_suffix": ""},
@@ -845,6 +1082,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional suffix appended to the generated `.blend` filename.",
section="Output",
default="",
text_format="safe_filename_suffix",
max_length=64,
),
],
execution_kind="bridge",
@@ -859,7 +1098,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"cad_file",
"cad.generate_stl_cache",
"processing",
"Generate and cache STL derivatives next to the STEP source.",
"Compatibility node for legacy CAD flows. HartOMat graph execution uses direct OCC/GLB export instead, so this node intentionally performs no per-node-configurable cache generation.",
node_type="convertNode",
icon="refresh-cw",
execution_kind="bridge",
@@ -877,7 +1116,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"Emit a user-visible notification for workflow completion or failure.",
node_type="outputNode",
icon="bell",
defaults={"channel": "audit_log"},
defaults={"channel": "audit_log", "require_armed_render": False},
fields=[
_field(
"channel",
@@ -888,6 +1127,14 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
default="audit_log",
options=[("audit_log", "Audit Log")],
),
_field(
"require_armed_render",
"Require Armed Render",
"boolean",
description="Fail this node when no upstream graph render task is configured to hand off notifications.",
section="Notification",
default=False,
),
],
execution_kind="bridge",
input_contract={
File diff suppressed because it is too large Load Diff
@@ -5,7 +5,7 @@ import re
import shutil
import uuid
from dataclasses import dataclass, field
from datetime import datetime
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Callable, Literal
@@ -13,7 +13,11 @@ from sqlalchemy import select, update as sql_update
from sqlalchemy.orm import Session, joinedload
from app.config import settings as app_settings
from app.core.render_paths import resolve_result_path, result_path_to_storage_key
from app.core.render_paths import (
ensure_group_writable_dir,
resolve_result_path,
result_path_to_storage_key,
)
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.orders.models import Order, OrderLine, OrderStatus
from app.domains.products.models import CadFile, Product
@@ -37,6 +41,199 @@ logger = logging.getLogger(__name__)
EmitFn = Callable[..., None] | None
SetupStatus = Literal["ready", "skip", "failed", "missing"]
QueueThumbnailFn = Callable[[str, dict[str, str]], None] | None
TEMPLATE_INPUT_PARAM_PREFIX = "template_input__"
_PNG_SIGNATURE = b"\x89PNG\r\n\x1a\n"
_VOLATILE_PNG_CHUNK_TYPES = {b"tEXt", b"zTXt", b"iTXt", b"tIME"}
def _slugify_material_lookup_key(value: str) -> str:
return re.sub(r"[^a-z0-9]+", "_", value).strip("_")
def _build_authoritative_material_lookup(materials_source: list[dict[str, Any]]) -> dict[str, str]:
lookup: dict[str, str] = {}
for material in materials_source:
raw_part_name = material.get("part_name")
raw_material_name = material.get("material")
if not raw_part_name or not raw_material_name:
continue
part_name = str(raw_part_name).lower().strip()
material_name = str(raw_material_name)
if not part_name:
continue
lookup.setdefault(part_name, material_name)
slug_key = _slugify_material_lookup_key(part_name)
if slug_key:
lookup.setdefault(slug_key, material_name)
stripped = re.sub(r"(_af\d+(_\d+)?)+$", "", part_name, flags=re.IGNORECASE)
if stripped != part_name:
lookup.setdefault(stripped, material_name)
slug_stripped = _slugify_material_lookup_key(stripped)
if slug_stripped:
lookup.setdefault(slug_stripped, material_name)
return lookup
def _common_prefix_length(left: str, right: str) -> int:
limit = min(len(left), len(right))
idx = 0
while idx < limit and left[idx] == right[idx]:
idx += 1
return idx
def _lookup_material_by_prefix(query: str, material_lookup: dict[str, str]) -> str | None:
if not query or not material_lookup:
return None
contenders: list[tuple[int, str]] = []
for key, material_name in material_lookup.items():
if len(key) >= 5 and len(query) >= 5 and (query.startswith(key) or key.startswith(query)):
contenders.append((len(key), material_name))
if not contenders:
return None
contenders.sort(reverse=True)
top_length = contenders[0][0]
close_materials = {
material_name
for key_length, material_name in contenders
if key_length >= top_length - 2
}
return contenders[0][1] if len(close_materials) == 1 else None
def _lookup_material_by_common_prefix(query: str, material_lookup: dict[str, str]) -> str | None:
if not query or not material_lookup:
return None
scored: list[tuple[float, int, int, str]] = []
for key, material_name in material_lookup.items():
prefix_length = _common_prefix_length(query, key)
if prefix_length < 12:
continue
ratio = prefix_length / max(len(query), len(key))
if ratio < 0.68:
continue
scored.append((ratio, prefix_length, len(key), material_name))
if not scored:
return None
scored.sort(reverse=True)
top_ratio, top_prefix_length, _, top_material_name = scored[0]
close_materials = {
material_name
for ratio, prefix_length, _, material_name in scored
if ratio >= top_ratio - 0.02 and prefix_length >= top_prefix_length - 2
}
return top_material_name if len(close_materials) == 1 else None
def _resolve_authoritative_material_name(
raw_name: str | None,
material_lookup: dict[str, str],
*fallback_names: str | None,
) -> str | None:
candidates = [raw_name, *fallback_names]
seen: set[str] = set()
for candidate in candidates:
if not candidate:
continue
normalized = str(candidate).lower().strip()
variants = [normalized]
stripped = re.sub(r"(_af\d+(_\d+)?)+$", "", normalized, flags=re.IGNORECASE)
if stripped != normalized:
variants.append(stripped)
no_instance = re.sub(r"_\d+$", "", stripped)
if no_instance and no_instance not in variants:
variants.append(no_instance)
for variant in list(variants):
slug_variant = _slugify_material_lookup_key(variant)
if slug_variant and slug_variant not in variants:
variants.append(slug_variant)
deduped_variants = [variant for variant in variants if variant and not (variant in seen or seen.add(variant))]
for variant in deduped_variants:
material_name = material_lookup.get(variant)
if material_name:
return material_name
for variant in deduped_variants:
material_name = _lookup_material_by_prefix(variant, material_lookup)
if material_name:
return material_name
for variant in deduped_variants:
material_name = _lookup_material_by_common_prefix(variant, material_lookup)
if material_name:
return material_name
return None
def _utcnow_naive() -> datetime:
"""Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns."""
return datetime.now(timezone.utc).replace(tzinfo=None)
def extract_template_input_overrides(params: dict[str, Any] | None) -> dict[str, Any]:
if not params:
return {}
overrides: dict[str, Any] = {}
for key, value in params.items():
if not isinstance(key, str) or not key.startswith(TEMPLATE_INPUT_PARAM_PREFIX):
continue
input_key = key[len(TEMPLATE_INPUT_PARAM_PREFIX):].strip()
if input_key:
overrides[input_key] = value
return overrides
def _normalize_template_input_schema(template: RenderTemplate | None) -> list[dict[str, Any]]:
raw_schema = getattr(template, "workflow_input_schema", None) if template is not None else None
if not isinstance(raw_schema, list):
return []
normalized: list[dict[str, Any]] = []
for raw_field in raw_schema:
if not isinstance(raw_field, dict):
continue
key = str(raw_field.get("key") or "").strip()
if not key:
continue
normalized.append(dict(raw_field))
return normalized
def _resolve_template_input_values(
schema: list[dict[str, Any]],
overrides: dict[str, Any] | None,
) -> dict[str, Any]:
raw_overrides = overrides or {}
resolved: dict[str, Any] = {}
for field in schema:
key = str(field.get("key") or "").strip()
if not key:
continue
if key in raw_overrides:
resolved[key] = raw_overrides[key]
continue
if "default" in field:
resolved[key] = field.get("default")
return resolved
@dataclass(slots=True)
@@ -75,8 +272,14 @@ class TemplateResolutionResult:
material_map: dict[str, str] | None
use_materials: bool
override_material: str | None
target_collection: str
lighting_only: bool
shadow_catcher: bool
camera_orbit: bool
category_key: str | None
output_type_id: str | None
workflow_input_schema: list[dict[str, Any]] = field(default_factory=list)
template_inputs: dict[str, Any] = field(default_factory=dict)
@dataclass(slots=True)
@@ -159,6 +362,7 @@ class OrderLineRenderInvocation:
sensor_width_mm: float | None = None
usd_path: str | None = None
material_override: str | None = None
template_inputs: dict[str, Any] = field(default_factory=dict)
def task_defaults(self) -> dict[str, Any]:
payload: dict[str, Any] = {
@@ -196,9 +400,10 @@ class OrderLineRenderInvocation:
"sensor_width_mm": self.sensor_width_mm,
"usd_path": self.usd_path,
"material_override": self.material_override,
"template_inputs": self.template_inputs,
}
for key, value in optional_values.items():
if value not in (None, ""):
if value not in (None, "", {}, [], ()):
payload[key] = value
return payload
@@ -242,6 +447,7 @@ class OrderLineRenderInvocation:
"focal_length_mm": self.focal_length_mm,
"sensor_width_mm": self.sensor_width_mm,
"material_override": self.material_override,
"template_inputs": self.template_inputs,
}
def as_turntable_renderer_kwargs(
@@ -285,6 +491,7 @@ class OrderLineRenderInvocation:
"focal_length_mm": self.focal_length_mm,
"sensor_width_mm": self.sensor_width_mm,
"material_override": self.material_override,
"template_inputs": self.template_inputs,
}
def as_cinematic_renderer_kwargs(
@@ -324,6 +531,7 @@ class OrderLineRenderInvocation:
"focal_length_mm": self.focal_length_mm,
"sensor_width_mm": self.sensor_width_mm,
"material_override": self.material_override,
"template_inputs": self.template_inputs,
"log_callback": log_callback,
}
@@ -341,7 +549,61 @@ def _resolve_asset_path(storage_key: str | None) -> Path | None:
return resolve_result_path(storage_key)
def _usd_master_refresh_reason(cad_file: CadFile) -> str | None:
def _usd_master_file_refresh_reason(usd_render_path: Path | None) -> str | None:
if usd_render_path is None:
return "missing USD master file"
if not usd_render_path.exists():
return "missing USD master file"
try:
usd_bytes = usd_render_path.read_bytes()
except OSError:
logger.exception("render_order_line: failed to inspect usd_master %s", usd_render_path)
return "unreadable USD master file"
usd_bytes_lower = usd_bytes.lower()
if b"schaeffler:" in usd_bytes_lower:
return "legacy Schaeffler USD primvars"
if b"hartomat:" in usd_bytes_lower:
return None
# Binary USD (`PXR-USDC`) stores HartOMat customData in a form that is not
# reliably discoverable via a raw byte grep. For those files we rely on the
# cache fingerprint plus the upstream resolved material metadata checks.
if usd_bytes.startswith(b"PXR-USDC") or b"\x00" in usd_bytes[:256]:
return None
# Textual USD payloads without any HartOMat markers are legacy/stale in the
# current pipeline and should be refreshed before they are reused.
try:
usd_bytes.decode("utf-8")
except UnicodeDecodeError:
return None
return "missing HartOMat USD markers"
def _usd_master_cache_refresh_reason(usd_asset: MediaAsset | None) -> str | None:
if usd_asset is None:
return None
render_config = usd_asset.render_config if isinstance(usd_asset.render_config, dict) else {}
cache_key = render_config.get("cache_key")
if not isinstance(cache_key, str) or not cache_key.strip():
return "missing USD cache fingerprint"
# New-format keys append the render-script fingerprint as a sixth colon-delimited segment.
if len(cache_key.split(":")) < 6:
return "legacy USD cache fingerprint"
return None
def _usd_master_refresh_reason(
cad_file: CadFile,
*,
usd_asset: MediaAsset | None = None,
usd_render_path: Path | None = None,
) -> str | None:
resolved = cad_file.resolved_material_assignments
if not isinstance(resolved, dict) or not resolved:
return "missing resolved material assignments"
@@ -350,7 +612,7 @@ def _usd_master_refresh_reason(cad_file: CadFile) -> str | None:
for meta in resolved.values():
if not isinstance(meta, dict):
continue
canonical = meta.get("canonical_material")
canonical = meta.get("canonical_material") or meta.get("material")
if isinstance(canonical, str) and canonical.strip():
canonical_materials.append(canonical.strip())
@@ -360,6 +622,14 @@ def _usd_master_refresh_reason(cad_file: CadFile) -> str | None:
if any(material.upper().startswith("SCHAEFFLER_") for material in canonical_materials):
return "legacy Schaeffler material metadata"
cache_reason = _usd_master_cache_refresh_reason(usd_asset)
if cache_reason is not None:
return cache_reason
file_reason = _usd_master_file_refresh_reason(usd_render_path)
if file_reason is not None:
return file_reason
return None
@@ -502,6 +772,27 @@ def _coerce_bool(value: Any) -> bool:
return bool(value)
def _resolve_tristate_mode(
value: Any,
*,
field_name: str,
fallback: bool | None = None,
) -> bool | None:
if value in (None, "", "inherit"):
return fallback
if isinstance(value, bool):
return value
if isinstance(value, str):
normalized = value.strip().lower()
if normalized in {"enabled", "true", "1", "yes", "on"}:
return True
if normalized in {"disabled", "false", "0", "no", "off"}:
return False
raise ValueError(
f"{field_name} must be one of: inherit, enabled, disabled"
)
def _resolve_render_output_extension(line: OrderLine) -> str:
output_type = line.output_type
output_extension = "jpg"
@@ -582,7 +873,7 @@ def build_order_line_render_invocation(
denoising_quality = str(render_settings.get("denoising_quality", ""))
denoising_use_gpu = str(render_settings.get("denoising_use_gpu", ""))
transparent_bg = bool(output_type and output_type.transparent_bg)
cycles_device = (output_type.cycles_device or "auto") if output_type is not None else "auto"
cycles_device = (output_type.cycles_device or "gpu") if output_type is not None else "gpu"
render_overrides = getattr(line, "render_overrides", None)
if isinstance(render_overrides, dict):
@@ -682,22 +973,14 @@ def build_order_line_render_invocation(
part_colors=dict(setup.part_colors or {}),
part_names_ordered=part_names_ordered,
template_path=template_context.template.blend_file_path if template_context and template_context.template else None,
target_collection=(
template_context.template.target_collection
if template_context and template_context.template and template_context.template.target_collection
else "Product"
),
target_collection=template_context.target_collection if template_context else "Product",
material_library_path=(
template_context.material_library if template_context and use_materials else None
),
material_map=material_map,
lighting_only=bool(template_context.template.lighting_only) if template_context and template_context.template else False,
shadow_catcher=(
bool(template_context.template.shadow_catcher_enabled)
if template_context and template_context.template
else False
),
camera_orbit=bool(template_context.template.camera_orbit) if template_context and template_context.template else True,
lighting_only=template_context.lighting_only if template_context else False,
shadow_catcher=template_context.shadow_catcher if template_context else False,
camera_orbit=template_context.camera_orbit if template_context else True,
rotation_x=position.rotation_x,
rotation_y=position.rotation_y,
rotation_z=position.rotation_z,
@@ -705,6 +988,7 @@ def build_order_line_render_invocation(
sensor_width_mm=position.sensor_width_mm,
usd_path=str(setup.usd_render_path) if setup.usd_render_path is not None else None,
material_override=material_override,
template_inputs=dict(template_context.template_inputs) if template_context is not None else {},
)
@@ -727,10 +1011,49 @@ def _canonical_public_output_path(line: OrderLine, output_path: str) -> str:
return str(upload_root / "renders" / str(line.id) / filename)
def _strip_volatile_png_metadata(output_path: Path) -> None:
if output_path.suffix.lower() != ".png" or not output_path.is_file():
return
raw_bytes = output_path.read_bytes()
if not raw_bytes.startswith(_PNG_SIGNATURE):
return
cursor = len(_PNG_SIGNATURE)
kept_chunks: list[bytes] = []
changed = False
while cursor + 12 <= len(raw_bytes):
chunk_length = int.from_bytes(raw_bytes[cursor : cursor + 4], "big")
chunk_end = cursor + 12 + chunk_length
if chunk_end > len(raw_bytes):
return
chunk_type = raw_bytes[cursor + 4 : cursor + 8]
chunk_bytes = raw_bytes[cursor:chunk_end]
if chunk_type in _VOLATILE_PNG_CHUNK_TYPES:
changed = True
else:
kept_chunks.append(chunk_bytes)
cursor = chunk_end
if chunk_type == b"IEND":
break
if not changed:
return
output_path.write_bytes(_PNG_SIGNATURE + b"".join(kept_chunks))
def _normalize_output_artifact(output_path: str) -> None:
_strip_volatile_png_metadata(Path(output_path))
def _materialize_public_output(line: OrderLine, output_path: str) -> str:
canonical_path = Path(_canonical_public_output_path(line, output_path))
source_path = Path(output_path)
canonical_path.parent.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(canonical_path.parent)
if source_path != canonical_path:
shutil.copy2(source_path, canonical_path)
return str(canonical_path)
@@ -765,6 +1088,7 @@ def persist_order_line_media_asset(
resolved_workflow_run_id = _resolve_existing_workflow_run_id(session, workflow_run_id)
if success:
_normalize_output_artifact(output_path)
storage_key = _normalize_storage_key(output_path)
output_file = Path(output_path)
existing_asset = session.execute(
@@ -906,13 +1230,14 @@ def persist_order_line_output(
) -> OutputSaveResult:
"""Persist the render result for an order line and publish the media asset if needed."""
status: Literal["completed", "failed"] = "completed" if success else "failed"
completed_at = render_completed_at or datetime.utcnow()
completed_at = render_completed_at or _utcnow_naive()
persisted_output_path = output_path
line.render_status = status
line.render_completed_at = completed_at
line.render_log = render_log
if success:
_normalize_output_artifact(output_path)
persisted_output_path = _materialize_public_output(line, output_path)
line.result_path = persisted_output_path if success else None
session.flush()
@@ -1084,7 +1409,7 @@ def prepare_order_line_render_context(
reason="missing_cad_file",
)
render_start = datetime.utcnow() if persist_state else None
render_start = _utcnow_naive() if persist_state else None
if persist_state:
session.execute(
sql_update(OrderLine)
@@ -1111,7 +1436,12 @@ def prepare_order_line_render_context(
.limit(1)
).scalar_one_or_none()
if usd_asset:
refresh_reason = _usd_master_refresh_reason(cad_file)
usd_candidate_path = _resolve_asset_path(usd_asset.storage_key)
refresh_reason = _usd_master_refresh_reason(
cad_file,
usd_asset=usd_asset,
usd_render_path=usd_candidate_path,
)
if refresh_reason is not None:
logger.warning(
"render_order_line: ignoring stale usd_master for cad %s (%s)",
@@ -1127,7 +1457,7 @@ def prepare_order_line_render_context(
if _queue_usd_master_refresh(str(cad_file.id)):
_emit(emit, order_line_id, "Queued USD master regeneration in background")
else:
usd_render_path = _resolve_asset_path(usd_asset.storage_key)
usd_render_path = usd_candidate_path
if usd_render_path:
logger.info(
"render_order_line: using usd_master %s for cad %s",
@@ -1203,6 +1533,12 @@ def resolve_order_line_template_context(
material_library_path_override: str | None = None,
require_template: bool = False,
disable_materials: bool = False,
target_collection_override: str | None = None,
material_replace_mode: str | None = None,
lighting_only_mode: str | None = None,
shadow_catcher_mode: str | None = None,
camera_orbit_mode: str | None = None,
template_input_overrides: dict[str, Any] | None = None,
) -> TemplateResolutionResult:
"""Resolve render template, material library, and material map for a prepared order line."""
if not setup.is_ready:
@@ -1242,6 +1578,7 @@ def resolve_order_line_template_context(
if isinstance(material_library_path_override, str) and material_library_path_override.strip()
else get_material_library_path_for_session(session)
)
material_replace_override = _resolve_tristate_mode(material_replace_mode, field_name="material_replace_mode")
material_resolution = resolve_order_line_material_map(
line,
cad_file,
@@ -1250,8 +1587,36 @@ def resolve_order_line_template_context(
template=template,
emit=emit,
disable_materials=disable_materials,
material_replace_enabled_override=material_replace_override,
)
resolved_target_collection = (
target_collection_override.strip()
if isinstance(target_collection_override, str) and target_collection_override.strip()
else (
template.target_collection
if template is not None and template.target_collection
else "Product"
)
)
resolved_lighting_only = _resolve_tristate_mode(
lighting_only_mode,
field_name="lighting_only_mode",
fallback=bool(template.lighting_only) if template is not None else False,
)
resolved_shadow_catcher = _resolve_tristate_mode(
shadow_catcher_mode,
field_name="shadow_catcher_mode",
fallback=bool(template.shadow_catcher_enabled) if template is not None else False,
)
resolved_camera_orbit = _resolve_tristate_mode(
camera_orbit_mode,
field_name="camera_orbit_mode",
fallback=bool(template.camera_orbit) if template is not None else True,
)
workflow_input_schema = _normalize_template_input_schema(template)
template_inputs = _resolve_template_input_values(workflow_input_schema, template_input_overrides)
if template:
_emit(
emit,
@@ -1267,6 +1632,8 @@ def resolve_order_line_template_context(
template.blend_file_path,
template.lighting_only,
)
if template_inputs:
logger.info("Render template inputs resolved for '%s': %s", template.name, sorted(template_inputs))
if not template:
_emit(emit, str(line.id), "No render template found — using factory settings (Mode A)")
logger.info(
@@ -1281,8 +1648,14 @@ def resolve_order_line_template_context(
material_map=material_resolution.material_map,
use_materials=material_resolution.use_materials,
override_material=material_resolution.override_material,
target_collection=resolved_target_collection,
lighting_only=resolved_lighting_only,
shadow_catcher=resolved_shadow_catcher,
camera_orbit=resolved_camera_orbit,
category_key=category_key,
output_type_id=output_type_id,
workflow_input_schema=workflow_input_schema,
template_inputs=template_inputs,
)
@@ -1296,6 +1669,7 @@ def resolve_order_line_material_map(
emit: EmitFn = None,
material_override: str | None = None,
disable_materials: bool = False,
material_replace_enabled_override: bool | None = None,
) -> MaterialResolutionResult:
"""Resolve the effective order-line material map with legacy precedence rules."""
if disable_materials:
@@ -1311,11 +1685,15 @@ def resolve_order_line_material_map(
raw_material_count = 0
raw_material_map = _build_effective_material_lookup(cad_file, materials_source)
use_materials = bool(material_library and raw_material_map)
if template and not template.material_replace_enabled:
if material_replace_enabled_override is not None:
use_materials = bool(material_replace_enabled_override and material_library and raw_material_map)
elif template and not template.material_replace_enabled:
use_materials = False
if use_materials:
raw_material_count = len(raw_material_map)
material_map = resolve_material_map(raw_material_map)
if cad_file:
material_map = _overlay_scene_manifest_material_map(cad_file, material_map)
line_override = getattr(line, "material_override", None)
output_override = line.output_type.material_override if line.output_type else None
@@ -1344,21 +1722,55 @@ def resolve_order_line_material_map(
)
def _overlay_scene_manifest_material_map(
cad_file: CadFile,
material_map: dict[str, str],
) -> dict[str, str]:
"""Overlay authoritative scene-manifest materials onto a resolved material map.
Low-level lookups still retain legacy/product source assignments so older
fallback paths keep working. The final order-line material map, however,
must prefer the scene manifest's effective assignments wherever the USD/CAD
pipeline has already established authoritative part identity.
"""
if not material_map:
return material_map
merged = dict(material_map)
manifest = build_scene_manifest(cad_file)
for part in manifest.get("parts", []):
if not isinstance(part, dict):
continue
effective_material = part.get("effective_material")
if not isinstance(effective_material, str) or not effective_material.strip():
continue
source_name = part.get("source_name")
part_key = part.get("part_key")
if isinstance(source_name, str) and source_name.strip():
merged[source_name] = effective_material
if isinstance(part_key, str) and part_key.strip():
merged[part_key] = effective_material
return merged
def _build_effective_material_lookup(
cad_file: CadFile | None,
materials_source: list[dict[str, Any]],
) -> dict[str, str]:
"""Build a renderer-compatible material lookup from all available layers.
Authoritative scene-manifest assignments win when present, but we emit both
source-name and part-key keys so USD and GLB/STEP fallback paths resolve the
same effective material map.
Product/Excel CAD assignments stay authoritative for overlapping source-name
keys so legacy renders, thumbnails, and viewer previews keep parity with the
pre-USD pipeline. Scene-manifest assignments still fill gaps and emit part-key
aliases so USD and GLB/STEP fallback paths resolve the same effective map.
"""
raw_material_map: dict[str, str] = {
str(material["part_name"]): str(material["material"])
for material in materials_source
if material.get("part_name") and material.get("material")
}
authoritative_lookup = _build_authoritative_material_lookup(materials_source)
if not cad_file:
return raw_material_map
@@ -1372,10 +1784,16 @@ def _build_effective_material_lookup(
continue
source_name = part.get("source_name")
part_key = part.get("part_key")
if source_name:
raw_material_map[str(source_name)] = str(effective_material)
authoritative_material = _resolve_authoritative_material_name(
str(source_name) if source_name else None,
authoritative_lookup,
str(part_key) if part_key else None,
)
merged_material = authoritative_material or str(effective_material)
if source_name and str(source_name) not in raw_material_map:
raw_material_map[str(source_name)] = merged_material
if part_key:
raw_material_map[str(part_key)] = str(effective_material)
raw_material_map.setdefault(str(part_key), merged_material)
return raw_material_map
+159 -14
View File
@@ -18,6 +18,7 @@ Example config::
"""
from collections import deque
from typing import Any, Literal
from uuid import UUID
from pydantic import BaseModel, Field, field_validator, model_validator
@@ -29,6 +30,14 @@ from app.domains.rendering.workflow_node_registry import (
)
_WORKFLOW_META_PARAM_KEYS = {"retry_policy", "failure_policy"}
_TEMPLATE_INPUT_PARAM_PREFIX = "template_input__"
_HEX_COLOR_LENGTHS = {7, 9}
_SAFE_FILENAME_SUFFIX_CHARS = set(
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-"
)
def _context_seed_artifacts(definition: WorkflowNodeDefinition) -> set[str]:
if definition.family == "order_line":
return {"order_line_record"}
@@ -37,10 +46,43 @@ def _context_seed_artifacts(definition: WorkflowNodeDefinition) -> set[str]:
return set()
def _infer_concrete_workflow_family(
definitions: list[WorkflowNodeDefinition],
) -> Literal["cad_file", "order_line", "mixed"] | None:
concrete_families = {
definition.family
for definition in definitions
if definition.family in {"cad_file", "order_line"}
}
if not concrete_families:
return None
if len(concrete_families) > 1:
return "mixed"
return next(iter(concrete_families))
def _coerce_node_label(node: "WorkflowNode") -> str:
return f"{node.id!r} ({node.step.value})"
def _require_node_definition(node: "WorkflowNode") -> WorkflowNodeDefinition:
definition = get_node_definition(node.step)
if definition is None:
raise ValueError(
f"node {_coerce_node_label(node)} is not registered in workflow_node_registry"
)
return definition
def _is_dynamic_template_input_param(node: "WorkflowNode", key: str) -> bool:
return (
node.step == StepName.RESOLVE_TEMPLATE
and isinstance(key, str)
and key.startswith(_TEMPLATE_INPUT_PARAM_PREFIX)
and key[len(_TEMPLATE_INPUT_PARAM_PREFIX):].strip() != ""
)
def _validate_param_value(
*,
node: "WorkflowNode",
@@ -72,6 +114,105 @@ def _validate_param_value(
if value not in valid_values:
allowed_values = ", ".join(repr(option) for option in sorted(valid_values, key=repr))
raise ValueError(f"{field_label} must be one of: {allowed_values}")
return
if field_definition.type == "text":
if not isinstance(value, str):
raise ValueError(f"{field_label} must be a string")
stripped_value = value.strip()
if stripped_value == "":
if field_definition.allow_blank:
return
raise ValueError(f"{field_label} may not be blank")
if field_definition.max_length is not None and len(value) > field_definition.max_length:
raise ValueError(
f"{field_label} must be at most {field_definition.max_length} characters"
)
if field_definition.text_format == "plain":
return
if field_definition.text_format == "uuid":
try:
UUID(stripped_value)
except ValueError as exc:
raise ValueError(f"{field_label} must be a valid UUID") from exc
return
if field_definition.text_format == "absolute_path":
if not stripped_value.startswith("/"):
raise ValueError(f"{field_label} must be an absolute path")
return
if field_definition.text_format == "absolute_blend_path":
if not stripped_value.startswith("/"):
raise ValueError(f"{field_label} must be an absolute path")
if not stripped_value.lower().endswith(".blend"):
raise ValueError(f"{field_label} must point to a .blend file")
return
if field_definition.text_format == "absolute_glb_path":
if not stripped_value.startswith("/"):
raise ValueError(f"{field_label} must be an absolute path")
if not stripped_value.lower().endswith(".glb"):
raise ValueError(f"{field_label} must point to a .glb file")
return
if field_definition.text_format == "float_string":
try:
float(stripped_value)
except ValueError as exc:
raise ValueError(f"{field_label} must be a valid numeric string") from exc
return
if field_definition.text_format == "hex_color":
if len(stripped_value) not in _HEX_COLOR_LENGTHS or not stripped_value.startswith("#"):
raise ValueError(f"{field_label} must be a hex color like #FFFFFF or #FFFFFFFF")
color_digits = stripped_value[1:]
if any(character not in "0123456789abcdefABCDEF" for character in color_digits):
raise ValueError(f"{field_label} must be a hex color like #FFFFFF or #FFFFFFFF")
return
if field_definition.text_format == "safe_filename_suffix":
if any(character not in _SAFE_FILENAME_SUFFIX_CHARS for character in stripped_value):
raise ValueError(
f"{field_label} may only contain letters, numbers, '.', '-' or '_'"
)
return
raise ValueError(
f"{field_label} uses unsupported text format {field_definition.text_format!r}"
)
def _validate_meta_param_value(*, node: "WorkflowNode", key: str, value: Any) -> None:
field_label = f"node {_coerce_node_label(node)} meta param {key!r}"
if key == "retry_policy":
if not isinstance(value, dict):
raise ValueError(f"{field_label} must be an object")
unknown_keys = sorted(raw_key for raw_key in value if raw_key not in {"max_attempts"})
if unknown_keys:
joined = ", ".join(repr(raw_key) for raw_key in unknown_keys)
raise ValueError(f"{field_label} uses unknown key(s): {joined}")
max_attempts = value.get("max_attempts", 1)
if isinstance(max_attempts, bool) or not isinstance(max_attempts, int):
raise ValueError(f"{field_label} field 'max_attempts' must be an integer")
if max_attempts < 1 or max_attempts > 5:
raise ValueError(f"{field_label} field 'max_attempts' must be between 1 and 5")
return
if key == "failure_policy":
if not isinstance(value, dict):
raise ValueError(f"{field_label} must be an object")
allowed_keys = {"halt_workflow", "fallback_to_legacy"}
unknown_keys = sorted(raw_key for raw_key in value if raw_key not in allowed_keys)
if unknown_keys:
joined = ", ".join(repr(raw_key) for raw_key in unknown_keys)
raise ValueError(f"{field_label} uses unknown key(s): {joined}")
for bool_key in allowed_keys:
if bool_key not in value:
continue
if not isinstance(value[bool_key], bool):
raise ValueError(f"{field_label} field {bool_key!r} must be a boolean")
return
raise ValueError(f"{field_label} is not supported")
class WorkflowPosition(BaseModel):
@@ -149,18 +290,25 @@ class WorkflowConfig(BaseModel):
@model_validator(mode="after")
def node_params_match_registry(self) -> "WorkflowConfig":
for node in self.nodes:
definition = get_node_definition(node.step)
if definition is None:
continue
definition = _require_node_definition(node)
field_definitions = {field.key: field for field in definition.fields}
allowed_keys = {field.key for field in definition.fields}
unknown_keys = sorted(key for key in node.params if key not in allowed_keys)
allowed_keys = {field.key for field in definition.fields} | _WORKFLOW_META_PARAM_KEYS
unknown_keys = sorted(
key
for key in node.params
if key not in allowed_keys and not _is_dynamic_template_input_param(node, key)
)
if unknown_keys:
joined = ", ".join(repr(key) for key in unknown_keys)
raise ValueError(
f"node {node.id!r} ({node.step.value}) uses unknown param key(s): {joined}"
)
for key, value in node.params.items():
if _is_dynamic_template_input_param(node, key):
continue
if key in _WORKFLOW_META_PARAM_KEYS:
_validate_meta_param_value(node=node, key=key, value=value)
continue
field_definition = field_definitions.get(key)
if field_definition is None:
continue
@@ -173,20 +321,19 @@ class WorkflowConfig(BaseModel):
@model_validator(mode="after")
def ui_family_matches_node_families(self) -> "WorkflowConfig":
families = {
definition.family
for node in self.nodes
if (definition := get_node_definition(node.step)) is not None
}
definitions = [_require_node_definition(node) for node in self.nodes]
families = {definition.family for definition in definitions}
inferred_family = _infer_concrete_workflow_family(definitions)
if not families:
return self
inferred_family = "mixed" if len(families) > 1 else next(iter(families))
execution_mode = self.ui.execution_mode if self.ui is not None else "legacy"
if execution_mode in {"graph", "shadow"} and inferred_family == "mixed":
raise ValueError(
"workflow ui.execution_mode must stay single-family for graph/shadow execution"
)
if inferred_family is None:
return self
if self.ui is None or self.ui.family is None:
return self
if self.ui.family != inferred_family:
@@ -220,9 +367,7 @@ class WorkflowConfig(BaseModel):
node_id = queue.popleft()
processed += 1
node = node_by_id[node_id]
definition = get_node_definition(node.step)
if definition is None:
continue
definition = _require_node_definition(node)
node_inputs = available_artifacts[node_id] | _context_seed_artifacts(definition)
required = set(definition.input_contract.get("requires", []))
+4 -3
View File
@@ -6,6 +6,7 @@ from fastapi.staticfiles import StaticFiles
from pathlib import Path
from app.config import settings
from app.core.render_paths import ensure_group_writable_dir
from app.database import engine, Base
from app.core.websocket import manager as ws_manager
from app.core.middleware import TenantContextMiddleware
@@ -33,7 +34,7 @@ from app.api.routers.chat import router as chat_router
async def lifespan(app: FastAPI):
# Create upload directories
for subdir in ("step_files", "excel_files", "thumbnails", "renders", "blend-templates"):
Path(settings.upload_dir, subdir).mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(Path(settings.upload_dir, subdir))
# Start WebSocket Redis subscriber
await ws_manager.start_redis_subscriber()
yield
@@ -59,7 +60,7 @@ app.add_middleware(TenantContextMiddleware)
# Mount static files for thumbnails (dir created in lifespan; skip if not writable)
thumbnails_dir = Path(settings.upload_dir) / "thumbnails"
try:
thumbnails_dir.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(thumbnails_dir)
app.mount("/thumbnails", StaticFiles(directory=str(thumbnails_dir)), name="thumbnails")
except (PermissionError, OSError):
pass # Running outside Docker without upload dir — thumbnails won't be served statically
@@ -67,7 +68,7 @@ except (PermissionError, OSError):
# Mount static files for renders
renders_dir = Path(settings.upload_dir) / "renders"
try:
renders_dir.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(renders_dir)
app.mount("/renders", StaticFiles(directory=str(renders_dir)), name="renders")
except (PermissionError, OSError):
pass
+2
View File
@@ -2,6 +2,7 @@
from app.domains.rendering.models import (
OUTPUT_TYPE_ARTIFACT_KINDS,
OUTPUT_TYPE_WORKFLOW_FAMILIES,
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES,
OutputType,
VALID_RENDER_BACKENDS,
)
@@ -9,5 +10,6 @@ __all__ = [
"OutputType",
"VALID_RENDER_BACKENDS",
"OUTPUT_TYPE_WORKFLOW_FAMILIES",
"OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES",
"OUTPUT_TYPE_ARTIFACT_KINDS",
]
+2 -6
View File
@@ -13,6 +13,7 @@ from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import settings
from app.core.render_paths import result_path_to_public_url
logger = logging.getLogger(__name__)
@@ -774,12 +775,7 @@ async def _tool_find_product_renders(
renders = []
for r in rows:
path = r["result_path"] or ""
# Convert internal path to servable URL
url = None
if "/renders/" in path:
url = path[path.index("/renders/"):]
elif "/thumbnails/" in path:
url = path[path.index("/thumbnails/"):]
url = result_path_to_public_url(path, require_exists=True)
# Effective material override (line overrides output type)
material = r["line_material_override"] or r["ot_material_override"] or None
+144 -18
View File
@@ -20,6 +20,9 @@ import re
# ── Part key generation ───────────────────────────────────────────────────────
_AF_RE = re.compile(r'_AF\d+$', re.IGNORECASE)
_AF_VARIANT_RE = re.compile(r"_AF\d+(_ASM)?_?$", re.IGNORECASE)
_LEGACY_MATERIAL_PREFIX = "SCHAEFFLER_"
_CURRENT_MATERIAL_PREFIX = "HARTOMAT_"
def generate_part_key(
@@ -53,6 +56,95 @@ def generate_part_key(
return key
def normalize_material_name(material_name: str | None) -> str | None:
"""Normalize persisted legacy material names to the current HartOMat prefix."""
if not isinstance(material_name, str):
return None
value = material_name.strip()
if not value:
return None
if value.upper().startswith(_LEGACY_MATERIAL_PREFIX):
return f"{_CURRENT_MATERIAL_PREFIX}{value[len(_LEGACY_MATERIAL_PREFIX):]}"
return value
def _normalize_semantic_source_name(raw_name: str) -> str:
"""Collapse exporter-only suffixes back to their semantic OCC source name."""
name = (raw_name or "").strip()
name = re.sub(r"\.\d{3}$", "", name)
previous = None
while previous != name:
previous = name
name = _AF_VARIANT_RE.sub("", name)
return name
def _slugify_semantic_source_name(raw_name: str) -> str:
base = _normalize_semantic_source_name(raw_name)
base = re.sub(r"([a-z])([A-Z])", r"\1_\2", base)
return re.sub(r"[^a-z0-9]+", "_", base.lower()).strip("_")[:50]
def _derive_semantic_alias_key(part_key: str, source_name: str) -> str | None:
"""Return the semantic alias for deduplicated instance keys, if any."""
alias_key = _slugify_semantic_source_name(source_name)
if not alias_key or alias_key == part_key:
return None
if re.fullmatch(
rf"{re.escape(alias_key)}(?:_[2-9]\d*|_af\d+(?:_asm)?)",
part_key,
flags=re.IGNORECASE,
) is None:
return None
return alias_key
def _alias_priority(part_key: str, source_name: str) -> tuple[int, int, int]:
match = re.fullmatch(r".+_(\d+)$", part_key)
suffix_number = int(match.group(1)) if match else 1_000_000
return (suffix_number, len(source_name or ""), len(part_key))
def _iter_lookup_keys(part_key: str, fallback_part_keys: tuple[str, ...] = ()) -> tuple[str, ...]:
ordered_keys: list[str] = []
for key in (part_key, *fallback_part_keys):
if key and key not in ordered_keys:
ordered_keys.append(key)
return tuple(ordered_keys)
def _build_part_entry(
*,
part_key: str,
source_name: str,
prim_path: str | None,
manual: dict,
resolved: dict,
source: dict,
fallback_part_keys: tuple[str, ...] = (),
) -> dict:
effective_material, provenance = _resolve_material(
part_key,
source_name,
manual,
resolved,
source,
fallback_part_keys=fallback_part_keys,
)
is_unassigned = effective_material is None
return {
"part_key": part_key,
"source_name": source_name,
"prim_path": prim_path,
"effective_material": effective_material,
"assignment_provenance": provenance,
"is_unassigned": is_unassigned,
}
# ── Scene manifest building ───────────────────────────────────────────────────
def build_scene_manifest(cad_file, usd_asset=None) -> dict:
@@ -65,7 +157,8 @@ def build_scene_manifest(cad_file, usd_asset=None) -> dict:
Material assignment priority per part:
1. `manual_material_overrides[part_key]` provenance "manual"
2. `resolved_material_assignments[part_key]["material"]` provenance "auto"
2. `resolved_material_assignments[part_key]["canonical_material"]` (or legacy
`["material"]`) provenance "auto"
3. substring match in `source_material_assignments` against source_name provenance "source"
4. None, is_unassigned=True provenance "default"
"""
@@ -80,25 +173,51 @@ def build_scene_manifest(cad_file, usd_asset=None) -> dict:
if resolved:
# Build from resolved assignments (USD pipeline has run)
alias_candidates: dict[str, tuple[tuple[int, int, int], dict]] = {}
for part_key, meta in resolved.items():
source_name = meta.get("source_name", "") if isinstance(meta, dict) else ""
prim_path = meta.get("prim_path") if isinstance(meta, dict) else None
effective_material, provenance = _resolve_material(
part_key, source_name, manual, resolved, source
part_entry = _build_part_entry(
part_key=part_key,
source_name=source_name,
prim_path=prim_path,
manual=manual,
resolved=resolved,
source=source,
)
is_unassigned = effective_material is None
parts.append(part_entry)
if part_entry["is_unassigned"]:
unassigned_parts.append(part_key)
parts.append({
"part_key": part_key,
alias_key = _derive_semantic_alias_key(part_key, source_name)
if alias_key is None or alias_key in resolved:
continue
candidate = {
"part_key": alias_key,
"source_name": source_name,
"prim_path": prim_path,
"effective_material": effective_material,
"assignment_provenance": provenance,
"is_unassigned": is_unassigned,
})
if is_unassigned:
unassigned_parts.append(part_key)
"fallback_part_keys": (part_key,),
}
candidate_priority = _alias_priority(part_key, source_name)
current = alias_candidates.get(alias_key)
if current is None or candidate_priority < current[0]:
alias_candidates[alias_key] = (candidate_priority, candidate)
for alias_key, (_, candidate) in alias_candidates.items():
alias_entry = _build_part_entry(
part_key=candidate["part_key"],
source_name=candidate["source_name"],
prim_path=candidate["prim_path"],
manual=manual,
resolved=resolved,
source=source,
fallback_part_keys=candidate["fallback_part_keys"],
)
parts.append(alias_entry)
if alias_entry["is_unassigned"]:
unassigned_parts.append(alias_key)
elif cad_file.parsed_objects:
# Fall back to parsed_objects from STEP extraction
@@ -149,23 +268,30 @@ def _resolve_material(
manual: dict,
resolved: dict,
source: dict,
fallback_part_keys: tuple[str, ...] = (),
) -> tuple[str | None, str]:
"""Return (material_name, provenance) for one part using priority order."""
lookup_keys = _iter_lookup_keys(part_key, fallback_part_keys)
# 1. Manual override
if part_key in manual and manual[part_key]:
return str(manual[part_key]), "manual"
for lookup_key in lookup_keys:
if lookup_key in manual and manual[lookup_key]:
return normalize_material_name(str(manual[lookup_key])), "manual"
# 2. Auto-resolved from USD pipeline
meta = resolved.get(part_key)
if isinstance(meta, dict) and meta.get("material"):
return str(meta["material"]), "auto"
for lookup_key in lookup_keys:
meta = resolved.get(lookup_key)
if isinstance(meta, dict):
canonical = normalize_material_name(meta.get("canonical_material") or meta.get("material"))
if canonical:
return canonical, "auto"
# 3. Substring match in source_material_assignments against source_name
sn_lower = source_name.lower()
for src_key, src_mat in source.items():
if src_key.lower() in sn_lower or sn_lower in src_key.lower():
if src_mat:
return str(src_mat), "source"
return normalize_material_name(str(src_mat)), "source"
# 4. Unassigned
return None, "default"
+298 -55
View File
@@ -4,6 +4,7 @@ Used by the render-worker Celery container (which has BLENDER_BIN set and
cadquery installed). The backend and standard workers fall back to the Pillow
placeholder when this service is unavailable.
"""
import hashlib
import json
import logging
import os
@@ -12,16 +13,175 @@ import signal
import subprocess
from pathlib import Path
from app.core.render_paths import ensure_group_writable_dir
logger = logging.getLogger(__name__)
def _glb_from_step(step_path: Path, glb_path: Path, tessellation_engine: str = "occ") -> None:
def resolve_tessellation_settings(
profile: str = "render",
tessellation_engine: str | None = None,
) -> tuple[float, float, str]:
"""Resolve tessellation settings from system settings for a given profile."""
profile_key = "scene" if profile == "scene" else "render"
defaults = {
"scene": (0.1, 0.1),
"render": (0.03, 0.05),
}
default_linear, default_angular = defaults[profile_key]
try:
from app.services.step_processor import _get_all_settings
settings = _get_all_settings()
linear_deflection = float(
settings.get(f"{profile_key}_linear_deflection", str(default_linear))
)
angular_deflection = float(
settings.get(f"{profile_key}_angular_deflection", str(default_angular))
)
effective_engine = (
tessellation_engine
or settings.get("tessellation_engine", "occ")
or "occ"
)
return linear_deflection, angular_deflection, effective_engine
except Exception as exc:
logger.warning(
"Could not resolve %s tessellation settings: %s; using defaults",
profile_key,
exc,
)
return default_linear, default_angular, tessellation_engine or "occ"
def build_tessellated_glb_path(
step_path: Path,
profile: str,
tessellation_engine: str,
linear_deflection: float,
angular_deflection: float,
) -> Path:
"""Build a settings-sensitive GLB path to avoid stale mesh reuse."""
signature = hashlib.sha1(
f"{profile}:{tessellation_engine}:{linear_deflection:.6f}:{angular_deflection:.6f}".encode(
"utf-8"
)
).hexdigest()[:10]
return step_path.parent / f"{step_path.stem}_{profile}_{signature}.glb"
def _stringify_optional_arg(value: object) -> str:
if value in (None, ""):
return ""
return str(value)
def _resolve_render_samples(engine: str, samples: int | None) -> int:
if samples is not None:
return int(samples)
effective_engine = (engine or "cycles").lower()
setting_key = (
"blender_eevee_samples"
if effective_engine == "eevee"
else "blender_cycles_samples"
)
try:
from app.services.step_processor import _get_all_settings
settings = _get_all_settings()
return int(settings[setting_key])
except Exception as exc:
logger.warning(
"Could not resolve Blender samples from settings for engine=%s: %s; "
"using legacy fallback",
effective_engine,
exc,
)
return 64 if effective_engine == "eevee" else 256
def build_turntable_ffmpeg_cmd(
frames_dir: Path,
output_path: Path,
*,
fps: int = 30,
bg_color: str = "",
width: int = 1920,
height: int = 1080,
ffmpeg_bin: str | None = None,
) -> list[str]:
"""Build the canonical FFmpeg command for turntable MP4 composition.
Legacy and graph/shadow paths must share this logic so template-backed
turntable outputs do not drift due to encoding differences.
"""
ffmpeg = ffmpeg_bin or shutil.which("ffmpeg") or "ffmpeg"
if any(frames_dir.glob("frame_*.png")):
frame_pattern = str(frames_dir / "frame_%04d.png")
else:
frame_pattern = str(frames_dir / "%04d.png")
if bg_color:
hex_color = bg_color.lstrip("#") or "ffffff"
return [
ffmpeg,
"-y",
"-framerate",
str(fps),
"-i",
frame_pattern,
"-f",
"lavfi",
"-i",
f"color=c=0x{hex_color}:size={width}x{height}:rate={fps}",
"-filter_complex",
"[1:v][0:v]overlay=0:0:shortest=1",
"-vcodec",
"libx264",
"-pix_fmt",
"yuv420p",
"-crf",
"18",
"-movflags",
"+faststart",
str(output_path),
]
return [
ffmpeg,
"-y",
"-framerate",
str(fps),
"-i",
frame_pattern,
"-vcodec",
"libx264",
"-pix_fmt",
"yuv420p",
"-crf",
"18",
"-movflags",
"+faststart",
str(output_path),
]
def _glb_from_step(
step_path: Path,
glb_path: Path,
tessellation_engine: str = "occ",
tessellation_profile: str = "render",
) -> None:
"""Convert STEP → GLB via OCC or GMSH (export_step_to_gltf.py, no Blender needed)."""
import subprocess
import sys as _sys
linear_deflection = 0.3
angular_deflection = 0.5
linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings(
tessellation_profile,
tessellation_engine,
)
scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script_path = scripts_dir / "export_step_to_gltf.py"
@@ -32,7 +192,7 @@ def _glb_from_step(step_path: Path, glb_path: Path, tessellation_engine: str = "
"--output_path", str(glb_path),
"--linear_deflection", str(linear_deflection),
"--angular_deflection", str(angular_deflection),
"--tessellation_engine", tessellation_engine,
"--tessellation_engine", effective_engine,
]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=120)
for line in result.stdout.splitlines():
@@ -44,7 +204,15 @@ def _glb_from_step(step_path: Path, glb_path: Path, tessellation_engine: str = "
f"export_step_to_gltf.py failed (exit {result.returncode}).\n"
f"STDERR: {result.stderr[-1000:]}"
)
logger.info("GLB converted: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024)
logger.info(
"GLB converted: %s (%d KB) with %s tessellation linear=%s angular=%s engine=%s",
glb_path.name,
glb_path.stat().st_size // 1024,
tessellation_profile,
linear_deflection,
angular_deflection,
effective_engine,
)
def find_blender() -> str:
@@ -67,9 +235,9 @@ def render_still(
width: int = 512,
height: int = 512,
engine: str = "cycles",
samples: int = 256,
samples: int | None = None,
smooth_angle: int = 30,
cycles_device: str = "auto",
cycles_device: str = "gpu",
transparent_bg: bool = False,
part_colors: dict | None = None,
template_path: str | None = None,
@@ -92,9 +260,12 @@ def render_still(
log_callback: "Callable[[str], None] | None" = None,
usd_path: "Path | None" = None,
tessellation_engine: str = "occ",
tessellation_profile: str = "render",
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
material_override: str | None = None,
template_inputs: dict | None = None,
**ignored_control_kwargs,
) -> dict:
"""Convert STEP → GLB (OCC or GMSH) → PNG (Blender subprocess).
@@ -120,8 +291,18 @@ def render_still(
t0 = time.monotonic()
if ignored_control_kwargs:
logger.debug(
"render_still ignoring unsupported control kwargs: %s",
sorted(ignored_control_kwargs.keys()),
)
if isinstance(usd_path, str) and usd_path.strip():
usd_path = Path(usd_path)
actual_samples = _resolve_render_samples(engine, samples)
# 1. GLB conversion (OCC) — skipped when usd_path is provided
glb_path = step_path.parent / f"{step_path.stem}_thumbnail.glb"
use_usd = bool(usd_path and usd_path.exists())
t_glb = time.monotonic()
@@ -129,15 +310,39 @@ def render_still(
logger.info("[render_blender] using USD path: %s", usd_path)
glb_size_bytes = 0
else:
linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings(
tessellation_profile,
tessellation_engine,
)
glb_path = build_tessellated_glb_path(
step_path,
tessellation_profile,
effective_engine,
linear_deflection,
angular_deflection,
)
if not glb_path.exists() or glb_path.stat().st_size == 0:
_glb_from_step(step_path, glb_path, tessellation_engine)
_glb_from_step(
step_path,
glb_path,
tessellation_engine=effective_engine,
tessellation_profile=tessellation_profile,
)
else:
logger.info("GLB local hit: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024)
logger.info(
"GLB local hit: %s (%d KB) profile=%s linear=%s angular=%s engine=%s",
glb_path.name,
glb_path.stat().st_size // 1024,
tessellation_profile,
linear_deflection,
angular_deflection,
effective_engine,
)
glb_size_bytes = glb_path.stat().st_size if glb_path.exists() else 0
glb_duration_s = round(time.monotonic() - t_glb, 2)
# 2. Blender render
output_path.parent.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(output_path.parent)
env = dict(os.environ)
if engine == "eevee":
@@ -149,6 +354,7 @@ def render_still(
})
else:
env["EGL_PLATFORM"] = "surfaceless"
env["BLENDER_DEFAULT_SAMPLES"] = str(actual_samples)
def _build_cmd(eng: str) -> list:
# Pass "" as glb_path when using USD — blender_render.py reads --usd-path instead
@@ -161,7 +367,7 @@ def render_still(
glb_arg,
str(output_path),
str(width), str(height),
eng, str(samples), str(smooth_angle),
eng, str(actual_samples), str(smooth_angle),
cycles_device,
"1" if transparent_bg else "0",
template_path or "",
@@ -172,9 +378,9 @@ def render_still(
"1" if lighting_only else "0",
"1" if shadow_catcher else "0",
str(rotation_x), str(rotation_y), str(rotation_z),
noise_threshold or "", denoiser or "",
denoising_input_passes or "", denoising_prefilter or "",
denoising_quality or "", denoising_use_gpu or "",
_stringify_optional_arg(noise_threshold), _stringify_optional_arg(denoiser),
_stringify_optional_arg(denoising_input_passes), _stringify_optional_arg(denoising_prefilter),
_stringify_optional_arg(denoising_quality), _stringify_optional_arg(denoising_use_gpu),
]
if use_usd:
cmd += ["--usd-path", str(usd_path)]
@@ -188,6 +394,8 @@ def render_still(
cmd += ["--sensor-width", str(sensor_width_mm)]
if material_override:
cmd += ["--material-override", material_override]
if template_inputs:
cmd += ["--template-inputs", json.dumps(template_inputs)]
return cmd
def _run(eng: str) -> tuple[int, list[str], list[str]]:
@@ -305,7 +513,7 @@ def render_turntable_to_file(
engine: str = "cycles",
samples: int = 128,
smooth_angle: int = 30,
cycles_device: str = "auto",
cycles_device: str = "gpu",
transparent_bg: bool = False,
bg_color: str = "",
turntable_axis: str = "world_z",
@@ -323,9 +531,11 @@ def render_turntable_to_file(
camera_orbit: bool = True,
usd_path: "Path | None" = None,
tessellation_engine: str = "occ",
tessellation_profile: str = "render",
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
material_override: str | None = None,
template_inputs: dict | None = None,
) -> dict:
"""Render a turntable animation: STEP → STL → N frames (Blender) → mp4 (ffmpeg).
@@ -357,25 +567,48 @@ def render_turntable_to_file(
t0 = time.monotonic()
# 1. GLB conversion (OCC) — skipped when usd_path is provided
glb_path = step_path.parent / f"{step_path.stem}_thumbnail.glb"
use_usd = bool(usd_path and usd_path.exists())
t_glb = time.monotonic()
if use_usd:
logger.info("[render_blender] turntable using USD path: %s", usd_path)
else:
linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings(
tessellation_profile,
tessellation_engine,
)
glb_path = build_tessellated_glb_path(
step_path,
tessellation_profile,
effective_engine,
linear_deflection,
angular_deflection,
)
if not glb_path.exists() or glb_path.stat().st_size == 0:
_glb_from_step(step_path, glb_path, tessellation_engine)
_glb_from_step(
step_path,
glb_path,
tessellation_engine=effective_engine,
tessellation_profile=tessellation_profile,
)
else:
logger.info("GLB local hit: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024)
logger.info(
"GLB local hit: %s (%d KB) profile=%s linear=%s angular=%s engine=%s",
glb_path.name,
glb_path.stat().st_size // 1024,
tessellation_profile,
linear_deflection,
angular_deflection,
effective_engine,
)
glb_duration_s = round(time.monotonic() - t_glb, 2)
# 2. Render frames with Blender
frames_dir = output_path.parent / f"_frames_{output_path.stem}"
if frames_dir.exists():
_shutil.rmtree(frames_dir, ignore_errors=True)
frames_dir.mkdir(parents=True, exist_ok=True)
output_path.parent.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(frames_dir)
ensure_group_writable_dir(output_path.parent)
env = dict(os.environ)
env["EGL_PLATFORM"] = "surfaceless"
@@ -416,6 +649,8 @@ def render_turntable_to_file(
cmd += ["--sensor-width", str(sensor_width_mm)]
if material_override:
cmd += ["--material-override", material_override]
if template_inputs:
cmd += ["--template-inputs", json.dumps(template_inputs)]
log_lines: list[str] = []
@@ -458,34 +693,15 @@ def render_turntable_to_file(
# 3. Compose frames → mp4 with ffmpeg
t_ffmpeg = time.monotonic()
ffmpeg_cmd = [
ffmpeg_bin,
"-y",
"-framerate", str(fps),
"-i", str(frames_dir / "frame_%04d.png"),
"-vcodec", "libx264",
"-pix_fmt", "yuv420p",
"-crf", "18",
"-movflags", "+faststart",
str(output_path),
]
# If bg_color is set and transparent_bg is True, overlay frames on solid bg
if bg_color and transparent_bg:
hex_color = bg_color.lstrip("#")
r, g, b = int(hex_color[0:2], 16), int(hex_color[2:4], 16), int(hex_color[4:6], 16)
ffmpeg_cmd = [
ffmpeg_bin, "-y",
"-framerate", str(fps),
"-i", str(frames_dir / "frame_%04d.png"),
"-f", "lavfi", "-i", f"color=c=0x{hex_color}:size={width}x{height}:rate={fps}",
"-filter_complex", "[1:v][0:v]overlay=0:0:shortest=1",
"-vcodec", "libx264",
"-pix_fmt", "yuv420p",
"-crf", "18",
"-movflags", "+faststart",
str(output_path),
]
ffmpeg_cmd = build_turntable_ffmpeg_cmd(
frames_dir,
output_path,
fps=fps,
bg_color=bg_color if transparent_bg else "",
width=width,
height=height,
ffmpeg_bin=ffmpeg_bin,
)
ffmpeg_proc = subprocess.run(
ffmpeg_cmd, capture_output=True, text=True, timeout=300
@@ -530,7 +746,7 @@ def render_cinematic_to_file(
engine: str = "cycles",
samples: int = 128,
smooth_angle: int = 30,
cycles_device: str = "auto",
cycles_device: str = "gpu",
transparent_bg: bool = False,
part_colors: dict | None = None,
template_path: str | None = None,
@@ -545,9 +761,11 @@ def render_cinematic_to_file(
rotation_z: float = 0.0,
usd_path: "Path | None" = None,
tessellation_engine: str = "occ",
tessellation_profile: str = "render",
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
material_override: str | None = None,
template_inputs: dict | None = None,
log_callback: "Callable[[str], None] | None" = None,
) -> dict:
"""Render a cinematic highlight animation: STEP -> GLB/USD -> 480 frames @ 24fps (Blender) -> mp4 (ffmpeg).
@@ -587,25 +805,48 @@ def render_cinematic_to_file(
t0 = time.monotonic()
# 1. GLB conversion (OCC) — skipped when usd_path is provided
glb_path = step_path.parent / f"{step_path.stem}_thumbnail.glb"
use_usd = bool(usd_path and usd_path.exists())
t_glb = time.monotonic()
if use_usd:
logger.info("[render_blender] cinematic using USD path: %s", usd_path)
else:
linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings(
tessellation_profile,
tessellation_engine,
)
glb_path = build_tessellated_glb_path(
step_path,
tessellation_profile,
effective_engine,
linear_deflection,
angular_deflection,
)
if not glb_path.exists() or glb_path.stat().st_size == 0:
_glb_from_step(step_path, glb_path, tessellation_engine)
_glb_from_step(
step_path,
glb_path,
tessellation_engine=effective_engine,
tessellation_profile=tessellation_profile,
)
else:
logger.info("GLB local hit: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024)
logger.info(
"GLB local hit: %s (%d KB) profile=%s linear=%s angular=%s engine=%s",
glb_path.name,
glb_path.stat().st_size // 1024,
tessellation_profile,
linear_deflection,
angular_deflection,
effective_engine,
)
glb_duration_s = round(time.monotonic() - t_glb, 2)
# 2. Render frames with Blender
frames_dir = output_path.parent / f"_frames_{output_path.stem}"
if frames_dir.exists():
_shutil.rmtree(frames_dir, ignore_errors=True)
frames_dir.mkdir(parents=True, exist_ok=True)
output_path.parent.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(frames_dir)
ensure_group_writable_dir(output_path.parent)
env = dict(os.environ)
env["EGL_PLATFORM"] = "surfaceless"
@@ -645,6 +886,8 @@ def render_cinematic_to_file(
cmd += ["--sensor-width", str(sensor_width_mm)]
if material_override:
cmd += ["--material-override", material_override]
if template_inputs:
cmd += ["--template-inputs", json.dumps(template_inputs)]
log_lines: list[str] = []
+160 -30
View File
@@ -10,7 +10,9 @@ import logging
import uuid
from dataclasses import dataclass, field
from pathlib import Path
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Any
from app.core.render_paths import ensure_group_writable_dir
if TYPE_CHECKING:
from app.models.cad_file import CadFile
@@ -18,6 +20,10 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
class MissingCadResourceError(FileNotFoundError):
"""Terminal CAD resource error that should not be retried by Celery tasks."""
def build_part_colors(
cad_parsed_objects: list[str],
cad_part_materials: list[dict],
@@ -1023,8 +1029,12 @@ def _get_all_settings() -> dict[str, str]:
"blender_eevee_samples": "64",
"thumbnail_format": "jpg",
"blender_smooth_angle": "30",
"cycles_device": "auto",
"cycles_device": "gpu",
"tessellation_engine": "occ",
"scene_linear_deflection": "0.1",
"scene_angular_deflection": "0.1",
"render_linear_deflection": "0.03",
"render_angular_deflection": "0.05",
}
try:
from app.config import settings as app_settings
@@ -1046,6 +1056,23 @@ def _generate_thumbnail(
cad_file_id: str,
upload_dir: str,
part_colors: dict[str, str] | None = None,
*,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
target_collection: str = "Product",
material_library_path: str | None = None,
material_map: dict[str, str] | None = None,
part_names_ordered: list[str] | None = None,
lighting_only: bool = False,
shadow_catcher: bool = False,
usd_path: Path | None = None,
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
material_override: str | None = None,
) -> tuple[Path | None, dict]:
"""Generate thumbnail using the configured renderer.
@@ -1054,12 +1081,20 @@ def _generate_thumbnail(
"""
import time
out_dir = Path(upload_dir) / "thumbnails"
out_dir.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(out_dir)
settings = _get_all_settings()
renderer = settings["thumbnail_renderer"]
fmt = settings["thumbnail_format"] # "jpg" or "png"
requested_renderer = renderer or settings["thumbnail_renderer"]
active_renderer = requested_renderer
fmt = settings["thumbnail_format"] # "jpg" or "png"
ext = "jpg" if fmt == "jpg" else "png"
if requested_renderer == "threejs":
# The historical Three.js thumbnail renderer was removed from the backend.
# Keep the workflow node executable by falling back to the maintained Blender path
# while preserving the requested renderer in the render log for observability.
active_renderer = "blender"
fmt = "png"
ext = "png"
# Clean up any existing thumbnail for this cad_file_id (either extension)
for old_ext in ("png", "jpg"):
@@ -1073,28 +1108,39 @@ def _generate_thumbnail(
# Build the base render_log with the settings snapshot
render_log: dict = {
"renderer": renderer,
"renderer": requested_renderer,
"format": fmt,
"started_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
}
if renderer == "blender":
engine = settings["blender_engine"]
if active_renderer == "blender":
engine = render_engine or settings["blender_engine"]
resolved_samples = int(samples) if samples is not None else int(settings[f"blender_{engine}_samples"])
resolved_width = int(width) if width is not None else 512
resolved_height = int(height) if height is not None else 512
resolved_transparent_bg = bool(transparent_bg) if transparent_bg is not None else False
render_log.update({
"engine": engine,
"samples": int(settings[f"blender_{engine}_samples"]),
"samples": resolved_samples,
"smooth_angle": int(settings["blender_smooth_angle"]),
"cycles_device": settings["cycles_device"],
"width": 512,
"height": 512,
"width": resolved_width,
"height": resolved_height,
"transparent_bg": resolved_transparent_bg,
})
logger.info(f"Thumbnail renderer={renderer}, format={fmt}")
if requested_renderer != active_renderer:
render_log["renderer_backend"] = active_renderer
render_log["renderer_fallback_reason"] = "threejs_renderer_removed_using_blender_compat"
logger.info(f"Thumbnail renderer={requested_renderer}, format={fmt}")
rendered_png: Path | None = None
service_data: dict = {}
if renderer == "blender":
engine = settings["blender_engine"]
samples = int(settings[f"blender_{engine}_samples"])
if active_renderer == "blender":
engine = render_engine or settings["blender_engine"]
resolved_samples = int(samples) if samples is not None else int(settings[f"blender_{engine}_samples"])
resolved_width = int(width) if width is not None else 512
resolved_height = int(height) if height is not None else 512
resolved_transparent_bg = bool(transparent_bg) if transparent_bg is not None else False
from app.services.render_blender import is_blender_available, render_still
if is_blender_available():
@@ -1102,11 +1148,25 @@ def _generate_thumbnail(
service_data = render_still(
step_path=step_path,
output_path=tmp_png,
width=resolved_width,
height=resolved_height,
engine=engine,
samples=samples,
samples=resolved_samples,
smooth_angle=int(settings["blender_smooth_angle"]),
cycles_device=settings["cycles_device"],
transparent_bg=resolved_transparent_bg,
target_collection=target_collection,
material_library_path=material_library_path,
material_map=material_map,
part_names_ordered=part_names_ordered,
lighting_only=lighting_only,
shadow_catcher=shadow_catcher,
tessellation_engine=settings["tessellation_engine"],
usd_path=usd_path,
focal_length_mm=focal_length_mm,
sensor_width_mm=sensor_width_mm,
material_override=material_override,
tessellation_profile="scene",
)
rendered_png = tmp_png if tmp_png.exists() else None
except Exception as exc:
@@ -1133,8 +1193,7 @@ def _generate_thumbnail(
def _finalise_image(src: Path, dst: Path) -> Path | None:
"""Move src image to dst. When dst has a .webp suffix, convert via Pillow
(quality=90, method=4) for 50-70 % smaller files. Otherwise output PNG."""
"""Move src image to dst, converting the PNG intermediate when needed."""
if dst.suffix.lower() == ".webp":
try:
from PIL import Image
@@ -1148,13 +1207,52 @@ def _finalise_image(src: Path, dst: Path) -> Path | None:
out = dst.with_suffix(".png")
src.rename(out)
return out
if dst.suffix.lower() in {".jpg", ".jpeg"}:
try:
from PIL import Image
img = Image.open(str(src))
if img.mode in {"RGBA", "LA"} or (img.mode == "P" and "transparency" in img.info):
background = Image.new("RGBA", img.size, (255, 255, 255, 255))
img = Image.alpha_composite(background, img.convert("RGBA")).convert("RGB")
else:
img = img.convert("RGB")
out = dst.with_suffix(".jpg")
img.save(str(out), "JPEG", quality=95, subsampling=0)
src.unlink(missing_ok=True)
return out
except Exception:
logger.warning("JPEG conversion failed — falling back to PNG")
out = dst.with_suffix(".png")
src.rename(out)
return out
out = dst.with_suffix(".png")
src.rename(out)
return out
def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> bool:
def regenerate_cad_thumbnail(
cad_file_id: str,
part_colors: dict[str, str],
*,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
target_collection: str = "Product",
material_library_path: str | None = None,
material_map: dict[str, str] | None = None,
part_names_ordered: list[str] | None = None,
lighting_only: bool = False,
shadow_catcher: bool = False,
usd_path: Path | None = None,
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
material_override: str | None = None,
) -> bool:
"""
Regenerate a thumbnail with per-part colours for an existing CAD file.
@@ -1170,13 +1268,18 @@ def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> b
with Session(db_engine) as session:
cad_file = session.get(CadFile, uuid.UUID(cad_file_id))
if not cad_file:
logger.error(f"CAD file not found: {cad_file_id}")
return False
message = f"CAD file not found: {cad_file_id}"
logger.warning(message)
raise MissingCadResourceError(message)
step_path = Path(cad_file.stored_path)
if not step_path.exists():
logger.error(f"STEP file not found: {step_path}")
return False
message = f"STEP file not found: {step_path}"
logger.warning(message)
cad_file.processing_status = ProcessingStatus.failed
cad_file.error_message = message[:2000]
session.commit()
raise MissingCadResourceError(message)
# Mark as processing so the activity page shows it as active
cad_file.processing_status = ProcessingStatus.processing
@@ -1184,7 +1287,26 @@ def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> b
try:
thumb_path, render_log = _generate_thumbnail(
step_path, cad_file_id, app_settings.upload_dir, part_colors=part_colors
step_path,
cad_file_id,
app_settings.upload_dir,
part_colors=part_colors,
renderer=renderer,
render_engine=render_engine,
samples=samples,
width=width,
height=height,
transparent_bg=transparent_bg,
target_collection=target_collection,
material_library_path=material_library_path,
material_map=material_map,
part_names_ordered=part_names_ordered,
lighting_only=lighting_only,
shadow_catcher=shadow_catcher,
usd_path=usd_path,
focal_length_mm=focal_length_mm,
sensor_width_mm=sensor_width_mm,
material_override=material_override,
)
if thumb_path:
cad_file.thumbnail_path = str(thumb_path)
@@ -1207,6 +1329,7 @@ def render_to_file(
part_colors: dict[str, str] | None = None,
width: int | None = None,
height: int | None = None,
smooth_angle: int | None = None,
transparent_bg: bool = False,
engine: str | None = None,
samples: int | None = None,
@@ -1234,6 +1357,7 @@ def render_to_file(
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
material_override: str | None = None,
template_inputs: dict[str, Any] | None = None,
) -> tuple[bool, dict]:
"""Render a STEP file to a specific output path using current system settings.
@@ -1246,6 +1370,7 @@ def render_to_file(
part_colors: Optional {part_name: hex_color} map.
width: Optional render width (overrides system default).
height: Optional render height (overrides system default).
smooth_angle: Optional auto-smooth angle override in degrees.
transparent_bg: If True and renderer=blender+PNG, render with transparent background.
engine: Optional per-OT engine override ("cycles" | "eevee"), or None for system default.
samples: Optional per-OT samples override, or None for system default.
@@ -1262,7 +1387,7 @@ def render_to_file(
step = Path(step_path)
out = Path(output_path)
out.parent.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(out.parent)
settings = _get_all_settings()
renderer = settings["thumbnail_renderer"]
@@ -1284,19 +1409,20 @@ def render_to_file(
if renderer == "blender":
actual_engine = engine or settings["blender_engine"]
actual_samples = samples or int(settings[f"blender_{actual_engine}_samples"])
actual_samples = int(samples) if samples is not None else int(settings[f"blender_{actual_engine}_samples"])
actual_cycles_device = cycles_device or settings["cycles_device"]
actual_smooth_angle = smooth_angle if smooth_angle is not None else int(settings["blender_smooth_angle"])
w = width or 512
h = height or 512
render_log.update({
"engine": actual_engine, "samples": actual_samples,
"smooth_angle": int(settings["blender_smooth_angle"]),
"smooth_angle": actual_smooth_angle,
"cycles_device": actual_cycles_device,
"width": w, "height": h,
})
extra = {
"engine": actual_engine, "samples": actual_samples,
"smooth_angle": int(settings["blender_smooth_angle"]),
"smooth_angle": actual_smooth_angle,
"cycles_device": actual_cycles_device,
"width": w, "height": h,
"transparent_bg": transparent_bg,
@@ -1314,6 +1440,9 @@ def render_to_file(
render_log["lighting_only"] = True
if shadow_catcher:
render_log["shadow_catcher"] = True
if template_inputs:
extra["template_inputs"] = template_inputs
render_log["template_inputs"] = template_inputs
if material_library_path and material_map:
extra["material_library_path"] = material_library_path
extra["material_map"] = material_map
@@ -1349,7 +1478,7 @@ def render_to_file(
output_path=tmp_png,
engine=actual_engine,
samples=actual_samples,
smooth_angle=int(settings["blender_smooth_angle"]),
smooth_angle=actual_smooth_angle,
cycles_device=actual_cycles_device,
width=w, height=h,
transparent_bg=transparent_bg,
@@ -1373,6 +1502,7 @@ def render_to_file(
focal_length_mm=focal_length_mm,
sensor_width_mm=sensor_width_mm,
material_override=material_override,
template_inputs=template_inputs,
)
rendered_png = tmp_png if tmp_png.exists() else None
except Exception as exc:
@@ -1400,7 +1530,7 @@ def render_to_file(
def _convert_to_gltf(step_path: Path, cad_file_id: str, upload_dir: str) -> Path | None:
"""Convert STEP to glTF for browser 3D viewer."""
out_dir = Path(upload_dir) / "gltf"
out_dir.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(out_dir)
out_path = out_dir / f"{cad_file_id}.gltf"
try:
+17 -3
View File
@@ -15,6 +15,7 @@ import logging
from sqlalchemy import create_engine, select, and_, exists
from sqlalchemy.orm import Session
from app.domains.materials.library_paths import resolve_asset_library_blend_path
from app.models.render_template import RenderTemplate
from app.models.system_setting import SystemSetting
from app.domains.rendering.models import render_template_output_types
@@ -121,14 +122,27 @@ def get_material_library_path_for_session(session: Session) -> str | None:
row = session.execute(
select(AssetLibrary).where(AssetLibrary.is_active == True).limit(1) # noqa: E712
).scalar_one_or_none()
if row and row.blend_file_path:
return row.blend_file_path
if row:
resolved_path = resolve_asset_library_blend_path(
blend_file_path=row.blend_file_path,
asset_library_id=row.id,
)
if resolved_path:
if row.blend_file_path and resolved_path != row.blend_file_path:
logger.warning(
"Active asset library %s points to missing file %s; using %s instead",
row.id,
row.blend_file_path,
resolved_path,
)
return resolved_path
row = session.execute(
select(SystemSetting).where(SystemSetting.key == "material_library_path")
).scalar_one_or_none()
if row and row.value and row.value.strip():
return row.value.strip()
resolved_path = resolve_asset_library_blend_path(blend_file_path=row.value.strip())
return resolved_path or row.value.strip()
return None
+10 -1
View File
@@ -33,7 +33,16 @@ celery_app.conf.update(
"app.domains.rendering.tasks.*": {"queue": "asset_pipeline"},
"app.tasks.beat_tasks.*": {"queue": "step_processing"},
"app.tasks.ai_tasks.*": {"queue": "ai_validation"},
# Legacy task names (shim) — keep until old queued tasks drain
# Legacy task names (shim) — preserve the runtime queue split while
# old workflow configs and queued tasks still address app.tasks.step_tasks.*.
"app.tasks.step_tasks.render_step_thumbnail": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.render_graph_thumbnail": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.regenerate_thumbnail": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.generate_gltf_geometry_task": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.generate_usd_master_task": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.reextract_rich_metadata_task": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.reextract_cad_metadata": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.render_order_line_task": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.*": {"queue": "step_processing"},
},
beat_schedule={
+1
View File
@@ -11,6 +11,7 @@ from app.domains.pipeline.tasks.extract_metadata import ( # noqa: F401
reextract_rich_metadata_task,
)
from app.domains.pipeline.tasks.render_thumbnail import ( # noqa: F401
render_graph_thumbnail,
render_step_thumbnail,
regenerate_thumbnail,
)