chore: snapshot workflow migration progress

This commit is contained in:
2026-04-12 11:49:04 +02:00
parent 0cd02513d5
commit 3e810c74a3
163 changed files with 31774 additions and 2753 deletions
+14
View File
@@ -8,6 +8,18 @@ POSTGRES_PORT=5432
# Redis # Redis
REDIS_URL=redis://redis:6379/0 REDIS_URL=redis://redis:6379/0
# Prevent Python services from writing __pycache__ into bind-mounted source dirs.
PYTHONDONTWRITEBYTECODE=1
# Redirect any unavoidable bytecode writes away from bind mounts.
PYTHONPYCACHEPREFIX=/tmp/pycache
# Run Celery containers as your host user to avoid root-owned files on bind mounts.
# Typical Linux value: `id -u`
APP_UID=1000
# Docker defaults:
# - inside Compose, service discovery uses `postgres` / `redis`
# - host-side tools and tests are normalized to `localhost` automatically by backend/app/config.py
# JWT # JWT
JWT_SECRET_KEY=your-secret-key-here-change-in-production JWT_SECRET_KEY=your-secret-key-here-change-in-production
JWT_ALGORITHM=HS256 JWT_ALGORITHM=HS256
@@ -39,5 +51,7 @@ MINIO_BUCKET=uploads
# Blender >= 5.0.1 must be installed on the host at /opt/blender # Blender >= 5.0.1 must be installed on the host at /opt/blender
# The render-worker container mounts it read-only via volumes: - /opt/blender:/opt/blender:ro # The render-worker container mounts it read-only via volumes: - /opt/blender:/opt/blender:ro
BLENDER_VERSION=5.0.1 BLENDER_VERSION=5.0.1
# Set explicitly to `cpu` on hosts without a usable NVIDIA/Cycles device to suppress startup warnings.
CYCLES_DEVICE=gpu
# Set to host path if Blender is not at /opt/blender: # Set to host path if Blender is not at /opt/blender:
# BLENDER_BIN=/usr/local/blender/blender # BLENDER_BIN=/usr/local/blender/blender
+16 -14
View File
@@ -1,28 +1,32 @@
node_modules/
.env .env
.env.local .env.local
.DS_Store .DS_Store
*.log *.log
# core dump files (not directories named 'core') .gstack/
# Local scratch area managed by scripts/repo_hygiene.sh
tmp/
# Core dumps managed by scripts/repo_hygiene.sh
/core /core
/blender-renderer/core /blender-renderer/core
backend/core
# Python cache # Python cache / environments managed by scripts/repo_hygiene.sh
__pycache__/ __pycache__/
*.py[cod] *.py[cod]
*.pyo *.pyo
.venv/
# Node / Vite build output backend/.venv/
dist/ .pytest_cache/
node_modules/ .coverage
# Celery beat schedule
celerybeat-schedule celerybeat-schedule
celerybeat.pid celerybeat.pid
# Test cache # Frontend dependencies and build output
.pytest_cache/ node_modules/
.coverage dist/
frontend/dist/
# IDE # IDE
.vscode/ .vscode/
@@ -37,9 +41,7 @@ celerybeat.pid
*.step *.step
*.stl *.stl
*.xls *.xls
+.xslx
*.csv *.csv
*.xlsx *.xlsx
*.blend1 *.blend1
backend/core
@@ -0,0 +1,41 @@
"""Add workflow rollout mode to output types.
Revision ID: 067
Revises: 066
"""
from alembic import op
import sqlalchemy as sa
revision = "067"
down_revision = "066"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"output_types",
sa.Column(
"workflow_rollout_mode",
sa.String(length=20),
nullable=False,
server_default="legacy_only",
),
)
op.execute(
"""
UPDATE output_types AS ot
SET workflow_rollout_mode = CASE
WHEN coalesce(wd.config->'ui'->>'execution_mode', 'legacy') = 'graph' THEN 'graph'
WHEN coalesce(wd.config->'ui'->>'execution_mode', 'legacy') = 'shadow' THEN 'shadow'
ELSE 'legacy_only'
END
FROM workflow_definitions AS wd
WHERE ot.workflow_definition_id = wd.id
"""
)
def downgrade() -> None:
op.drop_column("output_types", "workflow_rollout_mode")
@@ -0,0 +1,55 @@
"""Clean up persisted legacy Schaeffler material metadata.
Revision ID: 068
Revises: 067
"""
from alembic import op
revision = "068"
down_revision = "067"
branch_labels = None
depends_on = None
_OLD_PREFIX = "SCHAEFFLER_"
_NEW_PREFIX = "HARTOMAT_"
def _replace_jsonb_prefix(table_name: str, column_name: str, old_prefix: str, new_prefix: str) -> None:
op.execute(
f"""
UPDATE {table_name}
SET {column_name} = replace({column_name}::text, '{old_prefix}', '{new_prefix}')::jsonb
WHERE {column_name} IS NOT NULL
AND {column_name}::text LIKE '%{old_prefix}%'
"""
)
def _replace_text_prefix(table_name: str, column_name: str, old_prefix: str, new_prefix: str) -> None:
op.execute(
f"""
UPDATE {table_name}
SET {column_name} = replace({column_name}, '{old_prefix}', '{new_prefix}')
WHERE {column_name} IS NOT NULL
AND {column_name} LIKE '%{old_prefix}%'
"""
)
def upgrade() -> None:
_replace_jsonb_prefix("cad_files", "resolved_material_assignments", _OLD_PREFIX, _NEW_PREFIX)
_replace_jsonb_prefix("cad_files", "manual_material_overrides", _OLD_PREFIX, _NEW_PREFIX)
_replace_jsonb_prefix("cad_files", "source_material_assignments", _OLD_PREFIX, _NEW_PREFIX)
_replace_text_prefix("output_types", "material_override", _OLD_PREFIX, _NEW_PREFIX)
_replace_text_prefix("order_lines", "material_override", _OLD_PREFIX, _NEW_PREFIX)
def downgrade() -> None:
_replace_jsonb_prefix("cad_files", "resolved_material_assignments", _NEW_PREFIX, _OLD_PREFIX)
_replace_jsonb_prefix("cad_files", "manual_material_overrides", _NEW_PREFIX, _OLD_PREFIX)
_replace_jsonb_prefix("cad_files", "source_material_assignments", _NEW_PREFIX, _OLD_PREFIX)
_replace_text_prefix("output_types", "material_override", _NEW_PREFIX, _OLD_PREFIX)
_replace_text_prefix("order_lines", "material_override", _NEW_PREFIX, _OLD_PREFIX)
@@ -0,0 +1,31 @@
"""Add workflow input schema to render templates.
Revision ID: 069
Revises: 068
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = "069"
down_revision = "068"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"render_templates",
sa.Column(
"workflow_input_schema",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
server_default=sa.text("'[]'::jsonb"),
),
)
op.alter_column("render_templates", "workflow_input_schema", server_default=None)
def downgrade() -> None:
op.drop_column("render_templates", "workflow_input_schema")
+86 -15
View File
@@ -3,10 +3,12 @@ import uuid
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Any, Optional from typing import Any, Optional
from fastapi import APIRouter, Depends, HTTPException, status from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update as sql_update, func, case, distinct, and_, extract from sqlalchemy import select, update as sql_update, func, case, distinct, and_, extract
from pydantic import BaseModel from pydantic import BaseModel, ValidationError
from app.database import get_db from app.database import get_db
from app.core.render_paths import resolve_result_path, result_path_to_storage_key
from app.models.user import User from app.models.user import User
from app.models.system_setting import SystemSetting from app.models.system_setting import SystemSetting
from app.models.cad_file import CadFile, ProcessingStatus from app.models.cad_file import CadFile, ProcessingStatus
@@ -27,7 +29,7 @@ SETTINGS_DEFAULTS: dict[str, str] = {
"blender_eevee_samples": "64", "blender_eevee_samples": "64",
"thumbnail_format": "jpg", "thumbnail_format": "jpg",
"blender_smooth_angle": "30", "blender_smooth_angle": "30",
"cycles_device": "auto", "cycles_device": "gpu",
"render_backend": "celery", "render_backend": "celery",
"blender_max_concurrent_renders": "3", "blender_max_concurrent_renders": "3",
"product_thumbnail_priority": '["latest_render","cad_thumbnail"]', "product_thumbnail_priority": '["latest_render","cad_thumbnail"]',
@@ -63,7 +65,7 @@ class SettingsOut(BaseModel):
blender_eevee_samples: int = 64 blender_eevee_samples: int = 64
thumbnail_format: str = "jpg" thumbnail_format: str = "jpg"
blender_smooth_angle: int = 30 blender_smooth_angle: int = 30
cycles_device: str = "auto" cycles_device: str = "gpu"
render_backend: str = "celery" render_backend: str = "celery"
blender_max_concurrent_renders: int = 3 blender_max_concurrent_renders: int = 3
product_thumbnail_priority: str = '["latest_render","cad_thumbnail"]' product_thumbnail_priority: str = '["latest_render","cad_thumbnail"]'
@@ -225,9 +227,9 @@ def _settings_to_out(raw: dict[str, str]) -> SettingsOut:
smtp_password=raw.get("smtp_password", ""), smtp_password=raw.get("smtp_password", ""),
smtp_from_address=raw.get("smtp_from_address", ""), smtp_from_address=raw.get("smtp_from_address", ""),
scene_linear_deflection=float(raw.get("scene_linear_deflection", "0.1")), scene_linear_deflection=float(raw.get("scene_linear_deflection", "0.1")),
scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.5")), scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.1")),
render_linear_deflection=float(raw.get("render_linear_deflection", "0.03")), render_linear_deflection=float(raw.get("render_linear_deflection", "0.03")),
render_angular_deflection=float(raw.get("render_angular_deflection", "0.2")), render_angular_deflection=float(raw.get("render_angular_deflection", "0.05")),
gltf_scale_factor=float(raw.get("gltf_scale_factor", "0.001")), gltf_scale_factor=float(raw.get("gltf_scale_factor", "0.001")),
gltf_smooth_normals=raw.get("gltf_smooth_normals", "true") == "true", gltf_smooth_normals=raw.get("gltf_smooth_normals", "true") == "true",
viewer_max_distance=float(raw.get("viewer_max_distance", "50")), viewer_max_distance=float(raw.get("viewer_max_distance", "50")),
@@ -680,7 +682,10 @@ async def seed_workflows(
): ):
"""Create the standard workflow definitions if they do not already exist.""" """Create the standard workflow definitions if they do not already exist."""
from app.domains.rendering.models import WorkflowDefinition from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.workflow_config_utils import build_preset_workflow_config from app.domains.rendering.workflow_config_utils import (
build_preset_workflow_config,
build_workflow_blueprint_config,
)
STANDARD_WORKFLOWS = [ STANDARD_WORKFLOWS = [
{ {
@@ -697,6 +702,13 @@ async def seed_workflows(
{"render_engine": "eevee", "samples": 64, "resolution": [1920, 1080]}, {"render_engine": "eevee", "samples": 64, "resolution": [1920, 1080]},
), ),
}, },
{
"name": "Still Image — Graph",
"config": build_preset_workflow_config(
"still_graph",
{"render_engine": "cycles", "samples": 256, "resolution": [1920, 1080]},
),
},
{ {
"name": "Turntable Animation", "name": "Turntable Animation",
"config": build_preset_workflow_config( "config": build_preset_workflow_config(
@@ -711,6 +723,18 @@ async def seed_workflows(
{"render_engine": "cycles", "samples": 128, "angles": [0, 45, 90]}, {"render_engine": "cycles", "samples": 128, "angles": [0, 45, 90]},
), ),
}, },
{
"name": "CAD Intake Blueprint",
"config": build_workflow_blueprint_config("cad_intake"),
},
{
"name": "Order Rendering Blueprint",
"config": build_workflow_blueprint_config("order_rendering"),
},
{
"name": "Still Graph Blueprint",
"config": build_workflow_blueprint_config("still_graph_reference"),
},
] ]
existing_result = await db.execute(select(WorkflowDefinition)) existing_result = await db.execute(select(WorkflowDefinition))
@@ -730,6 +754,57 @@ async def seed_workflows(
return {"created": created, "message": f"Created {created} workflow definition(s)"} return {"created": created, "message": f"Created {created} workflow definition(s)"}
@router.post("/settings/backfill-workflows", status_code=status.HTTP_200_OK)
async def backfill_workflows(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Rewrite persisted legacy workflow configs into canonical DAG form."""
from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.workflow_config_utils import (
canonicalize_workflow_config,
workflow_config_requires_canonicalization,
)
from app.domains.rendering.workflow_schema import WorkflowConfig
result = await db.execute(select(WorkflowDefinition).order_by(WorkflowDefinition.created_at))
workflows = result.scalars().all()
updated: list[dict[str, str]] = []
invalid: list[dict[str, str]] = []
for workflow in workflows:
if not workflow_config_requires_canonicalization(workflow.config):
continue
try:
normalized = canonicalize_workflow_config(workflow.config)
WorkflowConfig.model_validate(normalized)
except (ValidationError, ValueError) as exc:
invalid.append(
{
"id": str(workflow.id),
"name": workflow.name,
"error": str(exc),
}
)
continue
workflow.config = normalized
flag_modified(workflow, "config")
updated.append({"id": str(workflow.id), "name": workflow.name})
await db.commit()
return {
"scanned": len(workflows),
"updated": len(updated),
"invalid": invalid,
"workflows": updated,
"message": f"Canonicalized {len(updated)} workflow definition(s)",
}
@router.get("/settings/renderer-status") @router.get("/settings/renderer-status")
async def renderer_status( async def renderer_status(
admin: User = Depends(require_global_admin), admin: User = Depends(require_global_admin),
@@ -756,13 +831,10 @@ async def import_existing_media_assets(
created = 0 created = 0
skipped = 0 skipped = 0
from app.config import settings as _app_settings
def _normalize_key(path: str) -> str: def _normalize_key(path: str) -> str:
"""Strip UPLOAD_DIR prefix to store relative storage keys.""" """Normalize mixed legacy/canonical paths to a stable relative storage key."""
key = str(path) key = result_path_to_storage_key(path)
prefix = str(_app_settings.upload_dir).rstrip("/") + "/" return key or str(path)
return key[len(prefix):] if key.startswith(prefix) else key
# 1. CadFiles with thumbnail_path # 1. CadFiles with thumbnail_path
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'")) await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
@@ -843,7 +915,6 @@ async def purge_render_media(
""" """
import logging import logging
from pathlib import Path from pathlib import Path
from app.config import settings
from app.core.storage import get_storage from app.core.storage import get_storage
from app.domains.media.models import MediaAsset, MediaAssetType from app.domains.media.models import MediaAsset, MediaAssetType
@@ -865,8 +936,8 @@ async def purge_render_media(
# Delete backing file # Delete backing file
key = asset.storage_key key = asset.storage_key
try: try:
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key candidate = resolve_result_path(key)
if candidate.exists(): if candidate is not None and candidate.exists():
freed_bytes += candidate.stat().st_size freed_bytes += candidate.stat().st_size
candidate.unlink() candidate.unlink()
deleted_files += 1 deleted_files += 1
+37 -9
View File
@@ -13,6 +13,9 @@ from sqlalchemy import select
from sqlalchemy.orm import selectinload from sqlalchemy.orm import selectinload
from app.database import get_db from app.database import get_db
from app.core.render_paths import resolve_result_path
from app.config import settings
from app.domains.media.models import MediaAsset, MediaAssetType
from app.models.cad_file import CadFile, ProcessingStatus from app.models.cad_file import CadFile, ProcessingStatus
from app.models.order import Order from app.models.order import Order
from app.models.order_item import OrderItem from app.models.order_item import OrderItem
@@ -191,6 +194,38 @@ async def _get_cad_file(cad_id: uuid.UUID, db: AsyncSession) -> CadFile:
return cad return cad
async def _resolve_gltf_path(cad: CadFile, db: AsyncSession) -> Path | None:
"""Resolve the best available GLTF/GLB path for a CAD file.
Prefer the legacy cad_files.gltf_path for compatibility, but fall back to
the canonical media_assets.gltf_geometry record written by the newer export
pipeline.
"""
if cad.gltf_path:
legacy_path = resolve_result_path(cad.gltf_path) or Path(cad.gltf_path)
if legacy_path.exists():
return legacy_path
asset_result = await db.execute(
select(MediaAsset)
.where(
MediaAsset.cad_file_id == cad.id,
MediaAsset.asset_type == MediaAssetType.gltf_geometry,
MediaAsset.is_archived == False, # noqa: E712
)
.order_by(MediaAsset.created_at.desc())
)
asset = asset_result.scalars().first()
if asset and asset.storage_key:
asset_path = resolve_result_path(asset.storage_key)
if asset_path is None:
asset_path = Path(settings.upload_dir) / asset.storage_key.lstrip("/")
if asset_path.exists():
return asset_path
return None
@router.get("/{id}/thumbnail") @router.get("/{id}/thumbnail")
async def get_thumbnail( async def get_thumbnail(
id: uuid.UUID, id: uuid.UUID,
@@ -228,20 +263,13 @@ async def get_model(
): ):
"""Serve the glTF file for a CAD file.""" """Serve the glTF file for a CAD file."""
cad = await _get_cad_file(id, db) cad = await _get_cad_file(id, db)
gltf_path = await _resolve_gltf_path(cad, db)
if not cad.gltf_path: if gltf_path is None:
raise HTTPException( raise HTTPException(
status_code=404, status_code=404,
detail="glTF model not yet generated for this CAD file", detail="glTF model not yet generated for this CAD file",
) )
gltf_path = Path(cad.gltf_path)
if not gltf_path.exists():
raise HTTPException(
status_code=404,
detail="glTF file missing from storage",
)
# glTF files may be either .gltf (JSON) or .glb (binary) # glTF files may be either .gltf (JSON) or .glb (binary)
suffix = gltf_path.suffix.lower() suffix = gltf_path.suffix.lower()
if suffix == ".glb": if suffix == ".glb":
+5 -19
View File
@@ -30,6 +30,7 @@ from app.schemas.order_line import OrderLineCreate, OrderLineOut
from app.schemas.product import ProductOut from app.schemas.product import ProductOut
from app.schemas.output_type import OutputTypeOut from app.schemas.output_type import OutputTypeOut
from app.services.order_service import generate_order_number from app.services.order_service import generate_order_number
from app.core.render_paths import resolve_result_path, result_path_to_public_url
from app.utils.auth import get_current_user, require_admin_or_pm, require_pm_or_above from app.utils.auth import get_current_user, require_admin_or_pm, require_pm_or_above
router = APIRouter(prefix="/orders", tags=["orders"]) router = APIRouter(prefix="/orders", tags=["orders"])
@@ -41,13 +42,7 @@ def _is_privileged(user: User) -> bool:
def _result_path_to_url(result_path: str) -> str | None: def _result_path_to_url(result_path: str) -> str | None:
"""Convert an internal result_path to a servable static URL.""" """Convert an internal result_path to a servable static URL."""
if "/renders/" in result_path: return result_path_to_public_url(result_path, require_exists=True)
idx = result_path.index("/renders/")
return result_path[idx:]
if "/thumbnails/" in result_path:
idx = result_path.index("/thumbnails/")
return result_path[idx:]
return None
def _build_line_out(line: OrderLine) -> OrderLineOut: def _build_line_out(line: OrderLine) -> OrderLineOut:
@@ -1544,15 +1539,6 @@ async def download_renders(
if not lines: if not lines:
raise HTTPException(404, detail="No completed renders found for this order") raise HTTPException(404, detail="No completed renders found for this order")
from app.config import settings as app_settings
def _resolve_path(p: str) -> str:
"""Translate container-relative paths to backend filesystem paths."""
# Flamenco worker mounts the uploads volume at /shared, backend at /app/uploads
if p.startswith("/shared/"):
return app_settings.upload_dir + p[len("/shared"):]
return p
buf = io.BytesIO() buf = io.BytesIO()
# Track names used to avoid duplicates # Track names used to avoid duplicates
name_counts: dict[str, int] = {} name_counts: dict[str, int] = {}
@@ -1561,8 +1547,8 @@ async def download_renders(
for line in lines: for line in lines:
if not line.result_path: if not line.result_path:
continue continue
fs_path = _resolve_path(line.result_path) resolved_path = resolve_result_path(line.result_path)
if not os.path.isfile(fs_path): if resolved_path is None or not resolved_path.is_file():
continue continue
# Build a meaningful filename # Build a meaningful filename
product_name = (line.product.name or line.product.pim_id or "product") if line.product else "product" product_name = (line.product.name or line.product.pim_id or "product") if line.product else "product"
@@ -1587,7 +1573,7 @@ async def download_renders(
name_counts[base_name] = 0 name_counts[base_name] = 0
archive_name = base_name archive_name = base_name
zf.write(fs_path, archive_name) zf.write(resolved_path, archive_name)
if not zf.infolist(): if not zf.infolist():
raise HTTPException(404, detail="No render files found on disk") raise HTTPException(404, detail="No render files found on disk")
+128 -11
View File
@@ -12,6 +12,7 @@ from app.models.order_line import OrderLine
from app.models.output_type import ( from app.models.output_type import (
OUTPUT_TYPE_ARTIFACT_KINDS, OUTPUT_TYPE_ARTIFACT_KINDS,
OUTPUT_TYPE_WORKFLOW_FAMILIES, OUTPUT_TYPE_WORKFLOW_FAMILIES,
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES,
OutputType, OutputType,
VALID_RENDER_BACKENDS, VALID_RENDER_BACKENDS,
) )
@@ -21,12 +22,19 @@ from app.models.user import User
from app.domains.rendering.models import WorkflowDefinition from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.output_type_contracts import ( from app.domains.rendering.output_type_contracts import (
apply_invocation_overrides_to_render_settings, apply_invocation_overrides_to_render_settings,
build_output_type_contract_catalog,
build_output_type_invocation_profile,
derive_supported_artifact_kinds_from_workflow_config,
infer_output_type_artifact_kind, infer_output_type_artifact_kind,
infer_workflow_family_from_config, infer_workflow_family_from_config,
InvalidInvocationOverridesError,
merge_output_type_invocation_overrides, merge_output_type_invocation_overrides,
normalize_invocation_overrides, normalize_invocation_overrides,
resolve_output_type_invocation_overrides,
validate_and_normalize_invocation_overrides,
validate_output_type_contract, validate_output_type_contract,
) )
from app.domains.rendering.schemas import OutputTypeContractCatalogOut, OutputTypeInvocationProfileOut
router = APIRouter(prefix="/output-types", tags=["output-types"]) router = APIRouter(prefix="/output-types", tags=["output-types"])
@@ -34,6 +42,34 @@ router = APIRouter(prefix="/output-types", tags=["output-types"])
def _ot_to_out(ot: OutputType) -> OutputTypeOut: def _ot_to_out(ot: OutputType) -> OutputTypeOut:
"""Convert an OutputType ORM instance to OutputTypeOut with pricing convenience fields.""" """Convert an OutputType ORM instance to OutputTypeOut with pricing convenience fields."""
out = OutputTypeOut.model_validate(ot) out = OutputTypeOut.model_validate(ot)
resolved_invocation_overrides = resolve_output_type_invocation_overrides(
ot.render_settings,
getattr(ot, "invocation_overrides", None),
artifact_kind=ot.artifact_kind,
is_animation=ot.is_animation,
)
out.invocation_overrides = resolved_invocation_overrides
out.render_settings = apply_invocation_overrides_to_render_settings(
ot.render_settings,
resolved_invocation_overrides,
)
out.invocation_profile = OutputTypeInvocationProfileOut.model_validate(
build_output_type_invocation_profile(
renderer=ot.renderer,
render_backend=ot.render_backend,
workflow_family=ot.workflow_family,
artifact_kind=ot.artifact_kind,
output_format=ot.output_format,
is_animation=ot.is_animation,
workflow_definition_id=ot.workflow_definition_id,
workflow_rollout_mode=getattr(ot, "workflow_rollout_mode", "legacy_only"),
transparent_bg=ot.transparent_bg,
cycles_device=ot.cycles_device,
material_override=ot.material_override,
render_settings=ot.render_settings,
invocation_overrides=getattr(ot, "invocation_overrides", None),
)
)
if ot.pricing_tier: if ot.pricing_tier:
out.pricing_tier_name = f"{ot.pricing_tier.category_key}/{ot.pricing_tier.quality_level}" out.pricing_tier_name = f"{ot.pricing_tier.category_key}/{ot.pricing_tier.quality_level}"
out.price_per_item = float(ot.pricing_tier.price_per_item) out.price_per_item = float(ot.pricing_tier.price_per_item)
@@ -62,6 +98,7 @@ async def _validate_output_type_workflow_link(
*, *,
workflow_definition_id: uuid.UUID | None, workflow_definition_id: uuid.UUID | None,
workflow_family: str, workflow_family: str,
artifact_kind: str,
) -> None: ) -> None:
if workflow_definition_id is None: if workflow_definition_id is None:
return return
@@ -86,6 +123,17 @@ async def _validate_output_type_workflow_link(
), ),
) )
supported_artifact_kinds = derive_supported_artifact_kinds_from_workflow_config(workflow_definition.config)
if artifact_kind not in supported_artifact_kinds:
supported = ", ".join(supported_artifact_kinds) if supported_artifact_kinds else "none"
raise HTTPException(
400,
detail=(
f"Workflow artifact mismatch: output type expects '{artifact_kind}', "
f"but workflow '{workflow_definition.name}' supports [{supported}]"
),
)
def _ensure_output_type_contract_is_valid( def _ensure_output_type_contract_is_valid(
*, *,
@@ -105,6 +153,23 @@ def _ensure_output_type_contract_is_valid(
raise HTTPException(400, detail=str(exc)) from exc raise HTTPException(400, detail=str(exc)) from exc
def _normalize_explicit_invocation_overrides(
raw: dict | None,
*,
artifact_kind: str,
is_animation: bool,
) -> dict:
try:
return validate_and_normalize_invocation_overrides(
raw,
artifact_kind=artifact_kind,
is_animation=is_animation,
reject_unknown_keys=True,
)
except InvalidInvocationOverridesError as exc:
raise HTTPException(400, detail=str(exc)) from exc
@router.get("", response_model=list[OutputTypeOut]) @router.get("", response_model=list[OutputTypeOut])
async def list_output_types( async def list_output_types(
include_inactive: bool = Query(False), include_inactive: bool = Query(False),
@@ -133,6 +198,13 @@ async def list_output_types(
return await _enrich_workflow_names(db, items) return await _enrich_workflow_names(db, items)
@router.get("/contract-catalog", response_model=OutputTypeContractCatalogOut)
async def get_output_type_contract_catalog(
user: User = Depends(get_current_user),
):
return OutputTypeContractCatalogOut.model_validate(build_output_type_contract_catalog())
@router.post("", response_model=OutputTypeOut, status_code=status.HTTP_201_CREATED) @router.post("", response_model=OutputTypeOut, status_code=status.HTTP_201_CREATED)
async def create_output_type( async def create_output_type(
body: OutputTypeCreate, body: OutputTypeCreate,
@@ -146,25 +218,39 @@ async def create_output_type(
400, 400,
detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}", detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}",
) )
if body.workflow_rollout_mode not in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES:
raise HTTPException(
400,
detail=f"Invalid workflow_rollout_mode. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES))}",
)
existing = await db.execute(select(OutputType).where(OutputType.name == body.name)) existing = await db.execute(select(OutputType).where(OutputType.name == body.name))
if existing.scalar_one_or_none(): if existing.scalar_one_or_none():
raise HTTPException(409, detail=f"Output type '{body.name}' already exists") raise HTTPException(409, detail=f"Output type '{body.name}' already exists")
data = body.model_dump() data = body.model_dump()
explicit_invocation = normalize_invocation_overrides(body.invocation_overrides)
if not explicit_invocation:
explicit_invocation = normalize_invocation_overrides(body.render_settings)
data["invocation_overrides"] = explicit_invocation
data["render_settings"] = apply_invocation_overrides_to_render_settings(
body.render_settings,
explicit_invocation,
)
data["artifact_kind"] = data.get("artifact_kind") or infer_output_type_artifact_kind( data["artifact_kind"] = data.get("artifact_kind") or infer_output_type_artifact_kind(
body.output_format, body.output_format,
body.is_animation, body.is_animation,
body.workflow_family, body.workflow_family,
) )
explicit_invocation = _normalize_explicit_invocation_overrides(
body.invocation_overrides,
artifact_kind=data["artifact_kind"],
is_animation=body.is_animation,
)
if not explicit_invocation:
explicit_invocation = normalize_invocation_overrides(body.render_settings)
data["invocation_overrides"] = resolve_output_type_invocation_overrides(
body.render_settings,
explicit_invocation,
artifact_kind=data["artifact_kind"],
is_animation=body.is_animation,
)
data["render_settings"] = apply_invocation_overrides_to_render_settings(
body.render_settings,
data["invocation_overrides"],
)
if data["artifact_kind"] not in OUTPUT_TYPE_ARTIFACT_KINDS: if data["artifact_kind"] not in OUTPUT_TYPE_ARTIFACT_KINDS:
raise HTTPException( raise HTTPException(
400, 400,
@@ -180,7 +266,10 @@ async def create_output_type(
db, db,
workflow_definition_id=body.workflow_definition_id, workflow_definition_id=body.workflow_definition_id,
workflow_family=body.workflow_family, workflow_family=body.workflow_family,
artifact_kind=data["artifact_kind"],
) )
if body.workflow_definition_id is None:
data["workflow_rollout_mode"] = "legacy_only"
ot = OutputType(**data) ot = OutputType(**data)
db.add(ot) db.add(ot)
@@ -214,6 +303,11 @@ async def update_output_type(
400, 400,
detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}", detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}",
) )
if "workflow_rollout_mode" in data and data["workflow_rollout_mode"] not in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES:
raise HTTPException(
400,
detail=f"Invalid workflow_rollout_mode. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES))}",
)
candidate_workflow_family = data.get("workflow_family", ot.workflow_family) candidate_workflow_family = data.get("workflow_family", ot.workflow_family)
candidate_workflow_definition_id = data.get("workflow_definition_id", ot.workflow_definition_id) candidate_workflow_definition_id = data.get("workflow_definition_id", ot.workflow_definition_id)
@@ -226,16 +320,25 @@ async def update_output_type(
if render_settings_supplied or invocation_supplied: if render_settings_supplied or invocation_supplied:
candidate_render_settings = data.get("render_settings", ot.render_settings) candidate_render_settings = data.get("render_settings", ot.render_settings)
if invocation_supplied: if invocation_supplied:
candidate_invocation_overrides = normalize_invocation_overrides(data.get("invocation_overrides")) candidate_invocation_overrides = _normalize_explicit_invocation_overrides(
data.get("invocation_overrides"),
artifact_kind=candidate_artifact_kind,
is_animation=candidate_is_animation,
)
else: else:
candidate_invocation_overrides = merge_output_type_invocation_overrides( candidate_invocation_overrides = merge_output_type_invocation_overrides(
candidate_render_settings, candidate_render_settings,
None, None,
) )
data["invocation_overrides"] = candidate_invocation_overrides data["invocation_overrides"] = resolve_output_type_invocation_overrides(
data["render_settings"] = apply_invocation_overrides_to_render_settings(
candidate_render_settings, candidate_render_settings,
candidate_invocation_overrides, candidate_invocation_overrides,
artifact_kind=candidate_artifact_kind,
is_animation=candidate_is_animation,
)
data["render_settings"] = apply_invocation_overrides_to_render_settings(
candidate_render_settings,
data["invocation_overrides"],
) )
should_recompute_artifact_kind = ( should_recompute_artifact_kind = (
@@ -263,12 +366,26 @@ async def update_output_type(
output_format=candidate_output_format, output_format=candidate_output_format,
is_animation=candidate_is_animation, is_animation=candidate_is_animation,
) )
if render_settings_supplied or invocation_supplied or should_recompute_artifact_kind:
data["invocation_overrides"] = resolve_output_type_invocation_overrides(
data.get("render_settings", ot.render_settings),
data.get("invocation_overrides", ot.invocation_overrides),
artifact_kind=candidate_artifact_kind,
is_animation=candidate_is_animation,
)
data["render_settings"] = apply_invocation_overrides_to_render_settings(
data.get("render_settings", ot.render_settings),
data["invocation_overrides"],
)
await _validate_output_type_workflow_link( await _validate_output_type_workflow_link(
db, db,
workflow_definition_id=candidate_workflow_definition_id, workflow_definition_id=candidate_workflow_definition_id,
workflow_family=candidate_workflow_family, workflow_family=candidate_workflow_family,
artifact_kind=candidate_artifact_kind,
) )
if candidate_workflow_definition_id is None:
data["workflow_rollout_mode"] = "legacy_only"
for field_name, value in data.items(): for field_name, value in data.items():
setattr(ot, field_name, value) setattr(ot, field_name, value)
+9 -18
View File
@@ -16,6 +16,11 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload, joinedload from sqlalchemy.orm import selectinload, joinedload
from app.config import settings from app.config import settings
from app.core.render_paths import (
resolve_result_path,
resolve_public_asset_url,
result_path_to_public_url,
)
from app.database import get_db from app.database import get_db
from app.models.cad_file import CadFile, ProcessingStatus from app.models.cad_file import CadFile, ProcessingStatus
from app.models.material import Material from app.models.material import Material
@@ -829,24 +834,12 @@ VIDEO_EXTENSIONS = {".mp4", ".webm", ".avi", ".mov"}
def _result_path_to_url(result_path: str) -> str | None: def _result_path_to_url(result_path: str) -> str | None:
"""Convert an internal result_path to a servable static URL.""" """Convert an internal result_path to a servable static URL."""
# Flamenco / shared renders: /shared/renders/X/file.jpg → /renders/X/file.jpg return result_path_to_public_url(result_path, require_exists=False)
if "/renders/" in result_path:
idx = result_path.index("/renders/")
return result_path[idx:]
# Celery renders stored as thumbnails: /app/uploads/thumbnails/X.png → /thumbnails/X.png
if "/thumbnails/" in result_path:
idx = result_path.index("/thumbnails/")
return result_path[idx:]
return None
def _resolve_disk_path(url: str) -> Path | None: def _resolve_disk_path(url: str) -> Path | None:
"""Given a servable URL like /renders/X/file.jpg, resolve to disk path.""" """Given a servable URL like /renders/X/file.jpg, resolve to disk path."""
if url.startswith("/renders/"): return resolve_public_asset_url(url)
return Path(settings.upload_dir) / "renders" / url[len("/renders/"):]
if url.startswith("/thumbnails/"):
return Path(settings.upload_dir) / "thumbnails" / url[len("/thumbnails/"):]
return None
@router.get("/{product_id}/renders") @router.get("/{product_id}/renders")
@@ -983,9 +976,8 @@ async def download_product_renders(
raise HTTPException(404, detail="No completed renders found for the selected lines") raise HTTPException(404, detail="No completed renders found for the selected lines")
def _resolve_path(p: str) -> str: def _resolve_path(p: str) -> str:
if p.startswith("/shared/"): resolved = resolve_result_path(p)
return settings.upload_dir + p[len("/shared"):] return str(resolved) if resolved is not None else p
return p
def _safe(s: str) -> str: def _safe(s: str) -> str:
return re.sub(r"[^\w\-.]", "_", s).strip("_") return re.sub(r"[^\w\-.]", "_", s).strip("_")
@@ -1147,4 +1139,3 @@ async def delete_render_position(
raise HTTPException(404, detail="Render position not found") raise HTTPException(404, detail="Render position not found")
await db.delete(pos) await db.delete(pos)
await db.commit() await db.commit()
+34 -1
View File
@@ -1,17 +1,20 @@
"""Render Templates API — CRUD + .blend file upload/download + material library.""" """Render Templates API — CRUD + .blend file upload/download + material library."""
import json
import uuid import uuid
import shutil import shutil
from datetime import datetime from datetime import datetime
from pathlib import Path from pathlib import Path
from typing import Any
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, status from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, status
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update as sql_update, delete as sql_delete from sqlalchemy import select, update as sql_update, delete as sql_delete
from pydantic import BaseModel from pydantic import BaseModel, TypeAdapter, ValidationError
from app.database import get_db from app.database import get_db
from app.config import settings as app_settings from app.config import settings as app_settings
from app.domains.rendering.workflow_node_registry import WorkflowNodeFieldDefinition
from app.models.user import User from app.models.user import User
from app.models.render_template import RenderTemplate from app.models.render_template import RenderTemplate
from app.models.output_type import OutputType from app.models.output_type import OutputType
@@ -46,6 +49,7 @@ class RenderTemplateOut(BaseModel):
lighting_only: bool lighting_only: bool
shadow_catcher_enabled: bool shadow_catcher_enabled: bool
camera_orbit: bool camera_orbit: bool
workflow_input_schema: list[WorkflowNodeFieldDefinition]
is_active: bool is_active: bool
created_at: str created_at: str
updated_at: str updated_at: str
@@ -62,6 +66,7 @@ class RenderTemplateUpdate(BaseModel):
lighting_only: bool | None = None lighting_only: bool | None = None
shadow_catcher_enabled: bool | None = None shadow_catcher_enabled: bool | None = None
camera_orbit: bool | None = None camera_orbit: bool | None = None
workflow_input_schema: list[WorkflowNodeFieldDefinition] | None = None
is_active: bool | None = None is_active: bool | None = None
@@ -72,6 +77,29 @@ class MaterialLibraryInfo(BaseModel):
path: str | None = None path: str | None = None
_workflow_input_schema_adapter = TypeAdapter(list[WorkflowNodeFieldDefinition])
def _normalize_workflow_input_schema(schema: Any) -> list[dict[str, Any]]:
if schema in (None, "", "null"):
return []
try:
validated = _workflow_input_schema_adapter.validate_python(schema)
except ValidationError as exc:
raise HTTPException(status_code=422, detail={"workflow_input_schema": exc.errors()}) from exc
return [field.model_dump(mode="json") for field in validated]
def _parse_form_workflow_input_schema(raw_schema: str | None) -> list[dict[str, Any]]:
if raw_schema in (None, "", "null"):
return []
try:
payload = json.loads(raw_schema)
except json.JSONDecodeError as exc:
raise HTTPException(status_code=422, detail="workflow_input_schema must be valid JSON") from exc
return _normalize_workflow_input_schema(payload)
def _to_out(t: RenderTemplate) -> dict: def _to_out(t: RenderTemplate) -> dict:
ot_name = None ot_name = None
if t.output_type: if t.output_type:
@@ -94,6 +122,7 @@ def _to_out(t: RenderTemplate) -> dict:
"lighting_only": t.lighting_only, "lighting_only": t.lighting_only,
"shadow_catcher_enabled": t.shadow_catcher_enabled, "shadow_catcher_enabled": t.shadow_catcher_enabled,
"camera_orbit": t.camera_orbit, "camera_orbit": t.camera_orbit,
"workflow_input_schema": t.workflow_input_schema or [],
"is_active": t.is_active, "is_active": t.is_active,
"created_at": t.created_at.isoformat() if t.created_at else "", "created_at": t.created_at.isoformat() if t.created_at else "",
"updated_at": t.updated_at.isoformat() if t.updated_at else "", "updated_at": t.updated_at.isoformat() if t.updated_at else "",
@@ -126,6 +155,7 @@ async def create_render_template(
lighting_only: bool = Form(False), lighting_only: bool = Form(False),
shadow_catcher_enabled: bool = Form(False), shadow_catcher_enabled: bool = Form(False),
camera_orbit: bool = Form(True), camera_orbit: bool = Form(True),
workflow_input_schema: str | None = Form(None),
user: User = Depends(require_admin_or_pm), user: User = Depends(require_admin_or_pm),
db: AsyncSession = Depends(get_db), db: AsyncSession = Depends(get_db),
): ):
@@ -182,6 +212,7 @@ async def create_render_template(
lighting_only=lighting_only, lighting_only=lighting_only,
shadow_catcher_enabled=shadow_catcher_enabled, shadow_catcher_enabled=shadow_catcher_enabled,
camera_orbit=camera_orbit, camera_orbit=camera_orbit,
workflow_input_schema=_parse_form_workflow_input_schema(workflow_input_schema),
) )
db.add(tmpl) db.add(tmpl)
await db.flush() await db.flush()
@@ -224,6 +255,8 @@ async def update_render_template(
# Normalise empty strings to None for nullable fields # Normalise empty strings to None for nullable fields
if "category_key" in updates and updates["category_key"] in ("", "null"): if "category_key" in updates and updates["category_key"] in ("", "null"):
updates["category_key"] = None updates["category_key"] = None
if "workflow_input_schema" in updates:
updates["workflow_input_schema"] = _normalize_workflow_input_schema(updates["workflow_input_schema"])
# Handle M2M output_type_ids # Handle M2M output_type_ids
new_ot_ids: list[str] | None = updates.pop("output_type_ids", None) new_ot_ids: list[str] | None = updates.pop("output_type_ids", None)
+15 -1
View File
@@ -519,6 +519,12 @@ async def trigger_gpu_probe(current_user: User = Depends(require_global_admin)):
return {"task_id": str(result.id), "queued": True} return {"task_id": str(result.id), "queued": True}
@router.post("/gpu-probe", status_code=http_status.HTTP_202_ACCEPTED)
async def trigger_gpu_probe_legacy_alias(current_user: User = Depends(require_global_admin)):
"""Backward-compatible alias used by the current admin frontend."""
return await trigger_gpu_probe(current_user)
@router.get("/probe/gpu/result") @router.get("/probe/gpu/result")
async def get_gpu_probe_result( async def get_gpu_probe_result(
current_user: User = Depends(require_global_admin), current_user: User = Depends(require_global_admin),
@@ -535,6 +541,15 @@ async def get_gpu_probe_result(
return json.loads(setting.value) return json.loads(setting.value)
@router.get("/gpu-probe")
async def get_gpu_probe_result_legacy_alias(
current_user: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Backward-compatible alias used by the current admin frontend."""
return await get_gpu_probe_result(current_user, db)
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Render health check # Render health check
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@@ -733,4 +748,3 @@ async def update_worker_config(
enabled=cfg.enabled, enabled=cfg.enabled,
updated_at=cfg.updated_at.isoformat(), updated_at=cfg.updated_at.isoformat(),
) )
+1
View File
@@ -75,6 +75,7 @@ class Settings(BaseSettings):
# Redis / Celery # Redis / Celery
redis_url: str = "redis://localhost:6379/0" redis_url: str = "redis://localhost:6379/0"
workflow_shadow_render_queue: str = "asset_pipeline_light"
@model_validator(mode="after") @model_validator(mode="after")
def normalize_runtime_hosts(self) -> "Settings": def normalize_runtime_hosts(self) -> "Settings":
+1 -1
View File
@@ -39,7 +39,7 @@ class RenderConfig(BaseModel):
blender_eevee_samples: int = 64 blender_eevee_samples: int = 64
thumbnail_format: str = "jpg" thumbnail_format: str = "jpg"
blender_smooth_angle: int = 30 blender_smooth_angle: int = 30
cycles_device: str = "auto" cycles_device: str = "gpu"
render_backend: str = "celery" render_backend: str = "celery"
product_thumbnail_priority: list[str] = Field( product_thumbnail_priority: list[str] = Field(
default_factory=lambda: ["latest_render", "cad_thumbnail"] default_factory=lambda: ["latest_render", "cad_thumbnail"]
+194
View File
@@ -0,0 +1,194 @@
from __future__ import annotations
import os
from pathlib import Path
from app.config import settings
SHARED_DIR_MODE = 0o2775
def _managed_directory_chain(path: Path) -> list[Path]:
"""Return upload-root-relative directories that should share writable perms."""
resolved_path = path.resolve(strict=False)
upload_root = Path(settings.upload_dir).resolve(strict=False)
if resolved_path != upload_root and upload_root not in resolved_path.parents:
return [path]
chain: list[Path] = [upload_root]
current = upload_root
try:
relative_parts = resolved_path.relative_to(upload_root).parts
except ValueError:
return [path]
for part in relative_parts:
current = current / part
chain.append(current)
return chain
def _normalize_directory_mode(path: Path, *, mode: int = SHARED_DIR_MODE) -> None:
try:
current_mode = path.stat().st_mode & 0o7777
except OSError:
return
desired_mode = mode
if current_mode == desired_mode:
return
try:
os.chmod(path, desired_mode)
except OSError:
# Best-effort only: callers still get the path, but existing root-owned
# trees can be repaired when the process has sufficient permissions.
return
def ensure_group_writable_dir(path: str | Path, *, mode: int = SHARED_DIR_MODE) -> Path:
"""Create a directory and normalize upload-tree permissions for shared workers."""
dir_path = Path(path)
for candidate in _managed_directory_chain(dir_path):
candidate.mkdir(parents=True, exist_ok=True)
_normalize_directory_mode(candidate, mode=mode)
return dir_path
def resolve_public_asset_url(url: str | None) -> Path | None:
"""Resolve a public static asset URL like /renders/... to a local disk path."""
if not url:
return None
normalized = url.replace("\\", "/")
if normalized.startswith("/renders/"):
candidate = Path(settings.upload_dir) / "renders" / normalized[len("/renders/"):]
elif normalized.startswith("/thumbnails/"):
candidate = Path(settings.upload_dir) / "thumbnails" / normalized[len("/thumbnails/"):]
else:
return None
return candidate
def resolve_result_path(result_path: str | None) -> Path | None:
"""Resolve stored result_path variants to a local disk path.
Supports canonical /app/uploads/... paths, legacy /shared/... paths, public
URLs, and bare storage keys such as renders/<id>/file.png.
"""
if not result_path:
return None
normalized = result_path.replace("\\", "/")
for marker in ("/uploads/", "/shared/"):
if marker in normalized:
relative = normalized.split(marker, 1)[1].lstrip("/")
return Path(settings.upload_dir) / relative
public_candidate = resolve_public_asset_url(normalized)
if public_candidate is not None:
return public_candidate
stripped = normalized.lstrip("/")
if stripped.startswith(("renders/", "thumbnails/", "exports/", "usd/", "step_files/")):
return Path(settings.upload_dir) / stripped
if Path(normalized).is_absolute():
return Path(normalized)
return None
def result_path_to_storage_key(result_path: str | None) -> str | None:
"""Normalize stored paths to a canonical relative storage key when possible."""
if not result_path:
return None
normalized = result_path.replace("\\", "/")
disk_path = resolve_result_path(result_path)
if disk_path is not None:
try:
return disk_path.relative_to(Path(settings.upload_dir)).as_posix()
except ValueError:
pass
public_candidate = normalized.lstrip("/")
if public_candidate.startswith(("renders/", "thumbnails/", "exports/", "usd/", "step_files/")):
return public_candidate
return normalized
def result_path_to_public_url(
result_path: str | None,
*,
require_exists: bool = False,
) -> str | None:
"""Convert internal result paths to a servable public URL.
Returns only /renders/... or /thumbnails/... URLs. Non-public internal paths
like step_files/renders stay hidden from API/UI callers.
"""
if not result_path:
return None
disk_path = resolve_result_path(result_path)
if require_exists:
if disk_path is None or not disk_path.is_file():
return None
normalized = result_path.replace("\\", "/")
for marker in ("/renders/", "/thumbnails/"):
if marker in normalized:
idx = normalized.index(marker)
public_url = normalized[idx:]
candidate = resolve_public_asset_url(public_url)
if require_exists and (candidate is None or not candidate.is_file()):
return None
return public_url
if disk_path is None:
return None
try:
relative = disk_path.relative_to(Path(settings.upload_dir))
except ValueError:
return None
relative_str = relative.as_posix()
if relative_str.startswith(("renders/", "thumbnails/")):
if require_exists and not disk_path.is_file():
return None
return f"/{relative_str}"
return None
def build_order_line_step_render_path(
step_path: str | Path,
order_line_id: str,
filename: str,
*,
ensure_exists: bool = False,
) -> Path:
"""Build a unique per-order-line render-worker artifact path beside the STEP file."""
artifact_dir = Path(step_path).parent / "renders" / str(order_line_id)
if ensure_exists:
ensure_group_writable_dir(artifact_dir)
return artifact_dir / filename
def build_order_line_export_path(
order_line_id: str,
filename: str,
*,
ensure_exists: bool = False,
) -> Path:
"""Build a unique per-order-line export artifact path under the shared upload root."""
artifact_dir = Path(settings.upload_dir) / "exports" / str(order_line_id)
if ensure_exists:
ensure_group_writable_dir(artifact_dir)
return artifact_dir / filename
+15 -18
View File
@@ -1,13 +1,11 @@
from __future__ import annotations from __future__ import annotations
from typing import TYPE_CHECKING, AsyncGenerator, Optional from typing import AsyncGenerator, Optional
from starlette.requests import Request
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import DeclarativeBase
from sqlalchemy import text from sqlalchemy import text
from app.config import settings from app.config import settings
if TYPE_CHECKING:
from starlette.requests import Request
engine = create_async_engine( engine = create_async_engine(
settings.database_url, settings.database_url,
echo=False, echo=False,
@@ -27,22 +25,21 @@ class Base(DeclarativeBase):
pass pass
async def get_db(request: "Request | None" = None) -> AsyncGenerator[AsyncSession, None]: async def get_db(request: Request) -> AsyncGenerator[AsyncSession, None]:
async with AsyncSessionLocal() as session: async with AsyncSessionLocal() as session:
# Auto-apply RLS context if TenantContextMiddleware populated request.state # Auto-apply RLS context if TenantContextMiddleware populated request.state
if request is not None: tenant_id = getattr(request.state, "tenant_id", None)
tenant_id = getattr(request.state, "tenant_id", None) role = getattr(request.state, "role", None)
role = getattr(request.state, "role", None) if tenant_id:
if tenant_id: # global_admin and legacy admin bypass RLS to see all tenants
# global_admin and legacy admin bypass RLS to see all tenants _bypass_roles = {"global_admin", "admin"}
_bypass_roles = {"global_admin", "admin"} if role in _bypass_roles:
if role in _bypass_roles: await session.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
await session.execute(text("SET LOCAL app.current_tenant_id = 'bypass'")) else:
else: await session.execute(
await session.execute( text("SET LOCAL app.current_tenant_id = :tid"),
text("SET LOCAL app.current_tenant_id = :tid"), {"tid": tenant_id},
{"tid": tenant_id}, )
)
try: try:
yield session yield session
finally: finally:
@@ -0,0 +1,51 @@
from __future__ import annotations
from pathlib import Path
from typing import Any
from app.config import settings
def asset_library_dir() -> Path:
return Path(settings.upload_dir) / "asset-libraries"
def list_asset_library_blends() -> list[Path]:
directory = asset_library_dir()
if not directory.is_dir():
return []
return sorted(
(path for path in directory.glob("*.blend") if path.is_file()),
key=lambda path: (path.stat().st_mtime, path.name),
reverse=True,
)
def resolve_asset_library_blend_path(
*,
blend_file_path: str | None = None,
asset_library_id: Any | None = None,
) -> str | None:
"""Resolve the best available .blend path for an asset library.
Resolution order:
1. explicit configured path, when it exists
2. canonical uploads/asset-libraries/<id>.blend path
3. newest available .blend under uploads/asset-libraries
"""
if blend_file_path:
configured = Path(blend_file_path)
if configured.is_file():
return str(configured)
if asset_library_id:
candidate = asset_library_dir() / f"{asset_library_id}.blend"
if candidate.is_file():
return str(candidate)
available = list_asset_library_blends()
if available:
return str(available[0])
return None
+15 -1
View File
@@ -8,6 +8,7 @@ import subprocess
import uuid import uuid
from pathlib import Path from pathlib import Path
from app.domains.materials.library_paths import resolve_asset_library_blend_path
from app.tasks.celery_app import celery_app from app.tasks.celery_app import celery_app
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -43,7 +44,20 @@ def refresh_asset_library_catalog(self, asset_library_id: str) -> None:
if not lib: if not lib:
logger.warning("AssetLibrary %s not found", asset_library_id) logger.warning("AssetLibrary %s not found", asset_library_id)
return return
blend_path = lib.blend_file_path resolved_path = resolve_asset_library_blend_path(
blend_file_path=lib.blend_file_path,
asset_library_id=lib.id,
)
if resolved_path and resolved_path != lib.blend_file_path:
logger.warning(
"AssetLibrary %s path repaired from %s to %s before catalog refresh",
asset_library_id,
lib.blend_file_path,
resolved_path,
)
lib.blend_file_path = resolved_path
db.commit()
blend_path = resolved_path or lib.blend_file_path
engine.dispose() engine.dispose()
if not blend_path or not Path(blend_path).exists(): if not blend_path or not Path(blend_path).exists():
+14 -41
View File
@@ -10,6 +10,7 @@ from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db from app.database import get_db
from app.core.render_paths import resolve_result_path
from app.domains.auth.models import User from app.domains.auth.models import User
from app.domains.media.models import MediaAsset, MediaAssetType from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.media.schemas import MediaAssetOut, MediaAssetBrowseItem, MediaAssetBrowseResponse from app.domains.media.schemas import MediaAssetOut, MediaAssetBrowseItem, MediaAssetBrowseResponse
@@ -19,6 +20,10 @@ from app.utils.auth import get_current_user
router = APIRouter(prefix="/api/media", tags=["media"], redirect_slashes=False) router = APIRouter(prefix="/api/media", tags=["media"], redirect_slashes=False)
def _resolve_asset_candidate(key: str):
return resolve_result_path(key)
async def _resolve_thumbnails_bulk(db: AsyncSession, assets: list) -> None: async def _resolve_thumbnails_bulk(db: AsyncSession, assets: list) -> None:
"""Resolve thumbnail_url for assets using the same priority as product pages. """Resolve thumbnail_url for assets using the same priority as product pages.
@@ -275,15 +280,8 @@ async def thumbnail_asset(
raise HTTPException(404, "Not a previewable asset") raise HTTPException(404, "Not a previewable asset")
key = asset.storage_key key = asset.storage_key
from app.config import settings candidate = _resolve_asset_candidate(key)
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key if candidate is not None and candidate.exists():
if not candidate.exists() and "/shared/renders/" in key:
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
candidate = remapped
if candidate.exists():
return FileResponse( return FileResponse(
str(candidate), media_type=mime, str(candidate), media_type=mime,
headers={"Cache-Control": "max-age=86400, public"}, headers={"Cache-Control": "max-age=86400, public"},
@@ -314,22 +312,8 @@ async def download_asset(
mime = asset.mime_type or "application/octet-stream" mime = asset.mime_type or "application/octet-stream"
# Local file path (absolute or relative to UPLOAD_DIR) # Local file path (absolute or relative to UPLOAD_DIR)
from app.config import settings candidate = _resolve_asset_candidate(key)
candidate = Path(key) if candidate is not None and candidate.exists():
if not candidate.is_absolute():
candidate = Path(settings.upload_dir) / key
# Legacy path remapping: /shared/renders/{uuid}/{file} → UPLOAD_DIR/renders/{uuid}/{file}
if not candidate.exists() and "/shared/renders/" in key:
import logging
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
logging.getLogger(__name__).warning(
"Remapped legacy path %s%s", key, remapped
)
candidate = remapped
if candidate.exists():
ext = candidate.suffix.lstrip(".") ext = candidate.suffix.lstrip(".")
fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}" fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}"
return FileResponse( return FileResponse(
@@ -395,11 +379,8 @@ async def zip_download(
fname = base fname = base
try: try:
# Check absolute path first (local filesystem) # Check absolute path first (local filesystem)
candidate = Path(key) candidate = _resolve_asset_candidate(key)
if not candidate.is_absolute(): if candidate is not None and candidate.exists():
from app.config import settings
candidate = Path(settings.upload_dir) / key
if candidate.exists():
data = candidate.read_bytes() data = candidate.read_bytes()
else: else:
data = storage.download_bytes(key) data = storage.download_bytes(key)
@@ -440,7 +421,7 @@ async def batch_delete_assets(
): ):
"""Permanently delete multiple MediaAsset records.""" """Permanently delete multiple MediaAsset records."""
from app.utils.auth import require_global_admin from app.utils.auth import require_global_admin
require_global_admin(_user) await require_global_admin(_user)
deleted = 0 deleted = 0
for aid in asset_ids: for aid in asset_ids:
@@ -461,23 +442,15 @@ async def cleanup_orphaned_assets(
""" """
import logging import logging
from pathlib import Path from pathlib import Path
from app.config import settings
from app.core.storage import get_storage from app.core.storage import get_storage
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
storage = get_storage() storage = get_storage()
def _file_exists(key: str) -> bool: def _file_exists(key: str) -> bool:
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key candidate = _resolve_asset_candidate(key)
if candidate.exists(): if candidate is not None and candidate.exists():
return True return True
# Legacy path remapping
if "/shared/renders/" in key:
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
return True
# Check MinIO # Check MinIO
try: try:
storage.download_bytes(key) storage.download_bytes(key)
+9 -4
View File
@@ -5,7 +5,7 @@ to create notification rows in the audit_log table.
""" """
import logging import logging
import uuid import uuid
from datetime import datetime from datetime import datetime, timezone
from sqlalchemy import create_engine, select from sqlalchemy import create_engine, select
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
@@ -23,6 +23,11 @@ CHANNEL_ALERT = "alert" # admin-only infrastructure issues
_engine = None _engine = None
def _utcnow_naive() -> datetime:
"""Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns."""
return datetime.now(timezone.utc).replace(tzinfo=None)
def _get_engine(): def _get_engine():
global _engine global _engine
if _engine is None: if _engine is None:
@@ -53,7 +58,7 @@ async def emit_notification(
details=details, details=details,
notification=True, notification=True,
channel=channel, channel=channel,
timestamp=datetime.utcnow(), timestamp=_utcnow_naive(),
) )
db.add(entry) db.add(entry)
await db.commit() await db.commit()
@@ -85,7 +90,7 @@ def emit_notification_sync(
details=details, details=details,
notification=True, notification=True,
channel=channel, channel=channel,
timestamp=datetime.utcnow(), timestamp=_utcnow_naive(),
) )
session.add(entry) session.add(entry)
session.commit() session.commit()
@@ -149,7 +154,7 @@ def emit_batch_render_notification_sync(order_id: str) -> None:
}, },
notification=True, notification=True,
channel=CHANNEL_NOTIFICATION, channel=CHANNEL_NOTIFICATION,
timestamp=datetime.utcnow(), timestamp=_utcnow_naive(),
) )
session.add(entry) session.add(entry)
session.commit() session.commit()
+8 -3
View File
@@ -1,5 +1,5 @@
"""Order service — order number generation and business logic.""" """Order service — order number generation and business logic."""
from datetime import datetime from datetime import datetime, timezone
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, create_engine, update as sql_update from sqlalchemy import select, func, create_engine, update as sql_update
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
@@ -9,9 +9,14 @@ import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _utcnow_naive() -> datetime:
"""Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns."""
return datetime.now(timezone.utc).replace(tzinfo=None)
async def generate_order_number(db: AsyncSession) -> str: async def generate_order_number(db: AsyncSession) -> str:
"""Generate next sequential order number: SA-2026-XXXXX.""" """Generate next sequential order number: SA-2026-XXXXX."""
year = datetime.utcnow().year year = datetime.now(timezone.utc).year
prefix = f"SA-{year}-" prefix = f"SA-{year}-"
# Use MAX to find the highest existing sequence number this year. # Use MAX to find the highest existing sequence number this year.
@@ -68,7 +73,7 @@ def check_order_completion(order_id: str) -> bool:
return False return False
# Auto-advance to completed # Auto-advance to completed
now = datetime.utcnow() now = _utcnow_naive()
session.execute( session.execute(
sql_update(Order) sql_update(Order)
.where(Order.id == order_id) .where(Order.id == order_id)
@@ -13,8 +13,25 @@ from app.core.pipeline_logger import PipelineLogger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_render_path) -> str | None:
"""Reuse the runtime freshness checks before accepting a USD cache hit."""
from app.domains.rendering.workflow_runtime_services import _usd_master_refresh_reason
return _usd_master_refresh_reason(
cad_file,
usd_asset=usd_asset,
usd_render_path=usd_render_path,
)
@celery_app.task(bind=True, name="app.tasks.step_tasks.generate_gltf_geometry_task", queue="asset_pipeline", max_retries=1) @celery_app.task(bind=True, name="app.tasks.step_tasks.generate_gltf_geometry_task", queue="asset_pipeline", max_retries=1)
def generate_gltf_geometry_task(self, cad_file_id: str): def generate_gltf_geometry_task(
self,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
**_: object,
):
"""Export a geometry GLB directly from STEP via OCC (no STL intermediary). """Export a geometry GLB directly from STEP via OCC (no STL intermediary).
Pipeline: Pipeline:
@@ -94,10 +111,10 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
_current_hash = _compute_step_hash(str(step_path_str)) _current_hash = _compute_step_hash(str(step_path_str))
_cache_hit_asset_id = None _cache_hit_asset_id = None
# Composite cache key includes deflection settings so changing them invalidates cache # Composite cache key includes deflection settings so changing them invalidates cache.
# v3: removed BRepBuilderAPI_Transform, writer handles mm→m from STEP unit metadata # v5: occurrence-aware part-key stamping for repeated leaf meshes changed.
effective_cache_key = ( effective_cache_key = (
f"v3:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}" f"v5:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}"
if _current_hash else None if _current_hash else None
) )
@@ -112,6 +129,9 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
if stored_key == effective_cache_key: if stored_key == effective_cache_key:
_asset_disk_path = _Path(app_settings.upload_dir) / existing_geo.storage_key _asset_disk_path = _Path(app_settings.upload_dir) / existing_geo.storage_key
if _asset_disk_path.exists(): if _asset_disk_path.exists():
if cad_file.gltf_path != str(_asset_disk_path):
cad_file.gltf_path = str(_asset_disk_path)
session.commit()
logger.info("[CACHE] cache key match — skipping geometry GLB tessellation for %s", cad_file_id) logger.info("[CACHE] cache key match — skipping geometry GLB tessellation for %s", cad_file_id)
pl.step_done("export_glb_geometry", result={"cached": True, "asset_id": str(existing_geo.id)}) pl.step_done("export_glb_geometry", result={"cached": True, "asset_id": str(existing_geo.id)})
_cache_hit_asset_id = str(existing_geo.id) _cache_hit_asset_id = str(existing_geo.id)
@@ -133,6 +153,20 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
generate_usd_master_task.delay(cad_file_id) generate_usd_master_task.delay(cad_file_id)
except Exception: except Exception:
logger.debug("Could not queue generate_usd_master_task from cache-hit path (non-fatal)") logger.debug("Could not queue generate_usd_master_task from cache-hit path (non-fatal)")
try:
from app.domains.rendering.tasks import _update_workflow_run_status
_update_workflow_run_status(
cad_file_id,
"completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception(
"Failed to update workflow state for cached GLB export %s",
cad_file_id,
)
return {"cached": True, "asset_id": _cache_hit_asset_id} return {"cached": True, "asset_id": _cache_hit_asset_id}
step = _Path(step_path_str) step = _Path(step_path_str)
@@ -219,6 +253,9 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
existing.render_config = {"cache_key": effective_cache_key} existing.render_config = {"cache_key": effective_cache_key}
if product_id: if product_id:
existing.product_id = _uuid.UUID(product_id) existing.product_id = _uuid.UUID(product_id)
cad_file = _sess.get(CadFile, _uuid.UUID(cad_file_id))
if cad_file is not None:
cad_file.gltf_path = str(output_path)
_sess.commit() _sess.commit()
asset_id = str(existing.id) asset_id = str(existing.id)
else: else:
@@ -232,12 +269,26 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
render_config={"cache_key": effective_cache_key}, render_config={"cache_key": effective_cache_key},
) )
_sess.add(asset) _sess.add(asset)
cad_file = _sess.get(CadFile, _uuid.UUID(cad_file_id))
if cad_file is not None:
cad_file.gltf_path = str(output_path)
_sess.commit() _sess.commit()
asset_id = str(asset.id) asset_id = str(asset.id)
_eng2.dispose() _eng2.dispose()
pl.step_done("export_glb_geometry", result={"glb_path": str(output_path), "asset_id": asset_id}) pl.step_done("export_glb_geometry", result={"glb_path": str(output_path), "asset_id": asset_id})
logger.info("generate_gltf_geometry_task: MediaAsset %s created for cad %s", asset_id, cad_file_id) logger.info("generate_gltf_geometry_task: MediaAsset %s created for cad %s", asset_id, cad_file_id)
try:
from app.domains.rendering.tasks import _update_workflow_run_status
_update_workflow_run_status(
cad_file_id,
"completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception("Failed to update workflow state for GLB export %s", cad_file_id)
# Auto-chain USD master export so the canonical scene is always up to date # Auto-chain USD master export so the canonical scene is always up to date
try: try:
@@ -346,6 +397,33 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
angular_deflection = float(sys_settings.get("render_angular_deflection", "0.05")) angular_deflection = float(sys_settings.get("render_angular_deflection", "0.05"))
sharp_threshold = float(sys_settings.get("sharp_edge_threshold", "20.0")) sharp_threshold = float(sys_settings.get("sharp_edge_threshold", "20.0"))
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script_path = scripts_dir / "export_step_to_usd.py"
materials_helper_path = scripts_dir / "_blender_materials.py"
if not script_path.exists():
err = f"export_step_to_usd.py not found at {script_path}"
pl.step_error("usd_master", err, None)
raise RuntimeError(err)
# Cache must include the active render-script revision. Otherwise
# material resolution fixes never invalidate previously generated USD masters.
script_fingerprint = "unknown"
try:
import hashlib as _hashlib_script
_script_hash = _hashlib_script.sha256()
for candidate in (script_path, materials_helper_path):
if not candidate.exists():
continue
_script_hash.update(candidate.read_bytes())
script_fingerprint = _script_hash.hexdigest()[:12]
except Exception as exc:
logger.warning(
"[USD_MASTER] failed to fingerprint render scripts, falling back to legacy cache key: %s",
exc,
)
# Hash-based cache check: skip tessellation if file and settings haven't changed # Hash-based cache check: skip tessellation if file and settings haven't changed
from app.domains.products.cache_service import compute_step_hash as _compute_step_hash_usd from app.domains.products.cache_service import compute_step_hash as _compute_step_hash_usd
_current_hash_usd = _compute_step_hash_usd(str(step_path)) _current_hash_usd = _compute_step_hash_usd(str(step_path))
@@ -357,7 +435,7 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
_json.dumps(material_map, sort_keys=True).encode() _json.dumps(material_map, sort_keys=True).encode()
).hexdigest()[:12] if material_map else "none" ).hexdigest()[:12] if material_map else "none"
effective_cache_key = ( effective_cache_key = (
f"{_current_hash_usd}:{linear_deflection}:{angular_deflection}:{sharp_threshold}:{_mat_hash}" f"{_current_hash_usd}:{linear_deflection}:{angular_deflection}:{sharp_threshold}:{_mat_hash}:{script_fingerprint}"
if _current_hash_usd else None if _current_hash_usd else None
) )
@@ -372,9 +450,21 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
if stored_key == effective_cache_key: if stored_key == effective_cache_key:
_usd_disk_path = _Path(app_settings.upload_dir) / existing_usd.storage_key _usd_disk_path = _Path(app_settings.upload_dir) / existing_usd.storage_key
if _usd_disk_path.exists(): if _usd_disk_path.exists():
logger.info("[CACHE] cache key match — skipping USD master tessellation for %s", cad_file_id) refresh_reason = _usd_cache_hit_refresh_reason(
pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)}) cad_file,
_cache_hit_asset_id = str(existing_usd.id) existing_usd,
_usd_disk_path,
)
if refresh_reason is None:
logger.info("[CACHE] cache key match — skipping USD master tessellation for %s", cad_file_id)
pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)})
_cache_hit_asset_id = str(existing_usd.id)
else:
logger.info(
"[CACHE] USD cache key matched for %s but asset is stale (%s) — rebuilding",
cad_file_id,
refresh_reason,
)
else: else:
logger.info("[CACHE] cache key match but USD asset missing on disk — re-running tessellation for %s", cad_file_id) logger.info("[CACHE] cache key match but USD asset missing on disk — re-running tessellation for %s", cad_file_id)
else: else:
@@ -396,13 +486,6 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
raise RuntimeError(err) raise RuntimeError(err)
output_path = step_path.parent / f"{step_path.stem}_master.usd" output_path = step_path.parent / f"{step_path.stem}_master.usd"
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script_path = scripts_dir / "export_step_to_usd.py"
if not script_path.exists():
err = f"export_step_to_usd.py not found at {script_path}"
pl.step_error("usd_master", err, None)
raise RuntimeError(err)
cmd = [ cmd = [
_sys.executable, str(script_path), _sys.executable, str(script_path),
@@ -31,7 +31,13 @@ def _bbox_from_step_cadquery(step_path: str) -> dict | None:
@celery_app.task(bind=True, name="app.tasks.step_tasks.process_step_file", queue="step_processing") @celery_app.task(bind=True, name="app.tasks.step_tasks.process_step_file", queue="step_processing")
def process_step_file(self, cad_file_id: str): def process_step_file(
self,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
**_: object,
):
"""Process a STEP file: extract objects, generate thumbnail, convert to glTF. """Process a STEP file: extract objects, generate thumbnail, convert to glTF.
After processing completes, auto-populate cad_part_materials from Excel After processing completes, auto-populate cad_part_materials from Excel
@@ -122,10 +128,24 @@ def process_step_file(self, cad_file_id: str):
r.delete(lock_key) # always release on completion or unhandled error r.delete(lock_key) # always release on completion or unhandled error
pl.step_done("process_step_file") pl.step_done("process_step_file")
try:
from app.domains.rendering.tasks import _update_workflow_run_status
# Queue thumbnail rendering on the dedicated single-concurrency worker _update_workflow_run_status(
from app.domains.pipeline.tasks.render_thumbnail import render_step_thumbnail cad_file_id,
render_step_thumbnail.delay(cad_file_id) "completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception("Failed to update workflow state for process_step_file %s", cad_file_id)
# Legacy flow still auto-queues thumbnail generation here.
# Graph-mode workflows dispatch explicit thumbnail save/render nodes instead.
if workflow_run_id is None:
from app.domains.pipeline.tasks.render_thumbnail import render_step_thumbnail
render_step_thumbnail.delay(cad_file_id)
def _auto_populate_materials_for_cad(cad_file_id: str, tenant_id: str | None = None) -> None: def _auto_populate_materials_for_cad(cad_file_id: str, tenant_id: str | None = None) -> None:
@@ -8,6 +8,7 @@ import logging
from datetime import datetime from datetime import datetime
from app.tasks.celery_app import celery_app from app.tasks.celery_app import celery_app
from app.core.render_paths import ensure_group_writable_dir
from app.core.task_logs import log_task_event from app.core.task_logs import log_task_event
from app.core.pipeline_logger import PipelineLogger from app.core.pipeline_logger import PipelineLogger
@@ -149,7 +150,7 @@ def render_order_line_task(self, order_line_id: str):
product_name = render_invocation.product_name product_name = render_invocation.product_name
ot_name = render_invocation.output_type_name ot_name = render_invocation.output_type_name
output_path = render_invocation.output_path output_path = render_invocation.output_path
_Path(output_path).parent.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(_Path(output_path).parent)
render_width = render_invocation.width render_width = render_invocation.width
render_height = render_invocation.height render_height = render_invocation.height
render_engine = render_invocation.engine render_engine = render_invocation.engine
@@ -19,6 +19,247 @@ logger = logging.getLogger(__name__)
_THUMBNAIL_SAMPLE_CAP = 64 _THUMBNAIL_SAMPLE_CAP = 64
def _resolve_thumbnail_render_context(session, cad) -> dict[str, object]:
"""Reuse workflow material/USD resolution for CAD thumbnails when possible."""
context: dict[str, object] = {}
if not cad:
return context
parsed_objects = cad.parsed_objects if isinstance(cad.parsed_objects, dict) else {}
raw_part_names = parsed_objects.get("objects") if isinstance(parsed_objects, dict) else None
if isinstance(raw_part_names, list):
part_names_ordered = [
str(part_name).strip()
for part_name in raw_part_names
if isinstance(part_name, str) and part_name.strip()
]
if part_names_ordered:
context["part_names_ordered"] = part_names_ordered
try:
from sqlalchemy import select
from app.core.render_paths import resolve_result_path
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.products.models import Product
from app.domains.rendering.workflow_runtime_services import (
_build_effective_material_lookup,
_usd_master_refresh_reason,
)
from app.services.material_service import resolve_material_map
from app.services.template_service import get_material_library_path_for_session
product = session.execute(
select(Product)
.where(Product.cad_file_id == cad.id)
.order_by(Product.is_active.desc(), Product.updated_at.desc(), Product.created_at.desc())
.limit(1)
).scalar_one_or_none()
material_library_path = get_material_library_path_for_session(session)
materials_source = product.cad_part_materials or [] if product else []
raw_material_map = _build_effective_material_lookup(cad, materials_source)
if material_library_path and raw_material_map:
material_map = resolve_material_map(raw_material_map)
if material_map:
context["material_library_path"] = material_library_path
context["material_map"] = material_map
usd_asset = session.execute(
select(MediaAsset)
.where(
MediaAsset.cad_file_id == cad.id,
MediaAsset.asset_type == MediaAssetType.usd_master,
)
.order_by(MediaAsset.created_at.desc())
.limit(1)
).scalar_one_or_none()
if usd_asset:
usd_path = resolve_result_path(usd_asset.storage_key)
refresh_reason = _usd_master_refresh_reason(
cad,
usd_asset=usd_asset,
usd_render_path=usd_path,
)
if refresh_reason is None and usd_path and usd_path.exists():
context["usd_path"] = usd_path
except Exception:
logger.exception("Failed to resolve thumbnail render context for cad %s", getattr(cad, "id", None))
return context
def _render_thumbnail_core(
*,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
include_postprocess: bool,
queue_legacy_glb_follow_up: bool,
) -> None:
"""Render a CAD thumbnail with optional legacy post-processing."""
pl = PipelineLogger(task_id=None)
pl.step_start("render_step_thumbnail", {"cad_file_id": cad_file_id})
logger.info("Rendering thumbnail for CAD file: %s", cad_file_id)
from app.core.tenant_context import resolve_tenant_id_for_cad
tenant_id = resolve_tenant_id_for_cad(cad_file_id)
try:
from app.models.cad_file import CadFile
from app.domains.products.cache_service import compute_step_hash
with _pipeline_session(tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
if cad and cad.stored_path and not cad.step_file_hash:
cad.step_file_hash = compute_step_hash(cad.stored_path)
session.commit()
logger.info("Saved step_file_hash for %s: %s", cad_file_id, cad.step_file_hash[:12])
except Exception:
logger.warning("step_file_hash computation failed for %s (non-fatal)", cad_file_id)
render_context: dict[str, object] = {}
try:
from app.models.cad_file import CadFile
with _pipeline_session(tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
render_context = _resolve_thumbnail_render_context(session, cad)
except Exception:
logger.warning("thumbnail render context resolution failed for %s; using fallback render path", cad_file_id)
try:
from app.services.step_processor import regenerate_cad_thumbnail
pl.info("render_step_thumbnail", "Calling regenerate_cad_thumbnail")
with _capped_thumbnail_samples():
success = regenerate_cad_thumbnail(
cad_file_id,
part_colors={},
renderer=renderer,
render_engine=render_engine,
samples=samples,
width=width,
height=height,
transparent_bg=transparent_bg,
**render_context,
)
if not success:
raise RuntimeError("regenerate_cad_thumbnail returned False")
except Exception as exc:
pl.step_error("render_step_thumbnail", f"Thumbnail render failed: {exc}", exc)
logger.error("Thumbnail render failed for %s: %s", cad_file_id, exc)
raise
resolved_tenant_id: str | None = None
if include_postprocess:
try:
from app.models.cad_file import CadFile
from app.domains.rendering.workflow_runtime_services import resolve_cad_bbox
with _pipeline_session(tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
if not cad:
logger.warning("CadFile %s not found in post-render phase", cad_file_id)
else:
step_path = cad.stored_path
attrs = cad.mesh_attributes or {}
if step_path and not attrs.get("dimensions_mm"):
step_file = Path(step_path)
glb_path = step_file.parent / f"{step_file.stem}_thumbnail.glb"
bbox_data = resolve_cad_bbox(step_path, glb_path=str(glb_path)).bbox_data
if bbox_data:
cad.mesh_attributes = {**attrs, **bbox_data}
attrs = cad.mesh_attributes
dims = bbox_data["dimensions_mm"]
logger.info(
"bbox for %s: %s×%s×%s mm",
cad_file_id,
dims["x"],
dims["y"],
dims["z"],
)
if step_path and "sharp_edge_pairs" not in attrs:
try:
from app.services.step_processor import extract_mesh_edge_data
edge_data = extract_mesh_edge_data(step_path)
if edge_data:
cad.mesh_attributes = {**attrs, **edge_data}
n_pairs = len(edge_data.get("sharp_edge_pairs", []))
logger.info(
"Sharp edge data extracted for %s: %s sharp edges",
cad_file_id,
n_pairs,
)
except Exception:
logger.exception(
"Sharp edge extraction failed for %s (non-fatal)",
cad_file_id,
)
session.commit()
resolved_tenant_id = str(cad.tenant_id) if cad.tenant_id else None
except Exception:
logger.exception("Post-render processing failed for %s (non-fatal)", cad_file_id)
try:
from app.domains.pipeline.tasks.extract_metadata import _auto_populate_materials_for_cad
_auto_populate_materials_for_cad(cad_file_id, tenant_id=tenant_id)
except Exception:
logger.exception(
"Auto material population failed for cad_file %s (non-fatal)",
cad_file_id,
)
try:
if resolved_tenant_id:
from app.core.websocket import publish_event_sync
publish_event_sync(
resolved_tenant_id,
{
"type": "cad_processing_complete",
"cad_file_id": cad_file_id,
"status": "completed",
},
)
except Exception:
logger.debug("WebSocket publish for CAD complete skipped (non-fatal)")
if queue_legacy_glb_follow_up:
try:
from app.domains.pipeline.tasks.export_glb import generate_gltf_geometry_task
generate_gltf_geometry_task.delay(cad_file_id)
pl.info("render_step_thumbnail", f"Queued generate_gltf_geometry_task for {cad_file_id}")
except Exception:
logger.debug("Could not queue generate_gltf_geometry_task (non-fatal)")
pl.step_done("render_step_thumbnail")
try:
from app.domains.rendering.tasks import _update_workflow_run_status
_update_workflow_run_status(
cad_file_id,
"completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception("Failed to update workflow state for thumbnail render %s", cad_file_id)
@contextmanager @contextmanager
def _capped_thumbnail_samples(): def _capped_thumbnail_samples():
"""Temporarily cap render samples for thumbnail renders. """Temporarily cap render samples for thumbnail renders.
@@ -73,123 +314,88 @@ def _pipeline_session(tenant_id: str | None = None):
@celery_app.task(bind=True, name="app.tasks.step_tasks.render_step_thumbnail", queue="asset_pipeline") @celery_app.task(bind=True, name="app.tasks.step_tasks.render_step_thumbnail", queue="asset_pipeline")
def render_step_thumbnail(self, cad_file_id: str): def render_step_thumbnail(
self,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
**_: object,
):
"""Render the thumbnail for a freshly-processed STEP file. """Render the thumbnail for a freshly-processed STEP file.
Runs on the dedicated asset_pipeline queue (concurrency=1) so the Runs on the dedicated asset_pipeline queue (concurrency=1) so the
blender-renderer service is never overwhelmed by concurrent requests. blender-renderer service is never overwhelmed by concurrent requests.
On success, also auto-populates materials and marks the CadFile as completed. On success, also auto-populates materials and marks the CadFile as completed.
""" """
pl = PipelineLogger(task_id=self.request.id)
pl.step_start("render_step_thumbnail", {"cad_file_id": cad_file_id})
logger.info(f"Rendering thumbnail for CAD file: {cad_file_id}")
from app.core.tenant_context import resolve_tenant_id_for_cad
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
# ── Pre-render: compute hash ──────────────────────────────────────────
try: try:
from app.models.cad_file import CadFile _render_thumbnail_core(
from app.domains.products.cache_service import compute_step_hash cad_file_id=cad_file_id,
workflow_run_id=workflow_run_id,
with _pipeline_session(_tenant_id) as session: workflow_node_id=workflow_node_id,
cad = session.get(CadFile, cad_file_id) renderer=renderer,
if cad and cad.stored_path and not cad.step_file_hash: render_engine=render_engine,
cad.step_file_hash = compute_step_hash(cad.stored_path) samples=samples,
session.commit() width=width,
logger.info(f"Saved step_file_hash for {cad_file_id}: {cad.step_file_hash[:12]}") height=height,
except Exception: transparent_bg=transparent_bg,
logger.warning(f"step_file_hash computation failed for {cad_file_id} (non-fatal)") include_postprocess=True,
queue_legacy_glb_follow_up=workflow_run_id is None,
# ── Render thumbnail (with capped samples for 512x512) ────────────── )
try:
from app.services.step_processor import regenerate_cad_thumbnail
pl.info("render_step_thumbnail", "Calling regenerate_cad_thumbnail")
with _capped_thumbnail_samples():
success = regenerate_cad_thumbnail(cad_file_id, part_colors={})
if not success:
raise RuntimeError("regenerate_cad_thumbnail returned False")
except Exception as exc: except Exception as exc:
pl.step_error("render_step_thumbnail", f"Thumbnail render failed: {exc}", exc)
logger.error(f"Thumbnail render failed for {cad_file_id}: {exc}")
raise self.retry(exc=exc, countdown=30, max_retries=2) raise self.retry(exc=exc, countdown=30, max_retries=2)
# ── Post-render: bbox + sharp edges + materials (single session) ──────
@celery_app.task(bind=True, name="app.tasks.step_tasks.render_graph_thumbnail", queue="asset_pipeline")
def render_graph_thumbnail(
self,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
**_: object,
):
"""Render a CAD thumbnail for graph workflows without legacy follow-up side effects."""
try: try:
from app.models.cad_file import CadFile _render_thumbnail_core(
from app.domains.rendering.workflow_runtime_services import resolve_cad_bbox cad_file_id=cad_file_id,
workflow_run_id=workflow_run_id,
with _pipeline_session(_tenant_id) as session: workflow_node_id=workflow_node_id,
cad = session.get(CadFile, cad_file_id) renderer=renderer,
if not cad: render_engine=render_engine,
logger.warning(f"CadFile {cad_file_id} not found in post-render phase") samples=samples,
else: width=width,
step_path = cad.stored_path height=height,
attrs = cad.mesh_attributes or {} transparent_bg=transparent_bg,
include_postprocess=False,
# Bounding box extraction queue_legacy_glb_follow_up=False,
if step_path and not attrs.get("dimensions_mm"): )
_step = Path(step_path) except Exception as exc:
_glb = _step.parent / f"{_step.stem}_thumbnail.glb" raise self.retry(exc=exc, countdown=30, max_retries=2)
bbox_data = resolve_cad_bbox(step_path, glb_path=str(_glb)).bbox_data
if bbox_data:
cad.mesh_attributes = {**attrs, **bbox_data}
attrs = cad.mesh_attributes
dims = bbox_data["dimensions_mm"]
logger.info(f"bbox for {cad_file_id}: {dims['x']}×{dims['y']}×{dims['z']} mm")
# Sharp edge extraction (PCurve-based, runs on render-worker with OCP)
if step_path and "sharp_edge_pairs" not in attrs:
try:
from app.services.step_processor import extract_mesh_edge_data
edge_data = extract_mesh_edge_data(step_path)
if edge_data:
cad.mesh_attributes = {**attrs, **edge_data}
n_pairs = len(edge_data.get("sharp_edge_pairs", []))
logger.info(f"Sharp edge data extracted for {cad_file_id}: {n_pairs} sharp edges")
except Exception:
logger.exception(f"Sharp edge extraction failed for {cad_file_id} (non-fatal)")
session.commit()
# WebSocket broadcast
_tid = str(cad.tenant_id) if cad.tenant_id else None
except Exception:
logger.exception(f"Post-render processing failed for {cad_file_id} (non-fatal)")
_tid = None
# Auto-populate materials
try:
from app.domains.pipeline.tasks.extract_metadata import _auto_populate_materials_for_cad
_auto_populate_materials_for_cad(cad_file_id, tenant_id=_tenant_id)
except Exception:
logger.exception(f"Auto material population failed for cad_file {cad_file_id} (non-fatal)")
# Broadcast WebSocket event
try:
if _tid:
from app.core.websocket import publish_event_sync
publish_event_sync(_tid, {
"type": "cad_processing_complete",
"cad_file_id": cad_file_id,
"status": "completed",
})
except Exception:
logger.debug("WebSocket publish for CAD complete skipped (non-fatal)")
# Auto-generate geometry GLB
try:
from app.domains.pipeline.tasks.export_glb import generate_gltf_geometry_task
generate_gltf_geometry_task.delay(cad_file_id)
pl.info("render_step_thumbnail", f"Queued generate_gltf_geometry_task for {cad_file_id}")
except Exception:
logger.debug("Could not queue generate_gltf_geometry_task (non-fatal)")
pl.step_done("render_step_thumbnail")
@celery_app.task(bind=True, name="app.tasks.step_tasks.regenerate_thumbnail", queue="asset_pipeline") @celery_app.task(bind=True, name="app.tasks.step_tasks.regenerate_thumbnail", queue="asset_pipeline")
def regenerate_thumbnail(self, cad_file_id: str, part_colors: dict): def regenerate_thumbnail(
self,
cad_file_id: str,
part_colors: dict,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
):
"""Regenerate thumbnail with per-part colours.""" """Regenerate thumbnail with per-part colours."""
pl = PipelineLogger(task_id=self.request.id) pl = PipelineLogger(task_id=self.request.id)
pl.step_start("regenerate_thumbnail", {"cad_file_id": cad_file_id}) pl.step_start("regenerate_thumbnail", {"cad_file_id": cad_file_id})
@@ -200,11 +406,40 @@ def regenerate_thumbnail(self, cad_file_id: str, part_colors: dict):
_tenant_id = resolve_tenant_id_for_cad(cad_file_id) _tenant_id = resolve_tenant_id_for_cad(cad_file_id)
try: try:
from app.services.step_processor import regenerate_cad_thumbnail from app.services.step_processor import MissingCadResourceError, regenerate_cad_thumbnail
render_context: dict[str, object] = {}
try:
from app.models.cad_file import CadFile
with _pipeline_session(_tenant_id) as session:
cad = session.get(CadFile, cad_file_id)
render_context = _resolve_thumbnail_render_context(session, cad)
except Exception:
logger.warning(
"thumbnail render context resolution failed for %s during regeneration; using fallback render path",
cad_file_id,
)
with _capped_thumbnail_samples(): with _capped_thumbnail_samples():
success = regenerate_cad_thumbnail(cad_file_id, part_colors) success = regenerate_cad_thumbnail(
cad_file_id,
part_colors,
renderer=renderer,
render_engine=render_engine,
samples=samples,
width=width,
height=height,
transparent_bg=transparent_bg,
**render_context,
)
if not success: if not success:
raise RuntimeError("regenerate_cad_thumbnail returned False") raise RuntimeError("regenerate_cad_thumbnail returned False")
except MissingCadResourceError as exc:
pl.warning("regenerate_thumbnail", f"Skipping stale thumbnail regeneration: {exc}")
logger.warning("Skipping thumbnail regeneration for %s: %s", cad_file_id, exc)
pl.step_done("regenerate_thumbnail")
return
except Exception as exc: except Exception as exc:
pl.step_error("regenerate_thumbnail", f"Thumbnail regeneration failed: {exc}", exc) pl.step_error("regenerate_thumbnail", f"Thumbnail regeneration failed: {exc}", exc)
logger.error(f"Thumbnail regeneration failed for {cad_file_id}: {exc}") logger.error(f"Thumbnail regeneration failed for {cad_file_id}: {exc}")
+158 -10
View File
@@ -16,6 +16,8 @@ import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
_WORKFLOW_ROLLOUT_MODES = {"legacy_only", "shadow", "graph"}
def _build_rollout_signal( def _build_rollout_signal(
*, *,
@@ -39,6 +41,13 @@ def _build_rollout_signal(
} }
def _normalize_workflow_rollout_mode(value: str | None) -> str:
normalized = (value or "legacy_only").strip().lower()
if normalized in _WORKFLOW_ROLLOUT_MODES:
return normalized
return "legacy_only"
def dispatch_render_with_workflow(order_line_id: str) -> dict: def dispatch_render_with_workflow(order_line_id: str) -> dict:
"""Dispatch a render for the given order line. """Dispatch a render for the given order line.
@@ -54,12 +63,19 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
from app.config import settings from app.config import settings
from app.domains.orders.models import OrderLine from app.domains.orders.models import OrderLine
from app.domains.rendering.models import OutputType, WorkflowDefinition from app.domains.rendering.models import OutputType, WorkflowDefinition
from app.domains.rendering.output_type_contracts import (
derive_supported_artifact_kinds_from_workflow_config,
)
from app.domains.rendering.workflow_config_utils import ( from app.domains.rendering.workflow_config_utils import (
canonicalize_workflow_config, canonicalize_workflow_config,
extract_runtime_workflow, extract_runtime_workflow,
get_workflow_execution_mode, get_workflow_execution_mode,
) )
from app.domains.rendering.workflow_executor import prepare_workflow_context from app.domains.rendering.workflow_executor import (
WorkflowTaskSubmissionError,
prepare_workflow_context,
submit_prepared_workflow_tasks,
)
from app.domains.rendering.workflow_graph_runtime import ( from app.domains.rendering.workflow_graph_runtime import (
execute_graph_workflow, execute_graph_workflow,
find_unsupported_graph_nodes, find_unsupported_graph_nodes,
@@ -150,7 +166,41 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
) )
return legacy_result return legacy_result
execution_mode = get_workflow_execution_mode(canonical_config, default="legacy") supported_artifact_kinds = derive_supported_artifact_kinds_from_workflow_config(canonical_config)
output_type_artifact_kind = getattr(output_type, "artifact_kind", None)
if output_type_artifact_kind and output_type_artifact_kind not in supported_artifact_kinds:
supported = ", ".join(supported_artifact_kinds) if supported_artifact_kinds else "none"
logger.warning(
"order_line %s: workflow_definition_id %s is incompatible with output_type %s artifact_kind %s; "
"falling back to legacy dispatch",
order_line_id,
wf_def.id,
output_type.id,
output_type_artifact_kind,
)
legacy_result = _legacy_dispatch(order_line_id)
legacy_result.update(
_build_rollout_signal(
gate_status="workflow_contract_mismatch",
ready=False,
reasons=[
"Linked workflow does not produce the artifact kind required by the output type; legacy dispatch remains authoritative.",
f"Expected artifact kind: {output_type_artifact_kind}. Supported by workflow: [{supported}].",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
return legacy_result
configured_execution_mode = get_workflow_execution_mode(canonical_config, default="legacy")
workflow_rollout_mode = _normalize_workflow_rollout_mode(
getattr(output_type, "workflow_rollout_mode", None)
)
legacy_runtime_gate_status = "workflow_legacy_runtime"
legacy_runtime_reasons = [
"Workflow definition is active, but execution still uses the legacy runtime path."
]
def _prepare_graph_context(target_mode: str): def _prepare_graph_context(target_mode: str):
workflow_context = prepare_workflow_context( workflow_context = prepare_workflow_context(
@@ -175,7 +225,38 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
session.commit() session.commit()
return run return run
if execution_mode == "graph": if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "legacy_only":
logger.info(
"order_line %s: workflow_definition_id %s is graph-capable but output_type %s is pinned to legacy_only rollout",
order_line_id,
wf_def.id,
output_type.id,
)
legacy_result = _legacy_dispatch(order_line_id)
legacy_result["workflow_rollout_mode"] = workflow_rollout_mode
legacy_result["configured_execution_mode"] = configured_execution_mode
legacy_result.update(
_build_rollout_signal(
gate_status="rollout_legacy_only",
ready=False,
reasons=[
"Output type rollout mode is pinned to legacy_only; legacy dispatch remains authoritative.",
f"Linked workflow stays attached in configured execution mode '{configured_execution_mode}' until rollout is promoted.",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
return legacy_result
if workflow_rollout_mode in {"graph", "shadow"} and configured_execution_mode not in {"graph", "shadow"}:
legacy_runtime_gate_status = "rollout_requires_graph_workflow"
legacy_runtime_reasons = [
f"Output type rollout mode '{workflow_rollout_mode}' requires a workflow configured for graph or shadow execution.",
f"Linked workflow is still configured for '{configured_execution_mode}', so legacy runtime remains authoritative.",
]
if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "graph":
try: try:
workflow_context = _prepare_graph_context("graph") workflow_context = _prepare_graph_context("graph")
except Exception as exc: except Exception as exc:
@@ -225,13 +306,44 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
return legacy_result return legacy_result
try: try:
dispatch_result = execute_graph_workflow(session, workflow_context) dispatch_result = execute_graph_workflow(
session,
workflow_context,
dispatch_tasks=False,
)
session.commit() session.commit()
submit_prepared_workflow_tasks(dispatch_result)
except Exception as exc: except Exception as exc:
session.rollback() session.rollback()
session.add(run) session.add(run)
mark_workflow_run_failed(run, str(exc)) mark_workflow_run_failed(run, str(exc))
session.commit() session.commit()
if isinstance(exc, WorkflowTaskSubmissionError) and exc.submitted_task_ids:
logger.exception(
"order_line %s: graph workflow submission partially failed after %d task(s); "
"not falling back to legacy to avoid duplicate renders",
order_line_id,
len(exc.submitted_task_ids),
)
return {
"backend": "workflow_graph",
"execution_mode": "graph",
"workflow_run_id": str(run.id),
"workflow_rollout_mode": workflow_rollout_mode,
"configured_execution_mode": configured_execution_mode,
"submission_status": "partial_failure",
"submitted_task_ids": exc.submitted_task_ids,
**_build_rollout_signal(
gate_status="graph_submission_failed",
ready=False,
reasons=[
"Graph workflow task submission failed after some tasks were already queued.",
f"Submission error: {exc}.",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
),
}
logger.exception( logger.exception(
"order_line %s: graph workflow execution via definition %s failed, falling back to legacy dispatch", "order_line %s: graph workflow execution via definition %s failed, falling back to legacy dispatch",
order_line_id, order_line_id,
@@ -257,6 +369,8 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
"workflow_run_id": str(run.id), "workflow_run_id": str(run.id),
"celery_task_id": dispatch_result.task_ids[0] if dispatch_result.task_ids else None, "celery_task_id": dispatch_result.task_ids[0] if dispatch_result.task_ids else None,
"task_ids": dispatch_result.task_ids, "task_ids": dispatch_result.task_ids,
"workflow_rollout_mode": workflow_rollout_mode,
"configured_execution_mode": configured_execution_mode,
} }
result.update( result.update(
_build_rollout_signal( _build_rollout_signal(
@@ -267,10 +381,10 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
workflow_def_id=wf_def.id, workflow_def_id=wf_def.id,
output_type_id=output_type.id, output_type_id=output_type.id,
) )
) )
return result return result
if execution_mode == "shadow": if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "shadow":
legacy_result = _legacy_dispatch(order_line_id) legacy_result = _legacy_dispatch(order_line_id)
try: try:
@@ -330,13 +444,43 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
return legacy_result return legacy_result
try: try:
dispatch_result = execute_graph_workflow(session, workflow_context) dispatch_result = execute_graph_workflow(
session,
workflow_context,
dispatch_tasks=False,
)
session.commit() session.commit()
submit_prepared_workflow_tasks(dispatch_result)
except Exception as exc: except Exception as exc:
session.rollback() session.rollback()
session.add(run) session.add(run)
mark_workflow_run_failed(run, str(exc)) mark_workflow_run_failed(run, str(exc))
session.commit() session.commit()
if isinstance(exc, WorkflowTaskSubmissionError) and exc.submitted_task_ids:
logger.exception(
"order_line %s: shadow workflow submission partially failed after %d task(s); "
"legacy dispatch remains authoritative",
order_line_id,
len(exc.submitted_task_ids),
)
legacy_result["execution_mode"] = "shadow"
legacy_result["shadow_status"] = "partial_failure"
legacy_result["shadow_error"] = str(exc)
legacy_result["shadow_workflow_run_id"] = str(run.id)
legacy_result["shadow_submitted_task_ids"] = exc.submitted_task_ids
legacy_result.update(
_build_rollout_signal(
gate_status="shadow_submission_failed",
ready=False,
reasons=[
"Shadow workflow task submission failed after some tasks were already queued.",
f"Submission error: {exc}.",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
return legacy_result
logger.exception( logger.exception(
"order_line %s: shadow workflow execution via definition %s failed; legacy dispatch remains authoritative", "order_line %s: shadow workflow execution via definition %s failed; legacy dispatch remains authoritative",
order_line_id, order_line_id,
@@ -364,6 +508,8 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
legacy_result["shadow_status"] = "dispatched" legacy_result["shadow_status"] = "dispatched"
legacy_result["shadow_workflow_run_id"] = str(run.id) legacy_result["shadow_workflow_run_id"] = str(run.id)
legacy_result["shadow_task_ids"] = dispatch_result.task_ids legacy_result["shadow_task_ids"] = dispatch_result.task_ids
legacy_result["workflow_rollout_mode"] = workflow_rollout_mode
legacy_result["configured_execution_mode"] = configured_execution_mode
legacy_result.update( legacy_result.update(
_build_rollout_signal( _build_rollout_signal(
gate_status="pending_shadow_verdict", gate_status="pending_shadow_verdict",
@@ -375,7 +521,7 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
workflow_def_id=wf_def.id, workflow_def_id=wf_def.id,
output_type_id=output_type.id, output_type_id=output_type.id,
) )
) )
return legacy_result return legacy_result
workflow_type, params = extract_runtime_workflow(canonical_config) workflow_type, params = extract_runtime_workflow(canonical_config)
@@ -519,12 +665,14 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
"execution_mode": "legacy", "execution_mode": "legacy",
"workflow_run_id": str(run.id), "workflow_run_id": str(run.id),
"celery_task_id": celery_task_id, "celery_task_id": celery_task_id,
"workflow_rollout_mode": workflow_rollout_mode,
"configured_execution_mode": configured_execution_mode,
} }
result.update( result.update(
_build_rollout_signal( _build_rollout_signal(
gate_status="workflow_legacy_runtime", gate_status=legacy_runtime_gate_status,
ready=False, ready=False,
reasons=["Workflow definition is active, but execution still uses the legacy runtime path."], reasons=legacy_runtime_reasons,
workflow_def_id=wf_def.id, workflow_def_id=wf_def.id,
output_type_id=output_type.id, output_type_id=output_type.id,
) )
+30 -2
View File
@@ -1,5 +1,6 @@
import uuid import uuid
from datetime import datetime from datetime import datetime
from typing import Any
from sqlalchemy import String, DateTime, Boolean, Text, Integer, Float, ForeignKey, Table, Column from sqlalchemy import String, DateTime, Boolean, Text, Integer, Float, ForeignKey, Table, Column
from sqlalchemy.orm import Mapped, mapped_column, relationship from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID, JSONB from sqlalchemy.dialects.postgresql import UUID, JSONB
@@ -15,6 +16,17 @@ render_template_output_types = Table(
) )
VALID_RENDER_BACKENDS = {"celery"} VALID_RENDER_BACKENDS = {"celery"}
OUTPUT_TYPE_WORKFLOW_FAMILIES = {"cad_file", "order_line"}
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES = {"legacy_only", "shadow", "graph"}
OUTPUT_TYPE_ARTIFACT_KINDS = {
"still_image",
"turntable_video",
"model_export",
"thumbnail_image",
"blend_asset",
"package",
"custom",
}
class OutputType(Base): class OutputType(Base):
@@ -23,14 +35,21 @@ class OutputType(Base):
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
name: Mapped[str] = mapped_column(String(200), unique=True, nullable=False) name: Mapped[str] = mapped_column(String(200), unique=True, nullable=False)
description: Mapped[str | None] = mapped_column(Text, nullable=True) description: Mapped[str | None] = mapped_column(Text, nullable=True)
renderer: Mapped[str] = mapped_column(String(50), nullable=False, default="threejs") renderer: Mapped[str] = mapped_column(String(50), nullable=False, default="blender")
render_settings: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict) render_settings: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict)
output_format: Mapped[str] = mapped_column(String(20), nullable=False, default="png") output_format: Mapped[str] = mapped_column(String(20), nullable=False, default="png")
sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0) sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
compatible_categories: Mapped[list] = mapped_column(JSONB, default=list, server_default="[]") compatible_categories: Mapped[list] = mapped_column(JSONB, default=list, server_default="[]")
render_backend: Mapped[str] = mapped_column(String(20), nullable=False, default="auto", server_default="auto") render_backend: Mapped[str] = mapped_column(String(20), nullable=False, default="celery", server_default="auto")
is_animation: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") is_animation: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
transparent_bg: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") transparent_bg: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
workflow_family: Mapped[str] = mapped_column(
String(20), nullable=False, default="order_line", server_default="order_line"
)
artifact_kind: Mapped[str] = mapped_column(
String(50), nullable=False, default="still_image", server_default="still_image"
)
invocation_overrides: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict, server_default="{}")
cycles_device: Mapped[str | None] = mapped_column(String(10), nullable=True, default=None) cycles_device: Mapped[str | None] = mapped_column(String(10), nullable=True, default=None)
pricing_tier_id: Mapped[int | None] = mapped_column( pricing_tier_id: Mapped[int | None] = mapped_column(
Integer, ForeignKey("pricing_tiers.id", ondelete="SET NULL"), nullable=True, index=True Integer, ForeignKey("pricing_tiers.id", ondelete="SET NULL"), nullable=True, index=True
@@ -49,6 +68,9 @@ class OutputType(Base):
workflow_definition_id: Mapped[uuid.UUID | None] = mapped_column( workflow_definition_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True), ForeignKey("workflow_definitions.id", ondelete="SET NULL"), nullable=True UUID(as_uuid=True), ForeignKey("workflow_definitions.id", ondelete="SET NULL"), nullable=True
) )
workflow_rollout_mode: Mapped[str] = mapped_column(
String(20), nullable=False, default="legacy_only", server_default="legacy_only"
)
order_lines: Mapped[list["OrderLine"]] = relationship("OrderLine", back_populates="output_type") order_lines: Mapped[list["OrderLine"]] = relationship("OrderLine", back_populates="output_type")
pricing_tier: Mapped["PricingTier | None"] = relationship("PricingTier", back_populates="output_types") pricing_tier: Mapped["PricingTier | None"] = relationship("PricingTier", back_populates="output_types")
@@ -70,6 +92,12 @@ class RenderTemplate(Base):
lighting_only: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") lighting_only: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
shadow_catcher_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") shadow_catcher_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
camera_orbit: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true") camera_orbit: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
workflow_input_schema: Mapped[list[dict[str, Any]]] = mapped_column(
JSONB,
nullable=False,
default=list,
server_default="[]",
)
is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true") is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
tenant_id: Mapped[uuid.UUID | None] = mapped_column( tenant_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True
@@ -3,6 +3,12 @@ from __future__ import annotations
from collections.abc import Mapping from collections.abc import Mapping
from typing import Any, Literal from typing import Any, Literal
from app.core.process_steps import StepName
from app.domains.rendering.models import (
OUTPUT_TYPE_ARTIFACT_KINDS,
OUTPUT_TYPE_WORKFLOW_FAMILIES,
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES,
)
from app.domains.rendering.workflow_config_utils import canonicalize_workflow_config from app.domains.rendering.workflow_config_utils import canonicalize_workflow_config
from app.domains.rendering.workflow_node_registry import get_node_definition from app.domains.rendering.workflow_node_registry import get_node_definition
@@ -22,6 +28,11 @@ OutputTypeArtifactKind = Literal[
_MODEL_EXPORT_FORMATS = {"gltf", "glb", "stl", "obj", "usd", "usdz"} _MODEL_EXPORT_FORMATS = {"gltf", "glb", "stl", "obj", "usd", "usdz"}
_VIDEO_FORMATS = {"mp4", "webm", "mov"} _VIDEO_FORMATS = {"mp4", "webm", "mov"}
_IMAGE_FORMATS = {"png", "jpg", "jpeg", "webp"} _IMAGE_FORMATS = {"png", "jpg", "jpeg", "webp"}
_BLEND_FORMATS = {"blend"}
_OUTPUT_FORMATS_BY_FAMILY: dict[OutputTypeWorkflowFamily, set[str]] = {
"cad_file": {*_IMAGE_FORMATS, *_MODEL_EXPORT_FORMATS},
"order_line": {*_IMAGE_FORMATS, *_VIDEO_FORMATS, *_BLEND_FORMATS},
}
_ARTIFACT_KINDS_BY_FAMILY: dict[OutputTypeWorkflowFamily, set[OutputTypeArtifactKind]] = { _ARTIFACT_KINDS_BY_FAMILY: dict[OutputTypeWorkflowFamily, set[OutputTypeArtifactKind]] = {
"cad_file": {"thumbnail_image", "model_export", "package", "custom"}, "cad_file": {"thumbnail_image", "model_export", "package", "custom"},
"order_line": {"still_image", "turntable_video", "blend_asset", "package", "custom"}, "order_line": {"still_image", "turntable_video", "blend_asset", "package", "custom"},
@@ -42,6 +53,83 @@ INVOCATION_OVERRIDE_KEYS = (
"denoising_quality", "denoising_quality",
"denoising_use_gpu", "denoising_use_gpu",
) )
_STATIC_RENDER_OVERRIDE_KEYS = (
"width",
"height",
"engine",
"samples",
"bg_color",
"noise_threshold",
"denoiser",
"denoising_input_passes",
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
)
_ANIMATION_OVERRIDE_KEYS = (
"frame_count",
"fps",
"turntable_axis",
)
_TURNABLE_AXES = {"world_x", "world_y", "world_z"}
_WORKFLOW_FAMILY_DISPLAY_ORDER: tuple[OutputTypeWorkflowFamily, ...] = ("order_line", "cad_file")
_WORKFLOW_ROLLOUT_DISPLAY_ORDER: tuple[str, ...] = ("legacy_only", "shadow", "graph")
_ARTIFACT_KIND_DISPLAY_ORDER: tuple[OutputTypeArtifactKind, ...] = (
"still_image",
"turntable_video",
"model_export",
"thumbnail_image",
"blend_asset",
"package",
"custom",
)
_OUTPUT_FORMAT_DISPLAY_ORDER: tuple[str, ...] = (
"png",
"jpg",
"jpeg",
"webp",
"mp4",
"webm",
"mov",
"gltf",
"glb",
"stl",
"obj",
"usd",
"usdz",
"blend",
)
_DEFAULT_OUTPUT_FORMAT_BY_ARTIFACT_KIND: dict[OutputTypeArtifactKind, str] = {
"still_image": "png",
"turntable_video": "mp4",
"model_export": "gltf",
"thumbnail_image": "png",
"blend_asset": "blend",
"package": "png",
"custom": "png",
}
_OUTPUT_TYPE_PROFILE_KEYS: tuple[str, ...] = (
"transparent_bg",
"cycles_device",
"material_override",
)
_TEMPLATE_RUNTIME_KEYS: tuple[str, ...] = (
"target_collection",
"lighting_only",
"shadow_catcher",
"camera_orbit",
"template_inputs",
)
_WORKFLOW_NODE_PARAMETER_OWNERSHIP_STEPS: tuple[StepName, ...] = (
StepName.RESOLVE_TEMPLATE,
StepName.BLENDER_STILL,
StepName.BLENDER_TURNTABLE,
StepName.EXPORT_BLEND,
)
class InvalidInvocationOverridesError(ValueError):
pass
def list_allowed_artifact_kinds_for_family( def list_allowed_artifact_kinds_for_family(
@@ -55,6 +143,79 @@ def list_allowed_artifact_kinds_for_family(
return tuple(sorted(allowed)) return tuple(sorted(allowed))
def list_allowed_output_formats_for_family(workflow_family: str) -> tuple[str, ...]:
normalized_family = (workflow_family or "order_line").strip().lower()
if normalized_family == "cad_file":
allowed = _OUTPUT_FORMATS_BY_FAMILY["cad_file"]
else:
allowed = _OUTPUT_FORMATS_BY_FAMILY["order_line"]
return tuple(sorted(allowed))
def build_output_type_contract_catalog() -> dict[str, Any]:
workflow_families = [
family for family in _WORKFLOW_FAMILY_DISPLAY_ORDER if family in OUTPUT_TYPE_WORKFLOW_FAMILIES
]
workflow_rollout_modes = [
mode for mode in _WORKFLOW_ROLLOUT_DISPLAY_ORDER if mode in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES
]
artifact_kinds = [
artifact_kind
for artifact_kind in _ARTIFACT_KIND_DISPLAY_ORDER
if artifact_kind in OUTPUT_TYPE_ARTIFACT_KINDS
]
allowed_artifact_kinds_by_family = {
family: [
artifact_kind
for artifact_kind in artifact_kinds
if artifact_kind in list_allowed_artifact_kinds_for_family(family)
]
for family in workflow_families
}
allowed_output_formats_by_family = {
family: [
output_format
for output_format in _OUTPUT_FORMAT_DISPLAY_ORDER
if output_format in list_allowed_output_formats_for_family(family)
]
for family in workflow_families
}
allowed_invocation_override_keys_by_artifact_kind = {
artifact_kind: list(
list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind,
is_animation=artifact_kind == "turntable_video",
)
)
for artifact_kind in artifact_kinds
}
default_output_format_by_artifact_kind = {
artifact_kind: _DEFAULT_OUTPUT_FORMAT_BY_ARTIFACT_KIND[artifact_kind]
for artifact_kind in artifact_kinds
}
workflow_node_keys_by_step = {
step.value: [field.key for field in definition.fields]
for step in _WORKFLOW_NODE_PARAMETER_OWNERSHIP_STEPS
if (definition := get_node_definition(step.value)) is not None
}
return {
"workflow_families": workflow_families,
"workflow_rollout_modes": workflow_rollout_modes,
"artifact_kinds": artifact_kinds,
"allowed_artifact_kinds_by_family": allowed_artifact_kinds_by_family,
"allowed_output_formats_by_family": allowed_output_formats_by_family,
"allowed_invocation_override_keys_by_artifact_kind": allowed_invocation_override_keys_by_artifact_kind,
"default_output_format_by_artifact_kind": default_output_format_by_artifact_kind,
"parameter_ownership": {
"output_type_profile_keys": list(_OUTPUT_TYPE_PROFILE_KEYS),
"template_runtime_keys": list(_TEMPLATE_RUNTIME_KEYS),
"workflow_node_keys_by_step": workflow_node_keys_by_step,
},
}
def infer_output_type_artifact_kind( def infer_output_type_artifact_kind(
output_format: str | None, output_format: str | None,
is_animation: bool, is_animation: bool,
@@ -65,6 +226,8 @@ def infer_output_type_artifact_kind(
if is_animation or normalized_format in _VIDEO_FORMATS: if is_animation or normalized_format in _VIDEO_FORMATS:
return "turntable_video" return "turntable_video"
if normalized_format in _BLEND_FORMATS:
return "blend_asset"
if normalized_format in _MODEL_EXPORT_FORMATS: if normalized_format in _MODEL_EXPORT_FORMATS:
return "model_export" return "model_export"
if normalized_family == "cad_file" and normalized_format in _IMAGE_FORMATS: if normalized_family == "cad_file" and normalized_format in _IMAGE_FORMATS:
@@ -91,6 +254,14 @@ def validate_output_type_contract(
f"'{workflow_family}'. Allowed: {allowed}" f"'{workflow_family}'. Allowed: {allowed}"
) )
allowed_output_formats = list_allowed_output_formats_for_family(normalized_family)
if normalized_format and normalized_format not in allowed_output_formats:
allowed = ", ".join(allowed_output_formats)
raise ValueError(
f"Output format '{output_format}' is not allowed for workflow_family "
f"'{workflow_family}'. Allowed: {allowed}"
)
if normalized_family == "cad_file" and is_animation: if normalized_family == "cad_file" and is_animation:
raise ValueError("CAD-file workflows do not support animated output types") raise ValueError("CAD-file workflows do not support animated output types")
@@ -114,6 +285,20 @@ def validate_output_type_contract(
f"({', '.join(sorted(_MODEL_EXPORT_FORMATS))})" f"({', '.join(sorted(_MODEL_EXPORT_FORMATS))})"
) )
if normalized_artifact == "blend_asset":
if is_animation:
raise ValueError("Artifact kind 'blend_asset' does not support is_animation=true")
if normalized_format and normalized_format not in _BLEND_FORMATS:
raise ValueError(
"Artifact kind 'blend_asset' requires a blend output_format "
f"({', '.join(sorted(_BLEND_FORMATS))})"
)
if normalized_format in _BLEND_FORMATS and normalized_artifact != "blend_asset":
raise ValueError(
f"Output format '{output_format}' requires artifact kind 'blend_asset'"
)
def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily | None: def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily | None:
normalized = canonicalize_workflow_config(config) normalized = canonicalize_workflow_config(config)
@@ -121,6 +306,7 @@ def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily |
definition.family definition.family
for node in normalized.get("nodes", []) for node in normalized.get("nodes", [])
if (definition := get_node_definition(node.get("step"))) is not None if (definition := get_node_definition(node.get("step"))) is not None
if definition.family in {"cad_file", "order_line"}
} }
if not families: if not families:
return None return None
@@ -129,14 +315,329 @@ def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily |
return next(iter(families)) return next(iter(families))
def derive_workflow_terminal_node_ids(config: dict[str, Any]) -> tuple[str, ...]:
normalized = canonicalize_workflow_config(config)
nodes = normalized.get("nodes", [])
if not nodes:
return ()
node_ids = {
str(node.get("id"))
for node in nodes
if node.get("id") not in (None, "")
}
upstream_ids = {
str(edge.get("from"))
for edge in normalized.get("edges", [])
if edge.get("from") not in (None, "")
}
return tuple(sorted(node_id for node_id in node_ids if node_id not in upstream_ids))
def derive_supported_artifact_kinds_from_workflow_config(
config: dict[str, Any],
) -> tuple[OutputTypeArtifactKind, ...]:
try:
normalized = canonicalize_workflow_config(config)
except Exception:
return ()
nodes = normalized.get("nodes", [])
if not nodes:
return ()
nodes_by_id = {
str(node.get("id")): node
for node in nodes
if node.get("id") not in (None, "")
}
incoming_by_target: dict[str, set[str]] = {}
for edge in normalized.get("edges", []):
source = edge.get("from")
target = edge.get("to")
if source in (None, "") or target in (None, ""):
continue
incoming_by_target.setdefault(str(target), set()).add(str(source))
cache: dict[str, set[str]] = {}
def _collect_upstream_steps(node_id: str) -> set[str]:
cached = cache.get(node_id)
if cached is not None:
return set(cached)
steps: set[str] = set()
node = nodes_by_id.get(node_id)
if node is not None and node.get("step"):
steps.add(str(node["step"]))
for upstream_id in incoming_by_target.get(node_id, set()):
steps.update(_collect_upstream_steps(upstream_id))
cache[node_id] = set(steps)
return steps
def _derive_node_artifact_kinds(node_id: str) -> set[OutputTypeArtifactKind]:
node = nodes_by_id.get(node_id)
if node is None:
return set()
step = str(node.get("step") or "")
if step in {StepName.BLENDER_STILL.value}:
return {"still_image"}
if step in {StepName.BLENDER_TURNTABLE.value}:
return {"turntable_video"}
if step in {StepName.EXPORT_BLEND.value}:
return {"blend_asset"}
if step in {
StepName.OCC_GLB_EXPORT.value,
StepName.STL_CACHE_GENERATE.value,
}:
return {"model_export"}
if step == StepName.THUMBNAIL_SAVE.value:
return {"thumbnail_image"}
if step != StepName.OUTPUT_SAVE.value:
return set()
upstream_steps = _collect_upstream_steps(node_id)
has_still = StepName.BLENDER_STILL.value in upstream_steps
has_turntable = StepName.BLENDER_TURNTABLE.value in upstream_steps
if has_still and has_turntable:
return set()
if has_turntable:
return {"turntable_video"}
if has_still:
return {"still_image"}
return set()
supported: set[OutputTypeArtifactKind] = set()
for terminal_id in derive_workflow_terminal_node_ids(normalized):
supported.update(_derive_node_artifact_kinds(terminal_id))
return tuple(sorted(supported))
def workflow_supports_artifact_kind(
config: dict[str, Any],
artifact_kind: str,
) -> bool:
normalized_artifact = (artifact_kind or "").strip().lower()
if not normalized_artifact:
return False
return normalized_artifact in derive_supported_artifact_kinds_from_workflow_config(config)
def list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind: str,
*,
is_animation: bool = False,
) -> tuple[str, ...]:
normalized_artifact = (artifact_kind or "").strip().lower()
if normalized_artifact in {"still_image", "thumbnail_image"}:
return _STATIC_RENDER_OVERRIDE_KEYS
if normalized_artifact == "turntable_video":
return _STATIC_RENDER_OVERRIDE_KEYS + _ANIMATION_OVERRIDE_KEYS
if normalized_artifact in {"model_export", "blend_asset"}:
return ()
if normalized_artifact in {"package", "custom"}:
return INVOCATION_OVERRIDE_KEYS
if is_animation:
return _STATIC_RENDER_OVERRIDE_KEYS + _ANIMATION_OVERRIDE_KEYS
return _STATIC_RENDER_OVERRIDE_KEYS
def _normalize_positive_int_override(key: str, value: Any) -> int:
if isinstance(value, bool):
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be a positive integer")
try:
normalized = int(str(value).strip()) if isinstance(value, str) else int(value)
except (TypeError, ValueError) as exc:
raise InvalidInvocationOverridesError(
f"Invocation override '{key}' must be a positive integer"
) from exc
if normalized <= 0:
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be greater than zero")
return normalized
def _normalize_string_override(key: str, value: Any) -> str:
if not isinstance(value, str):
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be a string")
normalized = value.strip()
if not normalized:
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must not be blank")
return normalized
def _normalize_noise_threshold_override(value: Any) -> str:
if isinstance(value, bool):
raise InvalidInvocationOverridesError("Invocation override 'noise_threshold' must be a string or number")
if isinstance(value, (int, float)):
return str(value)
if isinstance(value, str) and value.strip():
return value.strip()
raise InvalidInvocationOverridesError("Invocation override 'noise_threshold' must be a string or number")
def _normalize_gpu_toggle_override(value: Any) -> str:
if isinstance(value, bool):
return "1" if value else "0"
if isinstance(value, int) and value in {0, 1}:
return str(value)
if isinstance(value, str):
normalized = value.strip().lower()
if normalized in {"1", "true", "enabled", "yes"}:
return "1"
if normalized in {"0", "false", "disabled", "no"}:
return "0"
raise InvalidInvocationOverridesError(
"Invocation override 'denoising_use_gpu' must be one of: 1, 0, true, false"
)
def _normalize_invocation_override_value(key: str, value: Any) -> int | str:
if key in {"width", "height", "samples", "frame_count", "fps"}:
return _normalize_positive_int_override(key, value)
if key == "turntable_axis":
normalized = _normalize_string_override(key, value).lower()
if normalized not in _TURNABLE_AXES:
raise InvalidInvocationOverridesError(
"Invocation override 'turntable_axis' must be one of: world_x, world_y, world_z"
)
return normalized
if key == "noise_threshold":
return _normalize_noise_threshold_override(value)
if key == "denoising_use_gpu":
return _normalize_gpu_toggle_override(value)
return _normalize_string_override(key, value)
def validate_and_normalize_invocation_overrides(
raw: Mapping[str, Any] | None,
*,
artifact_kind: str | None = None,
is_animation: bool = False,
reject_unknown_keys: bool = False,
) -> dict[str, Any]:
if raw is None:
return {}
if not isinstance(raw, Mapping):
raise InvalidInvocationOverridesError("invocation_overrides must be an object")
normalized: dict[str, Any] = {}
unknown_keys: list[str] = []
for key, value in raw.items():
key_name = str(key)
if key_name not in INVOCATION_OVERRIDE_KEYS:
if reject_unknown_keys:
unknown_keys.append(key_name)
continue
if value in (None, ""):
continue
normalized[key_name] = _normalize_invocation_override_value(key_name, value)
if unknown_keys:
supported = ", ".join(INVOCATION_OVERRIDE_KEYS)
raise InvalidInvocationOverridesError(
f"Unsupported invocation override keys: {', '.join(sorted(unknown_keys))}. Supported: {supported}"
)
if artifact_kind is not None:
allowed_keys = set(
list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind,
is_animation=is_animation,
)
)
disallowed = sorted(key for key in normalized if key not in allowed_keys)
if disallowed:
raise InvalidInvocationOverridesError(
f"Invocation overrides not allowed for artifact kind '{artifact_kind}': {', '.join(disallowed)}"
)
return normalized
def resolve_output_type_invocation_overrides(
render_settings: Mapping[str, Any] | None,
invocation_overrides: Mapping[str, Any] | None,
*,
artifact_kind: str,
is_animation: bool = False,
) -> dict[str, Any]:
merged = merge_output_type_invocation_overrides(render_settings, invocation_overrides)
allowed_keys = set(
list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind,
is_animation=is_animation,
)
)
return {
key: value
for key, value in merged.items()
if key in allowed_keys
}
def build_output_type_invocation_profile(
*,
renderer: str,
render_backend: str,
workflow_family: str,
artifact_kind: str,
output_format: str | None,
is_animation: bool,
workflow_definition_id: Any = None,
workflow_rollout_mode: str = "legacy_only",
transparent_bg: bool = False,
cycles_device: str | None = None,
material_override: str | None = None,
render_settings: Mapping[str, Any] | None = None,
invocation_overrides: Mapping[str, Any] | None = None,
) -> dict[str, Any]:
resolved_artifact_kind = artifact_kind or infer_output_type_artifact_kind(
output_format,
is_animation,
workflow_family,
)
resolved_overrides = resolve_output_type_invocation_overrides(
render_settings,
invocation_overrides,
artifact_kind=resolved_artifact_kind,
is_animation=is_animation,
)
return {
"renderer": renderer,
"render_backend": render_backend,
"workflow_family": workflow_family,
"artifact_kind": resolved_artifact_kind,
"output_format": (output_format or "").strip().lower(),
"is_animation": bool(is_animation),
"workflow_definition_id": workflow_definition_id,
"workflow_rollout_mode": workflow_rollout_mode,
"transparent_bg": bool(transparent_bg),
"cycles_device": cycles_device,
"material_override": material_override,
"allowed_override_keys": list(
list_allowed_invocation_override_keys_for_artifact_kind(
resolved_artifact_kind,
is_animation=is_animation,
)
),
"invocation_overrides": resolved_overrides,
}
def normalize_invocation_overrides(raw: Mapping[str, Any] | None) -> dict[str, Any]: def normalize_invocation_overrides(raw: Mapping[str, Any] | None) -> dict[str, Any]:
if not isinstance(raw, Mapping): if not isinstance(raw, Mapping):
return {} return {}
normalized: dict[str, Any] = {} normalized: dict[str, Any] = {}
for key in INVOCATION_OVERRIDE_KEYS: for key in INVOCATION_OVERRIDE_KEYS:
value = raw.get(key) value = raw.get(key)
if value not in (None, ""): if value in (None, ""):
normalized[key] = value continue
try:
normalized[key] = _normalize_invocation_override_value(key, value)
except InvalidInvocationOverridesError:
continue
return normalized return normalized
+143 -3
View File
@@ -1,22 +1,27 @@
import uuid import uuid
from datetime import datetime from datetime import datetime
from pydantic import BaseModel from pydantic import BaseModel, Field
class OutputTypeCreate(BaseModel): class OutputTypeCreate(BaseModel):
name: str name: str
description: str | None = None description: str | None = None
renderer: str = "threejs" renderer: str = "blender"
render_settings: dict = {} render_settings: dict = {}
output_format: str = "png" output_format: str = "png"
sort_order: int = 0 sort_order: int = 0
is_active: bool = True is_active: bool = True
compatible_categories: list[str] = [] compatible_categories: list[str] = []
render_backend: str = "auto" render_backend: str = "celery"
is_animation: bool = False is_animation: bool = False
transparent_bg: bool = False transparent_bg: bool = False
pricing_tier_id: int | None = None pricing_tier_id: int | None = None
cycles_device: str | None = None cycles_device: str | None = None
workflow_family: str = "order_line"
artifact_kind: str | None = None
invocation_overrides: dict = {}
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str = "legacy_only"
material_override: str | None = None material_override: str | None = None
@@ -32,12 +37,43 @@ class OutputTypePatch(BaseModel):
render_backend: str | None = None render_backend: str | None = None
is_animation: bool | None = None is_animation: bool | None = None
transparent_bg: bool | None = None transparent_bg: bool | None = None
workflow_family: str | None = None
artifact_kind: str | None = None
invocation_overrides: dict | None = None
pricing_tier_id: int | None = None pricing_tier_id: int | None = None
cycles_device: str | None = None cycles_device: str | None = None
workflow_definition_id: uuid.UUID | None = None workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str | None = None
material_override: str | None = None material_override: str | None = None
class OutputTypeInvocationProfileOut(BaseModel):
renderer: str
render_backend: str
workflow_family: str
artifact_kind: str
output_format: str
is_animation: bool
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str = "legacy_only"
transparent_bg: bool
cycles_device: str | None = None
material_override: str | None = None
allowed_override_keys: list[str] = Field(default_factory=list)
invocation_overrides: dict = Field(default_factory=dict)
class OutputTypeContractCatalogOut(BaseModel):
workflow_families: list[str] = Field(default_factory=list)
workflow_rollout_modes: list[str] = Field(default_factory=list)
artifact_kinds: list[str] = Field(default_factory=list)
allowed_artifact_kinds_by_family: dict[str, list[str]] = Field(default_factory=dict)
allowed_output_formats_by_family: dict[str, list[str]] = Field(default_factory=dict)
allowed_invocation_override_keys_by_artifact_kind: dict[str, list[str]] = Field(default_factory=dict)
default_output_format_by_artifact_kind: dict[str, str] = Field(default_factory=dict)
parameter_ownership: dict[str, dict | list[str]] = Field(default_factory=dict)
class OutputTypeOut(BaseModel): class OutputTypeOut(BaseModel):
id: uuid.UUID id: uuid.UUID
name: str name: str
@@ -50,13 +86,18 @@ class OutputTypeOut(BaseModel):
render_backend: str render_backend: str
is_animation: bool is_animation: bool
transparent_bg: bool transparent_bg: bool
workflow_family: str
artifact_kind: str
invocation_overrides: dict
cycles_device: str | None = None cycles_device: str | None = None
pricing_tier_id: int | None = None pricing_tier_id: int | None = None
pricing_tier_name: str | None = None pricing_tier_name: str | None = None
price_per_item: float | None = None price_per_item: float | None = None
workflow_definition_id: uuid.UUID | None = None workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str
workflow_name: str | None = None workflow_name: str | None = None
material_override: str | None = None material_override: str | None = None
invocation_profile: OutputTypeInvocationProfileOut | None = None
is_active: bool is_active: bool
created_at: datetime created_at: datetime
updated_at: datetime updated_at: datetime
@@ -159,11 +200,28 @@ class WorkflowDefinitionOut(BaseModel):
name: str name: str
output_type_id: uuid.UUID | None output_type_id: uuid.UUID | None
config: dict config: dict
family: str | None = None
supported_artifact_kinds: list[str] = Field(default_factory=list)
rollout_summary: "WorkflowRolloutSummaryOut" = Field(
default_factory=lambda: WorkflowRolloutSummaryOut()
)
is_active: bool is_active: bool
created_at: datetime created_at: datetime
model_config = {"from_attributes": True} model_config = {"from_attributes": True}
class WorkflowDraftPreflightRequest(BaseModel):
context_id: str
config: dict
workflow_id: uuid.UUID | None = None
class WorkflowDraftDispatchRequest(BaseModel):
context_id: str
config: dict
workflow_id: uuid.UUID | None = None
class WorkflowNodeResultOut(BaseModel): class WorkflowNodeResultOut(BaseModel):
id: uuid.UUID id: uuid.UUID
node_name: str node_name: str
@@ -190,6 +248,38 @@ class WorkflowRunOut(BaseModel):
model_config = {"from_attributes": True} model_config = {"from_attributes": True}
class WorkflowRolloutLatestRunOut(BaseModel):
workflow_run_id: uuid.UUID
execution_mode: str
status: str
created_at: datetime
completed_at: datetime | None = None
class WorkflowRolloutLinkedOutputTypeOut(BaseModel):
id: uuid.UUID
name: str
is_active: bool
artifact_kind: str
workflow_rollout_mode: str
class WorkflowRolloutSummaryOut(BaseModel):
linked_output_type_count: int = 0
active_output_type_count: int = 0
linked_output_type_names: list[str] = Field(default_factory=list)
linked_output_types: list[WorkflowRolloutLinkedOutputTypeOut] = Field(default_factory=list)
rollout_modes: list[str] = Field(default_factory=list)
has_blocking_contracts: bool = False
blocking_reasons: list[str] = Field(default_factory=list)
latest_run: WorkflowRolloutLatestRunOut | None = None
latest_shadow_run: WorkflowRolloutLatestRunOut | None = None
latest_rollout_gate_verdict: str | None = None
latest_rollout_ready: bool | None = None
latest_rollout_status: str | None = None
latest_rollout_reasons: list[str] = Field(default_factory=list)
class WorkflowComparisonArtifactOut(BaseModel): class WorkflowComparisonArtifactOut(BaseModel):
path: str | None path: str | None
storage_key: str | None storage_key: str | None
@@ -208,8 +298,58 @@ class WorkflowRunComparisonOut(BaseModel):
execution_mode: str execution_mode: str
status: str status: str
summary: str summary: str
rollout_gate_verdict: str
workflow_rollout_ready: bool
workflow_rollout_status: str
rollout_reasons: list[str] = []
rollout_thresholds: dict[str, float] = Field(default_factory=dict)
authoritative_output: WorkflowComparisonArtifactOut authoritative_output: WorkflowComparisonArtifactOut
observer_output: WorkflowComparisonArtifactOut observer_output: WorkflowComparisonArtifactOut
exact_match: bool | None exact_match: bool | None
dimensions_match: bool | None dimensions_match: bool | None
mean_pixel_delta: float | None mean_pixel_delta: float | None
class WorkflowPreflightIssueOut(BaseModel):
severity: str
code: str
message: str
node_id: str | None = None
step: str | None = None
class WorkflowPreflightNodeOut(BaseModel):
node_id: str
step: str
label: str | None = None
execution_kind: str
supported: bool
status: str
issues: list[WorkflowPreflightIssueOut] = []
class WorkflowPreflightOut(BaseModel):
workflow_id: uuid.UUID | None = None
context_id: str
context_kind: str | None = None
expected_context_kind: str
execution_mode: str
graph_dispatch_allowed: bool
summary: str
resolved_order_line_id: uuid.UUID | None = None
resolved_cad_file_id: uuid.UUID | None = None
unsupported_node_ids: list[str] = []
issues: list[WorkflowPreflightIssueOut] = []
nodes: list[WorkflowPreflightNodeOut] = []
class WorkflowOrderLineContextOptionOut(BaseModel):
value: uuid.UUID
label: str
meta: str
class WorkflowOrderLineContextGroupOut(BaseModel):
order_id: uuid.UUID
order_label: str
options: list[WorkflowOrderLineContextOptionOut] = []
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,146 @@
from __future__ import annotations
import json
import re
from collections import defaultdict
from typing import Any, Iterable, Mapping
_MARKER_PROP_NAMES = (
"hartomat_template_input",
"hartomat.template_input",
"template_input",
"schaeffler_template_input",
)
_MARKER_KEY_PROP_NAMES = (
"hartomat_template_input_key",
"hartomat.template_input_key",
"template_input_key",
"schaeffler_template_input_key",
)
_MARKER_VALUE_PROP_NAMES = (
"hartomat_template_input_value",
"hartomat.template_input_value",
"template_input_value",
"schaeffler_template_input_value",
)
_NAME_PATTERNS = (
re.compile(r"template_input__(?P<key>[^_]+)__(?P<value>[^_]+)", re.IGNORECASE),
re.compile(r"template-input:(?P<key>[^=]+)=(?P<value>.+)", re.IGNORECASE),
re.compile(r"ti::(?P<key>[^:]+)::(?P<value>.+)", re.IGNORECASE),
)
def _normalize_marker_token(value: Any) -> str | None:
if value is None:
return None
if isinstance(value, bool):
return "true" if value else "false"
text = str(value).strip()
return text or None
def _parse_marker_text(text: str) -> tuple[str, str] | None:
cleaned = text.strip()
if not cleaned:
return None
if cleaned.startswith("{"):
try:
payload = json.loads(cleaned)
except Exception:
payload = None
if isinstance(payload, dict):
key = _normalize_marker_token(payload.get("key"))
value = _normalize_marker_token(payload.get("value"))
if key and value:
return key, value
if "=" in cleaned:
key, value = cleaned.split("=", 1)
key = _normalize_marker_token(key)
value = _normalize_marker_token(value)
if key and value:
return key, value
return None
def extract_template_input_marker(
*,
name: str | None = None,
props: Mapping[str, Any] | None = None,
) -> tuple[str, str] | None:
raw_props = props or {}
for prop_name in _MARKER_PROP_NAMES:
raw_value = raw_props.get(prop_name)
text = _normalize_marker_token(raw_value)
if not text:
continue
marker = _parse_marker_text(text)
if marker is not None:
return marker
key = None
value = None
for prop_name in _MARKER_KEY_PROP_NAMES:
key = _normalize_marker_token(raw_props.get(prop_name))
if key:
break
for prop_name in _MARKER_VALUE_PROP_NAMES:
value = _normalize_marker_token(raw_props.get(prop_name))
if value:
break
if key and value:
return key, value
candidate_name = (name or "").strip()
if candidate_name:
for pattern in _NAME_PATTERNS:
match = pattern.search(candidate_name)
if not match:
continue
marker_key = _normalize_marker_token(match.group("key"))
marker_value = _normalize_marker_token(match.group("value"))
if marker_key and marker_value:
return marker_key, marker_value
return None
def suggest_workflow_input_schema(
markers: Iterable[tuple[str, str]],
) -> list[dict[str, Any]]:
values_by_key: dict[str, set[str]] = defaultdict(set)
for key, value in markers:
normalized_key = _normalize_marker_token(key)
normalized_value = _normalize_marker_token(value)
if not normalized_key or not normalized_value:
continue
values_by_key[normalized_key].add(normalized_value)
schema: list[dict[str, Any]] = []
for key in sorted(values_by_key):
options = sorted(values_by_key[key])
if not options:
continue
label = key.replace("_", " ").strip().title()
if len(options) == 2 and set(options) == {"false", "true"}:
schema.append(
{
"key": key,
"label": label,
"type": "boolean",
"section": "Template Inputs",
"default": options[0] == "true",
}
)
continue
schema.append(
{
"key": key,
"label": label,
"type": "select",
"section": "Template Inputs",
"default": options[0],
"options": [{"value": option, "label": option.replace("_", " ").title()} for option in options],
}
)
return schema
@@ -18,6 +18,7 @@ def dispatch_workflow(
params = params or {} params = params or {}
builders = { builders = {
"still": _build_still, "still": _build_still,
"still_graph": _build_still,
"turntable": _build_turntable, "turntable": _build_turntable,
"multi_angle": _build_multi_angle, "multi_angle": _build_multi_angle,
"still_with_exports": _build_still_with_exports, "still_with_exports": _build_still_with_exports,
@@ -17,7 +17,7 @@ from app.domains.orders.models import OrderLine
from app.domains.rendering.models import WorkflowRun from app.domains.rendering.models import WorkflowRun
from app.domains.rendering.schemas import WorkflowComparisonArtifactOut, WorkflowRunComparisonOut from app.domains.rendering.schemas import WorkflowComparisonArtifactOut, WorkflowRunComparisonOut
ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA = 0.0 ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA = 1e-6
ROLLOUT_WARN_MAX_MEAN_PIXEL_DELTA = 0.02 ROLLOUT_WARN_MAX_MEAN_PIXEL_DELTA = 0.02
@@ -217,6 +217,7 @@ def _find_shadow_file(order_line: OrderLine, workflow_run: WorkflowRun) -> str |
upload_root = Path(settings.upload_dir) upload_root = Path(settings.upload_dir)
candidate_roots.append(upload_root / "renders" / str(order_line.id)) candidate_roots.append(upload_root / "renders" / str(order_line.id))
candidate_roots.append(upload_root / "step_files" / "renders" / str(order_line.id))
candidate_roots.append(upload_root / "step_files" / "renders") candidate_roots.append(upload_root / "step_files" / "renders")
seen_roots: set[Path] = set() seen_roots: set[Path] = set()
@@ -258,6 +259,13 @@ async def build_workflow_run_comparison(
authoritative_output = _build_artifact(authoritative_path) authoritative_output = _build_artifact(authoritative_path)
observer_output = _build_artifact(observer_path) observer_output = _build_artifact(observer_path)
rollout_gate = evaluate_rollout_gate(
authoritative_output=authoritative_output,
observer_output=observer_output,
exact_match=None,
dimensions_match=None,
mean_pixel_delta=None,
)
if not authoritative_output.exists: if not authoritative_output.exists:
status = "missing_authoritative" status = "missing_authoritative"
@@ -283,9 +291,9 @@ async def build_workflow_run_comparison(
if exact_match: if exact_match:
status = "matched" status = "matched"
summary = "Observer output matches the authoritative legacy output byte-for-byte." summary = "Observer output matches the authoritative legacy output byte-for-byte."
elif mean_pixel_delta == 0.0 and dimensions_match: elif mean_pixel_delta is not None and mean_pixel_delta <= ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA and dimensions_match:
status = "matched" status = "matched"
summary = "Observer output matches the authoritative legacy output visually, but file metadata differs." summary = "Observer output matches the authoritative legacy output within the visual pass threshold."
else: else:
status = "different" status = "different"
if dimensions_match is False: if dimensions_match is False:
@@ -294,6 +302,13 @@ async def build_workflow_run_comparison(
summary = "Observer output differs from the authoritative output." summary = "Observer output differs from the authoritative output."
else: else:
summary = "Observer output differs from the authoritative output and could not be pixel-compared." summary = "Observer output differs from the authoritative output and could not be pixel-compared."
rollout_gate = evaluate_rollout_gate(
authoritative_output=authoritative_output,
observer_output=observer_output,
exact_match=exact_match,
dimensions_match=dimensions_match,
mean_pixel_delta=mean_pixel_delta,
)
return WorkflowRunComparisonOut( return WorkflowRunComparisonOut(
workflow_run_id=workflow_run.id, workflow_run_id=workflow_run.id,
@@ -302,6 +317,14 @@ async def build_workflow_run_comparison(
execution_mode=workflow_run.execution_mode, execution_mode=workflow_run.execution_mode,
status=status, status=status,
summary=summary, summary=summary,
rollout_gate_verdict=str(rollout_gate["verdict"]),
workflow_rollout_ready=bool(rollout_gate["workflow_rollout_ready"]),
workflow_rollout_status=str(rollout_gate["workflow_rollout_status"]),
rollout_reasons=[str(reason) for reason in rollout_gate["reasons"]],
rollout_thresholds={
str(key): float(value)
for key, value in dict(rollout_gate["thresholds"]).items()
},
authoritative_output=authoritative_output.to_schema(), authoritative_output=authoritative_output.to_schema(),
observer_output=observer_output.to_schema(), observer_output=observer_output.to_schema(),
exact_match=exact_match, exact_match=exact_match,
@@ -21,6 +21,10 @@ _PRESET_TYPES = {
_EXECUTION_MODES = {"legacy", "graph", "shadow"} _EXECUTION_MODES = {"legacy", "graph", "shadow"}
_WORKFLOW_BLUEPRINTS = {"cad_intake", "order_rendering", "still_graph_reference"} _WORKFLOW_BLUEPRINTS = {"cad_intake", "order_rendering", "still_graph_reference"}
_WORKFLOW_STARTERS = {"cad_file", "order_line"} _WORKFLOW_STARTERS = {"cad_file", "order_line"}
_WORKFLOW_STARTER_BLUEPRINTS = {
"starter_cad_intake": "cad_file",
"starter_order_rendering": "order_line",
}
_NODE_TYPE_TO_STEP: dict[str, str] = { _NODE_TYPE_TO_STEP: dict[str, str] = {
"inputNode": StepName.RESOLVE_STEP_PATH.value, "inputNode": StepName.RESOLVE_STEP_PATH.value,
@@ -72,7 +76,7 @@ def _extract_render_params_from_nodes(nodes: list[dict[str, Any]], step: StepNam
def _build_order_line_still_graph_nodes(render_params: dict[str, Any]) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: def _build_order_line_still_graph_nodes(render_params: dict[str, Any]) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
graph_render_params = deepcopy(render_params) graph_render_params = deepcopy(render_params)
graph_render_params.setdefault("use_custom_render_settings", True) graph_render_params.setdefault("use_custom_render_settings", False)
nodes = [ nodes = [
_make_node("setup", StepName.ORDER_LINE_SETUP, 0, 160, label="Order Line Setup"), _make_node("setup", StepName.ORDER_LINE_SETUP, 0, 160, label="Order Line Setup"),
@@ -222,6 +226,7 @@ def build_preset_workflow_config(
"ui": { "ui": {
"preset": preset_type, "preset": preset_type,
"execution_mode": "graph" if preset_type == "still_graph" else "legacy", "execution_mode": "graph" if preset_type == "still_graph" else "legacy",
"family": "order_line",
}, },
} }
@@ -235,6 +240,7 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]:
_make_node("resolve_step", StepName.RESOLVE_STEP_PATH, 0, 180, label="Resolve STEP Path"), _make_node("resolve_step", StepName.RESOLVE_STEP_PATH, 0, 180, label="Resolve STEP Path"),
_make_node("extract_objects", StepName.OCC_OBJECT_EXTRACT, 220, 180, label="Extract STEP Objects"), _make_node("extract_objects", StepName.OCC_OBJECT_EXTRACT, 220, 180, label="Extract STEP Objects"),
_make_node("export_glb", StepName.OCC_GLB_EXPORT, 440, 180, label="Export GLB"), _make_node("export_glb", StepName.OCC_GLB_EXPORT, 440, 180, label="Export GLB"),
_make_node("bbox", StepName.GLB_BBOX, 660, 120, label="Compute Bounding Box"),
_make_node("stl_cache", StepName.STL_CACHE_GENERATE, 660, 300, label="Generate STL Cache"), _make_node("stl_cache", StepName.STL_CACHE_GENERATE, 660, 300, label="Generate STL Cache"),
_make_node( _make_node(
"blender_thumb", "blender_thumb",
@@ -260,9 +266,11 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]:
edges = [ edges = [
{"from": "resolve_step", "to": "extract_objects"}, {"from": "resolve_step", "to": "extract_objects"},
{"from": "extract_objects", "to": "export_glb"}, {"from": "extract_objects", "to": "export_glb"},
{"from": "export_glb", "to": "bbox"},
{"from": "export_glb", "to": "stl_cache"}, {"from": "export_glb", "to": "stl_cache"},
{"from": "export_glb", "to": "blender_thumb"}, {"from": "export_glb", "to": "blender_thumb"},
{"from": "export_glb", "to": "threejs_thumb"}, {"from": "export_glb", "to": "threejs_thumb"},
{"from": "bbox", "to": "threejs_thumb"},
{"from": "blender_thumb", "to": "save_blender_thumb"}, {"from": "blender_thumb", "to": "save_blender_thumb"},
{"from": "threejs_thumb", "to": "save_threejs_thumb"}, {"from": "threejs_thumb", "to": "save_threejs_thumb"},
] ]
@@ -329,6 +337,7 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]:
"ui": { "ui": {
"preset": "custom", "preset": "custom",
"execution_mode": "graph" if blueprint == "still_graph_reference" else "legacy", "execution_mode": "graph" if blueprint == "still_graph_reference" else "legacy",
"family": "cad_file" if blueprint == "cad_intake" else "order_line",
"blueprint": blueprint, "blueprint": blueprint,
}, },
} }
@@ -356,6 +365,7 @@ def build_starter_workflow_config(family: str = "order_line") -> dict[str, Any]:
"ui": { "ui": {
"preset": "custom", "preset": "custom",
"execution_mode": "legacy", "execution_mode": "legacy",
"family": family,
"blueprint": blueprint, "blueprint": blueprint,
}, },
} }
@@ -385,6 +395,7 @@ def _build_legacy_custom_render_fallback_config(params: dict[str, Any] | None =
"ui": { "ui": {
"preset": "custom", "preset": "custom",
"execution_mode": "legacy", "execution_mode": "legacy",
"family": "order_line",
"blueprint": "starter_order_rendering", "blueprint": "starter_order_rendering",
}, },
} }
@@ -480,9 +491,16 @@ def canonicalize_workflow_config(raw: dict[str, Any]) -> dict[str, Any]:
canonical["ui"].update(merged_ui) canonical["ui"].update(merged_ui)
return canonical return canonical
if blueprint == "still_graph_reference": if blueprint in _WORKFLOW_BLUEPRINTS:
merged_ui = dict(normalized["ui"]) merged_ui = dict(normalized["ui"])
canonical = build_workflow_blueprint_config("still_graph_reference") canonical = build_workflow_blueprint_config(blueprint)
merged_ui.setdefault("execution_mode", canonical["ui"]["execution_mode"])
canonical["ui"].update(merged_ui)
return canonical
if blueprint in _WORKFLOW_STARTER_BLUEPRINTS:
merged_ui = dict(normalized["ui"])
canonical = build_starter_workflow_config(_WORKFLOW_STARTER_BLUEPRINTS[blueprint])
merged_ui.setdefault("execution_mode", canonical["ui"]["execution_mode"]) merged_ui.setdefault("execution_mode", canonical["ui"]["execution_mode"])
canonical["ui"].update(merged_ui) canonical["ui"].update(merged_ui)
return canonical return canonical
@@ -25,7 +25,7 @@ from collections import deque
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import Literal from typing import Literal
from app.domains.rendering.workflow_schema import WorkflowConfig, WorkflowNode from app.domains.rendering.workflow_schema import WorkflowConfig, WorkflowEdge, WorkflowNode
from app.core.process_steps import StepName from app.core.process_steps import StepName
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -40,6 +40,17 @@ class WorkflowContext:
execution_mode: WorkflowExecutionMode execution_mode: WorkflowExecutionMode
workflow_run_id: uuid.UUID | None = None workflow_run_id: uuid.UUID | None = None
ordered_nodes: list[WorkflowNode] = field(default_factory=list) ordered_nodes: list[WorkflowNode] = field(default_factory=list)
edges: list[WorkflowEdge] = field(default_factory=list)
@dataclass(slots=True)
class WorkflowTaskDispatchSpec:
node_id: str
task_name: str
args: list[str]
kwargs: dict
task_id: str
queue: str | None = None
@dataclass(slots=True) @dataclass(slots=True)
@@ -48,6 +59,38 @@ class WorkflowDispatchResult:
task_ids: list[str] task_ids: list[str]
node_task_ids: dict[str, str] node_task_ids: dict[str, str]
skipped_node_ids: list[str] skipped_node_ids: list[str]
task_specs: list[WorkflowTaskDispatchSpec] = field(default_factory=list)
class WorkflowTaskSubmissionError(RuntimeError):
def __init__(self, message: str, *, submitted_task_ids: list[str] | None = None) -> None:
super().__init__(message)
self.submitted_task_ids = list(submitted_task_ids or [])
def submit_prepared_workflow_tasks(dispatch_result: WorkflowDispatchResult) -> None:
"""Submit pre-built Celery tasks after DB state has been committed."""
from app.tasks.celery_app import celery_app
submitted_task_ids: list[str] = []
for spec in dispatch_result.task_specs:
task_options: dict[str, str] = {"task_id": spec.task_id}
if spec.queue:
task_options["queue"] = spec.queue
try:
celery_app.send_task(
spec.task_name,
args=spec.args,
kwargs=spec.kwargs,
**task_options,
)
except Exception as exc:
raise WorkflowTaskSubmissionError(
f"Failed to submit workflow task for node '{spec.node_id}': {exc}",
submitted_task_ids=submitted_task_ids,
) from exc
submitted_task_ids.append(spec.task_id)
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@@ -65,7 +108,7 @@ STEP_TASK_MAP: dict[StepName, str] = {
StepName.STL_CACHE_GENERATE: "app.tasks.step_tasks.process_step_file", StepName.STL_CACHE_GENERATE: "app.tasks.step_tasks.process_step_file",
# ── Thumbnail generation ───────────────────────────────────────────── # ── Thumbnail generation ─────────────────────────────────────────────
StepName.BLENDER_RENDER: "app.tasks.step_tasks.render_step_thumbnail", StepName.BLENDER_RENDER: "app.tasks.step_tasks.render_step_thumbnail",
StepName.THUMBNAIL_SAVE: "app.tasks.step_tasks.render_step_thumbnail", StepName.THUMBNAIL_SAVE: "app.tasks.step_tasks.render_graph_thumbnail",
# ── Order line stills & turntables ────────────────────────────────── # ── Order line stills & turntables ──────────────────────────────────
StepName.BLENDER_STILL: "app.domains.rendering.tasks.render_order_line_still_task", StepName.BLENDER_STILL: "app.domains.rendering.tasks.render_order_line_still_task",
StepName.BLENDER_TURNTABLE: "app.domains.rendering.tasks.render_turntable_task", StepName.BLENDER_TURNTABLE: "app.domains.rendering.tasks.render_turntable_task",
@@ -98,6 +141,7 @@ def prepare_workflow_context(
execution_mode=execution_mode, execution_mode=execution_mode,
workflow_run_id=workflow_run_id, workflow_run_id=workflow_run_id,
ordered_nodes=ordered_nodes, ordered_nodes=ordered_nodes,
edges=list(config.edges),
) )
@@ -12,12 +12,19 @@ from sqlalchemy import select
from sqlalchemy.orm import Session, selectinload from sqlalchemy.orm import Session, selectinload
from app.config import settings from app.config import settings
from app.core.render_paths import build_order_line_export_path, build_order_line_step_render_path
from app.core.process_steps import StepName from app.core.process_steps import StepName
from app.domains.products.models import CadFile from app.domains.products.models import CadFile
from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun
from app.domains.rendering.workflow_executor import STEP_TASK_MAP, WorkflowContext, WorkflowDispatchResult from app.domains.rendering.workflow_executor import (
STEP_TASK_MAP,
WorkflowContext,
WorkflowDispatchResult,
WorkflowTaskDispatchSpec,
)
from app.domains.rendering.workflow_node_registry import get_node_definition from app.domains.rendering.workflow_node_registry import get_node_definition
from app.domains.rendering.workflow_runtime_services import ( from app.domains.rendering.workflow_runtime_services import (
_resolve_render_output_extension,
AutoPopulateMaterialsResult, AutoPopulateMaterialsResult,
BBoxResolutionResult, BBoxResolutionResult,
MaterialResolutionResult, MaterialResolutionResult,
@@ -25,6 +32,7 @@ from app.domains.rendering.workflow_runtime_services import (
TemplateResolutionResult, TemplateResolutionResult,
auto_populate_materials_for_cad, auto_populate_materials_for_cad,
build_order_line_render_invocation, build_order_line_render_invocation,
extract_template_input_overrides,
prepare_order_line_render_context, prepare_order_line_render_context,
resolve_cad_bbox, resolve_cad_bbox,
resolve_order_line_material_map, resolve_order_line_material_map,
@@ -89,11 +97,13 @@ _STILL_TASK_KEYS = {
"material_override", "material_override",
"render_engine", "render_engine",
"resolution", "resolution",
"template_inputs",
} }
_TURNTABLE_TASK_KEYS = { _TURNTABLE_TASK_KEYS = {
"output_name", "output_name",
"engine", "engine",
"render_engine",
"samples", "samples",
"smooth_angle", "smooth_angle",
"cycles_device", "cycles_device",
@@ -119,6 +129,8 @@ _TURNTABLE_TASK_KEYS = {
"focal_length_mm", "focal_length_mm",
"sensor_width_mm", "sensor_width_mm",
"material_override", "material_override",
"template_inputs",
"duration_s",
} }
_THUMBNAIL_TASK_KEYS = { _THUMBNAIL_TASK_KEYS = {
@@ -144,13 +156,62 @@ _AUTHORITATIVE_RENDER_SETTING_KEYS = {
"denoising_prefilter", "denoising_prefilter",
"denoising_quality", "denoising_quality",
"denoising_use_gpu", "denoising_use_gpu",
"camera_orbit",
"focal_length_mm", "focal_length_mm",
"sensor_width_mm", "sensor_width_mm",
"bg_color", "bg_color",
} }
def _inspect_active_worker_queues(timeout: float = 1.0) -> set[str]:
from app.tasks.celery_app import celery_app
try:
inspect_result = celery_app.control.inspect(timeout=timeout)
active_queues = inspect_result.active_queues() or {}
except Exception as exc:
logger.info("[WORKFLOW] Could not inspect active Celery queues: %s", exc)
return set()
queue_names: set[str] = set()
for queues in active_queues.values():
for queue in queues or []:
if not isinstance(queue, dict):
continue
name = queue.get("name")
if isinstance(name, str) and name.strip():
queue_names.add(name.strip())
return queue_names
def _resolve_shadow_render_queue(
*,
workflow_context: WorkflowContext,
node,
active_queue_names: set[str],
) -> str | None:
if workflow_context.execution_mode != "shadow":
return None
if node.step not in {
StepName.BLENDER_STILL,
StepName.BLENDER_TURNTABLE,
StepName.EXPORT_BLEND,
}:
return None
preferred_queue = (settings.workflow_shadow_render_queue or "").strip()
if not preferred_queue or preferred_queue == "asset_pipeline":
return None
if preferred_queue in active_queue_names:
return preferred_queue
logger.info(
"[WORKFLOW] Preferred shadow render queue %s unavailable for node %s; using default routing",
preferred_queue,
node.id,
)
return None
def _filter_graph_render_overrides(step: StepName, params: dict[str, Any]) -> dict[str, Any]: def _filter_graph_render_overrides(step: StepName, params: dict[str, Any]) -> dict[str, Any]:
normalized = dict(params) normalized = dict(params)
use_custom_render_settings = bool(normalized.pop("use_custom_render_settings", False)) use_custom_render_settings = bool(normalized.pop("use_custom_render_settings", False))
@@ -186,6 +247,8 @@ def find_unsupported_graph_nodes(workflow_context: WorkflowContext) -> list[str]
def execute_graph_workflow( def execute_graph_workflow(
session: Session, session: Session,
workflow_context: WorkflowContext, workflow_context: WorkflowContext,
*,
dispatch_tasks: bool = True,
) -> WorkflowDispatchResult: ) -> WorkflowDispatchResult:
if workflow_context.workflow_run_id is None: if workflow_context.workflow_run_id is None:
raise ValueError("workflow_context.workflow_run_id is required for graph execution") raise ValueError("workflow_context.workflow_run_id is required for graph execution")
@@ -201,6 +264,12 @@ def execute_graph_workflow(
task_ids: list[str] = [] task_ids: list[str] = []
node_task_ids: dict[str, str] = {} node_task_ids: dict[str, str] = {}
skipped_node_ids: list[str] = [] skipped_node_ids: list[str] = []
task_specs: list[WorkflowTaskDispatchSpec] = []
active_queue_names = (
_inspect_active_worker_queues()
if workflow_context.execution_mode == "shadow"
else set()
)
for node in workflow_context.ordered_nodes: for node in workflow_context.ordered_nodes:
node_result = node_results.get(node.id) node_result = node_results.get(node.id)
@@ -326,8 +395,6 @@ def execute_graph_workflow(
skipped_node_ids.append(node.id) skipped_node_ids.append(node.id)
continue continue
from app.tasks.celery_app import celery_app
task_kwargs = _build_task_kwargs( task_kwargs = _build_task_kwargs(
session=session, session=session,
workflow_context=workflow_context, workflow_context=workflow_context,
@@ -335,12 +402,42 @@ def execute_graph_workflow(
node=node, node=node,
) )
result = celery_app.send_task( target_queue = _resolve_shadow_render_queue(
task_name, workflow_context=workflow_context,
args=[workflow_context.context_id], node=node,
kwargs=task_kwargs, active_queue_names=active_queue_names,
) )
metadata["task_id"] = result.id if dispatch_tasks:
from app.tasks.celery_app import celery_app
if target_queue:
result = celery_app.send_task(
task_name,
args=[workflow_context.context_id],
kwargs=task_kwargs,
queue=target_queue,
)
else:
result = celery_app.send_task(
task_name,
args=[workflow_context.context_id],
kwargs=task_kwargs,
)
task_id = result.id
else:
task_id = str(uuid.uuid4())
task_specs.append(
WorkflowTaskDispatchSpec(
node_id=node.id,
task_name=task_name,
args=[workflow_context.context_id],
kwargs=dict(task_kwargs),
task_id=task_id,
queue=target_queue,
)
)
metadata["task_id"] = task_id
metadata["task_queue"] = target_queue or "asset_pipeline"
if definition is not None: if definition is not None:
metadata["execution_kind"] = definition.execution_kind metadata["execution_kind"] = definition.execution_kind
metadata["attempt_count"] = 1 metadata["attempt_count"] = 1
@@ -360,15 +457,15 @@ def execute_graph_workflow(
node_result.duration_s = None node_result.duration_s = None
state.node_outputs[node.id] = dict(metadata) state.node_outputs[node.id] = dict(metadata)
session.flush() session.flush()
task_ids.append(result.id) task_ids.append(task_id)
node_task_ids[node.id] = result.id node_task_ids[node.id] = task_id
logger.info( logger.info(
"[WORKFLOW] Dispatched node %r (step=%s, mode=%s, run=%s) -> Celery task %s", "[WORKFLOW] Dispatched node %r (step=%s, mode=%s, run=%s) -> Celery task %s",
node.id, node.id,
node.step, node.step,
workflow_context.execution_mode, workflow_context.execution_mode,
workflow_context.workflow_run_id, workflow_context.workflow_run_id,
result.id, task_id,
) )
continue continue
@@ -397,6 +494,7 @@ def execute_graph_workflow(
task_ids=task_ids, task_ids=task_ids,
node_task_ids=node_task_ids, node_task_ids=node_task_ids,
skipped_node_ids=skipped_node_ids, skipped_node_ids=skipped_node_ids,
task_specs=task_specs,
) )
@@ -466,8 +564,15 @@ def _serialize_template_result(result: TemplateResolutionResult) -> dict[str, An
"material_map_count": len(result.material_map or {}), "material_map_count": len(result.material_map or {}),
"use_materials": result.use_materials, "use_materials": result.use_materials,
"override_material": result.override_material, "override_material": result.override_material,
"target_collection": result.target_collection,
"lighting_only": result.lighting_only,
"shadow_catcher": result.shadow_catcher,
"camera_orbit": result.camera_orbit,
"category_key": result.category_key, "category_key": result.category_key,
"output_type_id": result.output_type_id, "output_type_id": result.output_type_id,
"workflow_input_schema": result.workflow_input_schema,
"template_inputs": result.template_inputs,
"template_input_count": len(result.template_inputs or {}),
} }
@@ -597,13 +702,17 @@ def _predict_task_output_metadata(
order_line_id = str(state.setup.order_line.id) order_line_id = str(state.setup.order_line.id)
if node.step == StepName.BLENDER_STILL: if node.step == StepName.BLENDER_STILL:
output_dir = step_path.parent / "renders" output_extension = _resolve_render_output_extension(state.setup.order_line)
output_filename = f"line_{order_line_id}.png" if output_extension not in {"png", "jpg", "webp"}:
output_extension = "png"
output_filename = f"line_{order_line_id}.{output_extension}"
if output_name_suffix: if output_name_suffix:
output_filename = f"line_{order_line_id}_{output_name_suffix}.png" output_filename = f"line_{order_line_id}_{output_name_suffix}.{output_extension}"
return { return {
"artifact_role": "render_output", "artifact_role": "render_output",
"predicted_output_path": str(output_dir / output_filename), "predicted_output_path": str(
build_order_line_step_render_path(step_path, order_line_id, output_filename)
),
"predicted_asset_type": "still", "predicted_asset_type": "still",
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)), "publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
"graph_authoritative_output_enabled": bool( "graph_authoritative_output_enabled": bool(
@@ -618,9 +727,10 @@ def _predict_task_output_metadata(
output_filename = f"{step_path.stem}_production.blend" output_filename = f"{step_path.stem}_production.blend"
if output_name_suffix: if output_name_suffix:
output_filename = f"{step_path.stem}_production_{output_name_suffix}.blend" output_filename = f"{step_path.stem}_production_{output_name_suffix}.blend"
predicted_output_path = str(build_order_line_export_path(order_line_id, output_filename))
return { return {
"artifact_role": "blend_export", "artifact_role": "blend_export",
"predicted_output_path": str(step_path.parent / output_filename), "predicted_output_path": predicted_output_path,
"predicted_asset_type": "blend_production", "predicted_asset_type": "blend_production",
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)), "publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
"graph_authoritative_output_enabled": bool( "graph_authoritative_output_enabled": bool(
@@ -641,7 +751,9 @@ def _predict_task_output_metadata(
if isinstance(output_dir, str) and output_dir.strip(): if isinstance(output_dir, str) and output_dir.strip():
predicted_output_path = str(Path(output_dir) / f"{output_name}.mp4") predicted_output_path = str(Path(output_dir) / f"{output_name}.mp4")
else: else:
predicted_output_path = str(step_path.parent / "renders" / f"{output_name}.mp4") predicted_output_path = str(
build_order_line_step_render_path(step_path, order_line_id, f"{output_name}.mp4")
)
return { return {
"artifact_role": "turntable_output", "artifact_role": "turntable_output",
"predicted_output_path": predicted_output_path, "predicted_output_path": predicted_output_path,
@@ -733,6 +845,30 @@ def _resolve_thumbnail_request(
return None return None
def _normalize_turntable_task_kwargs(task_kwargs: dict[str, Any]) -> dict[str, Any]:
normalized = dict(task_kwargs)
raw_duration = normalized.get("duration_s")
if raw_duration in (None, ""):
return normalized
try:
duration_s = float(raw_duration)
except (TypeError, ValueError):
return normalized
try:
fps = int(float(normalized.get("fps", 0)))
except (TypeError, ValueError):
return normalized
if duration_s <= 0 or fps <= 0:
return normalized
normalized["duration_s"] = duration_s
normalized["frame_count"] = max(1, int(round(duration_s * fps)))
return normalized
def _build_task_kwargs( def _build_task_kwargs(
*, *,
session: Session, session: Session,
@@ -751,6 +887,7 @@ def _build_task_kwargs(
template_context=state.template, template_context=state.template,
position_context=resolve_render_position_context(session, state.setup.order_line), position_context=resolve_render_position_context(session, state.setup.order_line),
material_context=state.materials, material_context=state.materials,
artifact_kind_override=_artifact_kind_override_for_step(node.step),
) )
render_defaults = render_invocation.task_defaults() render_defaults = render_invocation.task_defaults()
@@ -774,6 +911,15 @@ def _build_task_kwargs(
}.items() }.items()
if key in _TURNTABLE_TASK_KEYS if key in _TURNTABLE_TASK_KEYS
} }
task_kwargs = _normalize_turntable_task_kwargs(task_kwargs)
if state.setup is not None and state.setup.is_ready and state.setup.cad_file is not None:
task_kwargs["output_dir"] = str(
build_order_line_step_render_path(
state.setup.cad_file.stored_path,
str(state.setup.order_line.id),
"turntable.mp4",
).parent
)
elif node.step == StepName.THUMBNAIL_SAVE: elif node.step == StepName.THUMBNAIL_SAVE:
thumbnail_request = _resolve_thumbnail_request(workflow_context, state, node.id) or {} thumbnail_request = _resolve_thumbnail_request(workflow_context, state, node.id) or {}
task_kwargs = { task_kwargs = {
@@ -787,7 +933,7 @@ def _build_task_kwargs(
task_kwargs["workflow_run_id"] = str(workflow_context.workflow_run_id) task_kwargs["workflow_run_id"] = str(workflow_context.workflow_run_id)
task_kwargs["workflow_node_id"] = node.id task_kwargs["workflow_node_id"] = node.id
if workflow_context.execution_mode == "graph" and node.step in { if workflow_context.execution_mode in {"graph", "shadow"} and node.step in {
StepName.BLENDER_STILL, StepName.BLENDER_STILL,
StepName.EXPORT_BLEND, StepName.EXPORT_BLEND,
StepName.BLENDER_TURNTABLE, StepName.BLENDER_TURNTABLE,
@@ -798,19 +944,23 @@ def _build_task_kwargs(
step=StepName.OUTPUT_SAVE, step=StepName.OUTPUT_SAVE,
direction="downstream", direction="downstream",
) )
connected_notify_node_ids = _connected_node_ids_by_step(
workflow_context,
node_id=node.id,
step=StepName.NOTIFY,
direction="downstream",
)
if connected_output_node_ids: if connected_output_node_ids:
task_kwargs["publish_asset_enabled"] = False task_kwargs["publish_asset_enabled"] = False
task_kwargs["graph_authoritative_output_enabled"] = True
task_kwargs["graph_output_node_ids"] = connected_output_node_ids task_kwargs["graph_output_node_ids"] = connected_output_node_ids
if connected_notify_node_ids: if workflow_context.execution_mode == "graph":
task_kwargs["emit_legacy_notifications"] = True task_kwargs["graph_authoritative_output_enabled"] = True
task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids else:
task_kwargs["observer_output_enabled"] = True
if workflow_context.execution_mode == "graph":
connected_notify_node_ids = _connected_node_ids_by_step(
workflow_context,
node_id=node.id,
step=StepName.NOTIFY,
direction="downstream",
)
if connected_notify_node_ids:
task_kwargs["emit_legacy_notifications"] = True
task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids
if workflow_context.execution_mode == "shadow": if workflow_context.execution_mode == "shadow":
task_kwargs["publish_asset_enabled"] = False task_kwargs["publish_asset_enabled"] = False
task_kwargs["emit_events"] = False task_kwargs["emit_events"] = False
@@ -819,6 +969,16 @@ def _build_task_kwargs(
return task_kwargs return task_kwargs
def _artifact_kind_override_for_step(step: StepName) -> str | None:
if step == StepName.BLENDER_TURNTABLE:
return "turntable_video"
if step == StepName.BLENDER_STILL:
return "still_image"
if step == StepName.EXPORT_BLEND:
return "blend_asset"
return None
def _execute_order_line_setup( def _execute_order_line_setup(
*, *,
session: Session, session: Session,
@@ -857,12 +1017,25 @@ def _execute_resolve_template(
node_params: dict[str, Any], node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]: ) -> tuple[dict[str, Any], str, str | None]:
del node del node
del workflow_context, node_params del workflow_context
if state.setup is None or not state.setup.is_ready: if state.setup is None or not state.setup.is_ready:
if state.setup is not None and state.setup.status == "skip": if state.setup is not None and state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason return _serialize_setup_result(state.setup), "skipped", state.setup.reason
raise WorkflowGraphRuntimeError("resolve_template requires a ready order_line_setup result") raise WorkflowGraphRuntimeError("resolve_template requires a ready order_line_setup result")
result = resolve_order_line_template_context(session, state.setup) result = resolve_order_line_template_context(
session,
state.setup,
template_id_override=node_params.get("template_id_override"),
material_library_path_override=node_params.get("material_library_path"),
require_template=bool(node_params.get("require_template", False)),
disable_materials=bool(node_params.get("disable_materials", False)),
target_collection_override=node_params.get("target_collection"),
material_replace_mode=node_params.get("material_replace_mode"),
lighting_only_mode=node_params.get("lighting_only_mode"),
shadow_catcher_mode=node_params.get("shadow_catcher_mode"),
camera_orbit_mode=node_params.get("camera_orbit_mode"),
template_input_overrides=extract_template_input_overrides(node_params),
)
state.template = result state.template = result
return _serialize_template_result(result), "completed", None return _serialize_template_result(result), "completed", None
@@ -876,7 +1049,7 @@ def _execute_material_map_resolve(
node_params: dict[str, Any], node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]: ) -> tuple[dict[str, Any], str, str | None]:
del node del node
del session, workflow_context, node_params del session, workflow_context
if state.setup is None or not state.setup.is_ready: if state.setup is None or not state.setup.is_ready:
if state.setup is not None and state.setup.status == "skip": if state.setup is not None and state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason return _serialize_setup_result(state.setup), "skipped", state.setup.reason
@@ -895,6 +1068,8 @@ def _execute_material_map_resolve(
state.setup.materials_source, state.setup.materials_source,
material_library=material_library, material_library=material_library,
template=template, template=template,
material_override=node_params.get("material_override"),
disable_materials=bool(node_params.get("disable_materials", False)),
) )
state.materials = result state.materials = result
return _serialize_material_result(result), "completed", None return _serialize_material_result(result), "completed", None
@@ -909,26 +1084,45 @@ def _execute_auto_populate_materials(
node_params: dict[str, Any], node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]: ) -> tuple[dict[str, Any], str, str | None]:
del node del node
del node_params
if state.setup is None or state.setup.cad_file is None: if state.setup is None or state.setup.cad_file is None:
if state.setup is not None and state.setup.status == "skip": if state.setup is not None and state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason return _serialize_setup_result(state.setup), "skipped", state.setup.reason
raise WorkflowGraphRuntimeError("auto_populate_materials requires a resolved cad_file") raise WorkflowGraphRuntimeError("auto_populate_materials requires a resolved cad_file")
shadow_mode = workflow_context.execution_mode == "shadow" shadow_mode = workflow_context.execution_mode == "shadow"
persist_updates = bool(node_params.get("persist_updates", not shadow_mode))
if shadow_mode:
persist_updates = False
refresh_material_source = bool(node_params.get("refresh_material_source", True))
include_populated_products = bool(node_params.get("include_populated_products", False))
if shadow_mode: if shadow_mode:
result = auto_populate_materials_for_cad( result = auto_populate_materials_for_cad(
session, session,
str(state.setup.cad_file.id), str(state.setup.cad_file.id),
persist_updates=False, persist_updates=False,
include_populated_products=include_populated_products,
) )
else: else:
result = auto_populate_materials_for_cad(session, str(state.setup.cad_file.id)) result = auto_populate_materials_for_cad(
session,
str(state.setup.cad_file.id),
persist_updates=persist_updates,
include_populated_products=include_populated_products,
)
state.auto_populate = result state.auto_populate = result
if not shadow_mode and state.setup.order_line is not None and state.setup.order_line.product is not None: if (
persist_updates
and refresh_material_source
and not shadow_mode
and state.setup.order_line is not None
and state.setup.order_line.product is not None
):
session.refresh(state.setup.order_line.product) session.refresh(state.setup.order_line.product)
state.setup.materials_source = state.setup.order_line.product.cad_part_materials or [] state.setup.materials_source = state.setup.order_line.product.cad_part_materials or []
payload = _serialize_auto_populate_result(result) payload = _serialize_auto_populate_result(result)
payload["shadow_mode"] = shadow_mode payload["shadow_mode"] = shadow_mode
payload["persist_updates"] = persist_updates
payload["refresh_material_source"] = refresh_material_source
payload["include_populated_products"] = include_populated_products
return payload, "completed", None return payload, "completed", None
@@ -949,17 +1143,31 @@ def _execute_glb_bbox(
step_path = state.setup.cad_file.stored_path step_path = state.setup.cad_file.stored_path
glb_path = node_params.get("glb_path") glb_path = node_params.get("glb_path")
if glb_path is None and state.setup.glb_reuse_path is not None: source_preference = str(node_params.get("source_preference") or "auto")
if glb_path is None and source_preference != "step_only" and state.setup.glb_reuse_path is not None:
glb_path = str(state.setup.glb_reuse_path) glb_path = str(state.setup.glb_reuse_path)
elif glb_path is None: elif glb_path is None and source_preference != "step_only":
step_file = Path(step_path) step_file = Path(step_path)
fallback_glb = step_file.parent / f"{step_file.stem}_thumbnail.glb" fallback_glb = step_file.parent / f"{step_file.stem}_thumbnail.glb"
if fallback_glb.exists(): if fallback_glb.exists():
glb_path = str(fallback_glb) glb_path = str(fallback_glb)
if source_preference == "glb_only" and not glb_path:
payload = {
"bbox_data": None,
"has_bbox": False,
"source_kind": "none",
"step_path": step_path,
"glb_path": None,
"source_preference": source_preference,
}
return payload, "failed", "glb_only requested but no GLB artifact is available"
result = resolve_cad_bbox(step_path, glb_path=glb_path) result = resolve_cad_bbox(step_path, glb_path=glb_path)
state.bbox = result state.bbox = result
return _serialize_bbox_result(result), "completed", None payload = _serialize_bbox_result(result)
payload["source_preference"] = source_preference
return payload, "completed", None
def _execute_resolve_step_path( def _execute_resolve_step_path(
@@ -1069,7 +1277,7 @@ def _execute_output_save(
node, node,
node_params: dict[str, Any], node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]: ) -> tuple[dict[str, Any], str, str | None]:
del session, node_params del session
if state.setup is None or state.setup.order_line is None: if state.setup is None or state.setup.order_line is None:
raise WorkflowGraphRuntimeError("output_save requires an order_line_setup result") raise WorkflowGraphRuntimeError("output_save requires an order_line_setup result")
@@ -1085,19 +1293,42 @@ def _execute_output_save(
"shadow_mode": workflow_context.execution_mode == "shadow", "shadow_mode": workflow_context.execution_mode == "shadow",
} }
upstream_artifacts = _connected_upstream_artifacts(workflow_context, state, node.id) upstream_artifacts = _connected_upstream_artifacts(workflow_context, state, node.id)
expected_artifact_role = str(node_params.get("expected_artifact_role") or "").strip() or None
require_upstream_artifact = bool(node_params.get("require_upstream_artifact", False))
if expected_artifact_role is not None:
upstream_artifacts = [
artifact for artifact in upstream_artifacts if artifact.get("artifact_role") == expected_artifact_role
]
if workflow_context.execution_mode == "shadow": if workflow_context.execution_mode == "shadow":
payload["publication_mode"] = "shadow_observer_only" payload["publication_mode"] = "shadow_observer_only"
elif any(artifact["publish_asset_enabled"] for artifact in upstream_artifacts): elif any(artifact["publish_asset_enabled"] for artifact in upstream_artifacts):
payload["publication_mode"] = "deferred_to_render_task" payload["publication_mode"] = "deferred_to_render_task"
else: else:
payload["publication_mode"] = "awaiting_graph_authoritative_save" payload["publication_mode"] = "awaiting_graph_authoritative_save"
payload["expected_artifact_role"] = expected_artifact_role
payload["require_upstream_artifact"] = require_upstream_artifact
if upstream_artifacts: if upstream_artifacts:
payload["artifact_count"] = len(upstream_artifacts) payload["artifact_count"] = len(upstream_artifacts)
payload["upstream_artifacts"] = upstream_artifacts payload["upstream_artifacts"] = upstream_artifacts
elif require_upstream_artifact:
payload["artifact_count"] = 0
return payload, "failed", "No upstream render artifact is connected to this output node"
if state.template is not None and state.template.template is not None: if state.template is not None and state.template.template is not None:
payload["template_name"] = state.template.template.name payload["template_name"] = state.template.template.name
if state.materials is not None: if state.materials is not None:
payload["material_map_count"] = len(state.materials.material_map or {}) payload["material_map_count"] = len(state.materials.material_map or {})
deferred_handoff_node_ids = [
str(artifact.get("node_id"))
for artifact in upstream_artifacts
if artifact.get("task_id")
]
if deferred_handoff_node_ids:
payload["handoff_state"] = "armed"
payload["handoff_node_ids"] = deferred_handoff_node_ids
payload["handoff_node_count"] = len(deferred_handoff_node_ids)
return payload, "pending", None
return payload, "completed", None return payload, "completed", None
@@ -1109,7 +1340,7 @@ def _execute_notify(
node, node,
node_params: dict[str, Any], node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]: ) -> tuple[dict[str, Any], str, str | None]:
del session, node_params del session
if state.setup is None or state.setup.order_line is None: if state.setup is None or state.setup.order_line is None:
raise WorkflowGraphRuntimeError("notify requires an order_line_setup result") raise WorkflowGraphRuntimeError("notify requires an order_line_setup result")
@@ -1121,8 +1352,10 @@ def _execute_notify(
payload: dict[str, Any] = { payload: dict[str, Any] = {
"order_line_id": str(state.setup.order_line.id), "order_line_id": str(state.setup.order_line.id),
"shadow_mode": workflow_context.execution_mode == "shadow", "shadow_mode": workflow_context.execution_mode == "shadow",
"channel": "audit_log", "channel": str(node_params.get("channel") or "audit_log"),
} }
require_armed_render = bool(node_params.get("require_armed_render", False))
payload["require_armed_render"] = require_armed_render
if workflow_context.execution_mode == "shadow": if workflow_context.execution_mode == "shadow":
payload["notification_mode"] = "shadow_suppressed" payload["notification_mode"] = "shadow_suppressed"
@@ -1136,12 +1369,15 @@ def _execute_notify(
] ]
if not armed_node_ids: if not armed_node_ids:
payload["notification_mode"] = "not_armed" payload["notification_mode"] = "not_armed"
if require_armed_render:
return payload, "failed", "No graph render task is configured for notification handoff"
return payload, "skipped", "No graph render task is configured for notification handoff" return payload, "skipped", "No graph render task is configured for notification handoff"
payload["notification_mode"] = "deferred_to_render_task" payload["notification_mode"] = "deferred_to_render_task"
payload["armed_node_ids"] = armed_node_ids payload["armed_node_ids"] = armed_node_ids
payload["armed_node_count"] = len(armed_node_ids) payload["armed_node_count"] = len(armed_node_ids)
return payload, "completed", None payload["handoff_state"] = "armed"
return payload, "pending", None
_BRIDGE_EXECUTORS = { _BRIDGE_EXECUTORS = {
@@ -10,7 +10,17 @@ from app.core.process_steps import StepName
StepCategory = Literal["input", "processing", "rendering", "output"] StepCategory = Literal["input", "processing", "rendering", "output"]
FieldType = Literal["number", "select", "boolean", "text"] FieldType = Literal["number", "select", "boolean", "text"]
ExecutionKind = Literal["native", "bridge"] ExecutionKind = Literal["native", "bridge"]
WorkflowNodeFamily = Literal["cad_file", "order_line"] WorkflowNodeFamily = Literal["cad_file", "order_line", "shared"]
TextFormat = Literal[
"plain",
"uuid",
"absolute_path",
"absolute_blend_path",
"absolute_glb_path",
"float_string",
"hex_color",
"safe_filename_suffix",
]
class WorkflowNodeFieldOption(BaseModel): class WorkflowNodeFieldOption(BaseModel):
@@ -30,6 +40,9 @@ class WorkflowNodeFieldDefinition(BaseModel):
step: float | None = None step: float | None = None
unit: str | None = None unit: str | None = None
options: list[WorkflowNodeFieldOption] = [] options: list[WorkflowNodeFieldOption] = []
allow_blank: bool = True
max_length: int | None = None
text_format: TextFormat = "plain"
class WorkflowNodeDefinition(BaseModel): class WorkflowNodeDefinition(BaseModel):
@@ -65,6 +78,9 @@ def _field(
step: float | None = None, step: float | None = None,
unit: str | None = None, unit: str | None = None,
options: list[tuple[str | int | float | bool, str]] | None = None, options: list[tuple[str | int | float | bool, str]] | None = None,
allow_blank: bool = True,
max_length: int | None = None,
text_format: TextFormat = "plain",
) -> WorkflowNodeFieldDefinition: ) -> WorkflowNodeFieldDefinition:
return WorkflowNodeFieldDefinition( return WorkflowNodeFieldDefinition(
key=key, key=key,
@@ -81,6 +97,9 @@ def _field(
WorkflowNodeFieldOption(value=value, label=option_label) WorkflowNodeFieldOption(value=value, label=option_label)
for value, option_label in (options or []) for value, option_label in (options or [])
], ],
allow_blank=allow_blank,
max_length=max_length,
text_format=text_format,
) )
@@ -169,7 +188,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"cad_file", "cad_file",
"cad.export_glb", "cad.export_glb",
"processing", "processing",
"Convert STEP geometry into GLB for previews and downstream rendering.", "Convert STEP geometry into GLB for previews and downstream rendering. Uses the system tessellation profile; this node does not expose per-node overrides yet.",
node_type="processNode", node_type="processNode",
icon="refresh-cw", icon="refresh-cw",
execution_kind="bridge", execution_kind="bridge",
@@ -181,10 +200,10 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
_definition( _definition(
StepName.GLB_BBOX, StepName.GLB_BBOX,
"Compute Bounding Box", "Compute Bounding Box",
"order_line", "shared",
"geometry.compute_bbox", "geometry.compute_bbox",
"processing", "processing",
"Compute the model bounding box from the exported GLB for framing decisions.", "Compute the model bounding box from a prepared GLB artifact for framing decisions in either CAD-intake or order-line workflows.",
node_type="processNode", node_type="processNode",
icon="layers", icon="layers",
execution_kind="bridge", execution_kind="bridge",
@@ -196,10 +215,24 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional absolute path to a specific GLB file. Leave empty to reuse the prepared preview/export artifact automatically.", description="Optional absolute path to a specific GLB file. Leave empty to reuse the prepared preview/export artifact automatically.",
section="Inputs", section="Inputs",
default="", default="",
text_format="absolute_glb_path",
),
_field(
"source_preference",
"Source Preference",
"select",
description="Prefer a prepared GLB, force STEP fallback, or fail when no GLB artifact is available.",
section="Inputs",
default="auto",
options=[
("auto", "Auto"),
("step_only", "STEP Only"),
("glb_only", "GLB Only"),
],
), ),
], ],
input_contract={"context": "order_line", "requires": ["glb_preview"]}, input_contract={"requires": ["glb_preview"]},
output_contract={"context": "order_line", "provides": ["bbox"]}, output_contract={"provides": ["bbox"]},
artifact_roles_consumed=["glb_preview"], artifact_roles_consumed=["glb_preview"],
artifact_roles_produced=["bbox"], artifact_roles_produced=["bbox"],
), ),
@@ -213,6 +246,25 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="processNode", node_type="processNode",
icon="layers", icon="layers",
execution_kind="bridge", execution_kind="bridge",
defaults={"disable_materials": False, "material_override": ""},
fields=[
_field(
"disable_materials",
"Disable Materials",
"boolean",
description="Bypass template and alias-based material mapping for this node.",
section="Materials",
default=False,
),
_field(
"material_override",
"Material Override",
"text",
description="Optional material name forced onto every detected part before rendering.",
section="Materials",
default="",
),
],
input_contract={"context": "order_line", "requires": ["order_line_context", "cad_materials"]}, input_contract={"context": "order_line", "requires": ["order_line_context", "cad_materials"]},
output_contract={"context": "order_line", "provides": ["material_assignments"]}, output_contract={"context": "order_line", "provides": ["material_assignments"]},
artifact_roles_consumed=["order_line_context", "cad_materials"], artifact_roles_consumed=["order_line_context", "cad_materials"],
@@ -228,6 +280,37 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="processNode", node_type="processNode",
icon="layers", icon="layers",
execution_kind="bridge", execution_kind="bridge",
defaults={
"persist_updates": True,
"refresh_material_source": True,
"include_populated_products": False,
},
fields=[
_field(
"persist_updates",
"Persist Updates",
"boolean",
description="Write discovered part-material mappings back to product records in graph mode.",
section="Behavior",
default=True,
),
_field(
"refresh_material_source",
"Refresh Material Source",
"boolean",
description="Reload product material mappings into the workflow context after persistence.",
section="Behavior",
default=True,
),
_field(
"include_populated_products",
"Rewrite Populated Products",
"boolean",
description="Also rebuild material mappings for products that already have non-empty assignments.",
section="Behavior",
default=False,
),
],
input_contract={"context": "order_line", "requires": ["cad_materials"]}, input_contract={"context": "order_line", "requires": ["cad_materials"]},
output_contract={"context": "order_line", "provides": ["material_catalog_updates"]}, output_contract={"context": "order_line", "provides": ["material_catalog_updates"]},
artifact_roles_consumed=["cad_materials"], artifact_roles_consumed=["cad_materials"],
@@ -306,7 +389,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"cad_file", "cad_file",
"media.save_thumbnail", "media.save_thumbnail",
"output", "output",
"Persist the generated thumbnail back onto the CAD file record.", "Persist the generated thumbnail back onto the CAD file record. Rendering settings are supplied by the connected upstream thumbnail request node.",
node_type="outputNode", node_type="outputNode",
icon="download", icon="download",
execution_kind="bridge", execution_kind="bridge",
@@ -360,6 +443,113 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="processNode", node_type="processNode",
icon="layers", icon="layers",
execution_kind="bridge", execution_kind="bridge",
defaults={
"template_id_override": "",
"material_library_path": "",
"require_template": False,
"disable_materials": False,
"target_collection": "",
"material_replace_mode": "inherit",
"lighting_only_mode": "inherit",
"shadow_catcher_mode": "inherit",
"camera_orbit_mode": "inherit",
},
fields=[
_field(
"template_id_override",
"Template ID Override",
"text",
description="Optional render-template UUID to force for this workflow node instead of category/output-type resolution.",
section="Template",
default="",
text_format="uuid",
),
_field(
"require_template",
"Require Template",
"boolean",
description="Fail this node when no active render template can be resolved.",
section="Template",
default=False,
),
_field(
"material_library_path",
"Material Library Path",
"text",
description="Optional absolute .blend path used instead of the active asset library.",
section="Materials",
default="",
text_format="absolute_blend_path",
),
_field(
"disable_materials",
"Disable Materials",
"boolean",
description="Resolve the template but skip material-map generation for downstream nodes.",
section="Materials",
default=False,
),
_field(
"target_collection",
"Target Collection Override",
"text",
description="Optional collection name override applied after template resolution. Leave blank to inherit from the template.",
section="Template Overrides",
default="",
),
_field(
"material_replace_mode",
"Material Replace",
"select",
description="Override whether template material replacement is active for downstream nodes.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Enabled"),
("disabled", "Force Disabled"),
],
),
_field(
"lighting_only_mode",
"Lighting Only",
"select",
description="Override the template lighting-only flag for downstream nodes.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Enabled"),
("disabled", "Force Disabled"),
],
),
_field(
"shadow_catcher_mode",
"Shadow Catcher",
"select",
description="Override the template shadow-catcher flag for downstream nodes.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Enabled"),
("disabled", "Force Disabled"),
],
),
_field(
"camera_orbit_mode",
"Camera Orbit",
"select",
description="Override whether turntable renders orbit the camera or rotate the object.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Camera Orbit"),
("disabled", "Force Object Rotation"),
],
),
],
input_contract={"context": "order_line", "requires": ["order_line_context"]}, input_contract={"context": "order_line", "requires": ["order_line_context"]},
output_contract={ output_contract={
"context": "order_line", "context": "order_line",
@@ -372,6 +562,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"use_materials", "use_materials",
"override_material", "override_material",
"category_key", "category_key",
"workflow_input_schema",
"template_inputs",
], ],
}, },
artifact_roles_consumed=["order_line_context"], artifact_roles_consumed=["order_line_context"],
@@ -384,6 +576,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"use_materials", "use_materials",
"override_material", "override_material",
"category_key", "category_key",
"workflow_input_schema",
"template_inputs",
], ],
), ),
_definition( _definition(
@@ -420,7 +614,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"select", "select",
description="Force CPU, GPU, or automatic device selection.", description="Force CPU, GPU, or automatic device selection.",
section="Render", section="Render",
default="auto", default="gpu",
options=_CYCLES_DEVICE_OPTIONS, options=_CYCLES_DEVICE_OPTIONS,
), ),
_field( _field(
@@ -451,6 +645,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional Cycles adaptive sampling threshold, for example 0.01.", description="Optional Cycles adaptive sampling threshold, for example 0.01.",
section="Denoising", section="Denoising",
default="", default="",
text_format="float_string",
), ),
_field( _field(
"denoiser", "denoiser",
@@ -606,7 +801,11 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
defaults={ defaults={
"use_custom_render_settings": False, "use_custom_render_settings": False,
"fps": 24, "fps": 24,
"frame_count": 120,
"duration_s": 5, "duration_s": 5,
"turntable_degrees": 360,
"turntable_axis": "world_z",
"camera_orbit": True,
"rotation_z": 0, "rotation_z": 0,
}, },
fields=[ fields=[
@@ -664,8 +863,20 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional hex color used during FFmpeg compositing, for example #FFFFFF.", description="Optional hex color used during FFmpeg compositing, for example #FFFFFF.",
section="Output", section="Output",
default="", default="",
text_format="hex_color",
), ),
_field("fps", "FPS", "number", section="Animation", default=24, min=1, max=120, step=1), _field("fps", "FPS", "number", section="Animation", default=24, min=1, max=120, step=1),
_field(
"frame_count",
"Frame Count",
"number",
description="Explicit total frame count for the rendered turntable clip.",
section="Animation",
default=120,
min=1,
max=7200,
step=1,
),
_field( _field(
"duration_s", "duration_s",
"Duration", "Duration",
@@ -818,6 +1029,32 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="outputNode", node_type="outputNode",
icon="download", icon="download",
execution_kind="bridge", execution_kind="bridge",
defaults={"expected_artifact_role": "", "require_upstream_artifact": False},
fields=[
_field(
"expected_artifact_role",
"Expected Artifact Role",
"select",
description="Restrict this node to a specific upstream render artifact type.",
section="Output",
default="",
options=[
("", "Any Connected Artifact"),
("render_output", "Still Output"),
("turntable_output", "Turntable Output"),
("blend_export", "Blend Export"),
("thumbnail_output", "Thumbnail Output"),
],
),
_field(
"require_upstream_artifact",
"Require Upstream Artifact",
"boolean",
description="Fail the node when no matching upstream artifact is connected.",
section="Output",
default=False,
),
],
input_contract={ input_contract={
"context": "order_line", "context": "order_line",
"requires": ["order_line_context"], "requires": ["order_line_context"],
@@ -833,7 +1070,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"order_line", "order_line",
"media.export_blend", "media.export_blend",
"output", "output",
"Persist the generated .blend file as a downloadable media asset.", "Persist the generated .blend file as a downloadable media asset. Only the optional filename suffix is workflow-configurable today.",
node_type="outputNode", node_type="outputNode",
icon="download", icon="download",
defaults={"output_name_suffix": ""}, defaults={"output_name_suffix": ""},
@@ -845,6 +1082,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional suffix appended to the generated `.blend` filename.", description="Optional suffix appended to the generated `.blend` filename.",
section="Output", section="Output",
default="", default="",
text_format="safe_filename_suffix",
max_length=64,
), ),
], ],
execution_kind="bridge", execution_kind="bridge",
@@ -859,7 +1098,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"cad_file", "cad_file",
"cad.generate_stl_cache", "cad.generate_stl_cache",
"processing", "processing",
"Generate and cache STL derivatives next to the STEP source.", "Compatibility node for legacy CAD flows. HartOMat graph execution uses direct OCC/GLB export instead, so this node intentionally performs no per-node-configurable cache generation.",
node_type="convertNode", node_type="convertNode",
icon="refresh-cw", icon="refresh-cw",
execution_kind="bridge", execution_kind="bridge",
@@ -877,7 +1116,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"Emit a user-visible notification for workflow completion or failure.", "Emit a user-visible notification for workflow completion or failure.",
node_type="outputNode", node_type="outputNode",
icon="bell", icon="bell",
defaults={"channel": "audit_log"}, defaults={"channel": "audit_log", "require_armed_render": False},
fields=[ fields=[
_field( _field(
"channel", "channel",
@@ -888,6 +1127,14 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
default="audit_log", default="audit_log",
options=[("audit_log", "Audit Log")], options=[("audit_log", "Audit Log")],
), ),
_field(
"require_armed_render",
"Require Armed Render",
"boolean",
description="Fail this node when no upstream graph render task is configured to hand off notifications.",
section="Notification",
default=False,
),
], ],
execution_kind="bridge", execution_kind="bridge",
input_contract={ input_contract={
File diff suppressed because it is too large Load Diff
@@ -5,7 +5,7 @@ import re
import shutil import shutil
import uuid import uuid
from dataclasses import dataclass, field from dataclasses import dataclass, field
from datetime import datetime from datetime import datetime, timezone
from pathlib import Path from pathlib import Path
from typing import Any, Callable, Literal from typing import Any, Callable, Literal
@@ -13,7 +13,11 @@ from sqlalchemy import select, update as sql_update
from sqlalchemy.orm import Session, joinedload from sqlalchemy.orm import Session, joinedload
from app.config import settings as app_settings from app.config import settings as app_settings
from app.core.render_paths import resolve_result_path, result_path_to_storage_key from app.core.render_paths import (
ensure_group_writable_dir,
resolve_result_path,
result_path_to_storage_key,
)
from app.domains.media.models import MediaAsset, MediaAssetType from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.orders.models import Order, OrderLine, OrderStatus from app.domains.orders.models import Order, OrderLine, OrderStatus
from app.domains.products.models import CadFile, Product from app.domains.products.models import CadFile, Product
@@ -37,6 +41,199 @@ logger = logging.getLogger(__name__)
EmitFn = Callable[..., None] | None EmitFn = Callable[..., None] | None
SetupStatus = Literal["ready", "skip", "failed", "missing"] SetupStatus = Literal["ready", "skip", "failed", "missing"]
QueueThumbnailFn = Callable[[str, dict[str, str]], None] | None QueueThumbnailFn = Callable[[str, dict[str, str]], None] | None
TEMPLATE_INPUT_PARAM_PREFIX = "template_input__"
_PNG_SIGNATURE = b"\x89PNG\r\n\x1a\n"
_VOLATILE_PNG_CHUNK_TYPES = {b"tEXt", b"zTXt", b"iTXt", b"tIME"}
def _slugify_material_lookup_key(value: str) -> str:
return re.sub(r"[^a-z0-9]+", "_", value).strip("_")
def _build_authoritative_material_lookup(materials_source: list[dict[str, Any]]) -> dict[str, str]:
lookup: dict[str, str] = {}
for material in materials_source:
raw_part_name = material.get("part_name")
raw_material_name = material.get("material")
if not raw_part_name or not raw_material_name:
continue
part_name = str(raw_part_name).lower().strip()
material_name = str(raw_material_name)
if not part_name:
continue
lookup.setdefault(part_name, material_name)
slug_key = _slugify_material_lookup_key(part_name)
if slug_key:
lookup.setdefault(slug_key, material_name)
stripped = re.sub(r"(_af\d+(_\d+)?)+$", "", part_name, flags=re.IGNORECASE)
if stripped != part_name:
lookup.setdefault(stripped, material_name)
slug_stripped = _slugify_material_lookup_key(stripped)
if slug_stripped:
lookup.setdefault(slug_stripped, material_name)
return lookup
def _common_prefix_length(left: str, right: str) -> int:
limit = min(len(left), len(right))
idx = 0
while idx < limit and left[idx] == right[idx]:
idx += 1
return idx
def _lookup_material_by_prefix(query: str, material_lookup: dict[str, str]) -> str | None:
if not query or not material_lookup:
return None
contenders: list[tuple[int, str]] = []
for key, material_name in material_lookup.items():
if len(key) >= 5 and len(query) >= 5 and (query.startswith(key) or key.startswith(query)):
contenders.append((len(key), material_name))
if not contenders:
return None
contenders.sort(reverse=True)
top_length = contenders[0][0]
close_materials = {
material_name
for key_length, material_name in contenders
if key_length >= top_length - 2
}
return contenders[0][1] if len(close_materials) == 1 else None
def _lookup_material_by_common_prefix(query: str, material_lookup: dict[str, str]) -> str | None:
if not query or not material_lookup:
return None
scored: list[tuple[float, int, int, str]] = []
for key, material_name in material_lookup.items():
prefix_length = _common_prefix_length(query, key)
if prefix_length < 12:
continue
ratio = prefix_length / max(len(query), len(key))
if ratio < 0.68:
continue
scored.append((ratio, prefix_length, len(key), material_name))
if not scored:
return None
scored.sort(reverse=True)
top_ratio, top_prefix_length, _, top_material_name = scored[0]
close_materials = {
material_name
for ratio, prefix_length, _, material_name in scored
if ratio >= top_ratio - 0.02 and prefix_length >= top_prefix_length - 2
}
return top_material_name if len(close_materials) == 1 else None
def _resolve_authoritative_material_name(
raw_name: str | None,
material_lookup: dict[str, str],
*fallback_names: str | None,
) -> str | None:
candidates = [raw_name, *fallback_names]
seen: set[str] = set()
for candidate in candidates:
if not candidate:
continue
normalized = str(candidate).lower().strip()
variants = [normalized]
stripped = re.sub(r"(_af\d+(_\d+)?)+$", "", normalized, flags=re.IGNORECASE)
if stripped != normalized:
variants.append(stripped)
no_instance = re.sub(r"_\d+$", "", stripped)
if no_instance and no_instance not in variants:
variants.append(no_instance)
for variant in list(variants):
slug_variant = _slugify_material_lookup_key(variant)
if slug_variant and slug_variant not in variants:
variants.append(slug_variant)
deduped_variants = [variant for variant in variants if variant and not (variant in seen or seen.add(variant))]
for variant in deduped_variants:
material_name = material_lookup.get(variant)
if material_name:
return material_name
for variant in deduped_variants:
material_name = _lookup_material_by_prefix(variant, material_lookup)
if material_name:
return material_name
for variant in deduped_variants:
material_name = _lookup_material_by_common_prefix(variant, material_lookup)
if material_name:
return material_name
return None
def _utcnow_naive() -> datetime:
"""Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns."""
return datetime.now(timezone.utc).replace(tzinfo=None)
def extract_template_input_overrides(params: dict[str, Any] | None) -> dict[str, Any]:
if not params:
return {}
overrides: dict[str, Any] = {}
for key, value in params.items():
if not isinstance(key, str) or not key.startswith(TEMPLATE_INPUT_PARAM_PREFIX):
continue
input_key = key[len(TEMPLATE_INPUT_PARAM_PREFIX):].strip()
if input_key:
overrides[input_key] = value
return overrides
def _normalize_template_input_schema(template: RenderTemplate | None) -> list[dict[str, Any]]:
raw_schema = getattr(template, "workflow_input_schema", None) if template is not None else None
if not isinstance(raw_schema, list):
return []
normalized: list[dict[str, Any]] = []
for raw_field in raw_schema:
if not isinstance(raw_field, dict):
continue
key = str(raw_field.get("key") or "").strip()
if not key:
continue
normalized.append(dict(raw_field))
return normalized
def _resolve_template_input_values(
schema: list[dict[str, Any]],
overrides: dict[str, Any] | None,
) -> dict[str, Any]:
raw_overrides = overrides or {}
resolved: dict[str, Any] = {}
for field in schema:
key = str(field.get("key") or "").strip()
if not key:
continue
if key in raw_overrides:
resolved[key] = raw_overrides[key]
continue
if "default" in field:
resolved[key] = field.get("default")
return resolved
@dataclass(slots=True) @dataclass(slots=True)
@@ -75,8 +272,14 @@ class TemplateResolutionResult:
material_map: dict[str, str] | None material_map: dict[str, str] | None
use_materials: bool use_materials: bool
override_material: str | None override_material: str | None
target_collection: str
lighting_only: bool
shadow_catcher: bool
camera_orbit: bool
category_key: str | None category_key: str | None
output_type_id: str | None output_type_id: str | None
workflow_input_schema: list[dict[str, Any]] = field(default_factory=list)
template_inputs: dict[str, Any] = field(default_factory=dict)
@dataclass(slots=True) @dataclass(slots=True)
@@ -159,6 +362,7 @@ class OrderLineRenderInvocation:
sensor_width_mm: float | None = None sensor_width_mm: float | None = None
usd_path: str | None = None usd_path: str | None = None
material_override: str | None = None material_override: str | None = None
template_inputs: dict[str, Any] = field(default_factory=dict)
def task_defaults(self) -> dict[str, Any]: def task_defaults(self) -> dict[str, Any]:
payload: dict[str, Any] = { payload: dict[str, Any] = {
@@ -196,9 +400,10 @@ class OrderLineRenderInvocation:
"sensor_width_mm": self.sensor_width_mm, "sensor_width_mm": self.sensor_width_mm,
"usd_path": self.usd_path, "usd_path": self.usd_path,
"material_override": self.material_override, "material_override": self.material_override,
"template_inputs": self.template_inputs,
} }
for key, value in optional_values.items(): for key, value in optional_values.items():
if value not in (None, ""): if value not in (None, "", {}, [], ()):
payload[key] = value payload[key] = value
return payload return payload
@@ -242,6 +447,7 @@ class OrderLineRenderInvocation:
"focal_length_mm": self.focal_length_mm, "focal_length_mm": self.focal_length_mm,
"sensor_width_mm": self.sensor_width_mm, "sensor_width_mm": self.sensor_width_mm,
"material_override": self.material_override, "material_override": self.material_override,
"template_inputs": self.template_inputs,
} }
def as_turntable_renderer_kwargs( def as_turntable_renderer_kwargs(
@@ -285,6 +491,7 @@ class OrderLineRenderInvocation:
"focal_length_mm": self.focal_length_mm, "focal_length_mm": self.focal_length_mm,
"sensor_width_mm": self.sensor_width_mm, "sensor_width_mm": self.sensor_width_mm,
"material_override": self.material_override, "material_override": self.material_override,
"template_inputs": self.template_inputs,
} }
def as_cinematic_renderer_kwargs( def as_cinematic_renderer_kwargs(
@@ -324,6 +531,7 @@ class OrderLineRenderInvocation:
"focal_length_mm": self.focal_length_mm, "focal_length_mm": self.focal_length_mm,
"sensor_width_mm": self.sensor_width_mm, "sensor_width_mm": self.sensor_width_mm,
"material_override": self.material_override, "material_override": self.material_override,
"template_inputs": self.template_inputs,
"log_callback": log_callback, "log_callback": log_callback,
} }
@@ -341,7 +549,61 @@ def _resolve_asset_path(storage_key: str | None) -> Path | None:
return resolve_result_path(storage_key) return resolve_result_path(storage_key)
def _usd_master_refresh_reason(cad_file: CadFile) -> str | None: def _usd_master_file_refresh_reason(usd_render_path: Path | None) -> str | None:
if usd_render_path is None:
return "missing USD master file"
if not usd_render_path.exists():
return "missing USD master file"
try:
usd_bytes = usd_render_path.read_bytes()
except OSError:
logger.exception("render_order_line: failed to inspect usd_master %s", usd_render_path)
return "unreadable USD master file"
usd_bytes_lower = usd_bytes.lower()
if b"schaeffler:" in usd_bytes_lower:
return "legacy Schaeffler USD primvars"
if b"hartomat:" in usd_bytes_lower:
return None
# Binary USD (`PXR-USDC`) stores HartOMat customData in a form that is not
# reliably discoverable via a raw byte grep. For those files we rely on the
# cache fingerprint plus the upstream resolved material metadata checks.
if usd_bytes.startswith(b"PXR-USDC") or b"\x00" in usd_bytes[:256]:
return None
# Textual USD payloads without any HartOMat markers are legacy/stale in the
# current pipeline and should be refreshed before they are reused.
try:
usd_bytes.decode("utf-8")
except UnicodeDecodeError:
return None
return "missing HartOMat USD markers"
def _usd_master_cache_refresh_reason(usd_asset: MediaAsset | None) -> str | None:
if usd_asset is None:
return None
render_config = usd_asset.render_config if isinstance(usd_asset.render_config, dict) else {}
cache_key = render_config.get("cache_key")
if not isinstance(cache_key, str) or not cache_key.strip():
return "missing USD cache fingerprint"
# New-format keys append the render-script fingerprint as a sixth colon-delimited segment.
if len(cache_key.split(":")) < 6:
return "legacy USD cache fingerprint"
return None
def _usd_master_refresh_reason(
cad_file: CadFile,
*,
usd_asset: MediaAsset | None = None,
usd_render_path: Path | None = None,
) -> str | None:
resolved = cad_file.resolved_material_assignments resolved = cad_file.resolved_material_assignments
if not isinstance(resolved, dict) or not resolved: if not isinstance(resolved, dict) or not resolved:
return "missing resolved material assignments" return "missing resolved material assignments"
@@ -350,7 +612,7 @@ def _usd_master_refresh_reason(cad_file: CadFile) -> str | None:
for meta in resolved.values(): for meta in resolved.values():
if not isinstance(meta, dict): if not isinstance(meta, dict):
continue continue
canonical = meta.get("canonical_material") canonical = meta.get("canonical_material") or meta.get("material")
if isinstance(canonical, str) and canonical.strip(): if isinstance(canonical, str) and canonical.strip():
canonical_materials.append(canonical.strip()) canonical_materials.append(canonical.strip())
@@ -360,6 +622,14 @@ def _usd_master_refresh_reason(cad_file: CadFile) -> str | None:
if any(material.upper().startswith("SCHAEFFLER_") for material in canonical_materials): if any(material.upper().startswith("SCHAEFFLER_") for material in canonical_materials):
return "legacy Schaeffler material metadata" return "legacy Schaeffler material metadata"
cache_reason = _usd_master_cache_refresh_reason(usd_asset)
if cache_reason is not None:
return cache_reason
file_reason = _usd_master_file_refresh_reason(usd_render_path)
if file_reason is not None:
return file_reason
return None return None
@@ -502,6 +772,27 @@ def _coerce_bool(value: Any) -> bool:
return bool(value) return bool(value)
def _resolve_tristate_mode(
value: Any,
*,
field_name: str,
fallback: bool | None = None,
) -> bool | None:
if value in (None, "", "inherit"):
return fallback
if isinstance(value, bool):
return value
if isinstance(value, str):
normalized = value.strip().lower()
if normalized in {"enabled", "true", "1", "yes", "on"}:
return True
if normalized in {"disabled", "false", "0", "no", "off"}:
return False
raise ValueError(
f"{field_name} must be one of: inherit, enabled, disabled"
)
def _resolve_render_output_extension(line: OrderLine) -> str: def _resolve_render_output_extension(line: OrderLine) -> str:
output_type = line.output_type output_type = line.output_type
output_extension = "jpg" output_extension = "jpg"
@@ -582,7 +873,7 @@ def build_order_line_render_invocation(
denoising_quality = str(render_settings.get("denoising_quality", "")) denoising_quality = str(render_settings.get("denoising_quality", ""))
denoising_use_gpu = str(render_settings.get("denoising_use_gpu", "")) denoising_use_gpu = str(render_settings.get("denoising_use_gpu", ""))
transparent_bg = bool(output_type and output_type.transparent_bg) transparent_bg = bool(output_type and output_type.transparent_bg)
cycles_device = (output_type.cycles_device or "auto") if output_type is not None else "auto" cycles_device = (output_type.cycles_device or "gpu") if output_type is not None else "gpu"
render_overrides = getattr(line, "render_overrides", None) render_overrides = getattr(line, "render_overrides", None)
if isinstance(render_overrides, dict): if isinstance(render_overrides, dict):
@@ -682,22 +973,14 @@ def build_order_line_render_invocation(
part_colors=dict(setup.part_colors or {}), part_colors=dict(setup.part_colors or {}),
part_names_ordered=part_names_ordered, part_names_ordered=part_names_ordered,
template_path=template_context.template.blend_file_path if template_context and template_context.template else None, template_path=template_context.template.blend_file_path if template_context and template_context.template else None,
target_collection=( target_collection=template_context.target_collection if template_context else "Product",
template_context.template.target_collection
if template_context and template_context.template and template_context.template.target_collection
else "Product"
),
material_library_path=( material_library_path=(
template_context.material_library if template_context and use_materials else None template_context.material_library if template_context and use_materials else None
), ),
material_map=material_map, material_map=material_map,
lighting_only=bool(template_context.template.lighting_only) if template_context and template_context.template else False, lighting_only=template_context.lighting_only if template_context else False,
shadow_catcher=( shadow_catcher=template_context.shadow_catcher if template_context else False,
bool(template_context.template.shadow_catcher_enabled) camera_orbit=template_context.camera_orbit if template_context else True,
if template_context and template_context.template
else False
),
camera_orbit=bool(template_context.template.camera_orbit) if template_context and template_context.template else True,
rotation_x=position.rotation_x, rotation_x=position.rotation_x,
rotation_y=position.rotation_y, rotation_y=position.rotation_y,
rotation_z=position.rotation_z, rotation_z=position.rotation_z,
@@ -705,6 +988,7 @@ def build_order_line_render_invocation(
sensor_width_mm=position.sensor_width_mm, sensor_width_mm=position.sensor_width_mm,
usd_path=str(setup.usd_render_path) if setup.usd_render_path is not None else None, usd_path=str(setup.usd_render_path) if setup.usd_render_path is not None else None,
material_override=material_override, material_override=material_override,
template_inputs=dict(template_context.template_inputs) if template_context is not None else {},
) )
@@ -727,10 +1011,49 @@ def _canonical_public_output_path(line: OrderLine, output_path: str) -> str:
return str(upload_root / "renders" / str(line.id) / filename) return str(upload_root / "renders" / str(line.id) / filename)
def _strip_volatile_png_metadata(output_path: Path) -> None:
if output_path.suffix.lower() != ".png" or not output_path.is_file():
return
raw_bytes = output_path.read_bytes()
if not raw_bytes.startswith(_PNG_SIGNATURE):
return
cursor = len(_PNG_SIGNATURE)
kept_chunks: list[bytes] = []
changed = False
while cursor + 12 <= len(raw_bytes):
chunk_length = int.from_bytes(raw_bytes[cursor : cursor + 4], "big")
chunk_end = cursor + 12 + chunk_length
if chunk_end > len(raw_bytes):
return
chunk_type = raw_bytes[cursor + 4 : cursor + 8]
chunk_bytes = raw_bytes[cursor:chunk_end]
if chunk_type in _VOLATILE_PNG_CHUNK_TYPES:
changed = True
else:
kept_chunks.append(chunk_bytes)
cursor = chunk_end
if chunk_type == b"IEND":
break
if not changed:
return
output_path.write_bytes(_PNG_SIGNATURE + b"".join(kept_chunks))
def _normalize_output_artifact(output_path: str) -> None:
_strip_volatile_png_metadata(Path(output_path))
def _materialize_public_output(line: OrderLine, output_path: str) -> str: def _materialize_public_output(line: OrderLine, output_path: str) -> str:
canonical_path = Path(_canonical_public_output_path(line, output_path)) canonical_path = Path(_canonical_public_output_path(line, output_path))
source_path = Path(output_path) source_path = Path(output_path)
canonical_path.parent.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(canonical_path.parent)
if source_path != canonical_path: if source_path != canonical_path:
shutil.copy2(source_path, canonical_path) shutil.copy2(source_path, canonical_path)
return str(canonical_path) return str(canonical_path)
@@ -765,6 +1088,7 @@ def persist_order_line_media_asset(
resolved_workflow_run_id = _resolve_existing_workflow_run_id(session, workflow_run_id) resolved_workflow_run_id = _resolve_existing_workflow_run_id(session, workflow_run_id)
if success: if success:
_normalize_output_artifact(output_path)
storage_key = _normalize_storage_key(output_path) storage_key = _normalize_storage_key(output_path)
output_file = Path(output_path) output_file = Path(output_path)
existing_asset = session.execute( existing_asset = session.execute(
@@ -906,13 +1230,14 @@ def persist_order_line_output(
) -> OutputSaveResult: ) -> OutputSaveResult:
"""Persist the render result for an order line and publish the media asset if needed.""" """Persist the render result for an order line and publish the media asset if needed."""
status: Literal["completed", "failed"] = "completed" if success else "failed" status: Literal["completed", "failed"] = "completed" if success else "failed"
completed_at = render_completed_at or datetime.utcnow() completed_at = render_completed_at or _utcnow_naive()
persisted_output_path = output_path persisted_output_path = output_path
line.render_status = status line.render_status = status
line.render_completed_at = completed_at line.render_completed_at = completed_at
line.render_log = render_log line.render_log = render_log
if success: if success:
_normalize_output_artifact(output_path)
persisted_output_path = _materialize_public_output(line, output_path) persisted_output_path = _materialize_public_output(line, output_path)
line.result_path = persisted_output_path if success else None line.result_path = persisted_output_path if success else None
session.flush() session.flush()
@@ -1084,7 +1409,7 @@ def prepare_order_line_render_context(
reason="missing_cad_file", reason="missing_cad_file",
) )
render_start = datetime.utcnow() if persist_state else None render_start = _utcnow_naive() if persist_state else None
if persist_state: if persist_state:
session.execute( session.execute(
sql_update(OrderLine) sql_update(OrderLine)
@@ -1111,7 +1436,12 @@ def prepare_order_line_render_context(
.limit(1) .limit(1)
).scalar_one_or_none() ).scalar_one_or_none()
if usd_asset: if usd_asset:
refresh_reason = _usd_master_refresh_reason(cad_file) usd_candidate_path = _resolve_asset_path(usd_asset.storage_key)
refresh_reason = _usd_master_refresh_reason(
cad_file,
usd_asset=usd_asset,
usd_render_path=usd_candidate_path,
)
if refresh_reason is not None: if refresh_reason is not None:
logger.warning( logger.warning(
"render_order_line: ignoring stale usd_master for cad %s (%s)", "render_order_line: ignoring stale usd_master for cad %s (%s)",
@@ -1127,7 +1457,7 @@ def prepare_order_line_render_context(
if _queue_usd_master_refresh(str(cad_file.id)): if _queue_usd_master_refresh(str(cad_file.id)):
_emit(emit, order_line_id, "Queued USD master regeneration in background") _emit(emit, order_line_id, "Queued USD master regeneration in background")
else: else:
usd_render_path = _resolve_asset_path(usd_asset.storage_key) usd_render_path = usd_candidate_path
if usd_render_path: if usd_render_path:
logger.info( logger.info(
"render_order_line: using usd_master %s for cad %s", "render_order_line: using usd_master %s for cad %s",
@@ -1203,6 +1533,12 @@ def resolve_order_line_template_context(
material_library_path_override: str | None = None, material_library_path_override: str | None = None,
require_template: bool = False, require_template: bool = False,
disable_materials: bool = False, disable_materials: bool = False,
target_collection_override: str | None = None,
material_replace_mode: str | None = None,
lighting_only_mode: str | None = None,
shadow_catcher_mode: str | None = None,
camera_orbit_mode: str | None = None,
template_input_overrides: dict[str, Any] | None = None,
) -> TemplateResolutionResult: ) -> TemplateResolutionResult:
"""Resolve render template, material library, and material map for a prepared order line.""" """Resolve render template, material library, and material map for a prepared order line."""
if not setup.is_ready: if not setup.is_ready:
@@ -1242,6 +1578,7 @@ def resolve_order_line_template_context(
if isinstance(material_library_path_override, str) and material_library_path_override.strip() if isinstance(material_library_path_override, str) and material_library_path_override.strip()
else get_material_library_path_for_session(session) else get_material_library_path_for_session(session)
) )
material_replace_override = _resolve_tristate_mode(material_replace_mode, field_name="material_replace_mode")
material_resolution = resolve_order_line_material_map( material_resolution = resolve_order_line_material_map(
line, line,
cad_file, cad_file,
@@ -1250,8 +1587,36 @@ def resolve_order_line_template_context(
template=template, template=template,
emit=emit, emit=emit,
disable_materials=disable_materials, disable_materials=disable_materials,
material_replace_enabled_override=material_replace_override,
) )
resolved_target_collection = (
target_collection_override.strip()
if isinstance(target_collection_override, str) and target_collection_override.strip()
else (
template.target_collection
if template is not None and template.target_collection
else "Product"
)
)
resolved_lighting_only = _resolve_tristate_mode(
lighting_only_mode,
field_name="lighting_only_mode",
fallback=bool(template.lighting_only) if template is not None else False,
)
resolved_shadow_catcher = _resolve_tristate_mode(
shadow_catcher_mode,
field_name="shadow_catcher_mode",
fallback=bool(template.shadow_catcher_enabled) if template is not None else False,
)
resolved_camera_orbit = _resolve_tristate_mode(
camera_orbit_mode,
field_name="camera_orbit_mode",
fallback=bool(template.camera_orbit) if template is not None else True,
)
workflow_input_schema = _normalize_template_input_schema(template)
template_inputs = _resolve_template_input_values(workflow_input_schema, template_input_overrides)
if template: if template:
_emit( _emit(
emit, emit,
@@ -1267,6 +1632,8 @@ def resolve_order_line_template_context(
template.blend_file_path, template.blend_file_path,
template.lighting_only, template.lighting_only,
) )
if template_inputs:
logger.info("Render template inputs resolved for '%s': %s", template.name, sorted(template_inputs))
if not template: if not template:
_emit(emit, str(line.id), "No render template found — using factory settings (Mode A)") _emit(emit, str(line.id), "No render template found — using factory settings (Mode A)")
logger.info( logger.info(
@@ -1281,8 +1648,14 @@ def resolve_order_line_template_context(
material_map=material_resolution.material_map, material_map=material_resolution.material_map,
use_materials=material_resolution.use_materials, use_materials=material_resolution.use_materials,
override_material=material_resolution.override_material, override_material=material_resolution.override_material,
target_collection=resolved_target_collection,
lighting_only=resolved_lighting_only,
shadow_catcher=resolved_shadow_catcher,
camera_orbit=resolved_camera_orbit,
category_key=category_key, category_key=category_key,
output_type_id=output_type_id, output_type_id=output_type_id,
workflow_input_schema=workflow_input_schema,
template_inputs=template_inputs,
) )
@@ -1296,6 +1669,7 @@ def resolve_order_line_material_map(
emit: EmitFn = None, emit: EmitFn = None,
material_override: str | None = None, material_override: str | None = None,
disable_materials: bool = False, disable_materials: bool = False,
material_replace_enabled_override: bool | None = None,
) -> MaterialResolutionResult: ) -> MaterialResolutionResult:
"""Resolve the effective order-line material map with legacy precedence rules.""" """Resolve the effective order-line material map with legacy precedence rules."""
if disable_materials: if disable_materials:
@@ -1311,11 +1685,15 @@ def resolve_order_line_material_map(
raw_material_count = 0 raw_material_count = 0
raw_material_map = _build_effective_material_lookup(cad_file, materials_source) raw_material_map = _build_effective_material_lookup(cad_file, materials_source)
use_materials = bool(material_library and raw_material_map) use_materials = bool(material_library and raw_material_map)
if template and not template.material_replace_enabled: if material_replace_enabled_override is not None:
use_materials = bool(material_replace_enabled_override and material_library and raw_material_map)
elif template and not template.material_replace_enabled:
use_materials = False use_materials = False
if use_materials: if use_materials:
raw_material_count = len(raw_material_map) raw_material_count = len(raw_material_map)
material_map = resolve_material_map(raw_material_map) material_map = resolve_material_map(raw_material_map)
if cad_file:
material_map = _overlay_scene_manifest_material_map(cad_file, material_map)
line_override = getattr(line, "material_override", None) line_override = getattr(line, "material_override", None)
output_override = line.output_type.material_override if line.output_type else None output_override = line.output_type.material_override if line.output_type else None
@@ -1344,21 +1722,55 @@ def resolve_order_line_material_map(
) )
def _overlay_scene_manifest_material_map(
cad_file: CadFile,
material_map: dict[str, str],
) -> dict[str, str]:
"""Overlay authoritative scene-manifest materials onto a resolved material map.
Low-level lookups still retain legacy/product source assignments so older
fallback paths keep working. The final order-line material map, however,
must prefer the scene manifest's effective assignments wherever the USD/CAD
pipeline has already established authoritative part identity.
"""
if not material_map:
return material_map
merged = dict(material_map)
manifest = build_scene_manifest(cad_file)
for part in manifest.get("parts", []):
if not isinstance(part, dict):
continue
effective_material = part.get("effective_material")
if not isinstance(effective_material, str) or not effective_material.strip():
continue
source_name = part.get("source_name")
part_key = part.get("part_key")
if isinstance(source_name, str) and source_name.strip():
merged[source_name] = effective_material
if isinstance(part_key, str) and part_key.strip():
merged[part_key] = effective_material
return merged
def _build_effective_material_lookup( def _build_effective_material_lookup(
cad_file: CadFile | None, cad_file: CadFile | None,
materials_source: list[dict[str, Any]], materials_source: list[dict[str, Any]],
) -> dict[str, str]: ) -> dict[str, str]:
"""Build a renderer-compatible material lookup from all available layers. """Build a renderer-compatible material lookup from all available layers.
Authoritative scene-manifest assignments win when present, but we emit both Product/Excel CAD assignments stay authoritative for overlapping source-name
source-name and part-key keys so USD and GLB/STEP fallback paths resolve the keys so legacy renders, thumbnails, and viewer previews keep parity with the
same effective material map. pre-USD pipeline. Scene-manifest assignments still fill gaps and emit part-key
aliases so USD and GLB/STEP fallback paths resolve the same effective map.
""" """
raw_material_map: dict[str, str] = { raw_material_map: dict[str, str] = {
str(material["part_name"]): str(material["material"]) str(material["part_name"]): str(material["material"])
for material in materials_source for material in materials_source
if material.get("part_name") and material.get("material") if material.get("part_name") and material.get("material")
} }
authoritative_lookup = _build_authoritative_material_lookup(materials_source)
if not cad_file: if not cad_file:
return raw_material_map return raw_material_map
@@ -1372,10 +1784,16 @@ def _build_effective_material_lookup(
continue continue
source_name = part.get("source_name") source_name = part.get("source_name")
part_key = part.get("part_key") part_key = part.get("part_key")
if source_name: authoritative_material = _resolve_authoritative_material_name(
raw_material_map[str(source_name)] = str(effective_material) str(source_name) if source_name else None,
authoritative_lookup,
str(part_key) if part_key else None,
)
merged_material = authoritative_material or str(effective_material)
if source_name and str(source_name) not in raw_material_map:
raw_material_map[str(source_name)] = merged_material
if part_key: if part_key:
raw_material_map[str(part_key)] = str(effective_material) raw_material_map.setdefault(str(part_key), merged_material)
return raw_material_map return raw_material_map
+159 -14
View File
@@ -18,6 +18,7 @@ Example config::
""" """
from collections import deque from collections import deque
from typing import Any, Literal from typing import Any, Literal
from uuid import UUID
from pydantic import BaseModel, Field, field_validator, model_validator from pydantic import BaseModel, Field, field_validator, model_validator
@@ -29,6 +30,14 @@ from app.domains.rendering.workflow_node_registry import (
) )
_WORKFLOW_META_PARAM_KEYS = {"retry_policy", "failure_policy"}
_TEMPLATE_INPUT_PARAM_PREFIX = "template_input__"
_HEX_COLOR_LENGTHS = {7, 9}
_SAFE_FILENAME_SUFFIX_CHARS = set(
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-"
)
def _context_seed_artifacts(definition: WorkflowNodeDefinition) -> set[str]: def _context_seed_artifacts(definition: WorkflowNodeDefinition) -> set[str]:
if definition.family == "order_line": if definition.family == "order_line":
return {"order_line_record"} return {"order_line_record"}
@@ -37,10 +46,43 @@ def _context_seed_artifacts(definition: WorkflowNodeDefinition) -> set[str]:
return set() return set()
def _infer_concrete_workflow_family(
definitions: list[WorkflowNodeDefinition],
) -> Literal["cad_file", "order_line", "mixed"] | None:
concrete_families = {
definition.family
for definition in definitions
if definition.family in {"cad_file", "order_line"}
}
if not concrete_families:
return None
if len(concrete_families) > 1:
return "mixed"
return next(iter(concrete_families))
def _coerce_node_label(node: "WorkflowNode") -> str: def _coerce_node_label(node: "WorkflowNode") -> str:
return f"{node.id!r} ({node.step.value})" return f"{node.id!r} ({node.step.value})"
def _require_node_definition(node: "WorkflowNode") -> WorkflowNodeDefinition:
definition = get_node_definition(node.step)
if definition is None:
raise ValueError(
f"node {_coerce_node_label(node)} is not registered in workflow_node_registry"
)
return definition
def _is_dynamic_template_input_param(node: "WorkflowNode", key: str) -> bool:
return (
node.step == StepName.RESOLVE_TEMPLATE
and isinstance(key, str)
and key.startswith(_TEMPLATE_INPUT_PARAM_PREFIX)
and key[len(_TEMPLATE_INPUT_PARAM_PREFIX):].strip() != ""
)
def _validate_param_value( def _validate_param_value(
*, *,
node: "WorkflowNode", node: "WorkflowNode",
@@ -72,6 +114,105 @@ def _validate_param_value(
if value not in valid_values: if value not in valid_values:
allowed_values = ", ".join(repr(option) for option in sorted(valid_values, key=repr)) allowed_values = ", ".join(repr(option) for option in sorted(valid_values, key=repr))
raise ValueError(f"{field_label} must be one of: {allowed_values}") raise ValueError(f"{field_label} must be one of: {allowed_values}")
return
if field_definition.type == "text":
if not isinstance(value, str):
raise ValueError(f"{field_label} must be a string")
stripped_value = value.strip()
if stripped_value == "":
if field_definition.allow_blank:
return
raise ValueError(f"{field_label} may not be blank")
if field_definition.max_length is not None and len(value) > field_definition.max_length:
raise ValueError(
f"{field_label} must be at most {field_definition.max_length} characters"
)
if field_definition.text_format == "plain":
return
if field_definition.text_format == "uuid":
try:
UUID(stripped_value)
except ValueError as exc:
raise ValueError(f"{field_label} must be a valid UUID") from exc
return
if field_definition.text_format == "absolute_path":
if not stripped_value.startswith("/"):
raise ValueError(f"{field_label} must be an absolute path")
return
if field_definition.text_format == "absolute_blend_path":
if not stripped_value.startswith("/"):
raise ValueError(f"{field_label} must be an absolute path")
if not stripped_value.lower().endswith(".blend"):
raise ValueError(f"{field_label} must point to a .blend file")
return
if field_definition.text_format == "absolute_glb_path":
if not stripped_value.startswith("/"):
raise ValueError(f"{field_label} must be an absolute path")
if not stripped_value.lower().endswith(".glb"):
raise ValueError(f"{field_label} must point to a .glb file")
return
if field_definition.text_format == "float_string":
try:
float(stripped_value)
except ValueError as exc:
raise ValueError(f"{field_label} must be a valid numeric string") from exc
return
if field_definition.text_format == "hex_color":
if len(stripped_value) not in _HEX_COLOR_LENGTHS or not stripped_value.startswith("#"):
raise ValueError(f"{field_label} must be a hex color like #FFFFFF or #FFFFFFFF")
color_digits = stripped_value[1:]
if any(character not in "0123456789abcdefABCDEF" for character in color_digits):
raise ValueError(f"{field_label} must be a hex color like #FFFFFF or #FFFFFFFF")
return
if field_definition.text_format == "safe_filename_suffix":
if any(character not in _SAFE_FILENAME_SUFFIX_CHARS for character in stripped_value):
raise ValueError(
f"{field_label} may only contain letters, numbers, '.', '-' or '_'"
)
return
raise ValueError(
f"{field_label} uses unsupported text format {field_definition.text_format!r}"
)
def _validate_meta_param_value(*, node: "WorkflowNode", key: str, value: Any) -> None:
field_label = f"node {_coerce_node_label(node)} meta param {key!r}"
if key == "retry_policy":
if not isinstance(value, dict):
raise ValueError(f"{field_label} must be an object")
unknown_keys = sorted(raw_key for raw_key in value if raw_key not in {"max_attempts"})
if unknown_keys:
joined = ", ".join(repr(raw_key) for raw_key in unknown_keys)
raise ValueError(f"{field_label} uses unknown key(s): {joined}")
max_attempts = value.get("max_attempts", 1)
if isinstance(max_attempts, bool) or not isinstance(max_attempts, int):
raise ValueError(f"{field_label} field 'max_attempts' must be an integer")
if max_attempts < 1 or max_attempts > 5:
raise ValueError(f"{field_label} field 'max_attempts' must be between 1 and 5")
return
if key == "failure_policy":
if not isinstance(value, dict):
raise ValueError(f"{field_label} must be an object")
allowed_keys = {"halt_workflow", "fallback_to_legacy"}
unknown_keys = sorted(raw_key for raw_key in value if raw_key not in allowed_keys)
if unknown_keys:
joined = ", ".join(repr(raw_key) for raw_key in unknown_keys)
raise ValueError(f"{field_label} uses unknown key(s): {joined}")
for bool_key in allowed_keys:
if bool_key not in value:
continue
if not isinstance(value[bool_key], bool):
raise ValueError(f"{field_label} field {bool_key!r} must be a boolean")
return
raise ValueError(f"{field_label} is not supported")
class WorkflowPosition(BaseModel): class WorkflowPosition(BaseModel):
@@ -149,18 +290,25 @@ class WorkflowConfig(BaseModel):
@model_validator(mode="after") @model_validator(mode="after")
def node_params_match_registry(self) -> "WorkflowConfig": def node_params_match_registry(self) -> "WorkflowConfig":
for node in self.nodes: for node in self.nodes:
definition = get_node_definition(node.step) definition = _require_node_definition(node)
if definition is None:
continue
field_definitions = {field.key: field for field in definition.fields} field_definitions = {field.key: field for field in definition.fields}
allowed_keys = {field.key for field in definition.fields} allowed_keys = {field.key for field in definition.fields} | _WORKFLOW_META_PARAM_KEYS
unknown_keys = sorted(key for key in node.params if key not in allowed_keys) unknown_keys = sorted(
key
for key in node.params
if key not in allowed_keys and not _is_dynamic_template_input_param(node, key)
)
if unknown_keys: if unknown_keys:
joined = ", ".join(repr(key) for key in unknown_keys) joined = ", ".join(repr(key) for key in unknown_keys)
raise ValueError( raise ValueError(
f"node {node.id!r} ({node.step.value}) uses unknown param key(s): {joined}" f"node {node.id!r} ({node.step.value}) uses unknown param key(s): {joined}"
) )
for key, value in node.params.items(): for key, value in node.params.items():
if _is_dynamic_template_input_param(node, key):
continue
if key in _WORKFLOW_META_PARAM_KEYS:
_validate_meta_param_value(node=node, key=key, value=value)
continue
field_definition = field_definitions.get(key) field_definition = field_definitions.get(key)
if field_definition is None: if field_definition is None:
continue continue
@@ -173,20 +321,19 @@ class WorkflowConfig(BaseModel):
@model_validator(mode="after") @model_validator(mode="after")
def ui_family_matches_node_families(self) -> "WorkflowConfig": def ui_family_matches_node_families(self) -> "WorkflowConfig":
families = { definitions = [_require_node_definition(node) for node in self.nodes]
definition.family families = {definition.family for definition in definitions}
for node in self.nodes inferred_family = _infer_concrete_workflow_family(definitions)
if (definition := get_node_definition(node.step)) is not None
}
if not families: if not families:
return self return self
inferred_family = "mixed" if len(families) > 1 else next(iter(families))
execution_mode = self.ui.execution_mode if self.ui is not None else "legacy" execution_mode = self.ui.execution_mode if self.ui is not None else "legacy"
if execution_mode in {"graph", "shadow"} and inferred_family == "mixed": if execution_mode in {"graph", "shadow"} and inferred_family == "mixed":
raise ValueError( raise ValueError(
"workflow ui.execution_mode must stay single-family for graph/shadow execution" "workflow ui.execution_mode must stay single-family for graph/shadow execution"
) )
if inferred_family is None:
return self
if self.ui is None or self.ui.family is None: if self.ui is None or self.ui.family is None:
return self return self
if self.ui.family != inferred_family: if self.ui.family != inferred_family:
@@ -220,9 +367,7 @@ class WorkflowConfig(BaseModel):
node_id = queue.popleft() node_id = queue.popleft()
processed += 1 processed += 1
node = node_by_id[node_id] node = node_by_id[node_id]
definition = get_node_definition(node.step) definition = _require_node_definition(node)
if definition is None:
continue
node_inputs = available_artifacts[node_id] | _context_seed_artifacts(definition) node_inputs = available_artifacts[node_id] | _context_seed_artifacts(definition)
required = set(definition.input_contract.get("requires", [])) required = set(definition.input_contract.get("requires", []))
+4 -3
View File
@@ -6,6 +6,7 @@ from fastapi.staticfiles import StaticFiles
from pathlib import Path from pathlib import Path
from app.config import settings from app.config import settings
from app.core.render_paths import ensure_group_writable_dir
from app.database import engine, Base from app.database import engine, Base
from app.core.websocket import manager as ws_manager from app.core.websocket import manager as ws_manager
from app.core.middleware import TenantContextMiddleware from app.core.middleware import TenantContextMiddleware
@@ -33,7 +34,7 @@ from app.api.routers.chat import router as chat_router
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
# Create upload directories # Create upload directories
for subdir in ("step_files", "excel_files", "thumbnails", "renders", "blend-templates"): for subdir in ("step_files", "excel_files", "thumbnails", "renders", "blend-templates"):
Path(settings.upload_dir, subdir).mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(Path(settings.upload_dir, subdir))
# Start WebSocket Redis subscriber # Start WebSocket Redis subscriber
await ws_manager.start_redis_subscriber() await ws_manager.start_redis_subscriber()
yield yield
@@ -59,7 +60,7 @@ app.add_middleware(TenantContextMiddleware)
# Mount static files for thumbnails (dir created in lifespan; skip if not writable) # Mount static files for thumbnails (dir created in lifespan; skip if not writable)
thumbnails_dir = Path(settings.upload_dir) / "thumbnails" thumbnails_dir = Path(settings.upload_dir) / "thumbnails"
try: try:
thumbnails_dir.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(thumbnails_dir)
app.mount("/thumbnails", StaticFiles(directory=str(thumbnails_dir)), name="thumbnails") app.mount("/thumbnails", StaticFiles(directory=str(thumbnails_dir)), name="thumbnails")
except (PermissionError, OSError): except (PermissionError, OSError):
pass # Running outside Docker without upload dir — thumbnails won't be served statically pass # Running outside Docker without upload dir — thumbnails won't be served statically
@@ -67,7 +68,7 @@ except (PermissionError, OSError):
# Mount static files for renders # Mount static files for renders
renders_dir = Path(settings.upload_dir) / "renders" renders_dir = Path(settings.upload_dir) / "renders"
try: try:
renders_dir.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(renders_dir)
app.mount("/renders", StaticFiles(directory=str(renders_dir)), name="renders") app.mount("/renders", StaticFiles(directory=str(renders_dir)), name="renders")
except (PermissionError, OSError): except (PermissionError, OSError):
pass pass
+2
View File
@@ -2,6 +2,7 @@
from app.domains.rendering.models import ( from app.domains.rendering.models import (
OUTPUT_TYPE_ARTIFACT_KINDS, OUTPUT_TYPE_ARTIFACT_KINDS,
OUTPUT_TYPE_WORKFLOW_FAMILIES, OUTPUT_TYPE_WORKFLOW_FAMILIES,
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES,
OutputType, OutputType,
VALID_RENDER_BACKENDS, VALID_RENDER_BACKENDS,
) )
@@ -9,5 +10,6 @@ __all__ = [
"OutputType", "OutputType",
"VALID_RENDER_BACKENDS", "VALID_RENDER_BACKENDS",
"OUTPUT_TYPE_WORKFLOW_FAMILIES", "OUTPUT_TYPE_WORKFLOW_FAMILIES",
"OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES",
"OUTPUT_TYPE_ARTIFACT_KINDS", "OUTPUT_TYPE_ARTIFACT_KINDS",
] ]
+2 -6
View File
@@ -13,6 +13,7 @@ from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from app.config import settings from app.config import settings
from app.core.render_paths import result_path_to_public_url
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -774,12 +775,7 @@ async def _tool_find_product_renders(
renders = [] renders = []
for r in rows: for r in rows:
path = r["result_path"] or "" path = r["result_path"] or ""
# Convert internal path to servable URL url = result_path_to_public_url(path, require_exists=True)
url = None
if "/renders/" in path:
url = path[path.index("/renders/"):]
elif "/thumbnails/" in path:
url = path[path.index("/thumbnails/"):]
# Effective material override (line overrides output type) # Effective material override (line overrides output type)
material = r["line_material_override"] or r["ot_material_override"] or None material = r["line_material_override"] or r["ot_material_override"] or None
+144 -18
View File
@@ -20,6 +20,9 @@ import re
# ── Part key generation ─────────────────────────────────────────────────────── # ── Part key generation ───────────────────────────────────────────────────────
_AF_RE = re.compile(r'_AF\d+$', re.IGNORECASE) _AF_RE = re.compile(r'_AF\d+$', re.IGNORECASE)
_AF_VARIANT_RE = re.compile(r"_AF\d+(_ASM)?_?$", re.IGNORECASE)
_LEGACY_MATERIAL_PREFIX = "SCHAEFFLER_"
_CURRENT_MATERIAL_PREFIX = "HARTOMAT_"
def generate_part_key( def generate_part_key(
@@ -53,6 +56,95 @@ def generate_part_key(
return key return key
def normalize_material_name(material_name: str | None) -> str | None:
"""Normalize persisted legacy material names to the current HartOMat prefix."""
if not isinstance(material_name, str):
return None
value = material_name.strip()
if not value:
return None
if value.upper().startswith(_LEGACY_MATERIAL_PREFIX):
return f"{_CURRENT_MATERIAL_PREFIX}{value[len(_LEGACY_MATERIAL_PREFIX):]}"
return value
def _normalize_semantic_source_name(raw_name: str) -> str:
"""Collapse exporter-only suffixes back to their semantic OCC source name."""
name = (raw_name or "").strip()
name = re.sub(r"\.\d{3}$", "", name)
previous = None
while previous != name:
previous = name
name = _AF_VARIANT_RE.sub("", name)
return name
def _slugify_semantic_source_name(raw_name: str) -> str:
base = _normalize_semantic_source_name(raw_name)
base = re.sub(r"([a-z])([A-Z])", r"\1_\2", base)
return re.sub(r"[^a-z0-9]+", "_", base.lower()).strip("_")[:50]
def _derive_semantic_alias_key(part_key: str, source_name: str) -> str | None:
"""Return the semantic alias for deduplicated instance keys, if any."""
alias_key = _slugify_semantic_source_name(source_name)
if not alias_key or alias_key == part_key:
return None
if re.fullmatch(
rf"{re.escape(alias_key)}(?:_[2-9]\d*|_af\d+(?:_asm)?)",
part_key,
flags=re.IGNORECASE,
) is None:
return None
return alias_key
def _alias_priority(part_key: str, source_name: str) -> tuple[int, int, int]:
match = re.fullmatch(r".+_(\d+)$", part_key)
suffix_number = int(match.group(1)) if match else 1_000_000
return (suffix_number, len(source_name or ""), len(part_key))
def _iter_lookup_keys(part_key: str, fallback_part_keys: tuple[str, ...] = ()) -> tuple[str, ...]:
ordered_keys: list[str] = []
for key in (part_key, *fallback_part_keys):
if key and key not in ordered_keys:
ordered_keys.append(key)
return tuple(ordered_keys)
def _build_part_entry(
*,
part_key: str,
source_name: str,
prim_path: str | None,
manual: dict,
resolved: dict,
source: dict,
fallback_part_keys: tuple[str, ...] = (),
) -> dict:
effective_material, provenance = _resolve_material(
part_key,
source_name,
manual,
resolved,
source,
fallback_part_keys=fallback_part_keys,
)
is_unassigned = effective_material is None
return {
"part_key": part_key,
"source_name": source_name,
"prim_path": prim_path,
"effective_material": effective_material,
"assignment_provenance": provenance,
"is_unassigned": is_unassigned,
}
# ── Scene manifest building ─────────────────────────────────────────────────── # ── Scene manifest building ───────────────────────────────────────────────────
def build_scene_manifest(cad_file, usd_asset=None) -> dict: def build_scene_manifest(cad_file, usd_asset=None) -> dict:
@@ -65,7 +157,8 @@ def build_scene_manifest(cad_file, usd_asset=None) -> dict:
Material assignment priority per part: Material assignment priority per part:
1. `manual_material_overrides[part_key]` provenance "manual" 1. `manual_material_overrides[part_key]` provenance "manual"
2. `resolved_material_assignments[part_key]["material"]` provenance "auto" 2. `resolved_material_assignments[part_key]["canonical_material"]` (or legacy
`["material"]`) provenance "auto"
3. substring match in `source_material_assignments` against source_name provenance "source" 3. substring match in `source_material_assignments` against source_name provenance "source"
4. None, is_unassigned=True provenance "default" 4. None, is_unassigned=True provenance "default"
""" """
@@ -80,25 +173,51 @@ def build_scene_manifest(cad_file, usd_asset=None) -> dict:
if resolved: if resolved:
# Build from resolved assignments (USD pipeline has run) # Build from resolved assignments (USD pipeline has run)
alias_candidates: dict[str, tuple[tuple[int, int, int], dict]] = {}
for part_key, meta in resolved.items(): for part_key, meta in resolved.items():
source_name = meta.get("source_name", "") if isinstance(meta, dict) else "" source_name = meta.get("source_name", "") if isinstance(meta, dict) else ""
prim_path = meta.get("prim_path") if isinstance(meta, dict) else None prim_path = meta.get("prim_path") if isinstance(meta, dict) else None
effective_material, provenance = _resolve_material( part_entry = _build_part_entry(
part_key, source_name, manual, resolved, source part_key=part_key,
source_name=source_name,
prim_path=prim_path,
manual=manual,
resolved=resolved,
source=source,
) )
is_unassigned = effective_material is None parts.append(part_entry)
if part_entry["is_unassigned"]:
unassigned_parts.append(part_key)
parts.append({ alias_key = _derive_semantic_alias_key(part_key, source_name)
"part_key": part_key, if alias_key is None or alias_key in resolved:
continue
candidate = {
"part_key": alias_key,
"source_name": source_name, "source_name": source_name,
"prim_path": prim_path, "prim_path": prim_path,
"effective_material": effective_material, "fallback_part_keys": (part_key,),
"assignment_provenance": provenance, }
"is_unassigned": is_unassigned, candidate_priority = _alias_priority(part_key, source_name)
}) current = alias_candidates.get(alias_key)
if is_unassigned: if current is None or candidate_priority < current[0]:
unassigned_parts.append(part_key) alias_candidates[alias_key] = (candidate_priority, candidate)
for alias_key, (_, candidate) in alias_candidates.items():
alias_entry = _build_part_entry(
part_key=candidate["part_key"],
source_name=candidate["source_name"],
prim_path=candidate["prim_path"],
manual=manual,
resolved=resolved,
source=source,
fallback_part_keys=candidate["fallback_part_keys"],
)
parts.append(alias_entry)
if alias_entry["is_unassigned"]:
unassigned_parts.append(alias_key)
elif cad_file.parsed_objects: elif cad_file.parsed_objects:
# Fall back to parsed_objects from STEP extraction # Fall back to parsed_objects from STEP extraction
@@ -149,23 +268,30 @@ def _resolve_material(
manual: dict, manual: dict,
resolved: dict, resolved: dict,
source: dict, source: dict,
fallback_part_keys: tuple[str, ...] = (),
) -> tuple[str | None, str]: ) -> tuple[str | None, str]:
"""Return (material_name, provenance) for one part using priority order.""" """Return (material_name, provenance) for one part using priority order."""
lookup_keys = _iter_lookup_keys(part_key, fallback_part_keys)
# 1. Manual override # 1. Manual override
if part_key in manual and manual[part_key]: for lookup_key in lookup_keys:
return str(manual[part_key]), "manual" if lookup_key in manual and manual[lookup_key]:
return normalize_material_name(str(manual[lookup_key])), "manual"
# 2. Auto-resolved from USD pipeline # 2. Auto-resolved from USD pipeline
meta = resolved.get(part_key) for lookup_key in lookup_keys:
if isinstance(meta, dict) and meta.get("material"): meta = resolved.get(lookup_key)
return str(meta["material"]), "auto" if isinstance(meta, dict):
canonical = normalize_material_name(meta.get("canonical_material") or meta.get("material"))
if canonical:
return canonical, "auto"
# 3. Substring match in source_material_assignments against source_name # 3. Substring match in source_material_assignments against source_name
sn_lower = source_name.lower() sn_lower = source_name.lower()
for src_key, src_mat in source.items(): for src_key, src_mat in source.items():
if src_key.lower() in sn_lower or sn_lower in src_key.lower(): if src_key.lower() in sn_lower or sn_lower in src_key.lower():
if src_mat: if src_mat:
return str(src_mat), "source" return normalize_material_name(str(src_mat)), "source"
# 4. Unassigned # 4. Unassigned
return None, "default" return None, "default"
+298 -55
View File
@@ -4,6 +4,7 @@ Used by the render-worker Celery container (which has BLENDER_BIN set and
cadquery installed). The backend and standard workers fall back to the Pillow cadquery installed). The backend and standard workers fall back to the Pillow
placeholder when this service is unavailable. placeholder when this service is unavailable.
""" """
import hashlib
import json import json
import logging import logging
import os import os
@@ -12,16 +13,175 @@ import signal
import subprocess import subprocess
from pathlib import Path from pathlib import Path
from app.core.render_paths import ensure_group_writable_dir
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _glb_from_step(step_path: Path, glb_path: Path, tessellation_engine: str = "occ") -> None: def resolve_tessellation_settings(
profile: str = "render",
tessellation_engine: str | None = None,
) -> tuple[float, float, str]:
"""Resolve tessellation settings from system settings for a given profile."""
profile_key = "scene" if profile == "scene" else "render"
defaults = {
"scene": (0.1, 0.1),
"render": (0.03, 0.05),
}
default_linear, default_angular = defaults[profile_key]
try:
from app.services.step_processor import _get_all_settings
settings = _get_all_settings()
linear_deflection = float(
settings.get(f"{profile_key}_linear_deflection", str(default_linear))
)
angular_deflection = float(
settings.get(f"{profile_key}_angular_deflection", str(default_angular))
)
effective_engine = (
tessellation_engine
or settings.get("tessellation_engine", "occ")
or "occ"
)
return linear_deflection, angular_deflection, effective_engine
except Exception as exc:
logger.warning(
"Could not resolve %s tessellation settings: %s; using defaults",
profile_key,
exc,
)
return default_linear, default_angular, tessellation_engine or "occ"
def build_tessellated_glb_path(
step_path: Path,
profile: str,
tessellation_engine: str,
linear_deflection: float,
angular_deflection: float,
) -> Path:
"""Build a settings-sensitive GLB path to avoid stale mesh reuse."""
signature = hashlib.sha1(
f"{profile}:{tessellation_engine}:{linear_deflection:.6f}:{angular_deflection:.6f}".encode(
"utf-8"
)
).hexdigest()[:10]
return step_path.parent / f"{step_path.stem}_{profile}_{signature}.glb"
def _stringify_optional_arg(value: object) -> str:
if value in (None, ""):
return ""
return str(value)
def _resolve_render_samples(engine: str, samples: int | None) -> int:
if samples is not None:
return int(samples)
effective_engine = (engine or "cycles").lower()
setting_key = (
"blender_eevee_samples"
if effective_engine == "eevee"
else "blender_cycles_samples"
)
try:
from app.services.step_processor import _get_all_settings
settings = _get_all_settings()
return int(settings[setting_key])
except Exception as exc:
logger.warning(
"Could not resolve Blender samples from settings for engine=%s: %s; "
"using legacy fallback",
effective_engine,
exc,
)
return 64 if effective_engine == "eevee" else 256
def build_turntable_ffmpeg_cmd(
frames_dir: Path,
output_path: Path,
*,
fps: int = 30,
bg_color: str = "",
width: int = 1920,
height: int = 1080,
ffmpeg_bin: str | None = None,
) -> list[str]:
"""Build the canonical FFmpeg command for turntable MP4 composition.
Legacy and graph/shadow paths must share this logic so template-backed
turntable outputs do not drift due to encoding differences.
"""
ffmpeg = ffmpeg_bin or shutil.which("ffmpeg") or "ffmpeg"
if any(frames_dir.glob("frame_*.png")):
frame_pattern = str(frames_dir / "frame_%04d.png")
else:
frame_pattern = str(frames_dir / "%04d.png")
if bg_color:
hex_color = bg_color.lstrip("#") or "ffffff"
return [
ffmpeg,
"-y",
"-framerate",
str(fps),
"-i",
frame_pattern,
"-f",
"lavfi",
"-i",
f"color=c=0x{hex_color}:size={width}x{height}:rate={fps}",
"-filter_complex",
"[1:v][0:v]overlay=0:0:shortest=1",
"-vcodec",
"libx264",
"-pix_fmt",
"yuv420p",
"-crf",
"18",
"-movflags",
"+faststart",
str(output_path),
]
return [
ffmpeg,
"-y",
"-framerate",
str(fps),
"-i",
frame_pattern,
"-vcodec",
"libx264",
"-pix_fmt",
"yuv420p",
"-crf",
"18",
"-movflags",
"+faststart",
str(output_path),
]
def _glb_from_step(
step_path: Path,
glb_path: Path,
tessellation_engine: str = "occ",
tessellation_profile: str = "render",
) -> None:
"""Convert STEP → GLB via OCC or GMSH (export_step_to_gltf.py, no Blender needed).""" """Convert STEP → GLB via OCC or GMSH (export_step_to_gltf.py, no Blender needed)."""
import subprocess import subprocess
import sys as _sys import sys as _sys
linear_deflection = 0.3 linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings(
angular_deflection = 0.5 tessellation_profile,
tessellation_engine,
)
scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts")) scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script_path = scripts_dir / "export_step_to_gltf.py" script_path = scripts_dir / "export_step_to_gltf.py"
@@ -32,7 +192,7 @@ def _glb_from_step(step_path: Path, glb_path: Path, tessellation_engine: str = "
"--output_path", str(glb_path), "--output_path", str(glb_path),
"--linear_deflection", str(linear_deflection), "--linear_deflection", str(linear_deflection),
"--angular_deflection", str(angular_deflection), "--angular_deflection", str(angular_deflection),
"--tessellation_engine", tessellation_engine, "--tessellation_engine", effective_engine,
] ]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=120) result = subprocess.run(cmd, capture_output=True, text=True, timeout=120)
for line in result.stdout.splitlines(): for line in result.stdout.splitlines():
@@ -44,7 +204,15 @@ def _glb_from_step(step_path: Path, glb_path: Path, tessellation_engine: str = "
f"export_step_to_gltf.py failed (exit {result.returncode}).\n" f"export_step_to_gltf.py failed (exit {result.returncode}).\n"
f"STDERR: {result.stderr[-1000:]}" f"STDERR: {result.stderr[-1000:]}"
) )
logger.info("GLB converted: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024) logger.info(
"GLB converted: %s (%d KB) with %s tessellation linear=%s angular=%s engine=%s",
glb_path.name,
glb_path.stat().st_size // 1024,
tessellation_profile,
linear_deflection,
angular_deflection,
effective_engine,
)
def find_blender() -> str: def find_blender() -> str:
@@ -67,9 +235,9 @@ def render_still(
width: int = 512, width: int = 512,
height: int = 512, height: int = 512,
engine: str = "cycles", engine: str = "cycles",
samples: int = 256, samples: int | None = None,
smooth_angle: int = 30, smooth_angle: int = 30,
cycles_device: str = "auto", cycles_device: str = "gpu",
transparent_bg: bool = False, transparent_bg: bool = False,
part_colors: dict | None = None, part_colors: dict | None = None,
template_path: str | None = None, template_path: str | None = None,
@@ -92,9 +260,12 @@ def render_still(
log_callback: "Callable[[str], None] | None" = None, log_callback: "Callable[[str], None] | None" = None,
usd_path: "Path | None" = None, usd_path: "Path | None" = None,
tessellation_engine: str = "occ", tessellation_engine: str = "occ",
tessellation_profile: str = "render",
focal_length_mm: float | None = None, focal_length_mm: float | None = None,
sensor_width_mm: float | None = None, sensor_width_mm: float | None = None,
material_override: str | None = None, material_override: str | None = None,
template_inputs: dict | None = None,
**ignored_control_kwargs,
) -> dict: ) -> dict:
"""Convert STEP → GLB (OCC or GMSH) → PNG (Blender subprocess). """Convert STEP → GLB (OCC or GMSH) → PNG (Blender subprocess).
@@ -120,8 +291,18 @@ def render_still(
t0 = time.monotonic() t0 = time.monotonic()
if ignored_control_kwargs:
logger.debug(
"render_still ignoring unsupported control kwargs: %s",
sorted(ignored_control_kwargs.keys()),
)
if isinstance(usd_path, str) and usd_path.strip():
usd_path = Path(usd_path)
actual_samples = _resolve_render_samples(engine, samples)
# 1. GLB conversion (OCC) — skipped when usd_path is provided # 1. GLB conversion (OCC) — skipped when usd_path is provided
glb_path = step_path.parent / f"{step_path.stem}_thumbnail.glb"
use_usd = bool(usd_path and usd_path.exists()) use_usd = bool(usd_path and usd_path.exists())
t_glb = time.monotonic() t_glb = time.monotonic()
@@ -129,15 +310,39 @@ def render_still(
logger.info("[render_blender] using USD path: %s", usd_path) logger.info("[render_blender] using USD path: %s", usd_path)
glb_size_bytes = 0 glb_size_bytes = 0
else: else:
linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings(
tessellation_profile,
tessellation_engine,
)
glb_path = build_tessellated_glb_path(
step_path,
tessellation_profile,
effective_engine,
linear_deflection,
angular_deflection,
)
if not glb_path.exists() or glb_path.stat().st_size == 0: if not glb_path.exists() or glb_path.stat().st_size == 0:
_glb_from_step(step_path, glb_path, tessellation_engine) _glb_from_step(
step_path,
glb_path,
tessellation_engine=effective_engine,
tessellation_profile=tessellation_profile,
)
else: else:
logger.info("GLB local hit: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024) logger.info(
"GLB local hit: %s (%d KB) profile=%s linear=%s angular=%s engine=%s",
glb_path.name,
glb_path.stat().st_size // 1024,
tessellation_profile,
linear_deflection,
angular_deflection,
effective_engine,
)
glb_size_bytes = glb_path.stat().st_size if glb_path.exists() else 0 glb_size_bytes = glb_path.stat().st_size if glb_path.exists() else 0
glb_duration_s = round(time.monotonic() - t_glb, 2) glb_duration_s = round(time.monotonic() - t_glb, 2)
# 2. Blender render # 2. Blender render
output_path.parent.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(output_path.parent)
env = dict(os.environ) env = dict(os.environ)
if engine == "eevee": if engine == "eevee":
@@ -149,6 +354,7 @@ def render_still(
}) })
else: else:
env["EGL_PLATFORM"] = "surfaceless" env["EGL_PLATFORM"] = "surfaceless"
env["BLENDER_DEFAULT_SAMPLES"] = str(actual_samples)
def _build_cmd(eng: str) -> list: def _build_cmd(eng: str) -> list:
# Pass "" as glb_path when using USD — blender_render.py reads --usd-path instead # Pass "" as glb_path when using USD — blender_render.py reads --usd-path instead
@@ -161,7 +367,7 @@ def render_still(
glb_arg, glb_arg,
str(output_path), str(output_path),
str(width), str(height), str(width), str(height),
eng, str(samples), str(smooth_angle), eng, str(actual_samples), str(smooth_angle),
cycles_device, cycles_device,
"1" if transparent_bg else "0", "1" if transparent_bg else "0",
template_path or "", template_path or "",
@@ -172,9 +378,9 @@ def render_still(
"1" if lighting_only else "0", "1" if lighting_only else "0",
"1" if shadow_catcher else "0", "1" if shadow_catcher else "0",
str(rotation_x), str(rotation_y), str(rotation_z), str(rotation_x), str(rotation_y), str(rotation_z),
noise_threshold or "", denoiser or "", _stringify_optional_arg(noise_threshold), _stringify_optional_arg(denoiser),
denoising_input_passes or "", denoising_prefilter or "", _stringify_optional_arg(denoising_input_passes), _stringify_optional_arg(denoising_prefilter),
denoising_quality or "", denoising_use_gpu or "", _stringify_optional_arg(denoising_quality), _stringify_optional_arg(denoising_use_gpu),
] ]
if use_usd: if use_usd:
cmd += ["--usd-path", str(usd_path)] cmd += ["--usd-path", str(usd_path)]
@@ -188,6 +394,8 @@ def render_still(
cmd += ["--sensor-width", str(sensor_width_mm)] cmd += ["--sensor-width", str(sensor_width_mm)]
if material_override: if material_override:
cmd += ["--material-override", material_override] cmd += ["--material-override", material_override]
if template_inputs:
cmd += ["--template-inputs", json.dumps(template_inputs)]
return cmd return cmd
def _run(eng: str) -> tuple[int, list[str], list[str]]: def _run(eng: str) -> tuple[int, list[str], list[str]]:
@@ -305,7 +513,7 @@ def render_turntable_to_file(
engine: str = "cycles", engine: str = "cycles",
samples: int = 128, samples: int = 128,
smooth_angle: int = 30, smooth_angle: int = 30,
cycles_device: str = "auto", cycles_device: str = "gpu",
transparent_bg: bool = False, transparent_bg: bool = False,
bg_color: str = "", bg_color: str = "",
turntable_axis: str = "world_z", turntable_axis: str = "world_z",
@@ -323,9 +531,11 @@ def render_turntable_to_file(
camera_orbit: bool = True, camera_orbit: bool = True,
usd_path: "Path | None" = None, usd_path: "Path | None" = None,
tessellation_engine: str = "occ", tessellation_engine: str = "occ",
tessellation_profile: str = "render",
focal_length_mm: float | None = None, focal_length_mm: float | None = None,
sensor_width_mm: float | None = None, sensor_width_mm: float | None = None,
material_override: str | None = None, material_override: str | None = None,
template_inputs: dict | None = None,
) -> dict: ) -> dict:
"""Render a turntable animation: STEP → STL → N frames (Blender) → mp4 (ffmpeg). """Render a turntable animation: STEP → STL → N frames (Blender) → mp4 (ffmpeg).
@@ -357,25 +567,48 @@ def render_turntable_to_file(
t0 = time.monotonic() t0 = time.monotonic()
# 1. GLB conversion (OCC) — skipped when usd_path is provided # 1. GLB conversion (OCC) — skipped when usd_path is provided
glb_path = step_path.parent / f"{step_path.stem}_thumbnail.glb"
use_usd = bool(usd_path and usd_path.exists()) use_usd = bool(usd_path and usd_path.exists())
t_glb = time.monotonic() t_glb = time.monotonic()
if use_usd: if use_usd:
logger.info("[render_blender] turntable using USD path: %s", usd_path) logger.info("[render_blender] turntable using USD path: %s", usd_path)
else: else:
linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings(
tessellation_profile,
tessellation_engine,
)
glb_path = build_tessellated_glb_path(
step_path,
tessellation_profile,
effective_engine,
linear_deflection,
angular_deflection,
)
if not glb_path.exists() or glb_path.stat().st_size == 0: if not glb_path.exists() or glb_path.stat().st_size == 0:
_glb_from_step(step_path, glb_path, tessellation_engine) _glb_from_step(
step_path,
glb_path,
tessellation_engine=effective_engine,
tessellation_profile=tessellation_profile,
)
else: else:
logger.info("GLB local hit: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024) logger.info(
"GLB local hit: %s (%d KB) profile=%s linear=%s angular=%s engine=%s",
glb_path.name,
glb_path.stat().st_size // 1024,
tessellation_profile,
linear_deflection,
angular_deflection,
effective_engine,
)
glb_duration_s = round(time.monotonic() - t_glb, 2) glb_duration_s = round(time.monotonic() - t_glb, 2)
# 2. Render frames with Blender # 2. Render frames with Blender
frames_dir = output_path.parent / f"_frames_{output_path.stem}" frames_dir = output_path.parent / f"_frames_{output_path.stem}"
if frames_dir.exists(): if frames_dir.exists():
_shutil.rmtree(frames_dir, ignore_errors=True) _shutil.rmtree(frames_dir, ignore_errors=True)
frames_dir.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(frames_dir)
output_path.parent.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(output_path.parent)
env = dict(os.environ) env = dict(os.environ)
env["EGL_PLATFORM"] = "surfaceless" env["EGL_PLATFORM"] = "surfaceless"
@@ -416,6 +649,8 @@ def render_turntable_to_file(
cmd += ["--sensor-width", str(sensor_width_mm)] cmd += ["--sensor-width", str(sensor_width_mm)]
if material_override: if material_override:
cmd += ["--material-override", material_override] cmd += ["--material-override", material_override]
if template_inputs:
cmd += ["--template-inputs", json.dumps(template_inputs)]
log_lines: list[str] = [] log_lines: list[str] = []
@@ -458,34 +693,15 @@ def render_turntable_to_file(
# 3. Compose frames → mp4 with ffmpeg # 3. Compose frames → mp4 with ffmpeg
t_ffmpeg = time.monotonic() t_ffmpeg = time.monotonic()
ffmpeg_cmd = [ ffmpeg_cmd = build_turntable_ffmpeg_cmd(
ffmpeg_bin, frames_dir,
"-y", output_path,
"-framerate", str(fps), fps=fps,
"-i", str(frames_dir / "frame_%04d.png"), bg_color=bg_color if transparent_bg else "",
"-vcodec", "libx264", width=width,
"-pix_fmt", "yuv420p", height=height,
"-crf", "18", ffmpeg_bin=ffmpeg_bin,
"-movflags", "+faststart", )
str(output_path),
]
# If bg_color is set and transparent_bg is True, overlay frames on solid bg
if bg_color and transparent_bg:
hex_color = bg_color.lstrip("#")
r, g, b = int(hex_color[0:2], 16), int(hex_color[2:4], 16), int(hex_color[4:6], 16)
ffmpeg_cmd = [
ffmpeg_bin, "-y",
"-framerate", str(fps),
"-i", str(frames_dir / "frame_%04d.png"),
"-f", "lavfi", "-i", f"color=c=0x{hex_color}:size={width}x{height}:rate={fps}",
"-filter_complex", "[1:v][0:v]overlay=0:0:shortest=1",
"-vcodec", "libx264",
"-pix_fmt", "yuv420p",
"-crf", "18",
"-movflags", "+faststart",
str(output_path),
]
ffmpeg_proc = subprocess.run( ffmpeg_proc = subprocess.run(
ffmpeg_cmd, capture_output=True, text=True, timeout=300 ffmpeg_cmd, capture_output=True, text=True, timeout=300
@@ -530,7 +746,7 @@ def render_cinematic_to_file(
engine: str = "cycles", engine: str = "cycles",
samples: int = 128, samples: int = 128,
smooth_angle: int = 30, smooth_angle: int = 30,
cycles_device: str = "auto", cycles_device: str = "gpu",
transparent_bg: bool = False, transparent_bg: bool = False,
part_colors: dict | None = None, part_colors: dict | None = None,
template_path: str | None = None, template_path: str | None = None,
@@ -545,9 +761,11 @@ def render_cinematic_to_file(
rotation_z: float = 0.0, rotation_z: float = 0.0,
usd_path: "Path | None" = None, usd_path: "Path | None" = None,
tessellation_engine: str = "occ", tessellation_engine: str = "occ",
tessellation_profile: str = "render",
focal_length_mm: float | None = None, focal_length_mm: float | None = None,
sensor_width_mm: float | None = None, sensor_width_mm: float | None = None,
material_override: str | None = None, material_override: str | None = None,
template_inputs: dict | None = None,
log_callback: "Callable[[str], None] | None" = None, log_callback: "Callable[[str], None] | None" = None,
) -> dict: ) -> dict:
"""Render a cinematic highlight animation: STEP -> GLB/USD -> 480 frames @ 24fps (Blender) -> mp4 (ffmpeg). """Render a cinematic highlight animation: STEP -> GLB/USD -> 480 frames @ 24fps (Blender) -> mp4 (ffmpeg).
@@ -587,25 +805,48 @@ def render_cinematic_to_file(
t0 = time.monotonic() t0 = time.monotonic()
# 1. GLB conversion (OCC) — skipped when usd_path is provided # 1. GLB conversion (OCC) — skipped when usd_path is provided
glb_path = step_path.parent / f"{step_path.stem}_thumbnail.glb"
use_usd = bool(usd_path and usd_path.exists()) use_usd = bool(usd_path and usd_path.exists())
t_glb = time.monotonic() t_glb = time.monotonic()
if use_usd: if use_usd:
logger.info("[render_blender] cinematic using USD path: %s", usd_path) logger.info("[render_blender] cinematic using USD path: %s", usd_path)
else: else:
linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings(
tessellation_profile,
tessellation_engine,
)
glb_path = build_tessellated_glb_path(
step_path,
tessellation_profile,
effective_engine,
linear_deflection,
angular_deflection,
)
if not glb_path.exists() or glb_path.stat().st_size == 0: if not glb_path.exists() or glb_path.stat().st_size == 0:
_glb_from_step(step_path, glb_path, tessellation_engine) _glb_from_step(
step_path,
glb_path,
tessellation_engine=effective_engine,
tessellation_profile=tessellation_profile,
)
else: else:
logger.info("GLB local hit: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024) logger.info(
"GLB local hit: %s (%d KB) profile=%s linear=%s angular=%s engine=%s",
glb_path.name,
glb_path.stat().st_size // 1024,
tessellation_profile,
linear_deflection,
angular_deflection,
effective_engine,
)
glb_duration_s = round(time.monotonic() - t_glb, 2) glb_duration_s = round(time.monotonic() - t_glb, 2)
# 2. Render frames with Blender # 2. Render frames with Blender
frames_dir = output_path.parent / f"_frames_{output_path.stem}" frames_dir = output_path.parent / f"_frames_{output_path.stem}"
if frames_dir.exists(): if frames_dir.exists():
_shutil.rmtree(frames_dir, ignore_errors=True) _shutil.rmtree(frames_dir, ignore_errors=True)
frames_dir.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(frames_dir)
output_path.parent.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(output_path.parent)
env = dict(os.environ) env = dict(os.environ)
env["EGL_PLATFORM"] = "surfaceless" env["EGL_PLATFORM"] = "surfaceless"
@@ -645,6 +886,8 @@ def render_cinematic_to_file(
cmd += ["--sensor-width", str(sensor_width_mm)] cmd += ["--sensor-width", str(sensor_width_mm)]
if material_override: if material_override:
cmd += ["--material-override", material_override] cmd += ["--material-override", material_override]
if template_inputs:
cmd += ["--template-inputs", json.dumps(template_inputs)]
log_lines: list[str] = [] log_lines: list[str] = []
+160 -30
View File
@@ -10,7 +10,9 @@ import logging
import uuid import uuid
from dataclasses import dataclass, field from dataclasses import dataclass, field
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING, Any
from app.core.render_paths import ensure_group_writable_dir
if TYPE_CHECKING: if TYPE_CHECKING:
from app.models.cad_file import CadFile from app.models.cad_file import CadFile
@@ -18,6 +20,10 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class MissingCadResourceError(FileNotFoundError):
"""Terminal CAD resource error that should not be retried by Celery tasks."""
def build_part_colors( def build_part_colors(
cad_parsed_objects: list[str], cad_parsed_objects: list[str],
cad_part_materials: list[dict], cad_part_materials: list[dict],
@@ -1023,8 +1029,12 @@ def _get_all_settings() -> dict[str, str]:
"blender_eevee_samples": "64", "blender_eevee_samples": "64",
"thumbnail_format": "jpg", "thumbnail_format": "jpg",
"blender_smooth_angle": "30", "blender_smooth_angle": "30",
"cycles_device": "auto", "cycles_device": "gpu",
"tessellation_engine": "occ", "tessellation_engine": "occ",
"scene_linear_deflection": "0.1",
"scene_angular_deflection": "0.1",
"render_linear_deflection": "0.03",
"render_angular_deflection": "0.05",
} }
try: try:
from app.config import settings as app_settings from app.config import settings as app_settings
@@ -1046,6 +1056,23 @@ def _generate_thumbnail(
cad_file_id: str, cad_file_id: str,
upload_dir: str, upload_dir: str,
part_colors: dict[str, str] | None = None, part_colors: dict[str, str] | None = None,
*,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
target_collection: str = "Product",
material_library_path: str | None = None,
material_map: dict[str, str] | None = None,
part_names_ordered: list[str] | None = None,
lighting_only: bool = False,
shadow_catcher: bool = False,
usd_path: Path | None = None,
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
material_override: str | None = None,
) -> tuple[Path | None, dict]: ) -> tuple[Path | None, dict]:
"""Generate thumbnail using the configured renderer. """Generate thumbnail using the configured renderer.
@@ -1054,12 +1081,20 @@ def _generate_thumbnail(
""" """
import time import time
out_dir = Path(upload_dir) / "thumbnails" out_dir = Path(upload_dir) / "thumbnails"
out_dir.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(out_dir)
settings = _get_all_settings() settings = _get_all_settings()
renderer = settings["thumbnail_renderer"] requested_renderer = renderer or settings["thumbnail_renderer"]
fmt = settings["thumbnail_format"] # "jpg" or "png" active_renderer = requested_renderer
fmt = settings["thumbnail_format"] # "jpg" or "png"
ext = "jpg" if fmt == "jpg" else "png" ext = "jpg" if fmt == "jpg" else "png"
if requested_renderer == "threejs":
# The historical Three.js thumbnail renderer was removed from the backend.
# Keep the workflow node executable by falling back to the maintained Blender path
# while preserving the requested renderer in the render log for observability.
active_renderer = "blender"
fmt = "png"
ext = "png"
# Clean up any existing thumbnail for this cad_file_id (either extension) # Clean up any existing thumbnail for this cad_file_id (either extension)
for old_ext in ("png", "jpg"): for old_ext in ("png", "jpg"):
@@ -1073,28 +1108,39 @@ def _generate_thumbnail(
# Build the base render_log with the settings snapshot # Build the base render_log with the settings snapshot
render_log: dict = { render_log: dict = {
"renderer": renderer, "renderer": requested_renderer,
"format": fmt, "format": fmt,
"started_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()), "started_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
} }
if renderer == "blender": if active_renderer == "blender":
engine = settings["blender_engine"] engine = render_engine or settings["blender_engine"]
resolved_samples = int(samples) if samples is not None else int(settings[f"blender_{engine}_samples"])
resolved_width = int(width) if width is not None else 512
resolved_height = int(height) if height is not None else 512
resolved_transparent_bg = bool(transparent_bg) if transparent_bg is not None else False
render_log.update({ render_log.update({
"engine": engine, "engine": engine,
"samples": int(settings[f"blender_{engine}_samples"]), "samples": resolved_samples,
"smooth_angle": int(settings["blender_smooth_angle"]), "smooth_angle": int(settings["blender_smooth_angle"]),
"cycles_device": settings["cycles_device"], "cycles_device": settings["cycles_device"],
"width": 512, "width": resolved_width,
"height": 512, "height": resolved_height,
"transparent_bg": resolved_transparent_bg,
}) })
logger.info(f"Thumbnail renderer={renderer}, format={fmt}") if requested_renderer != active_renderer:
render_log["renderer_backend"] = active_renderer
render_log["renderer_fallback_reason"] = "threejs_renderer_removed_using_blender_compat"
logger.info(f"Thumbnail renderer={requested_renderer}, format={fmt}")
rendered_png: Path | None = None rendered_png: Path | None = None
service_data: dict = {} service_data: dict = {}
if renderer == "blender": if active_renderer == "blender":
engine = settings["blender_engine"] engine = render_engine or settings["blender_engine"]
samples = int(settings[f"blender_{engine}_samples"]) resolved_samples = int(samples) if samples is not None else int(settings[f"blender_{engine}_samples"])
resolved_width = int(width) if width is not None else 512
resolved_height = int(height) if height is not None else 512
resolved_transparent_bg = bool(transparent_bg) if transparent_bg is not None else False
from app.services.render_blender import is_blender_available, render_still from app.services.render_blender import is_blender_available, render_still
if is_blender_available(): if is_blender_available():
@@ -1102,11 +1148,25 @@ def _generate_thumbnail(
service_data = render_still( service_data = render_still(
step_path=step_path, step_path=step_path,
output_path=tmp_png, output_path=tmp_png,
width=resolved_width,
height=resolved_height,
engine=engine, engine=engine,
samples=samples, samples=resolved_samples,
smooth_angle=int(settings["blender_smooth_angle"]), smooth_angle=int(settings["blender_smooth_angle"]),
cycles_device=settings["cycles_device"], cycles_device=settings["cycles_device"],
transparent_bg=resolved_transparent_bg,
target_collection=target_collection,
material_library_path=material_library_path,
material_map=material_map,
part_names_ordered=part_names_ordered,
lighting_only=lighting_only,
shadow_catcher=shadow_catcher,
tessellation_engine=settings["tessellation_engine"], tessellation_engine=settings["tessellation_engine"],
usd_path=usd_path,
focal_length_mm=focal_length_mm,
sensor_width_mm=sensor_width_mm,
material_override=material_override,
tessellation_profile="scene",
) )
rendered_png = tmp_png if tmp_png.exists() else None rendered_png = tmp_png if tmp_png.exists() else None
except Exception as exc: except Exception as exc:
@@ -1133,8 +1193,7 @@ def _generate_thumbnail(
def _finalise_image(src: Path, dst: Path) -> Path | None: def _finalise_image(src: Path, dst: Path) -> Path | None:
"""Move src image to dst. When dst has a .webp suffix, convert via Pillow """Move src image to dst, converting the PNG intermediate when needed."""
(quality=90, method=4) for 50-70 % smaller files. Otherwise output PNG."""
if dst.suffix.lower() == ".webp": if dst.suffix.lower() == ".webp":
try: try:
from PIL import Image from PIL import Image
@@ -1148,13 +1207,52 @@ def _finalise_image(src: Path, dst: Path) -> Path | None:
out = dst.with_suffix(".png") out = dst.with_suffix(".png")
src.rename(out) src.rename(out)
return out return out
if dst.suffix.lower() in {".jpg", ".jpeg"}:
try:
from PIL import Image
img = Image.open(str(src))
if img.mode in {"RGBA", "LA"} or (img.mode == "P" and "transparency" in img.info):
background = Image.new("RGBA", img.size, (255, 255, 255, 255))
img = Image.alpha_composite(background, img.convert("RGBA")).convert("RGB")
else:
img = img.convert("RGB")
out = dst.with_suffix(".jpg")
img.save(str(out), "JPEG", quality=95, subsampling=0)
src.unlink(missing_ok=True)
return out
except Exception:
logger.warning("JPEG conversion failed — falling back to PNG")
out = dst.with_suffix(".png")
src.rename(out)
return out
out = dst.with_suffix(".png") out = dst.with_suffix(".png")
src.rename(out) src.rename(out)
return out return out
def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> bool: def regenerate_cad_thumbnail(
cad_file_id: str,
part_colors: dict[str, str],
*,
renderer: str | None = None,
render_engine: str | None = None,
samples: int | None = None,
width: int | None = None,
height: int | None = None,
transparent_bg: bool | None = None,
target_collection: str = "Product",
material_library_path: str | None = None,
material_map: dict[str, str] | None = None,
part_names_ordered: list[str] | None = None,
lighting_only: bool = False,
shadow_catcher: bool = False,
usd_path: Path | None = None,
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
material_override: str | None = None,
) -> bool:
""" """
Regenerate a thumbnail with per-part colours for an existing CAD file. Regenerate a thumbnail with per-part colours for an existing CAD file.
@@ -1170,13 +1268,18 @@ def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> b
with Session(db_engine) as session: with Session(db_engine) as session:
cad_file = session.get(CadFile, uuid.UUID(cad_file_id)) cad_file = session.get(CadFile, uuid.UUID(cad_file_id))
if not cad_file: if not cad_file:
logger.error(f"CAD file not found: {cad_file_id}") message = f"CAD file not found: {cad_file_id}"
return False logger.warning(message)
raise MissingCadResourceError(message)
step_path = Path(cad_file.stored_path) step_path = Path(cad_file.stored_path)
if not step_path.exists(): if not step_path.exists():
logger.error(f"STEP file not found: {step_path}") message = f"STEP file not found: {step_path}"
return False logger.warning(message)
cad_file.processing_status = ProcessingStatus.failed
cad_file.error_message = message[:2000]
session.commit()
raise MissingCadResourceError(message)
# Mark as processing so the activity page shows it as active # Mark as processing so the activity page shows it as active
cad_file.processing_status = ProcessingStatus.processing cad_file.processing_status = ProcessingStatus.processing
@@ -1184,7 +1287,26 @@ def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> b
try: try:
thumb_path, render_log = _generate_thumbnail( thumb_path, render_log = _generate_thumbnail(
step_path, cad_file_id, app_settings.upload_dir, part_colors=part_colors step_path,
cad_file_id,
app_settings.upload_dir,
part_colors=part_colors,
renderer=renderer,
render_engine=render_engine,
samples=samples,
width=width,
height=height,
transparent_bg=transparent_bg,
target_collection=target_collection,
material_library_path=material_library_path,
material_map=material_map,
part_names_ordered=part_names_ordered,
lighting_only=lighting_only,
shadow_catcher=shadow_catcher,
usd_path=usd_path,
focal_length_mm=focal_length_mm,
sensor_width_mm=sensor_width_mm,
material_override=material_override,
) )
if thumb_path: if thumb_path:
cad_file.thumbnail_path = str(thumb_path) cad_file.thumbnail_path = str(thumb_path)
@@ -1207,6 +1329,7 @@ def render_to_file(
part_colors: dict[str, str] | None = None, part_colors: dict[str, str] | None = None,
width: int | None = None, width: int | None = None,
height: int | None = None, height: int | None = None,
smooth_angle: int | None = None,
transparent_bg: bool = False, transparent_bg: bool = False,
engine: str | None = None, engine: str | None = None,
samples: int | None = None, samples: int | None = None,
@@ -1234,6 +1357,7 @@ def render_to_file(
focal_length_mm: float | None = None, focal_length_mm: float | None = None,
sensor_width_mm: float | None = None, sensor_width_mm: float | None = None,
material_override: str | None = None, material_override: str | None = None,
template_inputs: dict[str, Any] | None = None,
) -> tuple[bool, dict]: ) -> tuple[bool, dict]:
"""Render a STEP file to a specific output path using current system settings. """Render a STEP file to a specific output path using current system settings.
@@ -1246,6 +1370,7 @@ def render_to_file(
part_colors: Optional {part_name: hex_color} map. part_colors: Optional {part_name: hex_color} map.
width: Optional render width (overrides system default). width: Optional render width (overrides system default).
height: Optional render height (overrides system default). height: Optional render height (overrides system default).
smooth_angle: Optional auto-smooth angle override in degrees.
transparent_bg: If True and renderer=blender+PNG, render with transparent background. transparent_bg: If True and renderer=blender+PNG, render with transparent background.
engine: Optional per-OT engine override ("cycles" | "eevee"), or None for system default. engine: Optional per-OT engine override ("cycles" | "eevee"), or None for system default.
samples: Optional per-OT samples override, or None for system default. samples: Optional per-OT samples override, or None for system default.
@@ -1262,7 +1387,7 @@ def render_to_file(
step = Path(step_path) step = Path(step_path)
out = Path(output_path) out = Path(output_path)
out.parent.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(out.parent)
settings = _get_all_settings() settings = _get_all_settings()
renderer = settings["thumbnail_renderer"] renderer = settings["thumbnail_renderer"]
@@ -1284,19 +1409,20 @@ def render_to_file(
if renderer == "blender": if renderer == "blender":
actual_engine = engine or settings["blender_engine"] actual_engine = engine or settings["blender_engine"]
actual_samples = samples or int(settings[f"blender_{actual_engine}_samples"]) actual_samples = int(samples) if samples is not None else int(settings[f"blender_{actual_engine}_samples"])
actual_cycles_device = cycles_device or settings["cycles_device"] actual_cycles_device = cycles_device or settings["cycles_device"]
actual_smooth_angle = smooth_angle if smooth_angle is not None else int(settings["blender_smooth_angle"])
w = width or 512 w = width or 512
h = height or 512 h = height or 512
render_log.update({ render_log.update({
"engine": actual_engine, "samples": actual_samples, "engine": actual_engine, "samples": actual_samples,
"smooth_angle": int(settings["blender_smooth_angle"]), "smooth_angle": actual_smooth_angle,
"cycles_device": actual_cycles_device, "cycles_device": actual_cycles_device,
"width": w, "height": h, "width": w, "height": h,
}) })
extra = { extra = {
"engine": actual_engine, "samples": actual_samples, "engine": actual_engine, "samples": actual_samples,
"smooth_angle": int(settings["blender_smooth_angle"]), "smooth_angle": actual_smooth_angle,
"cycles_device": actual_cycles_device, "cycles_device": actual_cycles_device,
"width": w, "height": h, "width": w, "height": h,
"transparent_bg": transparent_bg, "transparent_bg": transparent_bg,
@@ -1314,6 +1440,9 @@ def render_to_file(
render_log["lighting_only"] = True render_log["lighting_only"] = True
if shadow_catcher: if shadow_catcher:
render_log["shadow_catcher"] = True render_log["shadow_catcher"] = True
if template_inputs:
extra["template_inputs"] = template_inputs
render_log["template_inputs"] = template_inputs
if material_library_path and material_map: if material_library_path and material_map:
extra["material_library_path"] = material_library_path extra["material_library_path"] = material_library_path
extra["material_map"] = material_map extra["material_map"] = material_map
@@ -1349,7 +1478,7 @@ def render_to_file(
output_path=tmp_png, output_path=tmp_png,
engine=actual_engine, engine=actual_engine,
samples=actual_samples, samples=actual_samples,
smooth_angle=int(settings["blender_smooth_angle"]), smooth_angle=actual_smooth_angle,
cycles_device=actual_cycles_device, cycles_device=actual_cycles_device,
width=w, height=h, width=w, height=h,
transparent_bg=transparent_bg, transparent_bg=transparent_bg,
@@ -1373,6 +1502,7 @@ def render_to_file(
focal_length_mm=focal_length_mm, focal_length_mm=focal_length_mm,
sensor_width_mm=sensor_width_mm, sensor_width_mm=sensor_width_mm,
material_override=material_override, material_override=material_override,
template_inputs=template_inputs,
) )
rendered_png = tmp_png if tmp_png.exists() else None rendered_png = tmp_png if tmp_png.exists() else None
except Exception as exc: except Exception as exc:
@@ -1400,7 +1530,7 @@ def render_to_file(
def _convert_to_gltf(step_path: Path, cad_file_id: str, upload_dir: str) -> Path | None: def _convert_to_gltf(step_path: Path, cad_file_id: str, upload_dir: str) -> Path | None:
"""Convert STEP to glTF for browser 3D viewer.""" """Convert STEP to glTF for browser 3D viewer."""
out_dir = Path(upload_dir) / "gltf" out_dir = Path(upload_dir) / "gltf"
out_dir.mkdir(parents=True, exist_ok=True) ensure_group_writable_dir(out_dir)
out_path = out_dir / f"{cad_file_id}.gltf" out_path = out_dir / f"{cad_file_id}.gltf"
try: try:
+17 -3
View File
@@ -15,6 +15,7 @@ import logging
from sqlalchemy import create_engine, select, and_, exists from sqlalchemy import create_engine, select, and_, exists
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from app.domains.materials.library_paths import resolve_asset_library_blend_path
from app.models.render_template import RenderTemplate from app.models.render_template import RenderTemplate
from app.models.system_setting import SystemSetting from app.models.system_setting import SystemSetting
from app.domains.rendering.models import render_template_output_types from app.domains.rendering.models import render_template_output_types
@@ -121,14 +122,27 @@ def get_material_library_path_for_session(session: Session) -> str | None:
row = session.execute( row = session.execute(
select(AssetLibrary).where(AssetLibrary.is_active == True).limit(1) # noqa: E712 select(AssetLibrary).where(AssetLibrary.is_active == True).limit(1) # noqa: E712
).scalar_one_or_none() ).scalar_one_or_none()
if row and row.blend_file_path: if row:
return row.blend_file_path resolved_path = resolve_asset_library_blend_path(
blend_file_path=row.blend_file_path,
asset_library_id=row.id,
)
if resolved_path:
if row.blend_file_path and resolved_path != row.blend_file_path:
logger.warning(
"Active asset library %s points to missing file %s; using %s instead",
row.id,
row.blend_file_path,
resolved_path,
)
return resolved_path
row = session.execute( row = session.execute(
select(SystemSetting).where(SystemSetting.key == "material_library_path") select(SystemSetting).where(SystemSetting.key == "material_library_path")
).scalar_one_or_none() ).scalar_one_or_none()
if row and row.value and row.value.strip(): if row and row.value and row.value.strip():
return row.value.strip() resolved_path = resolve_asset_library_blend_path(blend_file_path=row.value.strip())
return resolved_path or row.value.strip()
return None return None
+10 -1
View File
@@ -33,7 +33,16 @@ celery_app.conf.update(
"app.domains.rendering.tasks.*": {"queue": "asset_pipeline"}, "app.domains.rendering.tasks.*": {"queue": "asset_pipeline"},
"app.tasks.beat_tasks.*": {"queue": "step_processing"}, "app.tasks.beat_tasks.*": {"queue": "step_processing"},
"app.tasks.ai_tasks.*": {"queue": "ai_validation"}, "app.tasks.ai_tasks.*": {"queue": "ai_validation"},
# Legacy task names (shim) — keep until old queued tasks drain # Legacy task names (shim) — preserve the runtime queue split while
# old workflow configs and queued tasks still address app.tasks.step_tasks.*.
"app.tasks.step_tasks.render_step_thumbnail": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.render_graph_thumbnail": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.regenerate_thumbnail": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.generate_gltf_geometry_task": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.generate_usd_master_task": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.reextract_rich_metadata_task": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.reextract_cad_metadata": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.render_order_line_task": {"queue": "asset_pipeline"},
"app.tasks.step_tasks.*": {"queue": "step_processing"}, "app.tasks.step_tasks.*": {"queue": "step_processing"},
}, },
beat_schedule={ beat_schedule={
+1
View File
@@ -11,6 +11,7 @@ from app.domains.pipeline.tasks.extract_metadata import ( # noqa: F401
reextract_rich_metadata_task, reextract_rich_metadata_task,
) )
from app.domains.pipeline.tasks.render_thumbnail import ( # noqa: F401 from app.domains.pipeline.tasks.render_thumbnail import ( # noqa: F401
render_graph_thumbnail,
render_step_thumbnail, render_step_thumbnail,
regenerate_thumbnail, regenerate_thumbnail,
) )
+1
View File
@@ -49,6 +49,7 @@ cad = [
[tool.pytest.ini_options] [tool.pytest.ini_options]
asyncio_mode = "auto" asyncio_mode = "auto"
cache_dir = "/tmp/pytest_cache"
testpaths = ["tests"] testpaths = ["tests"]
markers = [ markers = [
"integration: marks tests requiring running services", "integration: marks tests requiring running services",
+6 -1
View File
@@ -7,4 +7,9 @@ echo "Seeding templates and admin user..."
python seed.py python seed.py
echo "Starting API server..." echo "Starting API server..."
exec uvicorn app.main:app --host 0.0.0.0 --port 8888 --reload exec uvicorn app.main:app \
--host 0.0.0.0 \
--port 8888 \
--reload \
--reload-dir /app/app \
--reload-dir /app/alembic
+23 -10
View File
@@ -113,32 +113,44 @@ def parsed_anschlagplatten(parsed_excel_all):
# ── Test-DB (nutzt separate Test-Datenbank) ────────────────────────────────── # ── Test-DB (nutzt separate Test-Datenbank) ──────────────────────────────────
import os
import uuid import uuid
import pytest_asyncio import pytest_asyncio
from typing import AsyncGenerator from typing import AsyncGenerator
from httpx import AsyncClient, ASGITransport from httpx import AsyncClient, ASGITransport
from sqlalchemy.engine import make_url
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
TEST_DB_URL = os.environ.get( from app.config import settings
"TEST_DATABASE_URL", from tests.db_test_utils import reset_public_schema_async, resolve_test_db_url
"postgresql+asyncpg://hartomat:hartomat@localhost:5432/hartomat_test"
)
def _resolve_test_db_url() -> str:
return resolve_test_db_url(async_driver=True)
def _sync_settings_to_test_database() -> None:
resolved = make_url(resolve_test_db_url(async_driver=False))
settings.postgres_host = resolved.host or settings.postgres_host
settings.postgres_port = int(resolved.port or settings.postgres_port)
settings.postgres_user = resolved.username or settings.postgres_user
settings.postgres_password = resolved.password or settings.postgres_password
settings.postgres_db = resolved.database or settings.postgres_db
_sync_settings_to_test_database()
@pytest_asyncio.fixture @pytest_asyncio.fixture
async def test_engine(): async def test_engine():
from app.database import Base from app.database import Base
from sqlalchemy import text
import app.models # noqa - register all models import app.models # noqa - register all models
engine = create_async_engine(TEST_DB_URL, echo=False) engine = create_async_engine(_resolve_test_db_url(), echo=False)
async with engine.begin() as conn: async with engine.begin() as conn:
await reset_public_schema_async(conn)
await conn.run_sync(Base.metadata.create_all) await conn.run_sync(Base.metadata.create_all)
yield engine yield engine
# Use CASCADE to handle circular FK dependencies in drop
async with engine.begin() as conn: async with engine.begin() as conn:
await conn.execute(text("DROP SCHEMA public CASCADE")) await reset_public_schema_async(conn)
await conn.execute(text("CREATE SCHEMA public"))
await engine.dispose() await engine.dispose()
@@ -229,6 +241,7 @@ def mock_celery_tasks(monkeypatch):
task_paths = [ task_paths = [
"app.domains.materials.tasks.refresh_asset_library_catalog", "app.domains.materials.tasks.refresh_asset_library_catalog",
"app.tasks.step_tasks.process_step_file", "app.tasks.step_tasks.process_step_file",
"app.tasks.step_tasks.render_graph_thumbnail",
"app.tasks.step_tasks.render_step_thumbnail", "app.tasks.step_tasks.render_step_thumbnail",
"app.domains.imports.tasks.validate_excel_import", "app.domains.imports.tasks.validate_excel_import",
"app.domains.rendering.tasks.render_still_task", "app.domains.rendering.tasks.render_still_task",
+85
View File
@@ -0,0 +1,85 @@
from __future__ import annotations
from contextlib import contextmanager
import importlib
import os
from typing import Iterator
from sqlalchemy import text
from sqlalchemy.engine import make_url
from sqlalchemy.orm import Session
from sqlalchemy import create_engine
from app.database import Base
def resolve_test_db_url(*, async_driver: bool) -> str:
explicit_url = os.environ.get("TEST_DATABASE_URL")
if explicit_url:
db_url = explicit_url
else:
host = os.environ.get("TEST_POSTGRES_HOST") or os.environ.get("POSTGRES_HOST") or "localhost"
port = os.environ.get("TEST_POSTGRES_PORT") or os.environ.get("POSTGRES_PORT") or "5432"
user = os.environ.get("TEST_POSTGRES_USER") or os.environ.get("POSTGRES_USER") or "hartomat"
password = os.environ.get("TEST_POSTGRES_PASSWORD") or os.environ.get("POSTGRES_PASSWORD") or "hartomat"
default_db = f"{os.environ.get('POSTGRES_DB', 'hartomat')}_test"
database = os.environ.get("TEST_POSTGRES_DB") or os.environ.get("TEST_DB_NAME") or default_db
driver = "postgresql+asyncpg" if async_driver else "postgresql"
db_url = f"{driver}://{user}:{password}@{host}:{port}/{database}"
normalized_url = db_url if async_driver else db_url.replace("+asyncpg", "")
database_name = make_url(normalized_url).database or ""
if not database_name.endswith("_test"):
raise RuntimeError(
f"Refusing to run destructive test database setup against non-test database '{database_name}'."
)
return normalized_url
def reset_public_schema_sync(connection) -> None:
connection.execute(text("DROP SCHEMA IF EXISTS public CASCADE"))
connection.execute(text("CREATE SCHEMA public"))
async def reset_public_schema_async(connection) -> None:
await connection.execute(text("DROP SCHEMA IF EXISTS public CASCADE"))
await connection.execute(text("CREATE SCHEMA public"))
def import_all_model_modules() -> None:
module_names = (
"app.domains.tenants.models",
"app.domains.auth.models",
"app.domains.imports.models",
"app.domains.products.models",
"app.domains.orders.models",
"app.domains.notifications.models",
"app.domains.billing.models",
"app.domains.rendering.models",
"app.domains.materials.models",
"app.domains.media.models",
"app.domains.admin.models",
"app.models.system_setting",
"app.models.worker_config",
"app.models.chat",
)
for module_name in module_names:
importlib.import_module(module_name)
@contextmanager
def sync_test_session() -> Iterator[Session]:
import_all_model_modules()
engine = create_engine(resolve_test_db_url(async_driver=False))
with engine.begin() as conn:
reset_public_schema_sync(conn)
Base.metadata.create_all(conn)
session = Session(engine)
try:
yield session
finally:
session.close()
with engine.begin() as conn:
reset_public_schema_sync(conn)
engine.dispose()
@@ -1,6 +1,10 @@
"""Tests for notification config service.""" """Tests for notification config service."""
import pytest import pytest
from sqlalchemy import select
from app.domains.notifications.models import AuditLog
from app.domains.notifications.service import ( from app.domains.notifications.service import (
emit_notification,
upsert_notification_config, upsert_notification_config,
get_notification_configs, get_notification_configs,
) )
@@ -25,3 +29,25 @@ async def test_upsert_updates_existing(db, admin_user):
cfg = next((c for c in configs if c.event_type == "order_submitted"), None) cfg = next((c for c in configs if c.event_type == "order_submitted"), None)
assert cfg is not None assert cfg is not None
assert cfg.enabled is False assert cfg.enabled is False
@pytest.mark.asyncio
async def test_emit_notification_persists_naive_utc_timestamp(db, admin_user):
"""Notification writes must match the legacy naive Postgres timestamp columns."""
await emit_notification(
db,
actor_user_id=admin_user.id,
target_user_id=admin_user.id,
action="order.submitted",
entity_type="order",
entity_id="order-123",
details={"order_number": "SA-2026-00001"},
)
row = (
await db.execute(
select(AuditLog).where(AuditLog.action == "order.submitted")
)
).scalar_one()
assert row.timestamp.tzinfo is None
@@ -34,9 +34,138 @@ async def test_create_output_type_infers_artifact_kind_from_format_and_animation
payload = response.json() payload = response.json()
assert payload["workflow_family"] == "order_line" assert payload["workflow_family"] == "order_line"
assert payload["artifact_kind"] == "turntable_video" assert payload["artifact_kind"] == "turntable_video"
assert payload["workflow_rollout_mode"] == "legacy_only"
assert payload["invocation_overrides"] == {} assert payload["invocation_overrides"] == {}
@pytest.mark.asyncio
async def test_output_type_contract_catalog_exposes_backend_authored_rules(
client,
auth_headers,
):
response = await client.get(
"/api/output-types/contract-catalog",
headers=auth_headers,
)
assert response.status_code == 200, response.text
payload = response.json()
assert payload["workflow_families"] == ["order_line", "cad_file"]
assert payload["workflow_rollout_modes"] == ["legacy_only", "shadow", "graph"]
assert payload["artifact_kinds"] == [
"still_image",
"turntable_video",
"model_export",
"thumbnail_image",
"blend_asset",
"package",
"custom",
]
assert payload["allowed_artifact_kinds_by_family"]["cad_file"] == [
"model_export",
"thumbnail_image",
"package",
"custom",
]
assert payload["allowed_output_formats_by_family"]["order_line"] == [
"png",
"jpg",
"jpeg",
"webp",
"mp4",
"webm",
"mov",
"blend",
]
assert payload["allowed_invocation_override_keys_by_artifact_kind"]["turntable_video"] == [
"width",
"height",
"engine",
"samples",
"bg_color",
"noise_threshold",
"denoiser",
"denoising_input_passes",
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
"frame_count",
"fps",
"turntable_axis",
]
assert payload["default_output_format_by_artifact_kind"]["blend_asset"] == "blend"
assert payload["parameter_ownership"]["output_type_profile_keys"] == [
"transparent_bg",
"cycles_device",
"material_override",
]
assert payload["parameter_ownership"]["template_runtime_keys"] == [
"target_collection",
"lighting_only",
"shadow_catcher",
"camera_orbit",
"template_inputs",
]
assert payload["parameter_ownership"]["workflow_node_keys_by_step"]["resolve_template"] == [
"template_id_override",
"require_template",
"material_library_path",
"disable_materials",
"target_collection",
"material_replace_mode",
"lighting_only_mode",
"shadow_catcher_mode",
"camera_orbit_mode",
]
assert "target_collection" in payload["parameter_ownership"]["workflow_node_keys_by_step"]["blender_still"]
assert "camera_orbit" in payload["parameter_ownership"]["workflow_node_keys_by_step"]["blender_turntable"]
@pytest.mark.asyncio
async def test_create_output_type_infers_blend_asset_from_blend_format(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Blend {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "blend",
"render_backend": "celery",
"workflow_family": "order_line",
},
headers=auth_headers,
)
assert response.status_code == 201, response.text
payload = response.json()
assert payload["workflow_family"] == "order_line"
assert payload["artifact_kind"] == "blend_asset"
@pytest.mark.asyncio
async def test_create_output_type_rejects_non_blend_artifact_for_blend_format(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Bad Blend {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "blend",
"render_backend": "celery",
"workflow_family": "order_line",
"artifact_kind": "still_image",
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert response.json()["detail"] == "Output format 'blend' requires artifact kind 'blend_asset'"
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_output_type_rejects_workflow_family_mismatch( async def test_create_output_type_rejects_workflow_family_mismatch(
client, client,
@@ -69,6 +198,52 @@ async def test_create_output_type_rejects_workflow_family_mismatch(
assert "Workflow family mismatch" in response.json()["detail"] assert "Workflow family mismatch" in response.json()["detail"]
@pytest.mark.asyncio
async def test_create_output_type_rejects_workflow_artifact_mismatch(
client,
db,
auth_headers,
):
workflow = WorkflowDefinition(
name=f"Blend Export {uuid.uuid4().hex[:8]}",
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "blend"},
],
},
is_active=True,
)
db.add(workflow)
await db.commit()
await db.refresh(workflow)
response = await client.post(
"/api/output-types",
json={
"name": f"Still {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"artifact_kind": "still_image",
"workflow_definition_id": str(workflow.id),
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Workflow artifact mismatch" in response.json()["detail"]
assert "blend_asset" in response.json()["detail"]
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_output_type_rejects_artifact_kind_incompatible_with_family( async def test_create_output_type_rejects_artifact_kind_incompatible_with_family(
client, client,
@@ -91,6 +266,53 @@ async def test_create_output_type_rejects_artifact_kind_incompatible_with_family
assert "not allowed for workflow_family" in response.json()["detail"] assert "not allowed for workflow_family" in response.json()["detail"]
@pytest.mark.asyncio
async def test_create_output_type_rejects_output_format_incompatible_with_family(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Bad CAD Blend {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "blend",
"render_backend": "celery",
"workflow_family": "cad_file",
"artifact_kind": "custom",
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Output format 'blend' is not allowed for workflow_family 'cad_file'" in response.json()["detail"]
@pytest.mark.asyncio
async def test_create_output_type_preserves_legacy_safe_custom_png_output_type(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Legacy Custom Still {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"artifact_kind": "custom",
},
headers=auth_headers,
)
assert response.status_code == 201, response.text
payload = response.json()
assert payload["workflow_family"] == "order_line"
assert payload["artifact_kind"] == "custom"
assert payload["output_format"] == "png"
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_output_type_rejects_turntable_video_without_animation( async def test_create_output_type_rejects_turntable_video_without_animation(
client, client,
@@ -159,6 +381,99 @@ async def test_update_output_type_rejects_mixed_family_workflow(
assert response.json()["detail"] == "Output types cannot link mixed-family workflows" assert response.json()["detail"] == "Output types cannot link mixed-family workflows"
@pytest.mark.asyncio
async def test_patch_output_type_rejects_workflow_artifact_mismatch(
client,
db,
auth_headers,
):
output_type_response = await client.post(
"/api/output-types",
json={
"name": f"Still {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"artifact_kind": "still_image",
},
headers=auth_headers,
)
assert output_type_response.status_code == 201, output_type_response.text
output_type = output_type_response.json()
workflow = WorkflowDefinition(
name=f"Blend Export {uuid.uuid4().hex[:8]}",
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "blend"},
],
},
is_active=True,
)
db.add(workflow)
await db.commit()
await db.refresh(workflow)
response = await client.patch(
f"/api/output-types/{output_type['id']}",
json={"workflow_definition_id": str(workflow.id)},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Workflow artifact mismatch" in response.json()["detail"]
assert "blend_asset" in response.json()["detail"]
@pytest.mark.asyncio
async def test_patch_output_type_updates_workflow_rollout_mode(
client,
db,
auth_headers,
):
workflow = WorkflowDefinition(
name=f"Still Graph {uuid.uuid4().hex[:8]}",
config=build_preset_workflow_config("still_graph"),
is_active=True,
)
db.add(workflow)
await db.commit()
await db.refresh(workflow)
create_response = await client.post(
"/api/output-types",
json={
"name": f"Rollout {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"workflow_definition_id": str(workflow.id),
},
headers=auth_headers,
)
assert create_response.status_code == 201, create_response.text
output_type = create_response.json()
patch_response = await client.patch(
f"/api/output-types/{output_type['id']}",
json={"workflow_rollout_mode": "graph"},
headers=auth_headers,
)
assert patch_response.status_code == 200, patch_response.text
assert patch_response.json()["workflow_rollout_mode"] == "graph"
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_output_type_backfills_invocation_overrides_from_legacy_render_settings( async def test_create_output_type_backfills_invocation_overrides_from_legacy_render_settings(
client, client,
@@ -189,6 +504,25 @@ async def test_create_output_type_backfills_invocation_overrides_from_legacy_ren
"height": 900, "height": 900,
"engine": "cycles", "engine": "cycles",
} }
assert payload["invocation_profile"]["artifact_kind"] == "still_image"
assert payload["invocation_profile"]["allowed_override_keys"] == [
"width",
"height",
"engine",
"samples",
"bg_color",
"noise_threshold",
"denoiser",
"denoising_input_passes",
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
]
assert payload["invocation_profile"]["invocation_overrides"] == {
"width": 1600,
"height": 900,
"engine": "cycles",
}
assert payload["render_settings"]["width"] == 1600 assert payload["render_settings"]["width"] == 1600
assert payload["render_settings"]["height"] == 900 assert payload["render_settings"]["height"] == 900
assert payload["render_settings"]["engine"] == "cycles" assert payload["render_settings"]["engine"] == "cycles"
@@ -235,6 +569,57 @@ async def test_patch_output_type_invocation_overrides_syncs_legacy_render_settin
assert payload["render_settings"]["engine"] == "cycles" assert payload["render_settings"]["engine"] == "cycles"
@pytest.mark.asyncio
async def test_create_output_type_rejects_unknown_invocation_override_key(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Bad Override {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"invocation_overrides": {
"width": 1600,
"bogus": "value",
},
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Unsupported invocation override keys" in response.json()["detail"]
@pytest.mark.asyncio
async def test_create_output_type_rejects_disallowed_invocation_override_for_blend_asset(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Blend Override {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "blend",
"render_backend": "celery",
"workflow_family": "order_line",
"invocation_overrides": {
"width": 1600,
},
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert response.json()["detail"] == (
"Invocation overrides not allowed for artifact kind 'blend_asset': width"
)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_patch_output_type_recomputes_artifact_kind_when_switching_family( async def test_patch_output_type_recomputes_artifact_kind_when_switching_family(
client, client,
@@ -266,3 +651,35 @@ async def test_patch_output_type_recomputes_artifact_kind_when_switching_family(
payload = response.json() payload = response.json()
assert payload["workflow_family"] == "cad_file" assert payload["workflow_family"] == "cad_file"
assert payload["artifact_kind"] == "thumbnail_image" assert payload["artifact_kind"] == "thumbnail_image"
@pytest.mark.asyncio
async def test_patch_output_type_rejects_output_format_incompatible_with_family(
client,
auth_headers,
):
output_type_response = await client.post(
"/api/output-types",
json={
"name": f"Still {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
},
headers=auth_headers,
)
assert output_type_response.status_code == 201, output_type_response.text
output_type = output_type_response.json()
response = await client.patch(
f"/api/output-types/{output_type['id']}",
json={
"output_format": "gltf",
"artifact_kind": "custom",
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Output format 'gltf' is not allowed for workflow_family 'order_line'" in response.json()["detail"]
@@ -0,0 +1,574 @@
from __future__ import annotations
import importlib.util
import selectors
import sys
from pathlib import Path
from types import SimpleNamespace
import pytest
def test_resolve_render_samples_uses_system_settings_when_omitted(monkeypatch):
from app.services.render_blender import _resolve_render_samples
monkeypatch.setattr(
"app.services.step_processor._get_all_settings",
lambda: {
"blender_cycles_samples": "32",
"blender_eevee_samples": "12",
},
)
assert _resolve_render_samples("cycles", None) == 32
assert _resolve_render_samples("eevee", None) == 12
assert _resolve_render_samples("cycles", 48) == 48
def test_resolve_tessellation_settings_uses_profile_specific_values(monkeypatch):
from app.services.render_blender import resolve_tessellation_settings
monkeypatch.setattr(
"app.services.step_processor._get_all_settings",
lambda: {
"tessellation_engine": "occ",
"scene_linear_deflection": "0.1",
"scene_angular_deflection": "0.1",
"render_linear_deflection": "0.03",
"render_angular_deflection": "0.05",
},
)
assert resolve_tessellation_settings("scene") == (0.1, 0.1, "occ")
assert resolve_tessellation_settings("render") == (0.03, 0.05, "occ")
def test_render_still_passes_resolved_samples_to_blender_cli(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_still
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
glb_path.parent.mkdir(parents=True, exist_ok=True)
glb_path.write_text("GLB", encoding="utf-8")
output_path = tmp_path / "render.png"
output_path.write_text("PNG", encoding="utf-8")
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "blender_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.stdout = object()
self.stderr = object()
self.pid = 1234
self.returncode = 0
def wait(self, timeout: int | None = None) -> int:
del timeout
return self.returncode
def wait(self, timeout: int | None = None) -> int:
del timeout
return self.returncode
def wait(self, timeout: int = 10) -> int:
return self.returncode
class _FakeSelector:
def register(self, *_args, **_kwargs) -> None:
return None
def get_map(self) -> dict:
return {}
def close(self) -> None:
return None
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
captured["env"] = env
return _FakeProc()
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender._resolve_render_samples", lambda engine, samples: 32)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr(selectors, "DefaultSelector", _FakeSelector)
result = render_still(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=None,
width=640,
height=480,
)
assert captured["cmd"][10] == "32"
assert captured["env"]["BLENDER_DEFAULT_SAMPLES"] == "32"
assert result["engine_used"] == "cycles"
def test_render_still_passes_template_inputs_to_blender_cli(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_still
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
glb_path.parent.mkdir(parents=True, exist_ok=True)
glb_path.write_text("GLB", encoding="utf-8")
output_path = tmp_path / "render.png"
output_path.write_text("PNG", encoding="utf-8")
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "blender_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.stdout = object()
self.stderr = object()
self.pid = 1234
self.returncode = 0
def wait(self, timeout: int = 10) -> int:
return self.returncode
class _FakeSelector:
def register(self, *_args, **_kwargs) -> None:
return None
def get_map(self) -> dict:
return {}
def close(self) -> None:
return None
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
return _FakeProc()
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender._resolve_render_samples", lambda engine, samples: 32)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr(selectors, "DefaultSelector", _FakeSelector)
render_still(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=None,
width=640,
height=480,
template_inputs={"studio_variant": "warm"},
)
assert "--template-inputs" in captured["cmd"]
idx = captured["cmd"].index("--template-inputs")
assert captured["cmd"][idx + 1] == '{"studio_variant": "warm"}'
def test_render_still_uses_settings_sensitive_render_glb_path(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_still
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
output_path = tmp_path / "render.png"
output_path.write_text("PNG", encoding="utf-8")
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "blender_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.stdout = object()
self.stderr = object()
self.pid = 1234
self.returncode = 0
def wait(self, timeout: int = 10) -> int:
return self.returncode
class _FakeSelector:
def register(self, *_args, **_kwargs) -> None:
return None
def get_map(self) -> dict:
return {}
def close(self) -> None:
return None
def _fake_glb_from_step(step_path, glb_path, tessellation_engine="occ", tessellation_profile="render"):
captured["glb_path"] = glb_path
captured["tessellation_engine"] = tessellation_engine
captured["tessellation_profile"] = tessellation_profile
glb_path.write_text("GLB", encoding="utf-8")
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
return _FakeProc()
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender._resolve_render_samples", lambda engine, samples: 32)
monkeypatch.setattr(
"app.services.step_processor._get_all_settings",
lambda: {
"tessellation_engine": "occ",
"render_linear_deflection": "0.03",
"render_angular_deflection": "0.05",
"blender_cycles_samples": "32",
"blender_eevee_samples": "12",
},
)
monkeypatch.setattr("app.services.render_blender._glb_from_step", _fake_glb_from_step)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr(selectors, "DefaultSelector", _FakeSelector)
render_still(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=None,
width=640,
height=480,
)
expected_glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
assert captured["glb_path"] == expected_glb_path
assert captured["tessellation_profile"] == "render"
assert captured["cmd"][5] == str(expected_glb_path)
def test_render_turntable_passes_template_inputs_to_blender_cli(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_turntable_to_file
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
glb_path.parent.mkdir(parents=True, exist_ok=True)
glb_path.write_text("GLB", encoding="utf-8")
output_path = tmp_path / "turntable.mp4"
output_path.parent.mkdir(parents=True, exist_ok=True)
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "turntable_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.pid = 1234
self.returncode = 0
def communicate(self, timeout: int | None = None) -> tuple[str, str]:
frames_dir = Path(captured["cmd"][6])
frames_dir.mkdir(parents=True, exist_ok=True)
(frames_dir / "frame_0001.png").write_text("PNG", encoding="utf-8")
return ("[turntable_render] ok\n", "")
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
return _FakeProc()
def _fake_ffmpeg(cmd, capture_output, text, timeout):
output_path.write_text("MP4", encoding="utf-8")
return SimpleNamespace(returncode=0, stdout="", stderr="")
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr("app.services.render_blender.subprocess.run", _fake_ffmpeg)
monkeypatch.setattr("app.services.render_blender.build_turntable_ffmpeg_cmd", lambda *args, **kwargs: ["ffmpeg", str(output_path)])
monkeypatch.setattr("app.services.render_blender.resolve_tessellation_settings", lambda *args, **kwargs: (0.03, 0.05, "occ"))
render_turntable_to_file(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=32,
template_inputs={"studio_variant": "warm"},
)
assert "--template-inputs" in captured["cmd"]
idx = captured["cmd"].index("--template-inputs")
assert captured["cmd"][idx + 1] == '{"studio_variant": "warm"}'
def test_render_cinematic_passes_template_inputs_to_blender_cli(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_cinematic_to_file
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
glb_path.parent.mkdir(parents=True, exist_ok=True)
glb_path.write_text("GLB", encoding="utf-8")
output_path = tmp_path / "cinematic.mp4"
output_path.parent.mkdir(parents=True, exist_ok=True)
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "cinematic_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.stdout = object()
self.stderr = object()
self.pid = 1234
self.returncode = 0
def wait(self, timeout: int | None = None) -> int:
del timeout
return self.returncode
class _FakeSelector:
def __init__(self) -> None:
self._registered: list[object] = []
self._delivered = False
def register(self, fileobj, _event, data):
self._registered.append((fileobj, data))
def unregister(self, fileobj):
self._registered = [item for item in self._registered if item[0] is not fileobj]
def get_map(self) -> dict[int, object]:
return {idx: item for idx, item in enumerate(self._registered)}
def select(self, timeout=None):
del timeout
if self._delivered:
for fileobj, _data in list(self._registered):
if hasattr(fileobj, "readline"):
fileobj.readline = lambda: ""
self._registered.clear()
return []
self._delivered = True
events = []
for fileobj, data in list(self._registered):
events.append((SimpleNamespace(fileobj=fileobj, data=data), None))
return events
def close(self):
return None
class _FakeStream:
def __init__(self, lines: list[str]) -> None:
self._lines = list(lines)
def readline(self) -> str:
if not self._lines:
return ""
return self._lines.pop(0)
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
frames_dir = Path(cmd[6])
frames_dir.mkdir(parents=True, exist_ok=True)
(frames_dir / "frame_0001.png").write_text("PNG", encoding="utf-8")
proc = _FakeProc()
proc.stdout = _FakeStream(["[cinematic_render] ok\n"])
proc.stderr = _FakeStream([])
return proc
def _fake_ffmpeg(cmd, capture_output, text, timeout):
output_path.write_text("MP4", encoding="utf-8")
return SimpleNamespace(returncode=0, stdout="", stderr="")
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr("app.services.render_blender.subprocess.run", _fake_ffmpeg)
monkeypatch.setattr("app.services.render_blender.build_turntable_ffmpeg_cmd", lambda *args, **kwargs: ["ffmpeg", str(output_path)])
monkeypatch.setattr("app.services.render_blender.resolve_tessellation_settings", lambda *args, **kwargs: (0.03, 0.05, "occ"))
monkeypatch.setattr("selectors.DefaultSelector", _FakeSelector)
render_cinematic_to_file(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=32,
template_inputs={"studio_variant": "warm"},
)
assert "--template-inputs" in captured["cmd"]
idx = captured["cmd"].index("--template-inputs")
assert captured["cmd"][idx + 1] == '{"studio_variant": "warm"}'
def test_render_still_task_keeps_samples_unset_until_render_service(tmp_path, monkeypatch):
from app.domains.rendering.tasks import render_still_task
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
output_path = tmp_path / "render.png"
captured: dict[str, object] = {}
def _fake_render_still(**kwargs):
captured.update(kwargs)
return {"total_duration_s": 0.1}
monkeypatch.setattr("app.domains.rendering.tasks.log_task_event", lambda *args, **kwargs: None)
monkeypatch.setattr("app.services.render_blender.render_still", _fake_render_still)
task_self = SimpleNamespace(
request=SimpleNamespace(id="task-still"),
retry=lambda *, exc, countdown: (_ for _ in ()).throw(exc),
)
result = render_still_task.run.__func__(task_self, str(step_path), str(output_path))
assert captured["samples"] is None
assert result["total_duration_s"] == 0.1
def test_blender_args_prefers_backend_default_samples_env(monkeypatch):
module_path = (
Path(__file__).resolve().parents[2]
/ "render-worker"
/ "scripts"
/ "_blender_args.py"
)
if not module_path.exists():
pytest.skip(f"{module_path} not present in this runtime")
spec = importlib.util.spec_from_file_location("test_blender_args_module", module_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
monkeypatch.setenv("BLENDER_DEFAULT_SAMPLES", "32")
monkeypatch.setattr(
sys,
"argv",
[
"blender_render.py",
"--",
"input.glb",
"output.png",
"512",
"512",
"cycles",
"",
],
)
args = module.parse_args()
assert args.samples == 32
def test_blender_args_parses_template_inputs(monkeypatch):
module_path = (
Path(__file__).resolve().parents[2]
/ "render-worker"
/ "scripts"
/ "_blender_args.py"
)
if not module_path.exists():
pytest.skip(f"{module_path} not present in this runtime")
spec = importlib.util.spec_from_file_location("test_blender_args_module_template_inputs", module_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
monkeypatch.setattr(
sys,
"argv",
[
"blender_render.py",
"--",
"input.glb",
"output.png",
"512",
"512",
"cycles",
"64",
"30",
"auto",
"0",
"",
"Product",
"",
"{}",
"[]",
"0",
"0",
"0",
"0",
"",
"",
"",
"",
"",
"",
"--template-inputs",
'{"studio_variant":"warm"}',
],
)
args = module.parse_args()
assert args.template_inputs == {"studio_variant": "warm"}
def test_render_to_file_preserves_explicit_zero_samples(tmp_path, monkeypatch):
from app.services.step_processor import render_to_file
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
output_path = tmp_path / "render.png"
captured: dict[str, object] = {}
monkeypatch.setattr(
"app.services.step_processor._get_all_settings",
lambda: {
"thumbnail_renderer": "blender",
"thumbnail_format": "png",
"blender_engine": "cycles",
"blender_cycles_samples": "32",
"blender_eevee_samples": "12",
"cycles_device": "auto",
"blender_smooth_angle": "30",
"tessellation_engine": "occ",
},
)
monkeypatch.setattr("app.services.step_processor.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender.is_blender_available", lambda: True)
def _fake_render_still(**kwargs):
captured.update(kwargs)
kwargs["output_path"].write_text("PNG", encoding="utf-8")
return {"total_duration_s": 0.1, "engine_used": kwargs["engine"]}
monkeypatch.setattr("app.services.render_blender.render_still", _fake_render_still)
success, render_log = render_to_file(
str(step_path),
str(output_path),
samples=0,
)
assert success is True
assert captured["samples"] == 0
assert render_log["samples"] == 0
@@ -0,0 +1,152 @@
from __future__ import annotations
import os
import uuid
from contextlib import contextmanager
from pathlib import Path
import pytest
from sqlalchemy import select, text
from sqlalchemy.orm import Session
from app.domains.auth.models import User, UserRole
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.orders.models import Order, OrderLine, OrderStatus
from app.domains.products.models import CadFile, Product
from app.domains.rendering.models import OutputType
from tests.db_test_utils import sync_test_session as sync_test_session_ctx
@pytest.fixture
def sync_session():
with sync_test_session_ctx() as session:
yield session
def _seed_order_line(session: Session, tmp_path: Path) -> OrderLine:
step_path = tmp_path / "parts" / "bearing.step"
step_path.parent.mkdir(parents=True, exist_ok=True)
step_path.write_text("STEP", encoding="utf-8")
user = User(
id=uuid.uuid4(),
email=f"publish-{uuid.uuid4().hex[:8]}@test.local",
password_hash="hash",
full_name="Publish Tester",
role=UserRole.admin,
is_active=True,
)
cad_file = CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path=str(step_path),
file_hash=f"hash-{uuid.uuid4().hex}",
)
product = Product(
id=uuid.uuid4(),
pim_id="P-2000",
name="Bearing Publish",
category_key="bearings",
cad_file_id=cad_file.id,
cad_file=cad_file,
)
output_type = OutputType(
id=uuid.uuid4(),
name="HQ Still",
renderer="blender",
output_format="png",
render_settings={"width": 1600, "height": 900},
)
order = Order(
id=uuid.uuid4(),
order_number=f"ORD-{uuid.uuid4().hex[:8]}",
status=OrderStatus.processing,
created_by=user.id,
)
line = OrderLine(
id=uuid.uuid4(),
order_id=order.id,
product_id=product.id,
product=product,
output_type_id=output_type.id,
output_type=output_type,
render_status="processing",
)
session.add_all([user, cad_file, product, output_type, order, line])
session.commit()
return line
def test_publish_asset_canonicalizes_still_outputs(sync_session, tmp_path, monkeypatch):
from app.config import settings
from app.domains.rendering.tasks import publish_asset
upload_dir = tmp_path / "uploads"
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
line = _seed_order_line(sync_session, tmp_path)
source_output = tmp_path / "parts" / "renders" / "line.png"
source_output.parent.mkdir(parents=True, exist_ok=True)
source_output.write_bytes(b"png")
@contextmanager
def _session_ctx():
yield sync_session
monkeypatch.setattr("app.core.db_utils.get_sync_session", _session_ctx)
asset_id = publish_asset.run(
str(line.id),
"still",
str(source_output),
render_config={"renderer": "blender", "engine_used": "cycles"},
)
sync_session.expire_all()
stored_line = sync_session.get(OrderLine, line.id)
stored_asset = sync_session.execute(
select(MediaAsset).where(MediaAsset.id == uuid.UUID(asset_id))
).scalar_one()
assert stored_line.result_path == f"{upload_dir}/renders/{line.id}/Bearing_Publish_HQ_Still.png"
assert Path(stored_line.result_path).is_file()
assert stored_asset.storage_key == f"renders/{line.id}/Bearing_Publish_HQ_Still.png"
assert stored_asset.asset_type == MediaAssetType.still
def test_publish_asset_canonicalizes_blend_storage_key_without_touching_order_line(sync_session, tmp_path, monkeypatch):
from app.config import settings
from app.domains.rendering.tasks import publish_asset
upload_dir = tmp_path / "uploads"
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
line = _seed_order_line(sync_session, tmp_path)
source_output = tmp_path / "parts" / "bearing_production.blend"
source_output.parent.mkdir(parents=True, exist_ok=True)
source_output.write_bytes(b"blend")
@contextmanager
def _session_ctx():
yield sync_session
monkeypatch.setattr("app.core.db_utils.get_sync_session", _session_ctx)
asset_id = publish_asset.run(
str(line.id),
"blend_production",
str(source_output),
render_config={"artifact_type": "blend_production"},
)
sync_session.expire_all()
stored_line = sync_session.get(OrderLine, line.id)
stored_asset = sync_session.execute(
select(MediaAsset).where(MediaAsset.id == uuid.UUID(asset_id))
).scalar_one()
assert stored_line.result_path is None
assert stored_asset.storage_key == str(source_output)
assert stored_asset.asset_type == MediaAssetType.blend_production
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,77 @@
from app.domains.rendering.template_input_audit import (
extract_template_input_marker,
suggest_workflow_input_schema,
)
def test_extract_template_input_marker_from_combined_property() -> None:
marker = extract_template_input_marker(props={"template_input": "studio_variant=warm"})
assert marker == ("studio_variant", "warm")
def test_extract_template_input_marker_from_json_property() -> None:
marker = extract_template_input_marker(
props={"hartomat_template_input": '{"key":"lighting_profile","value":"shadow"}'}
)
assert marker == ("lighting_profile", "shadow")
def test_extract_template_input_marker_from_split_properties() -> None:
marker = extract_template_input_marker(
props={"template_input_key": "alpha_mode", "template_input_value": "transparent"}
)
assert marker == ("alpha_mode", "transparent")
def test_extract_template_input_marker_from_name_pattern() -> None:
marker = extract_template_input_marker(name="template-input:studio_variant=warm")
assert marker == ("studio_variant", "warm")
def test_suggest_workflow_input_schema_builds_select_fields() -> None:
schema = suggest_workflow_input_schema(
[
("studio_variant", "warm"),
("studio_variant", "cool"),
("alpha_mode", "transparent"),
("alpha_mode", "opaque"),
]
)
assert schema == [
{
"default": "opaque",
"key": "alpha_mode",
"label": "Alpha Mode",
"options": [
{"label": "Opaque", "value": "opaque"},
{"label": "Transparent", "value": "transparent"},
],
"section": "Template Inputs",
"type": "select",
},
{
"default": "cool",
"key": "studio_variant",
"label": "Studio Variant",
"options": [
{"label": "Cool", "value": "cool"},
{"label": "Warm", "value": "warm"},
],
"section": "Template Inputs",
"type": "select",
},
]
def test_suggest_workflow_input_schema_builds_boolean_field() -> None:
schema = suggest_workflow_input_schema([("shadow_pass", "true"), ("shadow_pass", "false")])
assert schema == [
{
"default": False,
"key": "shadow_pass",
"label": "Shadow Pass",
"section": "Template Inputs",
"type": "boolean",
}
]
@@ -40,8 +40,9 @@ def test_build_preset_workflow_config_creates_graph_still_variant():
assert config["ui"]["execution_mode"] == "graph" assert config["ui"]["execution_mode"] == "graph"
assert [node["step"] for node in config["nodes"]] == [ assert [node["step"] for node in config["nodes"]] == [
"order_line_setup", "order_line_setup",
"auto_populate_materials",
"resolve_template", "resolve_template",
"auto_populate_materials",
"glb_bbox",
"material_map_resolve", "material_map_resolve",
"blender_still", "blender_still",
"output_save", "output_save",
@@ -51,6 +52,7 @@ def test_build_preset_workflow_config_creates_graph_still_variant():
assert render_node["params"]["width"] == 1600 assert render_node["params"]["width"] == 1600
assert render_node["params"]["height"] == 900 assert render_node["params"]["height"] == 900
assert render_node["params"]["samples"] == 128 assert render_node["params"]["samples"] == 128
assert render_node["params"]["use_custom_render_settings"] is False
def test_canonicalize_workflow_config_migrates_legacy_preset(): def test_canonicalize_workflow_config_migrates_legacy_preset():
@@ -215,6 +217,111 @@ def test_canonicalize_workflow_config_defaults_execution_mode_for_canonical_conf
assert canonical["ui"]["execution_mode"] == "legacy" assert canonical["ui"]["execution_mode"] == "legacy"
def test_canonicalize_workflow_config_rebuilds_canonical_still_graph_preset():
canonical = canonicalize_workflow_config(
{
"version": 1,
"ui": {"preset": "still_graph", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "resolve_materials", "step": "material_map_resolve", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1280, "height": 720, "samples": 32}},
{"id": "output", "step": "output_save", "params": {}},
{"id": "notify", "step": "notify", "params": {}},
],
"edges": [
{"from": "setup", "to": "resolve_materials"},
{"from": "resolve_materials", "to": "template"},
{"from": "template", "to": "render"},
{"from": "render", "to": "output"},
{"from": "render", "to": "notify"},
],
}
)
assert canonical["ui"]["preset"] == "still_graph"
assert canonical["ui"]["execution_mode"] == "graph"
assert [node["step"] for node in canonical["nodes"]] == [
"order_line_setup",
"resolve_template",
"auto_populate_materials",
"glb_bbox",
"material_map_resolve",
"blender_still",
"output_save",
"notify",
]
render_node = next(node for node in canonical["nodes"] if node["step"] == "blender_still")
assert render_node["params"]["width"] == 1280
assert render_node["params"]["height"] == 720
assert render_node["params"]["samples"] == 32
assert render_node["params"]["use_custom_render_settings"] is False
def test_build_workflow_blueprint_config_cad_intake_supplies_bbox_to_threejs_thumbnail():
config = build_workflow_blueprint_config("cad_intake")
assert config["ui"]["family"] == "cad_file"
assert [node["step"] for node in config["nodes"]] == [
"resolve_step_path",
"occ_object_extract",
"occ_glb_export",
"glb_bbox",
"stl_cache_generate",
"blender_render",
"threejs_render",
"thumbnail_save",
"thumbnail_save",
]
assert {"from": "export_glb", "to": "bbox"} in config["edges"]
assert {"from": "bbox", "to": "threejs_thumb"} in config["edges"]
def test_canonicalize_workflow_config_rebuilds_reference_blueprints():
canonical = canonicalize_workflow_config(
{
"version": 1,
"ui": {"preset": "custom", "execution_mode": "legacy", "blueprint": "order_rendering"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
],
"edges": [],
}
)
assert canonical["ui"]["blueprint"] == "order_rendering"
assert canonical["ui"]["family"] == "order_line"
assert any(node["step"] == "blender_turntable" for node in canonical["nodes"])
assert any(node["step"] == "export_blend" for node in canonical["nodes"])
def test_canonicalize_workflow_config_rebuilds_starter_blueprints():
canonical = canonicalize_workflow_config(
{
"version": 1,
"ui": {"preset": "custom", "execution_mode": "legacy", "blueprint": "starter_cad_intake"},
"nodes": [],
"edges": [],
}
)
assert canonical["ui"]["blueprint"] == "starter_cad_intake"
assert canonical["ui"]["family"] == "cad_file"
assert canonical["nodes"] == [
{
"id": "resolve_step",
"step": "resolve_step_path",
"params": {},
"ui": {
"type": "inputNode",
"position": {"x": 120, "y": 140},
"label": "Resolve STEP Path",
},
}
]
def test_workflow_config_requires_canonicalization_for_legacy_payloads(): def test_workflow_config_requires_canonicalization_for_legacy_payloads():
assert workflow_config_requires_canonicalization( assert workflow_config_requires_canonicalization(
{ {
@@ -235,11 +342,13 @@ def test_build_workflow_blueprint_config_creates_cad_intake_family_graph():
assert config["version"] == 1 assert config["version"] == 1
assert config["ui"]["preset"] == "custom" assert config["ui"]["preset"] == "custom"
assert config["ui"]["family"] == "cad_file"
assert config["ui"]["blueprint"] == "cad_intake" assert config["ui"]["blueprint"] == "cad_intake"
assert [node["step"] for node in config["nodes"]] == [ assert [node["step"] for node in config["nodes"]] == [
"resolve_step_path", "resolve_step_path",
"occ_object_extract", "occ_object_extract",
"occ_glb_export", "occ_glb_export",
"glb_bbox",
"stl_cache_generate", "stl_cache_generate",
"blender_render", "blender_render",
"threejs_render", "threejs_render",
@@ -253,6 +362,7 @@ def test_build_workflow_blueprint_config_creates_order_rendering_family_graph():
assert config["version"] == 1 assert config["version"] == 1
assert config["ui"]["preset"] == "custom" assert config["ui"]["preset"] == "custom"
assert config["ui"]["family"] == "order_line"
assert config["ui"]["blueprint"] == "order_rendering" assert config["ui"]["blueprint"] == "order_rendering"
assert any(node["step"] == "blender_still" for node in config["nodes"]) assert any(node["step"] == "blender_still" for node in config["nodes"])
assert any(node["step"] == "blender_turntable" for node in config["nodes"]) assert any(node["step"] == "blender_turntable" for node in config["nodes"])
@@ -260,11 +370,34 @@ def test_build_workflow_blueprint_config_creates_order_rendering_family_graph():
assert sum(1 for node in config["nodes"] if node["step"] == "notify") == 3 assert sum(1 for node in config["nodes"] if node["step"] == "notify") == 3
def test_build_workflow_blueprint_config_creates_still_graph_reference():
config = build_workflow_blueprint_config("still_graph_reference")
assert config["version"] == 1
assert config["ui"]["preset"] == "custom"
assert config["ui"]["family"] == "order_line"
assert config["ui"]["blueprint"] == "still_graph_reference"
assert config["ui"]["execution_mode"] == "graph"
assert [node["step"] for node in config["nodes"]] == [
"order_line_setup",
"resolve_template",
"auto_populate_materials",
"glb_bbox",
"material_map_resolve",
"blender_still",
"output_save",
"notify",
]
render_node = next(node for node in config["nodes"] if node["step"] == "blender_still")
assert render_node["params"]["use_custom_render_settings"] is False
def test_build_starter_workflow_config_creates_minimal_valid_custom_graph(): def test_build_starter_workflow_config_creates_minimal_valid_custom_graph():
config = build_starter_workflow_config() config = build_starter_workflow_config()
assert config["version"] == 1 assert config["version"] == 1
assert config["ui"]["preset"] == "custom" assert config["ui"]["preset"] == "custom"
assert config["ui"]["family"] == "order_line"
assert config["ui"]["blueprint"] == "starter_order_rendering" assert config["ui"]["blueprint"] == "starter_order_rendering"
assert config["nodes"] == [ assert config["nodes"] == [
{ {
@@ -7,6 +7,7 @@ from types import SimpleNamespace
import pytest import pytest
from PIL import Image, PngImagePlugin from PIL import Image, PngImagePlugin
from sqlalchemy import select from sqlalchemy import select
from sqlalchemy.engine import make_url
from sqlalchemy.orm import selectinload from sqlalchemy.orm import selectinload
from app.config import settings from app.config import settings
@@ -18,15 +19,128 @@ from app.domains.rendering.workflow_comparison_service import (
_build_artifact, _build_artifact,
evaluate_rollout_gate, evaluate_rollout_gate,
) )
from app.domains.rendering.workflow_config_utils import build_preset_workflow_config from app.domains.rendering.workflow_config_utils import (
build_preset_workflow_config,
build_workflow_blueprint_config,
)
from tests.db_test_utils import resolve_test_db_url
def _use_test_database(monkeypatch) -> None: def _use_test_database(monkeypatch) -> None:
monkeypatch.setattr(settings, "postgres_host", "postgres") resolved = make_url(resolve_test_db_url(async_driver=False))
monkeypatch.setattr(settings, "postgres_port", 5432) monkeypatch.setattr(settings, "postgres_host", resolved.host or settings.postgres_host)
monkeypatch.setattr(settings, "postgres_user", "hartomat") monkeypatch.setattr(settings, "postgres_port", int(resolved.port or settings.postgres_port))
monkeypatch.setattr(settings, "postgres_password", "hartomat") monkeypatch.setattr(settings, "postgres_user", resolved.username or settings.postgres_user)
monkeypatch.setattr(settings, "postgres_db", "hartomat_test") monkeypatch.setattr(settings, "postgres_password", resolved.password or settings.postgres_password)
monkeypatch.setattr(settings, "postgres_db", resolved.database or settings.postgres_db)
def _build_valid_custom_still_graph(
*,
execution_mode: str = "graph",
width: int = 1024,
height: int = 768,
include_output: bool = False,
include_notify: bool = False,
) -> dict[str, object]:
nodes: list[dict[str, object]] = [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "populate_materials", "step": "auto_populate_materials", "params": {}},
{"id": "resolve_materials", "step": "material_map_resolve", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": width, "height": height}},
]
edges: list[dict[str, str]] = [
{"from": "setup", "to": "template"},
{"from": "setup", "to": "populate_materials"},
{"from": "template", "to": "resolve_materials"},
{"from": "populate_materials", "to": "resolve_materials"},
{"from": "template", "to": "render"},
{"from": "resolve_materials", "to": "render"},
]
if include_output:
nodes.append({"id": "output", "step": "output_save", "params": {}})
edges.append({"from": "render", "to": "output"})
if include_notify:
nodes.append({"id": "notify", "step": "notify", "params": {}})
edges.append({"from": "render", "to": "notify"})
return {
"version": 1,
"ui": {"preset": "custom", "execution_mode": execution_mode},
"nodes": nodes,
"edges": edges,
}
def _build_valid_custom_turntable_graph(
*,
execution_mode: str = "graph",
fps: int = 24,
frame_count: int = 96,
include_output: bool = False,
include_notify: bool = False,
) -> dict[str, object]:
duration_s = frame_count / fps
nodes: list[dict[str, object]] = [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "populate_materials", "step": "auto_populate_materials", "params": {}},
{"id": "bbox", "step": "glb_bbox", "params": {}},
{"id": "resolve_materials", "step": "material_map_resolve", "params": {}},
{"id": "turntable", "step": "blender_turntable", "params": {"fps": fps, "duration_s": duration_s}},
]
edges: list[dict[str, str]] = [
{"from": "setup", "to": "template"},
{"from": "setup", "to": "populate_materials"},
{"from": "setup", "to": "bbox"},
{"from": "template", "to": "resolve_materials"},
{"from": "populate_materials", "to": "resolve_materials"},
{"from": "bbox", "to": "turntable"},
{"from": "template", "to": "turntable"},
{"from": "resolve_materials", "to": "turntable"},
]
if include_output:
nodes.append({"id": "output", "step": "output_save", "params": {}})
edges.append({"from": "turntable", "to": "output"})
if include_notify:
nodes.append({"id": "notify", "step": "notify", "params": {}})
edges.append({"from": "turntable", "to": "notify"})
return {
"version": 1,
"ui": {"preset": "custom", "execution_mode": execution_mode},
"nodes": nodes,
"edges": edges,
}
def _build_valid_custom_blend_graph(*, include_output: bool = False) -> dict[str, object]:
nodes: list[dict[str, object]] = [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
]
edges: list[dict[str, str]] = [
{"from": "setup", "to": "template"},
{"from": "template", "to": "blend"},
]
if include_output:
nodes.append({"id": "output", "step": "output_save", "params": {}})
edges.append({"from": "blend", "to": "output"})
return {
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": nodes,
"edges": edges,
}
def _derive_rollout_mode_from_config(workflow_config: dict | None) -> str:
execution_mode = ((workflow_config or {}).get("ui") or {}).get("execution_mode")
if execution_mode == "graph":
return "graph"
if execution_mode == "shadow":
return "shadow"
return "legacy_only"
async def _seed_order_line( async def _seed_order_line(
@@ -61,6 +175,7 @@ async def _seed_order_line(
db.add(workflow_definition) db.add(workflow_definition)
await db.flush() await db.flush()
output_type.workflow_definition_id = workflow_definition.id output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = _derive_rollout_mode_from_config(workflow_config)
order_line = OrderLine( order_line = OrderLine(
order_id=order.id, order_id=order.id,
@@ -148,6 +263,54 @@ async def test_dispatch_render_with_workflow_falls_back_to_legacy_without_workfl
assert runs == [] assert runs == []
@pytest.mark.asyncio
async def test_dispatch_render_with_workflow_falls_back_on_artifact_contract_mismatch(
db,
admin_user,
monkeypatch,
):
_use_test_database(monkeypatch)
seeded = await _seed_order_line(
db,
admin_user,
workflow_config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "blend"},
],
},
)
output_type = seeded["output_type"]
output_type.artifact_kind = "still_image"
await db.commit()
monkeypatch.setattr(
"app.domains.rendering.dispatch_service._legacy_dispatch",
lambda order_line_id: {"backend": "legacy", "order_line_id": order_line_id},
)
result = dispatch_render_with_workflow(str(seeded["order_line"].id))
await db.rollback()
assert result["backend"] == "legacy"
assert result["order_line_id"] == str(seeded["order_line"].id)
assert result["rollout_gate_status"] == "workflow_contract_mismatch"
assert result["workflow_rollout_ready"] is False
assert result["output_type_rollout_ready"] is False
assert any("Expected artifact kind: still_image." in reason for reason in result["rollout_gate_reasons"])
assert any("blend_asset" in reason for reason in result["rollout_gate_reasons"])
runs = (await db.execute(select(WorkflowRun))).scalars().all()
assert runs == []
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_dispatch_render_with_workflow_creates_run_and_node_results_for_preset_dispatch( async def test_dispatch_render_with_workflow_creates_run_and_node_results_for_preset_dispatch(
db, db,
@@ -203,15 +366,11 @@ async def test_dispatch_render_with_workflow_falls_back_when_workflow_runtime_pr
seeded = await _seed_order_line( seeded = await _seed_order_line(
db, db,
admin_user, admin_user,
workflow_config={ workflow_config=build_preset_workflow_config("still", {"width": 640, "height": 640}),
"version": 1, )
"nodes": [ monkeypatch.setattr(
{"id": "render", "step": "blender_still", "params": {}}, "app.domains.rendering.workflow_executor.prepare_workflow_context",
], lambda *_args, **_kwargs: (_ for _ in ()).throw(RuntimeError("prep exploded")),
"edges": [
{"from": "missing", "to": "render"},
],
},
) )
monkeypatch.setattr( monkeypatch.setattr(
@@ -248,19 +407,7 @@ async def test_dispatch_render_with_workflow_graph_mode_dispatches_supported_cus
workflow_definition = WorkflowDefinition( workflow_definition = WorkflowDefinition(
name=f"Graph Workflow {uuid.uuid4().hex[:8]}", name=f"Graph Workflow {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id, output_type_id=order_line.output_type_id,
config={ config=_build_valid_custom_still_graph(execution_mode="graph"),
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
is_active=True, is_active=True,
) )
db.add(workflow_definition) db.add(workflow_definition)
@@ -268,6 +415,7 @@ async def test_dispatch_render_with_workflow_graph_mode_dispatches_supported_cus
output_type = await db.get(OutputType, order_line.output_type_id) output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "graph"
await db.commit() await db.commit()
monkeypatch.setattr( monkeypatch.setattr(
@@ -315,21 +463,7 @@ async def test_dispatch_render_with_workflow_graph_mode_uses_output_save_as_auth
workflow_definition = WorkflowDefinition( workflow_definition = WorkflowDefinition(
name=f"Graph Output Save {uuid.uuid4().hex[:8]}", name=f"Graph Output Save {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id, output_type_id=order_line.output_type_id,
config={ config=_build_valid_custom_still_graph(execution_mode="graph", include_output=True),
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
{"from": "render", "to": "output"},
],
},
is_active=True, is_active=True,
) )
db.add(workflow_definition) db.add(workflow_definition)
@@ -337,6 +471,7 @@ async def test_dispatch_render_with_workflow_graph_mode_uses_output_save_as_auth
output_type = await db.get(OutputType, order_line.output_type_id) output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "graph"
await db.commit() await db.commit()
calls: list[tuple[str, list[str], dict]] = [] calls: list[tuple[str, list[str], dict]] = []
@@ -367,8 +502,10 @@ async def test_dispatch_render_with_workflow_graph_mode_uses_output_save_as_auth
assert calls[0][2]["publish_asset_enabled"] is False assert calls[0][2]["publish_asset_enabled"] is False
assert calls[0][2]["graph_authoritative_output_enabled"] is True assert calls[0][2]["graph_authoritative_output_enabled"] is True
assert calls[0][2]["graph_output_node_ids"] == ["output"] assert calls[0][2]["graph_output_node_ids"] == ["output"]
assert node_results["output"].status == "completed" assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save" assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["render"]
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -395,6 +532,7 @@ async def test_dispatch_render_with_workflow_graph_mode_canonicalizes_legacy_pre
output_type = await db.get(OutputType, order_line.output_type_id) output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "graph"
await db.commit() await db.commit()
monkeypatch.setattr( monkeypatch.setattr(
@@ -421,7 +559,7 @@ async def test_dispatch_render_with_workflow_graph_mode_canonicalizes_legacy_pre
assert node_results["setup"].status == "completed" assert node_results["setup"].status == "completed"
assert node_results["template"].status == "completed" assert node_results["template"].status == "completed"
assert node_results["render"].status == "queued" assert node_results["render"].status == "queued"
assert node_results["output"].status == "completed" assert node_results["output"].status == "pending"
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -436,21 +574,7 @@ async def test_dispatch_render_with_workflow_graph_mode_falls_back_to_legacy_on_
workflow_definition = WorkflowDefinition( workflow_definition = WorkflowDefinition(
name=f"Graph Workflow {uuid.uuid4().hex[:8]}", name=f"Graph Workflow {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id, output_type_id=order_line.output_type_id,
config={ config=_build_valid_custom_still_graph(execution_mode="graph"),
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{
"id": "setup",
"step": "order_line_setup",
"params": {"failure_policy": {"fallback_to_legacy": True}},
},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
],
"edges": [
{"from": "setup", "to": "render"},
],
},
is_active=True, is_active=True,
) )
db.add(workflow_definition) db.add(workflow_definition)
@@ -458,6 +582,7 @@ async def test_dispatch_render_with_workflow_graph_mode_falls_back_to_legacy_on_
output_type = await db.get(OutputType, order_line.output_type_id) output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "graph"
await db.commit() await db.commit()
monkeypatch.setattr( monkeypatch.setattr(
@@ -490,6 +615,40 @@ async def test_dispatch_render_with_workflow_graph_mode_falls_back_to_legacy_on_
assert run.error_message == "graph dispatch exploded" assert run.error_message == "graph dispatch exploded"
@pytest.mark.asyncio
async def test_dispatch_render_with_graph_capable_workflow_respects_legacy_only_rollout_mode(
db,
admin_user,
monkeypatch,
):
_use_test_database(monkeypatch)
seeded = await _seed_order_line(
db,
admin_user,
workflow_config=_build_valid_custom_still_graph(execution_mode="graph"),
)
output_type = seeded["output_type"]
output_type.workflow_rollout_mode = "legacy_only"
await db.commit()
monkeypatch.setattr(
"app.domains.rendering.dispatch_service._legacy_dispatch",
lambda order_line_id: {"backend": "legacy", "order_line_id": order_line_id},
)
result = dispatch_render_with_workflow(str(seeded["order_line"].id))
await db.rollback()
assert result["backend"] == "legacy"
assert result["order_line_id"] == str(seeded["order_line"].id)
assert result["workflow_rollout_mode"] == "legacy_only"
assert result["configured_execution_mode"] == "graph"
assert result["rollout_gate_status"] == "rollout_legacy_only"
assert result["workflow_rollout_ready"] is False
assert result["output_type_rollout_ready"] is False
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritative_and_dispatches_graph_observer( async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritative_and_dispatches_graph_observer(
db, db,
@@ -502,19 +661,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritat
workflow_definition = WorkflowDefinition( workflow_definition = WorkflowDefinition(
name=f"Shadow Workflow {uuid.uuid4().hex[:8]}", name=f"Shadow Workflow {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id, output_type_id=order_line.output_type_id,
config={ config=_build_valid_custom_still_graph(execution_mode="shadow"),
"version": 1,
"ui": {"preset": "custom", "execution_mode": "shadow"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
is_active=True, is_active=True,
) )
db.add(workflow_definition) db.add(workflow_definition)
@@ -522,6 +669,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritat
output_type = await db.get(OutputType, order_line.output_type_id) output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "shadow"
await db.commit() await db.commit()
calls: list[tuple[str, list[str], dict]] = [] calls: list[tuple[str, list[str], dict]] = []
@@ -592,6 +740,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_canonicalizes_legacy_pr
output_type = await db.get(OutputType, order_line.output_type_id) output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "shadow"
await db.commit() await db.commit()
calls: list[tuple[str, list[str], dict]] = [] calls: list[tuple[str, list[str], dict]] = []
@@ -610,21 +759,13 @@ async def test_dispatch_render_with_workflow_shadow_mode_canonicalizes_legacy_pr
await db.rollback() await db.rollback()
run_result = await db.execute(
select(WorkflowRun)
.where(WorkflowRun.id == uuid.UUID(result["shadow_workflow_run_id"]))
.options(selectinload(WorkflowRun.node_results))
)
run = run_result.scalar_one()
node_results = {node_result.node_name: node_result for node_result in run.node_results}
assert result["backend"] == "legacy" assert result["backend"] == "legacy"
assert result["execution_mode"] == "shadow" assert result["execution_mode"] == "shadow"
assert result["shadow_status"] == "dispatched" assert result["shadow_status"] == "skipped"
assert result["shadow_task_ids"] == ["legacy-shadow-task-1"] assert result["rollout_gate_status"] == "shadow_skipped"
assert run.execution_mode == "shadow" assert "shadow_workflow_run_id" not in result
assert node_results["output"].status == "completed" assert "material_assignments" in result["shadow_error"]
assert calls[0][2]["publish_asset_enabled"] is False assert calls == []
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -639,17 +780,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_ignores_graph_failures_
workflow_definition = WorkflowDefinition( workflow_definition = WorkflowDefinition(
name=f"Shadow Workflow {uuid.uuid4().hex[:8]}", name=f"Shadow Workflow {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id, output_type_id=order_line.output_type_id,
config={ config=_build_valid_custom_still_graph(execution_mode="shadow"),
"version": 1,
"ui": {"preset": "custom", "execution_mode": "shadow"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
],
"edges": [
{"from": "setup", "to": "render"},
],
},
is_active=True, is_active=True,
) )
db.add(workflow_definition) db.add(workflow_definition)
@@ -657,6 +788,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_ignores_graph_failures_
output_type = await db.get(OutputType, order_line.output_type_id) output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "shadow"
await db.commit() await db.commit()
monkeypatch.setattr( monkeypatch.setattr(
@@ -730,6 +862,32 @@ def test_evaluate_rollout_gate_warns_on_small_visual_delta(tmp_path: Path):
assert any("warn threshold" in reason for reason in gate["reasons"]) assert any("warn threshold" in reason for reason in gate["reasons"])
def test_evaluate_rollout_gate_passes_near_zero_visual_delta(tmp_path: Path):
authoritative = tmp_path / "authoritative.png"
observer = tmp_path / "observer.png"
Image.new("RGBA", (1024, 1024), color=(106, 106, 106, 255)).save(authoritative)
Image.new("RGBA", (1024, 1024), color=(106, 106, 106, 255)).save(observer)
with Image.open(observer) as image:
image.putpixel((444, 137), (106, 106, 107, 255))
image.putpixel((651, 142), (105, 106, 106, 255))
image.save(observer)
gate = evaluate_rollout_gate(
authoritative_output=_build_artifact(str(authoritative)),
observer_output=_build_artifact(str(observer)),
exact_match=False,
dimensions_match=True,
mean_pixel_delta=((1 + 1) / (1024 * 1024 * 4 * 255)),
)
assert gate["verdict"] == "pass"
assert gate["ready"] is True
assert gate["status"] == "ready_for_rollout"
assert any("pass threshold" in reason for reason in gate["reasons"])
def test_evaluate_rollout_gate_fails_on_missing_observer(tmp_path: Path): def test_evaluate_rollout_gate_fails_on_missing_observer(tmp_path: Path):
authoritative = tmp_path / "authoritative.png" authoritative = tmp_path / "authoritative.png"
Image.new("RGBA", (16, 16), color=(0, 128, 255, 255)).save(authoritative) Image.new("RGBA", (16, 16), color=(0, 128, 255, 255)).save(authoritative)
@@ -796,7 +954,11 @@ def test_dispatch_render_with_workflow_unit_marks_shadow_dispatch_as_pending_rol
workflow_def_id = uuid.uuid4() workflow_def_id = uuid.uuid4()
fake_line = SimpleNamespace( fake_line = SimpleNamespace(
id=uuid.UUID(order_line_id), id=uuid.UUID(order_line_id),
output_type=SimpleNamespace(id=output_type_id, workflow_definition_id=workflow_def_id), output_type=SimpleNamespace(
id=output_type_id,
workflow_definition_id=workflow_def_id,
workflow_rollout_mode="shadow",
),
) )
fake_workflow_def = SimpleNamespace(id=workflow_def_id, config={"version": 1}, is_active=True) fake_workflow_def = SimpleNamespace(id=workflow_def_id, config={"version": 1}, is_active=True)
fake_run = SimpleNamespace(id=uuid.uuid4()) fake_run = SimpleNamespace(id=uuid.uuid4())
@@ -951,12 +1113,14 @@ async def test_workflow_dispatch_endpoint_returns_workflow_run_with_node_results
assert node_results["setup"]["output"]["order_line_id"] == str(order_line.id) assert node_results["setup"]["output"]["order_line_id"] == str(order_line.id)
assert node_results["template"]["status"] == "completed" assert node_results["template"]["status"] == "completed"
assert node_results["template"]["output"]["use_materials"] is False assert node_results["template"]["output"]["use_materials"] is False
assert node_results["output"]["status"] == "completed" assert node_results["output"]["status"] == "pending"
assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save" assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"]["output"]["handoff_state"] == "armed"
assert node_results["output"]["output"]["handoff_node_ids"] == ["render"]
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_workflow_dispatch_endpoint_arms_output_save_for_export_blend( async def test_workflow_dispatch_endpoint_rejects_output_save_for_export_blend_only_graph(
client, client,
db, db,
admin_user, admin_user,
@@ -968,18 +1132,7 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_export_blend(
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path) order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_definition = WorkflowDefinition( workflow_definition = WorkflowDefinition(
name=f"Blend Output Workflow {uuid.uuid4().hex[:8]}", name=f"Blend Output Workflow {uuid.uuid4().hex[:8]}",
config={ config=_build_valid_custom_blend_graph(include_output=True),
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "blend"},
{"from": "blend", "to": "output"},
],
},
is_active=True, is_active=True,
) )
db.add(workflow_definition) db.add(workflow_definition)
@@ -1000,35 +1153,9 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_export_blend(
headers=auth_headers, headers=auth_headers,
) )
assert response.status_code == 200 assert response.status_code == 422
body = response.json() assert "output_save" in response.json()["detail"]
assert calls == []
assert body["context_id"] == context_id
assert body["execution_mode"] == "graph"
assert body["dispatched"] == 1
assert body["task_ids"] == ["task-1"]
assert calls == [
(
"app.domains.rendering.tasks.export_blend_for_order_line_task",
[context_id],
{
"workflow_run_id": body["workflow_run"]["id"],
"workflow_node_id": "blend",
"publish_asset_enabled": False,
"graph_authoritative_output_enabled": True,
"graph_output_node_ids": ["output"],
},
)
]
node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]}
assert node_results["blend"]["status"] == "queued"
assert node_results["blend"]["output"]["predicted_asset_type"] == "blend_production"
assert node_results["blend"]["output"]["publish_asset_enabled"] is False
assert node_results["blend"]["output"]["graph_authoritative_output_enabled"] is True
assert node_results["blend"]["output"]["graph_output_node_ids"] == ["output"]
assert node_results["output"]["status"] == "completed"
assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save"
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -1044,18 +1171,7 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_turntable(
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path) order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_definition = WorkflowDefinition( workflow_definition = WorkflowDefinition(
name=f"Turntable Output Workflow {uuid.uuid4().hex[:8]}", name=f"Turntable Output Workflow {uuid.uuid4().hex[:8]}",
config={ config=_build_valid_custom_turntable_graph(include_output=True),
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "turntable", "step": "blender_turntable", "params": {"fps": 24, "frame_count": 96}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "turntable"},
{"from": "turntable", "to": "output"},
],
},
is_active=True, is_active=True,
) )
db.add(workflow_definition) db.add(workflow_definition)
@@ -1091,7 +1207,6 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_turntable(
assert calls[0][2]["graph_authoritative_output_enabled"] is True assert calls[0][2]["graph_authoritative_output_enabled"] is True
assert calls[0][2]["graph_output_node_ids"] == ["output"] assert calls[0][2]["graph_output_node_ids"] == ["output"]
assert calls[0][2]["fps"] == 24 assert calls[0][2]["fps"] == 24
assert calls[0][2]["frame_count"] == 96
node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]} node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]}
assert node_results["turntable"]["status"] == "queued" assert node_results["turntable"]["status"] == "queued"
@@ -1099,8 +1214,10 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_turntable(
assert node_results["turntable"]["output"]["publish_asset_enabled"] is False assert node_results["turntable"]["output"]["publish_asset_enabled"] is False
assert node_results["turntable"]["output"]["graph_authoritative_output_enabled"] is True assert node_results["turntable"]["output"]["graph_authoritative_output_enabled"] is True
assert node_results["turntable"]["output"]["graph_output_node_ids"] == ["output"] assert node_results["turntable"]["output"]["graph_output_node_ids"] == ["output"]
assert node_results["output"]["status"] == "completed" assert node_results["output"]["status"] == "pending"
assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save" assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"]["output"]["handoff_state"] == "armed"
assert node_results["output"]["output"]["handoff_node_ids"] == ["turntable"]
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -1116,18 +1233,7 @@ async def test_workflow_dispatch_endpoint_arms_notify_handoff_for_render_node(
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path) order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_definition = WorkflowDefinition( workflow_definition = WorkflowDefinition(
name=f"Notify Workflow {uuid.uuid4().hex[:8]}", name=f"Notify Workflow {uuid.uuid4().hex[:8]}",
config={ config=_build_valid_custom_still_graph(include_notify=True),
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "render", "step": "blender_still", "params": {}},
{"id": "notify", "step": "notify", "params": {}},
],
"edges": [
{"from": "setup", "to": "render"},
{"from": "render", "to": "notify"},
],
},
is_active=True, is_active=True,
) )
db.add(workflow_definition) db.add(workflow_definition)
@@ -1166,9 +1272,10 @@ async def test_workflow_dispatch_endpoint_arms_notify_handoff_for_render_node(
node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]} node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]}
assert node_results["render"]["status"] == "queued" assert node_results["render"]["status"] == "queued"
assert node_results["render"]["output"]["graph_notify_node_ids"] == ["notify"] assert node_results["render"]["output"]["graph_notify_node_ids"] == ["notify"]
assert node_results["notify"]["status"] == "completed" assert node_results["notify"]["status"] == "pending"
assert node_results["notify"]["output"]["notification_mode"] == "deferred_to_render_task" assert node_results["notify"]["output"]["notification_mode"] == "deferred_to_render_task"
assert node_results["notify"]["output"]["armed_node_ids"] == ["render"] assert node_results["notify"]["output"]["armed_node_ids"] == ["render"]
assert node_results["notify"]["output"]["handoff_state"] == "armed"
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -1246,19 +1353,7 @@ async def test_workflow_draft_dispatch_endpoint_dispatches_unsaved_render_graph(
json={ json={
"workflow_id": str(workflow_definition.id), "workflow_id": str(workflow_definition.id),
"context_id": str(order_line.id), "context_id": str(order_line.id),
"config": { "config": _build_valid_custom_still_graph(width=800, height=600),
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}, "ui": {"label": "Setup"}},
{"id": "template", "step": "resolve_template", "params": {}, "ui": {"label": "Template"}},
{"id": "render", "step": "blender_still", "params": {"width": 800, "height": 600}, "ui": {"label": "Render"}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
}, },
) )
@@ -1306,17 +1401,7 @@ async def test_workflow_draft_dispatch_endpoint_marks_submitted_order_processing
headers=auth_headers, headers=auth_headers,
json={ json={
"context_id": str(order_line.id), "context_id": str(order_line.id),
"config": { "config": _build_valid_custom_still_graph(),
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}, "ui": {"label": "Setup"}},
{"id": "render", "step": "blender_still", "params": {}, "ui": {"label": "Render"}},
],
"edges": [
{"from": "setup", "to": "render"},
],
},
}, },
) )
@@ -1413,19 +1498,7 @@ async def test_workflow_preflight_endpoint_supports_direct_cad_file_graphs(
) )
workflow_definition = WorkflowDefinition( workflow_definition = WorkflowDefinition(
name=f"CAD Workflow {uuid.uuid4().hex[:8]}", name=f"CAD Workflow {uuid.uuid4().hex[:8]}",
config={ config=build_workflow_blueprint_config("cad_intake"),
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "input", "step": "resolve_step_path", "params": {}, "ui": {"label": "Resolve STEP"}},
{"id": "render", "step": "blender_render", "params": {"width": 512, "height": 512}, "ui": {"label": "Thumbnail"}},
{"id": "save", "step": "thumbnail_save", "params": {}, "ui": {"label": "Save Thumbnail"}},
],
"edges": [
{"from": "input", "to": "render"},
{"from": "render", "to": "save"},
],
},
is_active=True, is_active=True,
) )
db.add_all([cad_file, workflow_definition]) db.add_all([cad_file, workflow_definition])
@@ -1443,7 +1516,7 @@ async def test_workflow_preflight_endpoint_supports_direct_cad_file_graphs(
assert body["context_kind"] == "cad_file" assert body["context_kind"] == "cad_file"
assert body["expected_context_kind"] == "cad_file" assert body["expected_context_kind"] == "cad_file"
assert body["execution_mode"] == "graph" assert body["execution_mode"] == "legacy"
assert body["graph_dispatch_allowed"] is True assert body["graph_dispatch_allowed"] is True
assert body["resolved_cad_file_id"] == str(cad_file.id) assert body["resolved_cad_file_id"] == str(cad_file.id)
assert all(node["status"] == "ready" for node in body["nodes"]) assert all(node["status"] == "ready" for node in body["nodes"])
@@ -1464,19 +1537,7 @@ async def test_workflow_draft_preflight_endpoint_validates_unsaved_render_graph(
headers=auth_headers, headers=auth_headers,
json={ json={
"context_id": str(order_line.id), "context_id": str(order_line.id),
"config": { "config": _build_valid_custom_still_graph(width=640, height=640),
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}, "ui": {"label": "Setup"}},
{"id": "template", "step": "resolve_template", "params": {}, "ui": {"label": "Template"}},
{"id": "render", "step": "blender_still", "params": {"width": 640, "height": 640}, "ui": {"label": "Render"}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
}, },
) )
@@ -1489,7 +1550,13 @@ async def test_workflow_draft_preflight_endpoint_validates_unsaved_render_graph(
assert body["execution_mode"] == "graph" assert body["execution_mode"] == "graph"
assert body["graph_dispatch_allowed"] is True assert body["graph_dispatch_allowed"] is True
assert body["resolved_order_line_id"] == str(order_line.id) assert body["resolved_order_line_id"] == str(order_line.id)
assert [node["node_id"] for node in body["nodes"]] == ["setup", "template", "render"] assert [node["node_id"] for node in body["nodes"]] == [
"setup",
"template",
"populate_materials",
"resolve_materials",
"render",
]
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -1646,7 +1713,9 @@ async def test_workflow_run_comparison_endpoint_reports_metadata_only_difference
assert body["exact_match"] is False assert body["exact_match"] is False
assert body["dimensions_match"] is True assert body["dimensions_match"] is True
assert body["mean_pixel_delta"] == 0.0 assert body["mean_pixel_delta"] == 0.0
assert "metadata differs" in body["summary"] assert body["summary"] == (
"Observer output matches the authoritative legacy output within the visual pass threshold."
)
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -1695,7 +1764,9 @@ async def test_workflow_run_comparison_endpoint_finds_shadow_output_in_step_file
admin_user, admin_user,
auth_headers, auth_headers,
tmp_path, tmp_path,
monkeypatch,
): ):
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path) order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_run = WorkflowRun( workflow_run = WorkflowRun(
order_line_id=order_line.id, order_line_id=order_line.id,
@@ -1710,7 +1781,7 @@ async def test_workflow_run_comparison_endpoint_finds_shadow_output_in_step_file
authoritative_path = render_dir / "authoritative.png" authoritative_path = render_dir / "authoritative.png"
Image.new("RGBA", (12, 12), (32, 160, 255, 255)).save(authoritative_path) Image.new("RGBA", (12, 12), (32, 160, 255, 255)).save(authoritative_path)
step_shadow_dir = Path("/app/uploads/step_files/renders") step_shadow_dir = Path(settings.upload_dir) / "step_files" / "renders" / str(order_line.id)
step_shadow_dir.mkdir(parents=True, exist_ok=True) step_shadow_dir.mkdir(parents=True, exist_ok=True)
shadow_path = step_shadow_dir / f"line_{order_line.id}_shadow-{str(workflow_run.id)[:8]}.png" shadow_path = step_shadow_dir / f"line_{order_line.id}_shadow-{str(workflow_run.id)[:8]}.png"
Image.new("RGBA", (12, 12), (32, 160, 255, 255)).save(shadow_path) Image.new("RGBA", (12, 12), (32, 160, 255, 255)).save(shadow_path)
@@ -1729,3 +1800,52 @@ async def test_workflow_run_comparison_endpoint_finds_shadow_output_in_step_file
assert body["status"] == "matched" assert body["status"] == "matched"
assert body["observer_output"]["exists"] is True assert body["observer_output"]["exists"] is True
assert body["observer_output"]["path"] == str(shadow_path) assert body["observer_output"]["path"] == str(shadow_path)
@pytest.mark.asyncio
async def test_workflow_run_comparison_endpoint_treats_near_zero_visual_delta_as_match(
client,
db,
admin_user,
auth_headers,
tmp_path,
):
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_run = WorkflowRun(
order_line_id=order_line.id,
execution_mode="shadow",
status="completed",
)
db.add(workflow_run)
await db.flush()
render_dir = tmp_path / "comparison-near-zero" / str(order_line.id)
render_dir.mkdir(parents=True, exist_ok=True)
authoritative_path = render_dir / "authoritative.png"
shadow_path = render_dir / f"line_{order_line.id}_shadow-{str(workflow_run.id)[:8]}.png"
Image.new("RGBA", (1024, 1024), (106, 106, 106, 255)).save(authoritative_path)
Image.new("RGBA", (1024, 1024), (106, 106, 106, 255)).save(shadow_path)
with Image.open(shadow_path) as image:
image.putpixel((444, 137), (106, 106, 107, 255))
image.putpixel((651, 142), (105, 106, 106, 255))
image.save(shadow_path)
order_line.result_path = str(authoritative_path)
order_line.render_status = "completed"
await db.commit()
response = await client.get(
f"/api/workflows/runs/{workflow_run.id}/comparison",
headers=auth_headers,
)
assert response.status_code == 200
body = response.json()
assert body["status"] == "matched"
assert body["exact_match"] is False
assert body["dimensions_match"] is True
assert body["mean_pixel_delta"] is not None
assert body["mean_pixel_delta"] <= 1e-6
assert "pass threshold" in body["summary"]
@@ -6,10 +6,10 @@ from pathlib import Path
from types import SimpleNamespace from types import SimpleNamespace
import pytest import pytest
from sqlalchemy import create_engine, select, text from sqlalchemy import select, text
from sqlalchemy.orm import Session, selectinload from sqlalchemy.orm import Session, selectinload
from app.database import Base from app.core.render_paths import build_order_line_export_path, build_order_line_step_render_path
from app.core.process_steps import StepName from app.core.process_steps import StepName
from app.domains.auth.models import User, UserRole from app.domains.auth.models import User, UserRole
from app.domains.materials.models import AssetLibrary from app.domains.materials.models import AssetLibrary
@@ -27,25 +27,13 @@ from app.domains.rendering.workflow_graph_runtime import (
from app.domains.rendering.workflow_run_service import create_workflow_run from app.domains.rendering.workflow_run_service import create_workflow_run
from app.domains.rendering.workflow_runtime_services import OrderLineRenderSetupResult from app.domains.rendering.workflow_runtime_services import OrderLineRenderSetupResult
import app.models # noqa: F401 from tests.db_test_utils import sync_test_session as sync_test_session_ctx
from tests.db_test_utils import reset_public_schema_sync, resolve_test_db_url
@pytest.fixture @pytest.fixture
def sync_session(): def sync_session():
engine = create_engine(resolve_test_db_url(async_driver=False)) with sync_test_session_ctx() as session:
with engine.begin() as conn:
reset_public_schema_sync(conn)
Base.metadata.create_all(conn)
session = Session(engine)
try:
yield session yield session
finally:
session.close()
with engine.begin() as conn:
reset_public_schema_sync(conn)
engine.dispose()
def _seed_renderable_order_line( def _seed_renderable_order_line(
@@ -137,6 +125,19 @@ def _seed_renderable_order_line(
target_collection="Product", target_collection="Product",
material_replace_enabled=True, material_replace_enabled=True,
lighting_only=False, lighting_only=False,
workflow_input_schema=[
{
"key": "studio_variant",
"label": "Studio Variant",
"type": "select",
"section": "Template Inputs",
"default": "default",
"options": [
{"value": "default", "label": "Default"},
{"value": "warm", "label": "Warm"},
],
}
],
is_active=True, is_active=True,
output_types=[output_type], output_types=[output_type],
) )
@@ -329,6 +330,193 @@ def test_execute_graph_workflow_routes_cad_thumbnail_save_using_upstream_threejs
assert node_results["save"].output["predicted_output_path"].endswith(f"{cad_file.id}.png") assert node_results["save"].output["predicted_output_path"].endswith(f"{cad_file.id}.png")
def test_execute_graph_workflow_serializes_template_schema_and_template_inputs(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
monkeypatch.setattr(
"app.domains.rendering.workflow_runtime_services.resolve_material_map",
lambda raw_map: {key: f"resolved:{value}" for key, value in raw_map.items()},
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{
"id": "template",
"step": "resolve_template",
"params": {
"template_id_override": str(template.id),
"template_input__studio_variant": "warm",
},
},
],
"edges": [
{"from": "setup", "to": "template"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
run = create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
refreshed_run = sync_session.execute(
select(WorkflowRun)
.where(WorkflowRun.id == run.id)
.options(selectinload(WorkflowRun.node_results))
).scalar_one()
node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results}
assert dispatch_result.task_ids == []
assert node_results["template"].status == "completed"
assert node_results["template"].output["workflow_input_schema"] == template.workflow_input_schema
assert node_results["template"].output["template_inputs"] == {"studio_variant": "warm"}
assert node_results["template"].output["template_input_count"] == 1
def test_execute_graph_workflow_passes_template_inputs_to_still_task(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]):
send_calls.append((task_name, args, kwargs))
return SimpleNamespace(id="task-still-template-inputs")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{
"id": "template",
"step": "resolve_template",
"params": {
"template_id_override": str(template.id),
"template_input__studio_variant": "warm",
},
},
{"id": "render", "step": "blender_still", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
assert dispatch_result.task_ids == ["task-still-template-inputs"]
assert len(send_calls) == 1
assert send_calls[0][0] == "app.domains.rendering.tasks.render_order_line_still_task"
assert send_calls[0][1] == [str(line.id)]
assert send_calls[0][2]["template_inputs"] == {"studio_variant": "warm"}
def test_execute_graph_workflow_passes_template_inputs_and_duration_to_turntable_task(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]):
send_calls.append((task_name, args, kwargs))
return SimpleNamespace(id="task-turntable-template-inputs")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{
"id": "template",
"step": "resolve_template",
"params": {
"template_id_override": str(template.id),
"template_input__studio_variant": "warm",
},
},
{
"id": "render",
"step": "blender_turntable",
"params": {
"fps": 12,
"duration_s": 7,
"frame_count": 999,
},
},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
assert dispatch_result.task_ids == ["task-turntable-template-inputs"]
assert len(send_calls) == 1
assert send_calls[0][0] == "app.domains.rendering.tasks.render_turntable_task"
assert send_calls[0][1] == [str(line.id)]
assert send_calls[0][2]["template_inputs"] == {"studio_variant": "warm"}
assert send_calls[0][2]["duration_s"] == 7.0
assert send_calls[0][2]["fps"] == 12
assert send_calls[0][2]["frame_count"] == 84
def test_execute_graph_workflow_completes_cad_bridge_only_nodes_without_queueing( def test_execute_graph_workflow_completes_cad_bridge_only_nodes_without_queueing(
sync_session, sync_session,
tmp_path, tmp_path,
@@ -660,6 +848,108 @@ def test_build_task_kwargs_autoscales_default_samples_via_shared_render_invocati
assert kwargs["samples"] == 64 assert kwargs["samples"] == 64
def test_build_task_kwargs_ignores_authoritative_still_overrides_without_opt_in(
tmp_path,
monkeypatch,
):
step_path = tmp_path / "cad" / "bearing.step"
step_path.parent.mkdir(parents=True, exist_ok=True)
step_path.write_text("STEP", encoding="utf-8")
output_type = OutputType(
id=uuid.uuid4(),
name="Still Preview",
renderer="blender",
output_format="png",
render_settings={
"width": 2048,
"height": 1536,
"engine": "cycles",
"samples": 128,
"noise_threshold": "0.05",
},
transparent_bg=True,
cycles_device="cuda",
)
cad_file = CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path=str(step_path),
file_hash="hash-graph-2",
parsed_objects={"objects": ["InnerRing", "OuterRing"]},
)
product = Product(
id=uuid.uuid4(),
pim_id="P-graph-2",
name="Bearing G2",
category_key="bearings",
cad_file_id=cad_file.id,
cad_file=cad_file,
)
line = OrderLine(
id=uuid.uuid4(),
order_id=uuid.uuid4(),
product_id=product.id,
product=product,
output_type_id=output_type.id,
output_type=output_type,
)
state = WorkflowGraphState(
setup=OrderLineRenderSetupResult(
status="ready",
order_line=line,
cad_file=cad_file,
part_colors={"InnerRing": "Steel raw"},
)
)
workflow_context = SimpleNamespace(
workflow_run_id=uuid.uuid4(),
execution_mode="graph",
ordered_nodes=[],
edges=[],
)
node = SimpleNamespace(
id="render",
step=StepName.BLENDER_STILL,
params={
"width": 1024,
"height": 768,
"samples": 16,
"render_engine": "eevee",
"transparent_bg": False,
"cycles_device": "cpu",
"noise_threshold": "0.2",
},
)
monkeypatch.setattr(
"app.domains.rendering.workflow_graph_runtime.resolve_render_position_context",
lambda _session, _line: SimpleNamespace(
rotation_x=0.0,
rotation_y=0.0,
rotation_z=0.0,
focal_length_mm=None,
sensor_width_mm=None,
),
)
kwargs = _build_task_kwargs(
session=object(),
workflow_context=workflow_context,
state=state,
node=node,
)
assert kwargs["width"] == 2048
assert kwargs["height"] == 1536
assert kwargs["engine"] == "cycles"
assert kwargs["samples"] == 128
assert kwargs["transparent_bg"] is True
assert kwargs["cycles_device"] == "cuda"
assert kwargs["noise_threshold"] == "0.05"
assert "render_engine" not in kwargs
def test_execute_graph_workflow_respects_custom_render_settings_opt_in_for_still_task( def test_execute_graph_workflow_respects_custom_render_settings_opt_in_for_still_task(
sync_session, sync_session,
tmp_path, tmp_path,
@@ -838,6 +1128,221 @@ def test_execute_graph_workflow_preserves_turntable_timing_without_custom_render
assert kwargs["output_name_suffix"].startswith("shadow-") assert kwargs["output_name_suffix"].startswith("shadow-")
def test_execute_graph_workflow_respects_custom_render_settings_opt_in_for_turntable_task(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
assert line.output_type is not None
line.output_type.render_settings = {
"width": 2048,
"height": 2048,
"engine": "cycles",
"samples": 128,
"fps": 30,
"frame_count": 180,
}
sync_session.commit()
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]):
send_calls.append((task_name, args, kwargs))
return SimpleNamespace(id="task-custom-turntable")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{
"id": "render",
"step": "blender_turntable",
"params": {
"use_custom_render_settings": True,
"width": 1024,
"height": 768,
"samples": 32,
"render_engine": "eevee",
"fps": 12,
"duration_s": 6,
},
},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
assert dispatch_result.task_ids == ["task-custom-turntable"]
assert len(send_calls) == 1
task_name, args, kwargs = send_calls[0]
assert task_name == "app.domains.rendering.tasks.render_turntable_task"
assert args == [str(line.id)]
assert kwargs["width"] == 1024
assert kwargs["height"] == 768
assert kwargs["samples"] == 32
assert kwargs["render_engine"] == "eevee"
assert kwargs["engine"] == "cycles"
assert kwargs["fps"] == 12
assert kwargs["duration_s"] == 6.0
assert kwargs["frame_count"] == 72
def test_execute_graph_workflow_preserves_template_camera_orbit_without_custom_render_settings(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
template.camera_orbit = False
assert line.output_type is not None
line.output_type.render_settings = {
"width": 2048,
"height": 2048,
"engine": "cycles",
"samples": 128,
"fps": 30,
"frame_count": 180,
}
sync_session.commit()
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]):
send_calls.append((task_name, args, kwargs))
return SimpleNamespace(id="task-turntable-camera-orbit")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{
"id": "render",
"step": "blender_turntable",
"params": {
"fps": 24,
"frame_count": 120,
},
},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
assert dispatch_result.task_ids == ["task-turntable-camera-orbit"]
assert len(send_calls) == 1
assert send_calls[0][2]["camera_orbit"] is False
def test_execute_graph_workflow_serializes_template_override_modes(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
template.target_collection = "TemplateCollection"
template.material_replace_enabled = False
template.lighting_only = False
template.shadow_catcher_enabled = False
template.camera_orbit = True
sync_session.commit()
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{
"id": "template",
"step": "resolve_template",
"params": {
"target_collection": "NodeCollection",
"material_library_path": "/libraries/materials.blend",
"material_replace_mode": "enabled",
"lighting_only_mode": "enabled",
"shadow_catcher_mode": "enabled",
"camera_orbit_mode": "disabled",
},
},
],
"edges": [
{"from": "setup", "to": "template"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
run = create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
refreshed_run = sync_session.execute(
select(WorkflowRun)
.where(WorkflowRun.id == run.id)
.options(selectinload(WorkflowRun.node_results))
).scalar_one()
node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results}
assert dispatch_result.task_ids == []
assert node_results["template"].status == "completed"
assert node_results["template"].output["target_collection"] == "NodeCollection"
assert node_results["template"].output["use_materials"] is True
assert node_results["template"].output["lighting_only"] is True
assert node_results["template"].output["shadow_catcher"] is True
assert node_results["template"].output["camera_orbit"] is False
def test_execute_graph_workflow_retries_bridge_node_and_persists_attempt_metadata( def test_execute_graph_workflow_retries_bridge_node_and_persists_attempt_metadata(
sync_session, sync_session,
monkeypatch, monkeypatch,
@@ -1010,16 +1515,22 @@ def test_execute_graph_workflow_supports_output_save_bridge_node(
assert send_calls[0][2]["graph_authoritative_output_enabled"] is True assert send_calls[0][2]["graph_authoritative_output_enabled"] is True
assert send_calls[0][2]["graph_output_node_ids"] == ["output"] assert send_calls[0][2]["graph_output_node_ids"] == ["output"]
assert node_results["render"].status == "queued" assert node_results["render"].status == "queued"
assert node_results["output"].status == "completed" assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save" assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"].output["order_line_id"] == str(line.id) assert node_results["output"].output["order_line_id"] == str(line.id)
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["render"]
assert node_results["output"].output["artifact_count"] == 1 assert node_results["output"].output["artifact_count"] == 1
assert node_results["output"].output["upstream_artifacts"] == [ assert node_results["output"].output["upstream_artifacts"] == [
{ {
"node_id": "render", "node_id": "render",
"artifact_role": "render_output", "artifact_role": "render_output",
"predicted_output_path": str( "predicted_output_path": str(
tmp_path / "cad" / "renders" / f"line_{line.id}.png" build_order_line_step_render_path(
line.product.cad_file.stored_path,
str(line.id),
f"line_{line.id}.png",
)
), ),
"predicted_asset_type": "still", "predicted_asset_type": "still",
"publish_asset_enabled": False, "publish_asset_enabled": False,
@@ -1086,14 +1597,16 @@ def test_execute_graph_workflow_arms_output_save_handoff_for_export_blend(
assert send_calls[0][2]["graph_authoritative_output_enabled"] is True assert send_calls[0][2]["graph_authoritative_output_enabled"] is True
assert send_calls[0][2]["graph_output_node_ids"] == ["output"] assert send_calls[0][2]["graph_output_node_ids"] == ["output"]
assert node_results["blend"].status == "queued" assert node_results["blend"].status == "queued"
assert node_results["output"].status == "completed" assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save" assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["blend"]
assert node_results["output"].output["artifact_count"] == 1 assert node_results["output"].output["artifact_count"] == 1
assert node_results["output"].output["upstream_artifacts"] == [ assert node_results["output"].output["upstream_artifacts"] == [
{ {
"node_id": "blend", "node_id": "blend",
"artifact_role": "blend_export", "artifact_role": "blend_export",
"predicted_output_path": str(tmp_path / "cad" / "bearing_production.blend"), "predicted_output_path": str(build_order_line_export_path(str(line.id), "bearing_production.blend")),
"predicted_asset_type": "blend_production", "predicted_asset_type": "blend_production",
"publish_asset_enabled": False, "publish_asset_enabled": False,
"graph_authoritative_output_enabled": True, "graph_authoritative_output_enabled": True,
@@ -1160,14 +1673,18 @@ def test_execute_graph_workflow_arms_output_save_handoff_for_turntable(
assert send_calls[0][2]["graph_output_node_ids"] == ["output"] assert send_calls[0][2]["graph_output_node_ids"] == ["output"]
assert send_calls[0][2]["workflow_node_id"] == "turntable" assert send_calls[0][2]["workflow_node_id"] == "turntable"
assert node_results["turntable"].status == "queued" assert node_results["turntable"].status == "queued"
assert node_results["output"].status == "completed" assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save" assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["turntable"]
assert node_results["output"].output["artifact_count"] == 1 assert node_results["output"].output["artifact_count"] == 1
assert node_results["output"].output["upstream_artifacts"] == [ assert node_results["output"].output["upstream_artifacts"] == [
{ {
"node_id": "turntable", "node_id": "turntable",
"artifact_role": "turntable_output", "artifact_role": "turntable_output",
"predicted_output_path": str(tmp_path / "cad" / "renders" / "turntable.mp4"), "predicted_output_path": str(
build_order_line_step_render_path(line.product.cad_file.stored_path, str(line.id), "turntable.mp4")
),
"predicted_asset_type": "turntable", "predicted_asset_type": "turntable",
"publish_asset_enabled": False, "publish_asset_enabled": False,
"graph_authoritative_output_enabled": True, "graph_authoritative_output_enabled": True,
@@ -1178,6 +1695,150 @@ def test_execute_graph_workflow_arms_output_save_handoff_for_turntable(
] ]
def test_execute_graph_workflow_arms_shadow_output_save_handoff_for_turntable(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
lambda task_name, args, kwargs: send_calls.append((task_name, args, kwargs))
or SimpleNamespace(id="task-shadow-turntable-output-save"),
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "turntable", "step": "blender_turntable", "params": {"fps": 24, "frame_count": 96}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "turntable"},
{"from": "turntable", "to": "output"},
],
},
context_id=str(line.id),
execution_mode="shadow",
)
run = create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
refreshed_run = sync_session.execute(
select(WorkflowRun)
.where(WorkflowRun.id == run.id)
.options(selectinload(WorkflowRun.node_results))
).scalar_one()
node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results}
assert dispatch_result.task_ids == ["task-shadow-turntable-output-save"]
assert len(send_calls) == 1
assert send_calls[0][0] == "app.domains.rendering.tasks.render_turntable_task"
assert send_calls[0][1] == [str(line.id)]
assert send_calls[0][2]["publish_asset_enabled"] is False
assert send_calls[0][2]["observer_output_enabled"] is True
assert send_calls[0][2]["graph_output_node_ids"] == ["output"]
assert "graph_authoritative_output_enabled" not in send_calls[0][2]
assert node_results["turntable"].status == "queued"
assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "shadow_observer_only"
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["turntable"]
assert node_results["output"].output["artifact_count"] == 1
assert node_results["output"].output["upstream_artifacts"] == [
{
"node_id": "turntable",
"artifact_role": "turntable_output",
"predicted_output_path": str(
build_order_line_step_render_path(
line.product.cad_file.stored_path,
str(line.id),
f"turntable_shadow-{str(run.id)[:8]}.mp4",
)
),
"predicted_asset_type": "turntable",
"publish_asset_enabled": False,
"graph_authoritative_output_enabled": False,
"graph_output_node_ids": ["output"],
"notify_handoff_enabled": False,
"task_id": "task-shadow-turntable-output-save",
}
]
def test_execute_graph_workflow_routes_shadow_render_tasks_to_light_queue_when_available(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
send_calls: list[tuple[str, list[str], dict[str, object], dict[str, object]]] = []
monkeypatch.setattr(
"app.domains.rendering.workflow_graph_runtime._inspect_active_worker_queues",
lambda timeout=1.0: {"asset_pipeline", "asset_pipeline_light"},
)
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object], **task_options):
send_calls.append((task_name, args, kwargs, task_options))
return SimpleNamespace(id="task-shadow-light-queue")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "turntable", "step": "blender_turntable", "params": {"fps": 24, "frame_count": 96}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "turntable"},
{"from": "turntable", "to": "output"},
],
},
context_id=str(line.id),
execution_mode="shadow",
)
run = create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
refreshed_run = sync_session.execute(
select(WorkflowRun)
.where(WorkflowRun.id == run.id)
.options(selectinload(WorkflowRun.node_results))
).scalar_one()
node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results}
assert dispatch_result.task_ids == ["task-shadow-light-queue"]
assert len(send_calls) == 1
assert send_calls[0][0] == "app.domains.rendering.tasks.render_turntable_task"
assert send_calls[0][3]["queue"] == "asset_pipeline_light"
assert node_results["turntable"].output["task_queue"] == "asset_pipeline_light"
def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch( def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch(
sync_session, sync_session,
tmp_path, tmp_path,
@@ -1240,12 +1901,21 @@ def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch
assert send_calls[0][2]["graph_output_node_ids"] == ["still_output"] assert send_calls[0][2]["graph_output_node_ids"] == ["still_output"]
assert send_calls[1][0] == "app.domains.rendering.tasks.render_turntable_task" assert send_calls[1][0] == "app.domains.rendering.tasks.render_turntable_task"
assert send_calls[1][2]["graph_output_node_ids"] == ["turntable_output"] assert send_calls[1][2]["graph_output_node_ids"] == ["turntable_output"]
assert node_results["still_output"].status == "pending"
assert node_results["still_output"].output["handoff_state"] == "armed"
assert node_results["still_output"].output["handoff_node_ids"] == ["still"]
assert node_results["still_output"].output["artifact_count"] == 1 assert node_results["still_output"].output["artifact_count"] == 1
assert node_results["still_output"].output["upstream_artifacts"] == [ assert node_results["still_output"].output["upstream_artifacts"] == [
{ {
"node_id": "still", "node_id": "still",
"artifact_role": "render_output", "artifact_role": "render_output",
"predicted_output_path": str(tmp_path / "cad" / "renders" / f"line_{line.id}.png"), "predicted_output_path": str(
build_order_line_step_render_path(
line.product.cad_file.stored_path,
str(line.id),
f"line_{line.id}.png",
)
),
"predicted_asset_type": "still", "predicted_asset_type": "still",
"publish_asset_enabled": False, "publish_asset_enabled": False,
"graph_authoritative_output_enabled": True, "graph_authoritative_output_enabled": True,
@@ -1254,12 +1924,17 @@ def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch
"task_id": "task-branch-1", "task_id": "task-branch-1",
} }
] ]
assert node_results["turntable_output"].status == "pending"
assert node_results["turntable_output"].output["handoff_state"] == "armed"
assert node_results["turntable_output"].output["handoff_node_ids"] == ["turntable"]
assert node_results["turntable_output"].output["artifact_count"] == 1 assert node_results["turntable_output"].output["artifact_count"] == 1
assert node_results["turntable_output"].output["upstream_artifacts"] == [ assert node_results["turntable_output"].output["upstream_artifacts"] == [
{ {
"node_id": "turntable", "node_id": "turntable",
"artifact_role": "turntable_output", "artifact_role": "turntable_output",
"predicted_output_path": str(tmp_path / "cad" / "renders" / "turntable.mp4"), "predicted_output_path": str(
build_order_line_step_render_path(line.product.cad_file.stored_path, str(line.id), "turntable.mp4")
),
"predicted_asset_type": "turntable", "predicted_asset_type": "turntable",
"publish_asset_enabled": False, "publish_asset_enabled": False,
"graph_authoritative_output_enabled": True, "graph_authoritative_output_enabled": True,
@@ -1379,9 +2054,10 @@ def test_execute_graph_workflow_arms_notify_handoff_for_graph_render_task(
assert send_calls[0][2]["emit_legacy_notifications"] is True assert send_calls[0][2]["emit_legacy_notifications"] is True
assert send_calls[0][2]["graph_notify_node_ids"] == ["notify"] assert send_calls[0][2]["graph_notify_node_ids"] == ["notify"]
assert node_results["render"].output["graph_notify_node_ids"] == ["notify"] assert node_results["render"].output["graph_notify_node_ids"] == ["notify"]
assert node_results["notify"].status == "completed" assert node_results["notify"].status == "pending"
assert node_results["notify"].output["notification_mode"] == "deferred_to_render_task" assert node_results["notify"].output["notification_mode"] == "deferred_to_render_task"
assert node_results["notify"].output["armed_node_ids"] == ["render"] assert node_results["notify"].output["armed_node_ids"] == ["render"]
assert node_results["notify"].output["handoff_state"] == "armed"
def test_execute_graph_workflow_routes_notify_handoffs_per_connected_branch( def test_execute_graph_workflow_routes_notify_handoffs_per_connected_branch(
@@ -1451,10 +2127,14 @@ def test_execute_graph_workflow_routes_notify_handoffs_per_connected_branch(
assert send_calls[1][2]["graph_notify_node_ids"] == ["turntable_notify"] assert send_calls[1][2]["graph_notify_node_ids"] == ["turntable_notify"]
assert node_results["still"].output["graph_notify_node_ids"] == ["still_notify"] assert node_results["still"].output["graph_notify_node_ids"] == ["still_notify"]
assert node_results["turntable"].output["graph_notify_node_ids"] == ["turntable_notify"] assert node_results["turntable"].output["graph_notify_node_ids"] == ["turntable_notify"]
assert node_results["still_notify"].status == "completed" assert node_results["still_notify"].status == "pending"
assert node_results["still_notify"].output["notification_mode"] == "deferred_to_render_task"
assert node_results["still_notify"].output["armed_node_ids"] == ["still"] assert node_results["still_notify"].output["armed_node_ids"] == ["still"]
assert node_results["turntable_notify"].status == "completed" assert node_results["still_notify"].output["handoff_state"] == "armed"
assert node_results["turntable_notify"].status == "pending"
assert node_results["turntable_notify"].output["notification_mode"] == "deferred_to_render_task"
assert node_results["turntable_notify"].output["armed_node_ids"] == ["turntable"] assert node_results["turntable_notify"].output["armed_node_ids"] == ["turntable"]
assert node_results["turntable_notify"].output["handoff_state"] == "armed"
def test_execute_graph_workflow_suppresses_notify_node_in_shadow_mode( def test_execute_graph_workflow_suppresses_notify_node_in_shadow_mode(
@@ -1,7 +1,9 @@
import pytest import pytest
from app.core.process_steps import StepName from app.core.process_steps import StepName
from app.domains.rendering.models import WorkflowDefinition from app.domains.rendering.models import OutputType, WorkflowDefinition, WorkflowRun
from app.domains.rendering.workflow_config_utils import build_preset_workflow_config
from app.domains.rendering.workflow_graph_runtime import _STILL_TASK_KEYS, _TURNTABLE_TASK_KEYS
from app.domains.rendering.workflow_node_registry import ( from app.domains.rendering.workflow_node_registry import (
get_node_definition, get_node_definition,
list_node_definitions, list_node_definitions,
@@ -14,11 +16,55 @@ def test_node_registry_covers_all_step_names():
expected_steps = {step.value for step in StepName} expected_steps = {step.value for step in StepName}
assert registered_steps == expected_steps assert registered_steps == expected_steps
assert all(definition.family in {"cad_file", "order_line"} for definition in definitions) assert all(definition.family in {"cad_file", "order_line", "shared"} for definition in definitions)
assert all(definition.module_key for definition in definitions) assert all(definition.module_key for definition in definitions)
assert all(definition.legacy_source for definition in definitions) assert all(definition.legacy_source for definition in definitions)
def test_node_registry_module_keys_are_unique():
definitions = list_node_definitions()
module_keys = [definition.module_key for definition in definitions]
assert len(module_keys) == len(set(module_keys))
def test_node_registry_defaults_match_declared_fields():
definitions = list_node_definitions()
for definition in definitions:
field_keys = {field.key for field in definition.fields}
default_keys = set(definition.defaults)
assert default_keys <= field_keys
def test_node_registry_contracts_have_valid_shape():
definitions = list_node_definitions()
for definition in definitions:
input_context = definition.input_contract.get("context")
output_context = definition.output_contract.get("context")
if definition.family == "shared":
assert input_context is None
assert output_context is None
else:
assert input_context == definition.family
assert output_context == definition.family
required = definition.input_contract.get("requires", [])
required_any = definition.input_contract.get("requires_any", [])
provides = definition.output_contract.get("provides", [])
assert len(required) == len(set(required))
assert len(required_any) == len(set(required_any))
assert len(provides) == len(set(provides))
assert len(definition.artifact_roles_consumed) == len(set(definition.artifact_roles_consumed))
assert len(definition.artifact_roles_produced) == len(set(definition.artifact_roles_produced))
field_keys = [field.key for field in definition.fields]
assert len(field_keys) == len(set(field_keys))
def test_turntable_node_definition_exposes_expected_schema(): def test_turntable_node_definition_exposes_expected_schema():
definition = get_node_definition(StepName.BLENDER_TURNTABLE) definition = get_node_definition(StepName.BLENDER_TURNTABLE)
@@ -27,7 +73,11 @@ def test_turntable_node_definition_exposes_expected_schema():
assert definition.module_key == "render.production.turntable" assert definition.module_key == "render.production.turntable"
assert definition.node_type == "renderFramesNode" assert definition.node_type == "renderFramesNode"
assert definition.defaults["fps"] == 24 assert definition.defaults["fps"] == 24
assert definition.defaults["frame_count"] == 120
assert definition.defaults["duration_s"] == 5 assert definition.defaults["duration_s"] == 5
assert definition.defaults["turntable_degrees"] == 360
assert definition.defaults["turntable_axis"] == "world_z"
assert definition.defaults["camera_orbit"] is True
assert definition.input_contract["context"] == "order_line" assert definition.input_contract["context"] == "order_line"
assert definition.output_contract["provides"] == ["rendered_frames", "rendered_video"] assert definition.output_contract["provides"] == ["rendered_frames", "rendered_video"]
assert "material_assignments" in definition.artifact_roles_consumed assert "material_assignments" in definition.artifact_roles_consumed
@@ -55,6 +105,22 @@ def test_turntable_node_definition_exposes_expected_schema():
} }
def test_graph_render_node_fields_are_supported_by_runtime_dispatch():
still_definition = get_node_definition(StepName.BLENDER_STILL)
turntable_definition = get_node_definition(StepName.BLENDER_TURNTABLE)
assert still_definition is not None
assert turntable_definition is not None
still_runtime_fields = {field.key for field in still_definition.fields if field.key != "use_custom_render_settings"}
turntable_runtime_fields = {
field.key for field in turntable_definition.fields if field.key != "use_custom_render_settings"
}
assert still_runtime_fields <= _STILL_TASK_KEYS
assert turntable_runtime_fields <= _TURNTABLE_TASK_KEYS
def test_order_line_setup_and_template_contracts_expose_runtime_outputs(): def test_order_line_setup_and_template_contracts_expose_runtime_outputs():
setup = get_node_definition(StepName.ORDER_LINE_SETUP) setup = get_node_definition(StepName.ORDER_LINE_SETUP)
template = get_node_definition(StepName.RESOLVE_TEMPLATE) template = get_node_definition(StepName.RESOLVE_TEMPLATE)
@@ -87,12 +153,40 @@ def test_order_line_setup_and_template_contracts_expose_runtime_outputs():
"use_materials", "use_materials",
"override_material", "override_material",
} }
assert {field.key for field in bbox.fields} == {"glb_path"} assert {field.key for field in bbox.fields} == {"glb_path", "source_preference"}
assert bbox.family == "shared"
assert bbox.input_contract == {"requires": ["glb_preview"]}
assert bbox.output_contract == {"provides": ["bbox"]}
assert {field.key for field in template.fields} == {
"template_id_override",
"require_template",
"material_library_path",
"disable_materials",
"target_collection",
"material_replace_mode",
"lighting_only_mode",
"shadow_catcher_mode",
"camera_orbit_mode",
}
assert {field.key for field in get_node_definition(StepName.MATERIAL_MAP_RESOLVE).fields} == {
"disable_materials",
"material_override",
}
assert {field.key for field in get_node_definition(StepName.AUTO_POPULATE_MATERIALS).fields} == {
"persist_updates",
"refresh_material_source",
"include_populated_products",
}
assert output.input_contract["requires"] == ["order_line_context"] assert output.input_contract["requires"] == ["order_line_context"]
assert output.input_contract["requires_any"] == ["rendered_image", "rendered_frames", "rendered_video"] assert output.input_contract["requires_any"] == ["rendered_image", "rendered_frames", "rendered_video"]
assert set(output.output_contract["provides"]) >= {"media_asset", "workflow_result"} assert set(output.output_contract["provides"]) >= {"media_asset", "workflow_result"}
assert {field.key for field in output.fields} == {
"expected_artifact_role",
"require_upstream_artifact",
}
assert export_blend.defaults["output_name_suffix"] == "" assert export_blend.defaults["output_name_suffix"] == ""
assert {field.key for field in export_blend.fields} == {"output_name_suffix"} assert {field.key for field in export_blend.fields} == {"output_name_suffix"}
assert notify.defaults == {"channel": "audit_log", "require_armed_render": False}
assert notify.input_contract["requires"] == ["order_line_context"] assert notify.input_contract["requires"] == ["order_line_context"]
assert notify.input_contract["requires_any"] == [ assert notify.input_contract["requires_any"] == [
"rendered_image", "rendered_image",
@@ -100,6 +194,58 @@ def test_order_line_setup_and_template_contracts_expose_runtime_outputs():
"rendered_video", "rendered_video",
"workflow_result", "workflow_result",
] ]
assert {field.key for field in notify.fields} == {"channel", "require_armed_render"}
def test_cad_and_export_contract_nodes_only_expose_supported_settings():
occ_glb_export = get_node_definition(StepName.OCC_GLB_EXPORT)
thumbnail_save = get_node_definition(StepName.THUMBNAIL_SAVE)
export_blend = get_node_definition(StepName.EXPORT_BLEND)
stl_cache_generate = get_node_definition(StepName.STL_CACHE_GENERATE)
assert occ_glb_export is not None
assert thumbnail_save is not None
assert export_blend is not None
assert stl_cache_generate is not None
assert occ_glb_export.family == "cad_file"
assert occ_glb_export.fields == []
assert occ_glb_export.defaults == {}
assert occ_glb_export.input_contract == {"context": "cad_file", "requires": ["step_path"]}
assert occ_glb_export.output_contract == {"context": "cad_file", "provides": ["glb_preview"]}
assert occ_glb_export.artifact_roles_consumed == ["step_path"]
assert occ_glb_export.artifact_roles_produced == ["glb_preview"]
assert "does not expose per-node overrides yet" in occ_glb_export.description
assert thumbnail_save.family == "cad_file"
assert thumbnail_save.fields == []
assert thumbnail_save.defaults == {}
assert thumbnail_save.input_contract == {"context": "cad_file", "requires": ["rendered_image"]}
assert thumbnail_save.output_contract == {"context": "cad_file", "provides": ["cad_thumbnail_media"]}
assert thumbnail_save.artifact_roles_consumed == ["rendered_image"]
assert thumbnail_save.artifact_roles_produced == ["cad_thumbnail_media"]
assert "connected upstream thumbnail request node" in thumbnail_save.description
assert export_blend.family == "order_line"
assert export_blend.defaults == {"output_name_suffix": ""}
assert {field.key for field in export_blend.fields} == {"output_name_suffix"}
assert export_blend.input_contract == {
"context": "order_line",
"requires": ["order_line_context", "render_template"],
}
assert export_blend.output_contract == {"context": "order_line", "provides": ["blend_asset"]}
assert export_blend.artifact_roles_consumed == ["order_line_context", "render_template"]
assert export_blend.artifact_roles_produced == ["blend_asset"]
assert "Only the optional filename suffix is workflow-configurable today." in export_blend.description
assert stl_cache_generate.family == "cad_file"
assert stl_cache_generate.fields == []
assert stl_cache_generate.defaults == {}
assert stl_cache_generate.input_contract == {"context": "cad_file", "requires": ["step_path"]}
assert stl_cache_generate.output_contract == {"context": "cad_file", "provides": ["stl_cache"]}
assert stl_cache_generate.artifact_roles_consumed == ["step_path"]
assert stl_cache_generate.artifact_roles_produced == ["stl_cache"]
assert "Compatibility node for legacy CAD flows." in stl_cache_generate.description
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -146,6 +292,16 @@ async def test_node_definitions_endpoint_returns_registry(client, auth_headers):
"material_override", "material_override",
} }
blender_turntable = next(
definition for definition in body["definitions"] if definition["step"] == StepName.BLENDER_TURNTABLE.value
)
assert blender_turntable["defaults"]["fps"] == 24
assert blender_turntable["defaults"]["frame_count"] == 120
assert blender_turntable["defaults"]["duration_s"] == 5
assert blender_turntable["defaults"]["turntable_degrees"] == 360
assert blender_turntable["defaults"]["turntable_axis"] == "world_z"
assert blender_turntable["defaults"]["camera_orbit"] is True
glb_bbox = next( glb_bbox = next(
definition for definition in body["definitions"] if definition["step"] == StepName.GLB_BBOX.value definition for definition in body["definitions"] if definition["step"] == StepName.GLB_BBOX.value
) )
@@ -162,7 +318,30 @@ async def test_node_definitions_endpoint_returns_registry(client, auth_headers):
"step": None, "step": None,
"unit": None, "unit": None,
"options": [], "options": [],
} "allow_blank": True,
"max_length": None,
"text_format": "absolute_glb_path",
},
{
"key": "source_preference",
"label": "Source Preference",
"type": "select",
"description": "Prefer a prepared GLB, force STEP fallback, or fail when no GLB artifact is available.",
"section": "Inputs",
"default": "auto",
"min": None,
"max": None,
"step": None,
"unit": None,
"options": [
{"value": "auto", "label": "Auto"},
{"value": "step_only", "label": "STEP Only"},
{"value": "glb_only", "label": "GLB Only"},
],
"allow_blank": True,
"max_length": None,
"text_format": "plain",
},
] ]
@@ -203,6 +382,85 @@ async def test_workflow_crud_roundtrip_preserves_execution_mode(client, auth_hea
assert fetched["config"]["ui"]["execution_mode"] == "shadow" assert fetched["config"]["ui"]["execution_mode"] == "shadow"
@pytest.mark.asyncio
async def test_workflow_crud_exposes_supported_artifact_kinds(client, auth_headers):
create_response = await client.post(
"/api/workflows",
headers=auth_headers,
json={
"name": "Still Workflow Contract",
"config": build_preset_workflow_config("still_graph"),
"is_active": True,
},
)
assert create_response.status_code == 201, create_response.text
created = create_response.json()
assert created["family"] == "order_line"
assert created["supported_artifact_kinds"] == ["still_image"]
get_response = await client.get(f"/api/workflows/{created['id']}", headers=auth_headers)
assert get_response.status_code == 200
fetched = get_response.json()
assert fetched["supported_artifact_kinds"] == ["still_image"]
@pytest.mark.asyncio
async def test_workflow_crud_exposes_rollout_summary(client, db, auth_headers):
workflow = WorkflowDefinition(
name="Shadow Rollout Workflow",
config=build_preset_workflow_config("still_graph") | {
"ui": {
**(build_preset_workflow_config("still_graph").get("ui") or {}),
"execution_mode": "shadow",
}
},
is_active=True,
)
db.add(workflow)
await db.flush()
output_type = OutputType(
name="Shadow Still Output",
workflow_definition_id=workflow.id,
workflow_family="order_line",
artifact_kind="still_image",
workflow_rollout_mode="shadow",
render_backend="celery",
)
db.add(output_type)
await db.flush()
workflow_run = WorkflowRun(
workflow_def_id=workflow.id,
execution_mode="shadow",
status="completed",
)
db.add(workflow_run)
await db.commit()
response = await client.get(f"/api/workflows/{workflow.id}", headers=auth_headers)
assert response.status_code == 200, response.text
body = response.json()
assert body["rollout_summary"]["linked_output_type_count"] == 1
assert body["rollout_summary"]["linked_output_type_names"] == ["Shadow Still Output"]
assert body["rollout_summary"]["linked_output_types"] == [
{
"id": str(output_type.id),
"name": "Shadow Still Output",
"is_active": True,
"artifact_kind": "still_image",
"workflow_rollout_mode": "shadow",
}
]
assert body["rollout_summary"]["rollout_modes"] == ["shadow"]
assert body["rollout_summary"]["has_blocking_contracts"] is False
assert body["rollout_summary"]["latest_shadow_run"]["workflow_run_id"] == str(workflow_run.id)
assert body["rollout_summary"]["latest_shadow_run"]["execution_mode"] == "shadow"
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_admin_backfill_workflows_rewrites_legacy_configs(client, db, auth_headers): async def test_admin_backfill_workflows_rewrites_legacy_configs(client, db, auth_headers):
legacy = WorkflowDefinition( legacy = WorkflowDefinition(
@@ -5,6 +5,7 @@ import uuid
from pathlib import Path from pathlib import Path
import pytest import pytest
from PIL import Image, PngImagePlugin
from sqlalchemy import select, text from sqlalchemy import select, text
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
@@ -15,6 +16,7 @@ from app.domains.orders.models import Order, OrderLine, OrderStatus
from app.domains.products.models import CadFile, Product from app.domains.products.models import CadFile, Product
from app.domains.rendering.models import OutputType, RenderTemplate from app.domains.rendering.models import OutputType, RenderTemplate
from app.domains.rendering.workflow_runtime_services import ( from app.domains.rendering.workflow_runtime_services import (
_build_effective_material_lookup,
auto_populate_materials_for_cad, auto_populate_materials_for_cad,
build_order_line_render_invocation, build_order_line_render_invocation,
emit_order_line_render_notifications, emit_order_line_render_notifications,
@@ -101,6 +103,75 @@ def _seed_order_line_graph(session: Session, tmp_path: Path) -> OrderLine:
return line return line
def _write_png_with_metadata(path: Path, *, rgba: tuple[int, int, int, int], date_text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
image = Image.new("RGBA", (8, 8), rgba)
metadata = PngImagePlugin.PngInfo()
metadata.add_text("Date", date_text)
metadata.add_text("Software", "Blender")
image.save(path, pnginfo=metadata)
def test_effective_material_lookup_keeps_product_assignments_authoritative_and_adds_manifest_aliases():
cad_file = CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path="/tmp/bearing.step",
file_hash=f"hash-{uuid.uuid4().hex}",
resolved_material_assignments={
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
},
"usd_only_part": {
"source_name": "UsdOnlyPart",
"prim_path": "/Root/Assembly/usd_only_part",
"canonical_material": "HARTOMAT_050101_Elastomer-Black",
},
},
)
effective = _build_effective_material_lookup(
cad_file,
[
{"part_name": "InnerRing", "material": "Steel raw"},
],
)
assert effective["InnerRing"] == "Steel raw"
assert effective["inner_ring"] == "Steel raw"
assert effective["UsdOnlyPart"] == "HARTOMAT_050101_Elastomer-Black"
assert effective["usd_only_part"] == "HARTOMAT_050101_Elastomer-Black"
def test_effective_material_lookup_backfills_manifest_part_keys_from_legacy_serialized_names():
cad_file = CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path="/tmp/bearing.step",
file_hash=f"hash-{uuid.uuid4().hex}",
resolved_material_assignments={
"rwdr_b_f_802044_tr4_h122bk": {
"source_name": "RWDR_B_F-802044_TR4_H122BK",
"prim_path": "/Root/Assembly/rwdr_b_f_802044_tr4_h122bk",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
},
},
)
effective = _build_effective_material_lookup(
cad_file,
[
{"part_name": "RWDR_B_F-802044_TR4_H122B-69186", "material": "Steel--Stahl"},
],
)
assert effective["RWDR_B_F-802044_TR4_H122B-69186"] == "Steel--Stahl"
assert effective["RWDR_B_F-802044_TR4_H122BK"] == "Steel--Stahl"
assert effective["rwdr_b_f_802044_tr4_h122bk"] == "Steel--Stahl"
def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd(sync_session, tmp_path, monkeypatch): def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd(sync_session, tmp_path, monkeypatch):
from app.config import settings from app.config import settings
@@ -118,7 +189,10 @@ def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd
} }
usd_asset_path = upload_dir / "usd" / "bearing.usd" usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True) usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_text("USD", encoding="utf-8") usd_asset_path.write_text(
"hartomat:canonicalMaterialName\nhartomat:partKey\n",
encoding="utf-8",
)
sync_session.add( sync_session.add(
MediaAsset( MediaAsset(
@@ -127,6 +201,9 @@ def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd
product_id=line.product_id, product_id=line.product_id,
asset_type=MediaAssetType.usd_master, asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd", storage_key="usd/bearing.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint",
},
) )
) )
sync_session.commit() sync_session.commit()
@@ -230,6 +307,264 @@ def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd(sync_se
assert line.render_status == "processing" assert line.render_status == "processing"
def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd_cache_key(sync_session, tmp_path, monkeypatch):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
upload_dir = Path(settings.upload_dir)
upload_dir.mkdir(parents=True, exist_ok=True)
line = _seed_order_line_graph(sync_session, tmp_path)
line.product.cad_file.resolved_material_assignments = {
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
}
usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_text(
"hartomat:canonicalMaterialName\nhartomat:partKey\n",
encoding="utf-8",
)
glb_asset_path = upload_dir / "step_files" / "bearing_thumbnail.glb"
glb_asset_path.parent.mkdir(parents=True, exist_ok=True)
glb_asset_path.write_text("GLB", encoding="utf-8")
sync_session.add_all(
[
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash",
},
),
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.gltf_geometry,
storage_key="step_files/bearing_thumbnail.glb",
),
]
)
sync_session.commit()
queued: list[str] = []
class _Task:
@staticmethod
def delay(cad_file_id: str) -> None:
queued.append(cad_file_id)
monkeypatch.setattr(
"app.tasks.step_tasks.generate_usd_master_task",
_Task(),
)
result = prepare_order_line_render_context(sync_session, str(line.id))
expected_glb = tmp_path / "parts" / "bearing_thumbnail.glb"
assert result.is_ready
assert result.usd_render_path is None
assert result.glb_reuse_path == expected_glb
assert expected_glb.exists()
assert queued == [str(line.product.cad_file_id)]
def test_prepare_order_line_render_context_accepts_binary_usd_without_literal_hartomat_markers(sync_session, tmp_path, monkeypatch):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
upload_dir = Path(settings.upload_dir)
upload_dir.mkdir(parents=True, exist_ok=True)
line = _seed_order_line_graph(sync_session, tmp_path)
line.product.cad_file.resolved_material_assignments = {
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
}
usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_bytes(b"PXR-USDC\x00binary-usd-with-customdata-not-greppable")
sync_session.add(
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint",
},
)
)
sync_session.commit()
queued: list[str] = []
class _Task:
@staticmethod
def delay(cad_file_id: str) -> None:
queued.append(cad_file_id)
monkeypatch.setattr(
"app.tasks.step_tasks.generate_usd_master_task",
_Task(),
)
result = prepare_order_line_render_context(sync_session, str(line.id))
assert result.is_ready
assert result.usd_render_path == usd_asset_path
assert result.glb_reuse_path is None
assert queued == []
def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd_file_markers(sync_session, tmp_path, monkeypatch):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
upload_dir = Path(settings.upload_dir)
upload_dir.mkdir(parents=True, exist_ok=True)
line = _seed_order_line_graph(sync_session, tmp_path)
line.product.cad_file.resolved_material_assignments = {
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
}
usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_text("legacy-usd-without-hartomat-markers", encoding="utf-8")
glb_asset_path = upload_dir / "step_files" / "bearing_thumbnail.glb"
glb_asset_path.parent.mkdir(parents=True, exist_ok=True)
glb_asset_path.write_text("GLB", encoding="utf-8")
sync_session.add_all(
[
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint",
},
),
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.gltf_geometry,
storage_key="step_files/bearing_thumbnail.glb",
),
]
)
sync_session.commit()
queued: list[str] = []
class _Task:
@staticmethod
def delay(cad_file_id: str) -> None:
queued.append(cad_file_id)
monkeypatch.setattr(
"app.tasks.step_tasks.generate_usd_master_task",
_Task(),
)
result = prepare_order_line_render_context(sync_session, str(line.id))
expected_glb = tmp_path / "parts" / "bearing_thumbnail.glb"
assert result.is_ready
assert result.usd_render_path is None
assert result.glb_reuse_path == expected_glb
assert expected_glb.exists()
assert queued == [str(line.product.cad_file_id)]
def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd_material_field(sync_session, tmp_path, monkeypatch):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
upload_dir = Path(settings.upload_dir)
upload_dir.mkdir(parents=True, exist_ok=True)
line = _seed_order_line_graph(sync_session, tmp_path)
line.product.cad_file.resolved_material_assignments = {
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"material": "SCHAEFFLER_010101_Steel-Bare",
}
}
usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_text("USD", encoding="utf-8")
glb_asset_path = upload_dir / "step_files" / "bearing_thumbnail.glb"
glb_asset_path.parent.mkdir(parents=True, exist_ok=True)
glb_asset_path.write_text("GLB", encoding="utf-8")
sync_session.add_all(
[
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd",
),
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.gltf_geometry,
storage_key="step_files/bearing_thumbnail.glb",
),
]
)
sync_session.commit()
queued: list[str] = []
class _Task:
@staticmethod
def delay(cad_file_id: str) -> None:
queued.append(cad_file_id)
monkeypatch.setattr(
"app.tasks.step_tasks.generate_usd_master_task",
_Task(),
)
result = prepare_order_line_render_context(sync_session, str(line.id))
expected_glb = tmp_path / "parts" / "bearing_thumbnail.glb"
assert result.is_ready
assert result.usd_render_path is None
assert result.glb_reuse_path == expected_glb
assert expected_glb.exists()
assert queued == [str(line.product.cad_file_id)]
def test_prepare_order_line_render_context_skips_closed_orders(sync_session, tmp_path, monkeypatch): def test_prepare_order_line_render_context_skips_closed_orders(sync_session, tmp_path, monkeypatch):
from app.config import settings from app.config import settings
@@ -322,6 +657,11 @@ def test_build_order_line_render_invocation_applies_output_and_line_overrides(tm
material_map={"InnerRing": "SteelPolished"}, material_map={"InnerRing": "SteelPolished"},
use_materials=True, use_materials=True,
override_material="Studio White", override_material="Studio White",
target_collection="Assembly",
lighting_only=True,
shadow_catcher=True,
camera_orbit=False,
template_inputs={"studio_variant": "warm"},
category_key="bearings", category_key="bearings",
output_type_id=str(output_type.id), output_type_id=str(output_type.id),
), ),
@@ -357,6 +697,7 @@ def test_build_order_line_render_invocation_applies_output_and_line_overrides(tm
assert invocation.part_names_ordered == ["InnerRing", "OuterRing"] assert invocation.part_names_ordered == ["InnerRing", "OuterRing"]
assert invocation.rotation_x == 12.0 assert invocation.rotation_x == 12.0
assert invocation.focal_length_mm == 50.0 assert invocation.focal_length_mm == 50.0
assert invocation.template_inputs == {"studio_variant": "warm"}
still_kwargs = invocation.as_still_renderer_kwargs( still_kwargs = invocation.as_still_renderer_kwargs(
step_path=str(step_path), step_path=str(step_path),
@@ -374,6 +715,7 @@ def test_build_order_line_render_invocation_applies_output_and_line_overrides(tm
assert still_kwargs["cycles_device"] == "cuda" assert still_kwargs["cycles_device"] == "cuda"
assert still_kwargs["material_library_path"] == "/libraries/materials.blend" assert still_kwargs["material_library_path"] == "/libraries/materials.blend"
assert still_kwargs["material_override"] == "Studio White" assert still_kwargs["material_override"] == "Studio White"
assert still_kwargs["template_inputs"] == {"studio_variant": "warm"}
assert still_kwargs["job_id"] == "job-1" assert still_kwargs["job_id"] == "job-1"
assert still_kwargs["order_line_id"] == "line-1" assert still_kwargs["order_line_id"] == "line-1"
@@ -437,6 +779,11 @@ def test_build_order_line_render_invocation_autoscales_samples_and_prefers_mater
material_map={"InnerRing": "TemplateSteel"}, material_map={"InnerRing": "TemplateSteel"},
use_materials=True, use_materials=True,
override_material="Template White", override_material="Template White",
target_collection="Product",
lighting_only=False,
shadow_catcher=False,
camera_orbit=True,
template_inputs={"studio_variant": "warm"},
category_key="bearings", category_key="bearings",
output_type_id=str(output_type.id), output_type_id=str(output_type.id),
), ),
@@ -480,11 +827,13 @@ def test_build_order_line_render_invocation_autoscales_samples_and_prefers_mater
assert turntable_kwargs["samples"] == 64 assert turntable_kwargs["samples"] == 64
assert turntable_kwargs["material_map"] == {"InnerRing": "ResolvedSteel"} assert turntable_kwargs["material_map"] == {"InnerRing": "ResolvedSteel"}
assert turntable_kwargs["material_library_path"] is None assert turntable_kwargs["material_library_path"] is None
assert turntable_kwargs["template_inputs"] == {"studio_variant": "warm"}
assert cinematic_kwargs["width"] == 1024 assert cinematic_kwargs["width"] == 1024
assert cinematic_kwargs["height"] == 512 assert cinematic_kwargs["height"] == 512
assert cinematic_kwargs["engine"] == "eevee" assert cinematic_kwargs["engine"] == "eevee"
assert cinematic_kwargs["samples"] == 64 assert cinematic_kwargs["samples"] == 64
assert cinematic_kwargs["material_override"] == "Resolved White" assert cinematic_kwargs["material_override"] == "Resolved White"
assert cinematic_kwargs["template_inputs"] == {"studio_variant": "warm"}
def test_resolve_order_line_template_context_uses_exact_template_and_override(sync_session, tmp_path, monkeypatch): def test_resolve_order_line_template_context_uses_exact_template_and_override(sync_session, tmp_path, monkeypatch):
@@ -584,6 +933,153 @@ def test_resolve_order_line_template_context_supports_explicit_template_and_libr
"InnerRing": "resolved:Steel raw", "InnerRing": "resolved:Steel raw",
"OuterRing": "resolved:Steel raw", "OuterRing": "resolved:Steel raw",
} }
assert result.target_collection == "ForcedCollection"
assert result.lighting_only is False
assert result.shadow_catcher is False
assert result.camera_orbit is True
def test_resolve_order_line_template_context_applies_template_override_modes(
sync_session,
tmp_path,
monkeypatch,
):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
line = _seed_order_line_graph(sync_session, tmp_path)
template = RenderTemplate(
id=uuid.uuid4(),
name="Overrideable Template",
category_key="bearings",
blend_file_path="/templates/overrideable.blend",
original_filename="overrideable.blend",
target_collection="TemplateCollection",
material_replace_enabled=False,
lighting_only=False,
shadow_catcher_enabled=False,
camera_orbit=True,
is_active=True,
output_types=[line.output_type],
)
sync_session.add(template)
sync_session.add(
AssetLibrary(
id=uuid.uuid4(),
name="Default Library",
blend_file_path="/libraries/materials.blend",
is_active=True,
)
)
sync_session.commit()
monkeypatch.setattr(
"app.domains.rendering.workflow_runtime_services.resolve_material_map",
lambda raw_map: {key: f"resolved:{value}" for key, value in raw_map.items()},
)
setup = prepare_order_line_render_context(sync_session, str(line.id))
result = resolve_order_line_template_context(
sync_session,
setup,
template_id_override=str(template.id),
material_library_path_override="/libraries/materials.blend",
target_collection_override="NodeCollection",
material_replace_mode="enabled",
lighting_only_mode="enabled",
shadow_catcher_mode="enabled",
camera_orbit_mode="disabled",
)
assert result.template is not None
assert result.use_materials is True
assert result.material_map == {
"InnerRing": "resolved:Steel raw",
"OuterRing": "resolved:Steel raw",
}
assert result.target_collection == "NodeCollection"
assert result.lighting_only is True
assert result.shadow_catcher is True
assert result.camera_orbit is False
def test_resolve_order_line_template_context_exposes_template_schema_and_invocation_inputs(
sync_session,
tmp_path,
monkeypatch,
):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
line = _seed_order_line_graph(sync_session, tmp_path)
template = RenderTemplate(
id=uuid.uuid4(),
name="Schema Template",
category_key="bearings",
blend_file_path="/templates/schema-template.blend",
original_filename="schema-template.blend",
target_collection="Product",
material_replace_enabled=True,
lighting_only=False,
shadow_catcher_enabled=False,
camera_orbit=True,
workflow_input_schema=[
{
"key": "studio_variant",
"label": "Studio Variant",
"type": "select",
"section": "Template Inputs",
"default": "default",
"options": [
{"value": "default", "label": "Default"},
{"value": "warm", "label": "Warm"},
],
},
{
"key": "camera_profile",
"label": "Camera Profile",
"type": "text",
"section": "Template Inputs",
"default": "macro",
},
],
is_active=True,
output_types=[line.output_type],
)
sync_session.add(template)
sync_session.add(
AssetLibrary(
id=uuid.uuid4(),
name="Default Library",
blend_file_path="/libraries/materials.blend",
is_active=True,
)
)
sync_session.commit()
monkeypatch.setattr(
"app.domains.rendering.workflow_runtime_services.resolve_material_map",
lambda raw_map: {key: f"resolved:{value}" for key, value in raw_map.items()},
)
setup = prepare_order_line_render_context(sync_session, str(line.id))
template_context = resolve_order_line_template_context(
sync_session,
setup,
template_id_override=str(template.id),
template_input_overrides={"studio_variant": "warm"},
)
invocation = build_order_line_render_invocation(setup, template_context=template_context)
assert template_context.workflow_input_schema == template.workflow_input_schema
assert template_context.template_inputs == {
"studio_variant": "warm",
"camera_profile": "macro",
}
assert invocation.template_inputs == {
"studio_variant": "warm",
"camera_profile": "macro",
}
def test_resolve_order_line_template_context_can_disable_material_resolution(sync_session, tmp_path, monkeypatch): def test_resolve_order_line_template_context_can_disable_material_resolution(sync_session, tmp_path, monkeypatch):
@@ -1077,6 +1573,56 @@ def test_persist_order_line_output_canonicalizes_step_file_outputs(sync_session,
assert asset.storage_key == f"renders/{line.id}/{expected_path.name}" assert asset.storage_key == f"renders/{line.id}/{expected_path.name}"
def test_png_persistence_strips_volatile_metadata_for_primary_and_observer_outputs(
sync_session,
tmp_path,
monkeypatch,
):
from app.config import settings
upload_dir = tmp_path / "uploads"
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
line = _seed_order_line_graph(sync_session, tmp_path)
primary_source = upload_dir / "step_files" / "renders" / f"line_{line.id}.png"
observer_source = upload_dir / "step_files" / "renders" / f"line_{line.id}_shadow.png"
_write_png_with_metadata(
primary_source,
rgba=(12, 34, 56, 255),
date_text="2026/04/10 17:05:27",
)
_write_png_with_metadata(
observer_source,
rgba=(12, 34, 56, 255),
date_text="2026/04/10 17:06:30",
)
primary_result = persist_order_line_output(
sync_session,
line,
success=True,
output_path=str(primary_source),
render_log={"renderer": "blender", "engine_used": "cycles"},
)
observer_result = persist_order_line_media_asset(
sync_session,
line,
success=True,
output_path=str(observer_source),
asset_type=MediaAssetType.still,
render_log={"renderer": "blender", "engine_used": "cycles"},
)
primary_bytes = Path(primary_result.result_path or "").read_bytes()
observer_bytes = Path(observer_result.result_path or "").read_bytes()
assert primary_bytes == observer_bytes
assert b"Date" not in primary_bytes
assert b"Date" not in observer_bytes
assert Image.open(primary_result.result_path).getpixel((0, 0)) == (12, 34, 56, 255)
assert Image.open(observer_result.result_path).getpixel((0, 0)) == (12, 34, 56, 255)
def test_persist_order_line_output_classifies_blend_outputs_as_blend_assets(sync_session, tmp_path, monkeypatch): def test_persist_order_line_output_classifies_blend_outputs_as_blend_assets(sync_session, tmp_path, monkeypatch):
from app.config import settings from app.config import settings
@@ -1,6 +1,7 @@
import pytest import pytest
from pydantic import ValidationError from pydantic import ValidationError
from app.core.process_steps import StepName
from app.domains.rendering.workflow_schema import WorkflowConfig from app.domains.rendering.workflow_schema import WorkflowConfig
@@ -72,6 +73,35 @@ def test_workflow_schema_rejects_unknown_node_params():
) )
def test_workflow_schema_rejects_unregistered_nodes_from_registry(monkeypatch):
from app.domains.rendering import workflow_schema as schema_module
original = schema_module.get_node_definition
def fake_get_node_definition(step):
if step == StepName.GLB_BBOX:
return None
return original(step)
monkeypatch.setattr(schema_module, "get_node_definition", fake_get_node_definition)
with pytest.raises(ValidationError, match="is not registered in workflow_node_registry"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "bbox",
"step": StepName.GLB_BBOX.value,
"params": {},
},
],
"edges": [],
"ui": {"family": "order_line"},
}
)
def test_workflow_schema_accepts_known_node_params(): def test_workflow_schema_accepts_known_node_params():
config = WorkflowConfig.model_validate( config = WorkflowConfig.model_validate(
{ {
@@ -92,6 +122,149 @@ def test_workflow_schema_accepts_known_node_params():
assert config.ui.family == "order_line" assert config.ui.family == "order_line"
def test_workflow_schema_rejects_invalid_glb_path_format():
with pytest.raises(ValidationError, match="must point to a .glb file"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "bbox",
"step": "glb_bbox",
"params": {"glb_path": "/tmp/model.gltf"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_template_id_override_format():
with pytest.raises(ValidationError, match="must be a valid UUID"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "template",
"step": "resolve_template",
"params": {"template_id_override": "not-a-uuid"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_material_library_path_format():
with pytest.raises(ValidationError, match="must point to a .blend file"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "template",
"step": "resolve_template",
"params": {"material_library_path": "/tmp/library.txt"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_noise_threshold_format():
with pytest.raises(ValidationError, match="must be a valid numeric string"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "render",
"step": "blender_still",
"params": {"noise_threshold": "fast"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_bg_color_format():
with pytest.raises(ValidationError, match="must be a hex color"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "turntable",
"step": "blender_turntable",
"params": {"bg_color": "blue"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_output_name_suffix_format():
with pytest.raises(ValidationError, match="may only contain letters, numbers"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "blend",
"step": "export_blend",
"params": {"output_name_suffix": "../unsafe"},
},
],
"edges": [],
}
)
def test_workflow_schema_accepts_empty_optional_text_overrides():
config = WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "template",
"step": "resolve_template",
"params": {
"template_id_override": "",
"material_library_path": "",
},
},
{
"id": "render",
"step": "blender_still",
"params": {
"noise_threshold": "",
"material_override": "",
},
},
{
"id": "turntable",
"step": "blender_turntable",
"params": {"bg_color": ""},
},
{
"id": "blend",
"step": "export_blend",
"params": {"output_name_suffix": ""},
},
],
"edges": [],
"ui": {"family": "order_line"},
}
)
assert config.ui is not None
assert config.ui.family == "order_line"
def test_workflow_schema_rejects_ui_family_mismatch(): def test_workflow_schema_rejects_ui_family_mismatch():
with pytest.raises(ValidationError, match="ui.family"): with pytest.raises(ValidationError, match="ui.family"):
WorkflowConfig.model_validate( WorkflowConfig.model_validate(
@@ -226,6 +399,32 @@ def test_workflow_schema_accepts_transitive_contract_wiring():
assert config.ui.execution_mode == "graph" assert config.ui.execution_mode == "graph"
def test_workflow_schema_accepts_cad_intake_contract_wiring_with_shared_bbox_node():
config = WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{"id": "resolve_step", "step": "resolve_step_path", "params": {}},
{"id": "export_glb", "step": "occ_glb_export", "params": {}},
{"id": "bbox", "step": "glb_bbox", "params": {}},
{"id": "threejs_thumb", "step": "threejs_render", "params": {}},
{"id": "save", "step": "thumbnail_save", "params": {}},
],
"edges": [
{"from": "resolve_step", "to": "export_glb"},
{"from": "export_glb", "to": "bbox"},
{"from": "export_glb", "to": "threejs_thumb"},
{"from": "bbox", "to": "threejs_thumb"},
{"from": "threejs_thumb", "to": "save"},
],
"ui": {"family": "cad_file", "execution_mode": "graph"},
}
)
assert config.ui is not None
assert config.ui.family == "cad_file"
def test_workflow_schema_rejects_mixed_family_graph_execution(): def test_workflow_schema_rejects_mixed_family_graph_execution():
with pytest.raises(ValidationError, match="single-family"): with pytest.raises(ValidationError, match="single-family"):
WorkflowConfig.model_validate( WorkflowConfig.model_validate(
@@ -0,0 +1,227 @@
from __future__ import annotations
import importlib.util
from pathlib import Path
import sys
import types
def _load_render_pipeline_script():
candidates = [
Path(__file__).resolve().parents[3] / "scripts" / "test_render_pipeline.py",
Path("/compose/scripts/test_render_pipeline.py"),
]
script_path = next((candidate for candidate in candidates if candidate.exists()), None)
assert script_path is not None
if "requests" not in sys.modules:
requests_stub = types.ModuleType("requests")
requests_stub.Response = object
requests_stub.Session = object
requests_stub.exceptions = types.SimpleNamespace(
ConnectionError=RuntimeError,
ChunkedEncodingError=RuntimeError,
ReadTimeout=RuntimeError,
)
sys.modules["requests"] = requests_stub
spec = importlib.util.spec_from_file_location("test_render_pipeline_script", script_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def test_build_output_type_workflow_link_payload_sets_graph_rollout_mode_explicitly():
module = _load_render_pipeline_script()
payload = module.build_output_type_workflow_link_payload(
workflow_definition_id="workflow-graph-123",
execution_mode="graph",
)
assert payload == {
"workflow_definition_id": "workflow-graph-123",
"workflow_rollout_mode": "graph",
"is_active": True,
}
def test_build_output_type_workflow_link_payload_sets_shadow_rollout_mode_explicitly():
module = _load_render_pipeline_script()
payload = module.build_output_type_workflow_link_payload(
workflow_definition_id="workflow-shadow-123",
execution_mode="shadow",
)
assert payload == {
"workflow_definition_id": "workflow-shadow-123",
"workflow_rollout_mode": "shadow",
"is_active": True,
}
def test_build_output_type_workflow_link_payload_keeps_legacy_rollout_implicit():
module = _load_render_pipeline_script()
payload = module.build_output_type_workflow_link_payload(
workflow_definition_id="workflow-legacy-123",
execution_mode="legacy",
)
assert payload == {
"workflow_definition_id": "workflow-legacy-123",
"is_active": True,
}
def test_build_graph_still_config_matches_canonical_still_graph_contract():
module = _load_render_pipeline_script()
config = module.build_graph_still_config(
execution_mode="shadow",
render_params={
"resolution": [1920, 1080],
"engine": "cycles",
"samples": 128,
},
)
assert config["ui"] == {
"preset": "still_graph",
"execution_mode": "shadow",
"family": "order_line",
}
assert [node["id"] for node in config["nodes"]] == [
"setup",
"template",
"populate_materials",
"bbox",
"resolve_materials",
"render",
"output",
"notify",
]
assert config["edges"] == [
{"from": "setup", "to": "template"},
{"from": "setup", "to": "populate_materials"},
{"from": "setup", "to": "bbox"},
{"from": "template", "to": "resolve_materials"},
{"from": "populate_materials", "to": "resolve_materials"},
{"from": "resolve_materials", "to": "render"},
{"from": "bbox", "to": "render"},
{"from": "template", "to": "render"},
{"from": "render", "to": "output"},
{"from": "render", "to": "notify"},
]
render_node = next(node for node in config["nodes"] if node["id"] == "render")
assert render_node["params"] == {
"width": 1920,
"height": 1080,
"render_engine": "cycles",
"samples": 128,
"use_custom_render_settings": False,
}
def test_render_template_candidates_for_output_type_matches_m2m_and_legacy_fields():
module = _load_render_pipeline_script()
templates = [
{
"id": "template-active-m2m",
"is_active": True,
"output_type_ids": ["ot-1", "ot-2"],
"output_type_id": None,
},
{
"id": "template-active-legacy",
"is_active": True,
"output_type_ids": [],
"output_type_id": "ot-1",
},
{
"id": "template-inactive",
"is_active": False,
"output_type_ids": ["ot-1"],
"output_type_id": None,
},
]
matches = module.render_template_candidates_for_output_type(templates, "ot-1")
assert [template["id"] for template in matches] == [
"template-active-m2m",
"template-active-legacy",
]
def test_build_graph_still_config_can_inherit_output_type_render_settings():
module = _load_render_pipeline_script()
config = module.build_graph_still_config(
execution_mode="shadow",
use_custom_render_settings=False,
)
render_node = next(node for node in config["nodes"] if node["id"] == "render")
assert render_node["params"] == {
"use_custom_render_settings": False,
}
def test_choose_template_backed_output_type_prefers_requested_name():
module = _load_render_pipeline_script()
output_types = [
{
"id": "ot-1",
"name": "HQ-Blender-Alpha-HDR",
"renderer": "blender",
"artifact_kind": "still_image",
"is_animation": False,
},
{
"id": "ot-2",
"name": "Turntable",
"renderer": "blender",
"artifact_kind": "turntable_video",
"is_animation": True,
},
]
templates = [
{
"id": "template-1",
"is_active": True,
"output_type_ids": ["ot-1"],
"output_type_id": None,
}
]
output_type, matches = module.choose_template_backed_output_type(
output_types,
templates,
preferred_name="HQ-Blender-Alpha-HDR",
)
assert output_type["id"] == "ot-1"
assert [template["id"] for template in matches] == ["template-1"]
def test_build_output_type_workflow_snapshot_keeps_restore_contract():
module = _load_render_pipeline_script()
snapshot = module.build_output_type_workflow_snapshot(
{
"workflow_definition_id": "workflow-123",
"workflow_rollout_mode": "shadow",
"is_active": False,
}
)
assert snapshot == {
"workflow_definition_id": "workflow-123",
"workflow_rollout_mode": "shadow",
"is_active": False,
}
@@ -0,0 +1,48 @@
from __future__ import annotations
import uuid
import pytest
@pytest.mark.integration
@pytest.mark.asyncio
async def test_cad_model_endpoint_falls_back_to_gltf_geometry_asset(
client,
db,
auth_headers,
tmp_path,
):
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.products.models import CadFile, ProcessingStatus
glb_path = tmp_path / "example.glb"
glb_path.write_bytes(b"glTF")
cad = CadFile(
id=uuid.uuid4(),
original_name="example.step",
stored_path=str(tmp_path / "example.step"),
file_hash="cad-model-endpoint-fallback",
file_size=123,
processing_status=ProcessingStatus.completed,
gltf_path=None,
)
db.add(cad)
await db.flush()
asset = MediaAsset(
id=uuid.uuid4(),
cad_file_id=cad.id,
asset_type=MediaAssetType.gltf_geometry,
storage_key=str(glb_path),
mime_type="model/gltf-binary",
)
db.add(asset)
await db.commit()
response = await client.get(f"/api/cad/{cad.id}/model", headers=auth_headers)
assert response.status_code == 200
assert response.headers["content-type"] == "model/gltf-binary"
assert response.content == b"glTF"
@@ -0,0 +1,30 @@
from __future__ import annotations
import uuid
import pytest
@pytest.mark.asyncio
async def test_batch_delete_assets_awaits_global_admin_guard(client, auth_headers, monkeypatch):
guard_calls: list[str] = []
deleted_asset_ids: list[str] = []
async def _guard(user):
guard_calls.append(str(user.id))
return user
async def _delete_media_asset(_db, asset_id):
deleted_asset_ids.append(str(asset_id))
return True
monkeypatch.setattr("app.utils.auth.require_global_admin", _guard)
monkeypatch.setattr("app.domains.media.service.delete_media_asset", _delete_media_asset)
asset_ids = [str(uuid.uuid4()), str(uuid.uuid4())]
response = await client.post("/api/media/batch-delete", json=asset_ids, headers=auth_headers)
assert response.status_code == 200, response.text
assert len(guard_calls) == 1
assert deleted_asset_ids == asset_ids
assert response.json() == {"deleted": 2, "requested": 2}
@@ -0,0 +1,12 @@
from app.api.routers.admin import SETTINGS_DEFAULTS, _settings_to_out
def test_settings_to_out_uses_consistent_tessellation_fallbacks() -> None:
raw = dict(SETTINGS_DEFAULTS)
raw.pop("scene_angular_deflection", None)
raw.pop("render_angular_deflection", None)
settings = _settings_to_out(raw)
assert settings.scene_angular_deflection == 0.1
assert settings.render_angular_deflection == 0.05
+67
View File
@@ -0,0 +1,67 @@
from pathlib import Path
from app.config import settings
from app.domains.materials.library_paths import (
asset_library_dir,
list_asset_library_blends,
resolve_asset_library_blend_path,
)
def test_asset_library_dir_uses_upload_dir(monkeypatch, tmp_path):
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
assert asset_library_dir() == tmp_path / "uploads" / "asset-libraries"
def test_resolve_asset_library_blend_path_prefers_existing_configured_path(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
library_dir = upload_dir / "asset-libraries"
library_dir.mkdir(parents=True, exist_ok=True)
configured = tmp_path / "external" / "materials.blend"
configured.parent.mkdir(parents=True, exist_ok=True)
configured.write_bytes(b"blend")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
resolved = resolve_asset_library_blend_path(
blend_file_path=str(configured),
asset_library_id="ignored",
)
assert resolved == str(configured)
def test_resolve_asset_library_blend_path_falls_back_to_id_named_file(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
library_dir = upload_dir / "asset-libraries"
library_dir.mkdir(parents=True, exist_ok=True)
expected = library_dir / "1234.blend"
expected.write_bytes(b"blend")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
resolved = resolve_asset_library_blend_path(
blend_file_path=str(library_dir / "missing.blend"),
asset_library_id="1234",
)
assert resolved == str(expected)
def test_resolve_asset_library_blend_path_falls_back_to_newest_available_file(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
library_dir = upload_dir / "asset-libraries"
library_dir.mkdir(parents=True, exist_ok=True)
older = library_dir / "older.blend"
newer = library_dir / "newer.blend"
older.write_bytes(b"older")
newer.write_bytes(b"newer")
newer.touch()
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
resolved = resolve_asset_library_blend_path(
blend_file_path=str(library_dir / "missing.blend"),
asset_library_id="missing",
)
assert resolved == str(newer)
assert list_asset_library_blends() == [newer, older]
+63
View File
@@ -0,0 +1,63 @@
from __future__ import annotations
import uuid
from pathlib import Path
import app.models # noqa: F401 Ensures SQLAlchemy relationships are registered.
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.pipeline.tasks.export_glb import _usd_cache_hit_refresh_reason
from app.domains.products.models import CadFile
def _build_cad_file() -> CadFile:
return CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path="/tmp/bearing.step",
file_hash=f"hash-{uuid.uuid4().hex}",
resolved_material_assignments={
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
)
def _build_usd_asset() -> MediaAsset:
return MediaAsset(
id=uuid.uuid4(),
cad_file_id=uuid.uuid4(),
asset_type=MediaAssetType.usd_master,
storage_key="step_files/bearing_master.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint",
},
)
def test_usd_cache_hit_refresh_reason_accepts_binary_usd_without_literal_hartomat_tokens(tmp_path: Path):
cad_file = _build_cad_file()
usd_asset = _build_usd_asset()
usd_path = tmp_path / "bearing_master.usd"
usd_path.write_text("#usda 1.0\n", encoding="utf-8")
refresh_reason = _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_path)
assert refresh_reason is None
def test_usd_cache_hit_refresh_reason_accepts_current_hartomat_usd(tmp_path: Path):
cad_file = _build_cad_file()
usd_asset = _build_usd_asset()
usd_path = tmp_path / "bearing_master.usd"
usd_path.write_text(
"hartomat:canonicalMaterialName\nhartomat:partKey\n",
encoding="utf-8",
)
refresh_reason = _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_path)
assert refresh_reason is None
+220
View File
@@ -0,0 +1,220 @@
from __future__ import annotations
import importlib.util
import json
import struct
from pathlib import Path
def _load_export_module():
candidates = [
Path(__file__).resolve().parents[2] / "render-worker" / "scripts" / "export_step_to_gltf.py",
Path("/compose/render-worker/scripts/export_step_to_gltf.py"),
]
module_path = next((path for path in candidates if path.exists()), None)
assert module_path is not None
spec = importlib.util.spec_from_file_location("test_export_step_to_gltf", module_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def _write_minimal_glb(path: Path, payload: dict) -> None:
json_bytes = json.dumps(payload, separators=(",", ":")).encode()
pad = (4 - len(json_bytes) % 4) % 4
json_bytes += b" " * pad
chunk = struct.pack("<II", len(json_bytes), 0x4E4F534A) + json_bytes
header = struct.pack("<III", 0x46546C67, 2, 12 + len(chunk))
path.write_bytes(header + chunk)
def _read_glb_json(path: Path) -> dict:
data = path.read_bytes()
json_len = struct.unpack_from("<I", data, 12)[0]
return json.loads(data[20 : 20 + json_len])
def test_atomic_export_helpers_publish_temp_glb_over_existing_output(tmp_path: Path):
module = _load_export_module()
output_path = tmp_path / "existing.glb"
output_path.write_bytes(b"old")
temp_path = module._prepare_atomic_export_path(output_path)
assert temp_path != output_path
assert temp_path.parent == output_path.parent
assert not temp_path.exists()
temp_path.write_bytes(b"new")
module._finalize_atomic_export(temp_path, output_path)
assert output_path.read_bytes() == b"new"
assert not temp_path.exists()
def test_inject_glb_extras_preserves_exact_leaf_mesh_part_keys_when_available(tmp_path: Path):
module = _load_export_module()
glb_path = tmp_path / "instance.glb"
payload = {
"asset": {"version": "2.0"},
"scene": 0,
"scenes": [{"nodes": [0]}],
"nodes": [
{"name": "Assembly", "children": [1, 2, 3]},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_AF21",
"translation": [0.1, 0.2, 0.3],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"mesh": 0,
"translation": [0.1, 0.2, 0.3],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_AF6_",
"translation": [0.4, 0.5, 0.6],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
],
"meshes": [{"primitives": []}],
}
_write_minimal_glb(glb_path, payload)
module._inject_glb_extras(
glb_path,
{"partKeyMap": {}},
part_key_map={
"KERO_Z-575693-QP-DRH_ISB_1": "kero_z_575693_qp_drh_isb_1",
"KERO_Z-575693-QP-DRH_ISB_1_1": "kero_z_575693_qp_drh_isb_1_1",
"KERO_Z-575693-QP-DRH_ISB_1_AF6_": "kero_z_575693_qp_drh_isb_1_af6",
},
part_key_occurrences={
"KERO_Z-575693-QP-DRH_ISB_1_1": ["kero_z_575693_qp_drh_isb_1_1"],
},
)
result = _read_glb_json(glb_path)
assert result["nodes"][1]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1"
assert result["nodes"][2]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1_1"
assert result["nodes"][3]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1"
def test_inject_glb_extras_keeps_unique_leaf_mesh_part_keys_without_semantic_siblings(tmp_path: Path):
module = _load_export_module()
glb_path = tmp_path / "leaf.glb"
payload = {
"asset": {"version": "2.0"},
"scene": 0,
"scenes": [{"nodes": [0]}],
"nodes": [
{"name": "Assembly", "children": [1]},
{
"name": "UNIQUE_PART_1_1",
"mesh": 0,
"translation": [0.0, 0.0, 0.0],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
],
"meshes": [{"primitives": []}],
}
_write_minimal_glb(glb_path, payload)
module._inject_glb_extras(
glb_path,
{"partKeyMap": {}},
part_key_map={
"UNIQUE_PART_1_1": "unique_part_1_1",
},
)
result = _read_glb_json(glb_path)
assert result["nodes"][1]["extras"]["partKey"] == "unique_part_1_1"
def test_inject_glb_extras_falls_back_to_semantic_siblings_when_exact_mesh_key_is_missing_even_if_instance_transforms_differ(tmp_path: Path):
module = _load_export_module()
glb_path = tmp_path / "instance-mismatch.glb"
payload = {
"asset": {"version": "2.0"},
"scene": 0,
"scenes": [{"nodes": [0]}],
"nodes": [
{"name": "Assembly", "children": [1, 2, 3]},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_AF21",
"translation": [0.1, 0.2, 0.3],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_AF22",
"translation": [-0.1, -0.2, 0.3],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"mesh": 0,
"translation": [0.9, 0.8, 0.3],
"rotation": [0.0, 0.0, 0.70710678, 0.70710678],
},
],
"meshes": [{"primitives": []}],
}
_write_minimal_glb(glb_path, payload)
module._inject_glb_extras(
glb_path,
{"partKeyMap": {}},
part_key_map={
"KERO_Z-575693-QP-DRH_ISB_1": "kero_z_575693_qp_drh_isb_1",
"KERO_Z-575693-QP-DRH_ISB_1_AF21": "kero_z_575693_qp_drh_isb_1_af21",
"KERO_Z-575693-QP-DRH_ISB_1_AF22": "kero_z_575693_qp_drh_isb_1_af22",
},
)
result = _read_glb_json(glb_path)
assert result["nodes"][3]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1"
def test_inject_glb_extras_assigns_distinct_occurrence_keys_to_repeated_leaf_meshes(tmp_path: Path):
module = _load_export_module()
glb_path = tmp_path / "repeated.glb"
payload = {
"asset": {"version": "2.0"},
"scene": 0,
"scenes": [{"nodes": [0]}],
"nodes": [
{"name": "Assembly", "children": [1, 2, 3]},
{"name": "KERO_Z-575693-QP-DRH_ISB_1_1", "mesh": 0},
{"name": "KERO_Z-575693-QP-DRH_ISB_1_1", "mesh": 1},
{"name": "KERO_Z-575693-QP-DRH_ISB_1_1", "mesh": 2},
],
"meshes": [
{"primitives": []},
{"primitives": []},
{"primitives": []},
],
}
_write_minimal_glb(glb_path, payload)
module._inject_glb_extras(
glb_path,
{"partKeyMap": {}},
part_key_map={
"KERO_Z-575693-QP-DRH_ISB_1_1": "kero_z_575693_qp_drh_isb_1_1",
},
part_key_occurrences={
"KERO_Z-575693-QP-DRH_ISB_1_1": [
"kero_z_575693_qp_drh_isb_1_1",
"kero_z_575693_qp_drh_isb_1_1_2",
"kero_z_575693_qp_drh_isb_1_1_3",
],
},
)
result = _read_glb_json(glb_path)
assert result["nodes"][1]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1_1"
assert result["nodes"][2]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1_1_2"
assert result["nodes"][3]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1_1_3"
+203
View File
@@ -0,0 +1,203 @@
from types import SimpleNamespace
from app.services.part_key_service import build_scene_manifest, get_effective_assignments
def test_build_scene_manifest_prefers_canonical_material_from_resolved_assignments():
cad_file = SimpleNamespace(
id="cad-1",
resolved_material_assignments={
"roller_part": {
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"prim_path": "/Root/Assembly/roller_part",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
manual_material_overrides=None,
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert manifest["parts"] == [
{
"part_key": "roller_part",
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"prim_path": "/Root/Assembly/roller_part",
"effective_material": "HARTOMAT_010101_Steel-Bare",
"assignment_provenance": "auto",
"is_unassigned": False,
}
]
assert manifest["unassigned_parts"] == []
assert get_effective_assignments(cad_file) == {
"roller_part": "HARTOMAT_010101_Steel-Bare",
}
def test_build_scene_manifest_normalizes_legacy_schaeffler_material_names():
cad_file = SimpleNamespace(
id="cad-legacy",
resolved_material_assignments={
"roller_part": {
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"prim_path": "/Root/Assembly/roller_part",
"canonical_material": "SCHAEFFLER_010101_Steel-Bare",
}
},
manual_material_overrides={"manual_part": "SCHAEFFLER_020101_Durotect-Blue"},
source_material_assignments={"roller": "SCHAEFFLER_030103_Elastomer-Black"},
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert manifest["parts"] == [
{
"part_key": "roller_part",
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"prim_path": "/Root/Assembly/roller_part",
"effective_material": "HARTOMAT_010101_Steel-Bare",
"assignment_provenance": "auto",
"is_unassigned": False,
}
]
assert get_effective_assignments(cad_file) == {
"roller_part": "HARTOMAT_010101_Steel-Bare",
}
def test_build_scene_manifest_adds_semantic_alias_for_deduplicated_instance_keys():
cad_file = SimpleNamespace(
id="cad-alias",
resolved_material_assignments={
"roller_part_2": {
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_2",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
manual_material_overrides=None,
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert manifest["parts"] == [
{
"part_key": "roller_part_2",
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_2",
"effective_material": "HARTOMAT_010101_Steel-Bare",
"assignment_provenance": "auto",
"is_unassigned": False,
},
{
"part_key": "roller_part",
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_2",
"effective_material": "HARTOMAT_010101_Steel-Bare",
"assignment_provenance": "auto",
"is_unassigned": False,
},
]
assert manifest["unassigned_parts"] == []
assert get_effective_assignments(cad_file) == {
"roller_part_2": "HARTOMAT_010101_Steel-Bare",
"roller_part": "HARTOMAT_010101_Steel-Bare",
}
def test_build_scene_manifest_skips_alias_when_canonical_key_already_exists():
cad_file = SimpleNamespace(
id="cad-existing-alias",
resolved_material_assignments={
"roller_part": {
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
},
"roller_part_2": {
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_2",
"canonical_material": "HARTOMAT_020202_Steel-Bare",
},
},
manual_material_overrides=None,
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert [part["part_key"] for part in manifest["parts"]] == [
"roller_part",
"roller_part_2",
]
assert get_effective_assignments(cad_file) == {
"roller_part": "HARTOMAT_010101_Steel-Bare",
"roller_part_2": "HARTOMAT_020202_Steel-Bare",
}
def test_build_scene_manifest_alias_inherits_leaf_manual_override():
cad_file = SimpleNamespace(
id="cad-manual-alias",
resolved_material_assignments={
"roller_part_3": {
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_3",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
manual_material_overrides={
"roller_part_3": "SCHAEFFLER_020101_Durotect-Blue",
},
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
alias_part = next(part for part in manifest["parts"] if part["part_key"] == "roller_part")
assert alias_part == {
"part_key": "roller_part",
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_3",
"effective_material": "HARTOMAT_020101_Durotect-Blue",
"assignment_provenance": "manual",
"is_unassigned": False,
}
assert get_effective_assignments(cad_file) == {
"roller_part_3": "HARTOMAT_020101_Durotect-Blue",
"roller_part": "HARTOMAT_020101_Durotect-Blue",
}
def test_build_scene_manifest_adds_semantic_alias_for_exporter_af_suffix_keys():
cad_file = SimpleNamespace(
id="cad-af-alias",
resolved_material_assignments={
"kero_z_575693_qp_drh_isb_1_af6": {
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_AF6_",
"prim_path": "/Root/Assembly/kero_z_575693_qp_drh_isb_1_af6",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
manual_material_overrides=None,
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert [part["part_key"] for part in manifest["parts"]] == [
"kero_z_575693_qp_drh_isb_1_af6",
"kero_z_575693_qp_drh_isb_1",
]
assert get_effective_assignments(cad_file) == {
"kero_z_575693_qp_drh_isb_1_af6": "HARTOMAT_010101_Steel-Bare",
"kero_z_575693_qp_drh_isb_1": "HARTOMAT_010101_Steel-Bare",
}
+81
View File
@@ -0,0 +1,81 @@
from pathlib import Path
from app.config import settings
from app.core.render_paths import (
ensure_group_writable_dir,
resolve_public_asset_url,
resolve_result_path,
result_path_to_storage_key,
result_path_to_public_url,
)
def test_result_path_to_public_url_for_canonical_render(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
render_file = upload_dir / "renders" / "line-1" / "bearing.png"
render_file.parent.mkdir(parents=True, exist_ok=True)
render_file.write_bytes(b"png")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
result_path = str(render_file)
assert result_path_to_public_url(result_path, require_exists=True) == "/renders/line-1/bearing.png"
assert resolve_result_path(result_path) == render_file
assert resolve_public_asset_url("/renders/line-1/bearing.png") == render_file
def test_result_path_to_public_url_for_legacy_shared_render(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
render_file = upload_dir / "renders" / "line-2" / "legacy.png"
render_file.parent.mkdir(parents=True, exist_ok=True)
render_file.write_bytes(b"png")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
legacy_path = "/shared/renders/line-2/legacy.png"
assert resolve_result_path(legacy_path) == render_file
assert result_path_to_public_url(legacy_path, require_exists=True) == "/renders/line-2/legacy.png"
def test_result_path_to_public_url_hides_missing_or_non_public_paths(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
step_render = upload_dir / "step_files" / "renders" / "line_123.png"
step_render.parent.mkdir(parents=True, exist_ok=True)
step_render.write_bytes(b"png")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
missing_public = str(upload_dir / "renders" / "line-3" / "missing.png")
assert result_path_to_public_url(str(step_render), require_exists=True) is None
assert result_path_to_public_url(missing_public, require_exists=True) is None
def test_result_path_to_storage_key_normalizes_legacy_and_public_variants(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
render_file = upload_dir / "renders" / "line-4" / "normalized.png"
render_file.parent.mkdir(parents=True, exist_ok=True)
render_file.write_bytes(b"png")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
assert result_path_to_storage_key(str(render_file)) == "renders/line-4/normalized.png"
assert result_path_to_storage_key("/shared/renders/line-4/normalized.png") == "renders/line-4/normalized.png"
assert result_path_to_storage_key("/renders/line-4/normalized.png") == "renders/line-4/normalized.png"
def test_ensure_group_writable_dir_normalizes_existing_upload_tree(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
target = upload_dir / "step_files" / "renders"
target.mkdir(parents=True, exist_ok=True)
upload_dir.chmod(0o755)
(upload_dir / "step_files").chmod(0o755)
target.chmod(0o755)
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
ensured = ensure_group_writable_dir(target)
assert ensured == target
for path in (upload_dir, upload_dir / "step_files", target):
mode = path.stat().st_mode & 0o7777
assert mode & 0o020
assert mode & 0o010
assert mode & 0o2000
+220
View File
@@ -0,0 +1,220 @@
from __future__ import annotations
import uuid
from contextlib import contextmanager
from pathlib import Path
from types import SimpleNamespace
def _patch_render_thumbnail_dependencies(monkeypatch, tmp_path: Path):
from app.domains.pipeline.tasks.render_thumbnail import render_graph_thumbnail, render_step_thumbnail
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
usd_path = tmp_path / "bearing.usdc"
usd_path.write_text("USD", encoding="utf-8")
cad_file = SimpleNamespace(
id=uuid.uuid4(),
stored_path=str(step_path),
step_file_hash=None,
mesh_attributes={},
tenant_id=uuid.uuid4(),
)
class _FakeSession:
def get(self, _model, _object_id):
return cad_file
def commit(self):
return None
@contextmanager
def _fake_pipeline_session(_tenant_id=None):
yield _FakeSession()
@contextmanager
def _fake_sample_cap():
yield
queued_glb_exports: list[str] = []
workflow_updates: list[tuple[str, str, str | None, str | None]] = []
postprocess_calls: list[str] = []
regenerate_calls: list[tuple[tuple, dict]] = []
monkeypatch.setattr(
"app.domains.pipeline.tasks.render_thumbnail._pipeline_session",
_fake_pipeline_session,
)
monkeypatch.setattr(
"app.domains.pipeline.tasks.render_thumbnail._capped_thumbnail_samples",
_fake_sample_cap,
)
monkeypatch.setattr(
"app.core.tenant_context.resolve_tenant_id_for_cad",
lambda cad_file_id: "tenant-1",
)
monkeypatch.setattr(
"app.domains.products.cache_service.compute_step_hash",
lambda _path: "hash-123",
)
monkeypatch.setattr(
"app.services.step_processor.regenerate_cad_thumbnail",
lambda *args, **kwargs: (regenerate_calls.append((args, kwargs)), True)[1],
)
monkeypatch.setattr(
"app.domains.pipeline.tasks.render_thumbnail._resolve_thumbnail_render_context",
lambda _session, _cad: {
"material_library_path": "/tmp/materials.blend",
"material_map": {"part-a": "HARTOMAT_010101_Steel-Bare"},
"part_names_ordered": ["part-a", "part-b"],
"usd_path": usd_path,
},
)
monkeypatch.setattr(
"app.services.step_processor.extract_mesh_edge_data",
lambda _step_path: {"sharp_edge_pairs": []},
)
monkeypatch.setattr(
"app.domains.rendering.workflow_runtime_services.resolve_cad_bbox",
lambda step_path, glb_path=None: (
postprocess_calls.append("bbox"),
SimpleNamespace(
bbox_data={
"dimensions_mm": {"x": 1.0, "y": 2.0, "z": 3.0},
"bbox_center_mm": {"x": 0.5, "y": 1.0, "z": 1.5},
}
),
)[1],
)
monkeypatch.setattr(
"app.domains.pipeline.tasks.extract_metadata._auto_populate_materials_for_cad",
lambda cad_file_id, tenant_id=None: postprocess_calls.append("auto_populate"),
)
monkeypatch.setattr(
"app.core.websocket.publish_event_sync",
lambda tenant_id, payload: postprocess_calls.append("websocket"),
)
monkeypatch.setattr(
"app.domains.pipeline.tasks.export_glb.generate_gltf_geometry_task.delay",
lambda cad_file_id: queued_glb_exports.append(cad_file_id),
)
monkeypatch.setattr(
"app.domains.rendering.tasks._update_workflow_run_status",
lambda order_line_id, status, error=None, *, workflow_run_id=None, workflow_node_id=None: workflow_updates.append(
(order_line_id, status, workflow_run_id, workflow_node_id)
),
)
return (
render_step_thumbnail,
render_graph_thumbnail,
queued_glb_exports,
workflow_updates,
postprocess_calls,
regenerate_calls,
)
def test_render_step_thumbnail_skips_legacy_glb_follow_up_for_graph_runs(monkeypatch, tmp_path):
render_step_thumbnail, _render_graph_thumbnail, queued_glb_exports, workflow_updates, postprocess_calls, regenerate_calls = _patch_render_thumbnail_dependencies(
monkeypatch,
tmp_path,
)
render_step_thumbnail.run(
"cad-123",
workflow_run_id="run-123",
workflow_node_id="save-thumb",
renderer="threejs",
width=512,
height=512,
transparent_bg=True,
)
assert queued_glb_exports == []
assert workflow_updates == [("cad-123", "completed", "run-123", "save-thumb")]
assert postprocess_calls == ["bbox", "auto_populate", "websocket"]
assert regenerate_calls == [(
("cad-123",),
{
"part_colors": {},
"renderer": "threejs",
"render_engine": None,
"samples": None,
"width": 512,
"height": 512,
"transparent_bg": True,
"material_library_path": "/tmp/materials.blend",
"material_map": {"part-a": "HARTOMAT_010101_Steel-Bare"},
"part_names_ordered": ["part-a", "part-b"],
"usd_path": tmp_path / "bearing.usdc",
},
)]
def test_render_step_thumbnail_keeps_legacy_glb_follow_up_without_workflow_run(monkeypatch, tmp_path):
render_step_thumbnail, _render_graph_thumbnail, queued_glb_exports, workflow_updates, postprocess_calls, _regenerate_calls = _patch_render_thumbnail_dependencies(
monkeypatch,
tmp_path,
)
render_step_thumbnail.run("cad-123")
assert queued_glb_exports == ["cad-123"]
assert workflow_updates == [("cad-123", "completed", None, None)]
assert postprocess_calls == ["bbox", "auto_populate", "websocket"]
def test_render_graph_thumbnail_skips_legacy_postprocess_and_glb_follow_up(monkeypatch, tmp_path):
_render_step_thumbnail, render_graph_thumbnail, queued_glb_exports, workflow_updates, postprocess_calls, _regenerate_calls = _patch_render_thumbnail_dependencies(
monkeypatch,
tmp_path,
)
render_graph_thumbnail.run(
"cad-123",
workflow_run_id="run-123",
workflow_node_id="save-thumb",
renderer="threejs",
width=512,
height=512,
transparent_bg=True,
)
assert queued_glb_exports == []
assert workflow_updates == [("cad-123", "completed", "run-123", "save-thumb")]
assert postprocess_calls == []
def test_regenerate_thumbnail_skips_retry_for_missing_cad_resource(monkeypatch):
from app.domains.pipeline.tasks.render_thumbnail import regenerate_thumbnail
from app.services.step_processor import MissingCadResourceError
retry_calls: list[tuple[tuple, dict]] = []
monkeypatch.setattr("app.core.task_logs.log_task_event", lambda *args, **kwargs: None)
monkeypatch.setattr(
"app.core.tenant_context.resolve_tenant_id_for_cad",
lambda cad_file_id: "tenant-1",
)
monkeypatch.setattr(
"app.services.step_processor.regenerate_cad_thumbnail",
lambda *args, **kwargs: (_ for _ in ()).throw(MissingCadResourceError("CAD file not found: cad-123")),
)
monkeypatch.setattr(
regenerate_thumbnail,
"retry",
lambda *args, **kwargs: retry_calls.append((args, kwargs)),
)
regenerate_thumbnail.run(
"cad-123",
{},
renderer="blender",
width=512,
height=512,
)
assert retry_calls == []
@@ -0,0 +1,83 @@
from __future__ import annotations
import importlib.util
from pathlib import Path
def _load_blender_materials_module():
candidates = [
Path(__file__).resolve().parents[1] / "render-worker" / "scripts" / "_blender_materials.py",
Path("/compose/render-worker/scripts/_blender_materials.py"),
]
module_path = next((path for path in candidates if path.exists()), None)
assert module_path is not None
spec = importlib.util.spec_from_file_location("test_blender_materials", module_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def test_lookup_material_name_matches_usd_part_keys_without_serial_suffixes():
module = _load_blender_materials_module()
mat_map = module.build_mat_map_lower(
{
"RWDR_B_F-802044_TR4_H122B-69186": "Steel--Stahl",
"RWDR_B_F-802044_TR4_H122B-72661": "Steel--Stahl",
"O_RING_RG_F-802044_TR4_H-120220": "Eslastomer_black--Elastomer_schwarz",
"O_RING_RG_F-802044_TR4_H-120399": "Eslastomer_black--Elastomer_schwarz",
"F-802044-3001_IR_TR2-H_A1-25921_AF0": "Steel--Stahl",
"F-802044-3001_IR_TR2-H_A1-53810_AF0": "Steel--Stahl",
}
)
assert (
module.lookup_material_name(
"RWDR_B_F-802044_TR4_H122BK",
mat_map,
"rwdr_b_f_802044_tr4_h122bk",
)
== "Steel--Stahl"
)
assert (
module.lookup_material_name(
"O_RING_RG_F-802044_TR4_H122BK_1",
mat_map,
"o_ring_rg_f_802044_tr4_h122bk_1",
)
== "Eslastomer_black--Elastomer_schwarz"
)
assert (
module.lookup_material_name(
"F-802044-3001_IR_TR2-H_A1_04",
mat_map,
"f_802044_3001_ir_tr2_h_a1_04",
)
== "Steel--Stahl"
)
def test_lookup_material_name_keeps_ambiguous_fuzzy_matches_unresolved():
module = _load_blender_materials_module()
mat_map = module.build_mat_map_lower(
{
"PART_ALPHA-11111": "Steel--Stahl",
"PART_ALPHA-22222": "Bronze--Bronze",
}
)
assert module.lookup_material_name("PART_ALPHA", mat_map) is None
def test_iter_object_name_variants_strips_blender_duplicate_suffix():
module = _load_blender_materials_module()
assert list(module._iter_object_name_variants("BearingHousing.001")) == [
"BearingHousing.001",
"BearingHousing",
]
assert list(module._iter_object_name_variants("BearingHousing")) == [
"BearingHousing",
]
+2470
View File
File diff suppressed because it is too large Load Diff
+33 -3
View File
@@ -50,6 +50,8 @@ services:
dockerfile: Dockerfile dockerfile: Dockerfile
command: /start.sh command: /start.sh
environment: environment:
- PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1}
- PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache}
- POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_DB=${POSTGRES_DB:-hartomat}
- POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat}
@@ -89,8 +91,12 @@ services:
build: build:
context: ./backend context: ./backend
dockerfile: Dockerfile dockerfile: Dockerfile
user: "${APP_UID:-1000}:0"
command: celery -A app.tasks.celery_app worker --loglevel=info -Q step_processing,ai_validation --autoscale=${MAX_CONCURRENCY:-8},${MIN_CONCURRENCY:-2} --concurrency=${MIN_CONCURRENCY:-2} command: celery -A app.tasks.celery_app worker --loglevel=info -Q step_processing,ai_validation --autoscale=${MAX_CONCURRENCY:-8},${MIN_CONCURRENCY:-2} --concurrency=${MIN_CONCURRENCY:-2}
environment: environment:
- PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1}
- PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache}
- HOME=/tmp
- POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_DB=${POSTGRES_DB:-hartomat}
- POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat}
@@ -123,8 +129,15 @@ services:
dockerfile: render-worker/Dockerfile dockerfile: render-worker/Dockerfile
args: args:
- BLENDER_VERSION=${BLENDER_VERSION:-5.0.1} - BLENDER_VERSION=${BLENDER_VERSION:-5.0.1}
user: "${APP_UID:-1000}:0"
group_add:
- "44"
- "110"
command: bash -c "python3 /check_version.py && celery -A app.tasks.celery_app worker --loglevel=info -Q asset_pipeline --autoscale=1,1 --concurrency=1" command: bash -c "python3 /check_version.py && celery -A app.tasks.celery_app worker --loglevel=info -Q asset_pipeline --autoscale=1,1 --concurrency=1"
environment: environment:
- PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1}
- PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache}
- HOME=/tmp
- POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_DB=${POSTGRES_DB:-hartomat}
- POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat}
@@ -135,13 +148,16 @@ services:
- UPLOAD_DIR=/app/uploads - UPLOAD_DIR=/app/uploads
- BLENDER_BIN=/opt/blender/blender - BLENDER_BIN=/opt/blender/blender
- RENDER_SCRIPTS_DIR=/render-scripts - RENDER_SCRIPTS_DIR=/render-scripts
- CYCLES_DEVICE=${CYCLES_DEVICE:-auto} - CYCLES_DEVICE=${CYCLES_DEVICE:-gpu}
- NVIDIA_VISIBLE_DEVICES=all
- NVIDIA_DRIVER_CAPABILITIES=compute,utility,graphics
- MINIO_URL=${MINIO_URL:-http://minio:9000} - MINIO_URL=${MINIO_URL:-http://minio:9000}
- MINIO_USER=${MINIO_USER:-minioadmin} - MINIO_USER=${MINIO_USER:-minioadmin}
- MINIO_PASSWORD=${MINIO_PASSWORD:-minioadmin} - MINIO_PASSWORD=${MINIO_PASSWORD:-minioadmin}
- MINIO_BUCKET=${MINIO_BUCKET:-uploads} - MINIO_BUCKET=${MINIO_BUCKET:-uploads}
volumes: volumes:
- ./backend:/app - ./backend:/app
- ./render-worker/scripts:/render-scripts
- uploads:/app/uploads - uploads:/app/uploads
- /opt/blender:/opt/blender:ro - /opt/blender:/opt/blender:ro
- optix-cache:/var/tmp/OptixCache_root # persist OptiX kernel cache across container restarts - optix-cache:/var/tmp/OptixCache_root # persist OptiX kernel cache across container restarts
@@ -165,8 +181,15 @@ services:
dockerfile: render-worker/Dockerfile dockerfile: render-worker/Dockerfile
args: args:
- BLENDER_VERSION=${BLENDER_VERSION:-5.0.1} - BLENDER_VERSION=${BLENDER_VERSION:-5.0.1}
user: "${APP_UID:-1000}:0"
group_add:
- "44"
- "110"
command: bash -c "python3 /check_version.py && celery -A app.tasks.celery_app worker --loglevel=info -Q asset_pipeline_light --autoscale=2,2 --concurrency=2" command: bash -c "python3 /check_version.py && celery -A app.tasks.celery_app worker --loglevel=info -Q asset_pipeline_light --autoscale=2,2 --concurrency=2"
environment: environment:
- PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1}
- PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache}
- HOME=/tmp
- POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_DB=${POSTGRES_DB:-hartomat}
- POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat}
@@ -177,13 +200,16 @@ services:
- UPLOAD_DIR=/app/uploads - UPLOAD_DIR=/app/uploads
- BLENDER_BIN=/opt/blender/blender - BLENDER_BIN=/opt/blender/blender
- RENDER_SCRIPTS_DIR=/render-scripts - RENDER_SCRIPTS_DIR=/render-scripts
- CYCLES_DEVICE=${CYCLES_DEVICE:-auto} - CYCLES_DEVICE=${CYCLES_DEVICE:-gpu}
- NVIDIA_VISIBLE_DEVICES=all
- NVIDIA_DRIVER_CAPABILITIES=compute,utility,graphics
- MINIO_URL=${MINIO_URL:-http://minio:9000} - MINIO_URL=${MINIO_URL:-http://minio:9000}
- MINIO_USER=${MINIO_USER:-minioadmin} - MINIO_USER=${MINIO_USER:-minioadmin}
- MINIO_PASSWORD=${MINIO_PASSWORD:-minioadmin} - MINIO_PASSWORD=${MINIO_PASSWORD:-minioadmin}
- MINIO_BUCKET=${MINIO_BUCKET:-uploads} - MINIO_BUCKET=${MINIO_BUCKET:-uploads}
volumes: volumes:
- ./backend:/app - ./backend:/app
- ./render-worker/scripts:/render-scripts
- uploads:/app/uploads - uploads:/app/uploads
- /opt/blender:/opt/blender:ro - /opt/blender:/opt/blender:ro
- optix-cache:/var/tmp/OptixCache_root - optix-cache:/var/tmp/OptixCache_root
@@ -204,8 +230,12 @@ services:
build: build:
context: ./backend context: ./backend
dockerfile: Dockerfile dockerfile: Dockerfile
command: celery -A app.tasks.celery_app beat --loglevel=info user: "${APP_UID:-1000}:0"
command: celery -A app.tasks.celery_app beat --loglevel=info --schedule=/tmp/celerybeat-schedule
environment: environment:
- PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1}
- PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache}
- HOME=/tmp
- POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_DB=${POSTGRES_DB:-hartomat}
- POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat}
+104 -2
View File
@@ -147,11 +147,101 @@ Ergebnis:
- Abgeschlossen: Block 6 - Abgeschlossen: Block 6
- Abgeschlossen: Block 7 - Abgeschlossen: Block 7
- Abgeschlossen: Block 8 - Abgeschlossen: Block 8
- In Arbeit: Block 9 - Abgeschlossen: Block 9
- Nächster geplanter Folgeblock: Block 9 - Parallel in Arbeit: Block 11
- Vorbereitet: Block 12
- Nächster geplanter Folgeblock: Block 11
## Nächste Orchestrierte Batch-Wellen
Diese Wellen priorisieren Root-Cause-Arbeit vor weiterer UI-Politur und halten Legacy jederzeit parallel funktionsfähig.
### Welle P1: Vertrags- und Produktionsmodell-Schließung
Muss zuerst laufen:
- `P1-A` Node-Contract Closure
- Backend-Registry und Schema als harte Source of Truth schließen
- Fokus: Family-Konsistenz, param-key-Validierung, vollständige node settings contracts
- `P1-B` Output-Type / Invocation Model Closure
- Output Types als sauberes Workflow-Invocation-Modell abschließen
- Fokus: artifact/family compatibility, editor/API contract clarity, sichere Erstellung neuer Output Types
- `P1-C` Render-Template- und Asset-Library-Inputs als echte Produktionsinputs modellieren
- Fokus: template/material-library/input contracts statt versteckter Defaults
Warum zuerst:
- diese drei Blöcke definieren die autoritativen Verträge, an denen Editor, Runtime und Golden-Gates hängen
- weitere Runtime- und E2E-Arbeit bleibt sonst drift-anfällig
### Welle P2: Runtime-Parität und Graph/Legacy-Unifikation
Parallelisierbar nach P1-A:
- `P2-A` Legacy/Graph Module Unification
- gleiche Produktionsmodule in Legacy- und Graph-Pfad verwenden
- Fokus: template resolution, samples/defaults, dispatch parity
- `P2-B` Canonical Graphs / Starter Blueprints / Seed Normalization
- eine kanonische Graph-Quelle statt Drift zwischen backend blueprints, bundles und frontend starters
- `P2-C` Run Inspection Completion
- Preflight, dispatch, comparison und node outputs operativ debugbar machen
### Welle P3: CAD-/Material-Parität und Editor-Führung
Parallelisierbar nach P2-A:
- `P3-A` CAD / Material Parity
- instance-aware part/material truth zwischen exporter, viewer und render path schließen
- `P3-B` Editor Organization Around Modules / Families / Input Paths
- gemeinsame Authoring-Surface weiter auf modulare Produktionspfade zuschneiden
- `P3-C` Context Flow Simplification Follow-up
- Kontextauswahl und Output-Type-Einstieg auf die neuen Contracts ausrichten
### Welle P4: Operative Freigabe und Hygiene
Nach P2 und P3:
- `P4-A` Shadow / Graph Rollout Hardening
- pro workflow / output type steuerbar, mit klarem Rückfallpfad
- `P4-B` Sequential Golden / Smoke / E2E Gates
- echte Produktionsfälle mit Templates, Varianten und Output-Types sequenziell absichern
- `P4-C` Test-Infrastruktur / Low-RAM Gates
- reproduzierbare sequentielle Verifikation
- `P4-D` Repo Hygiene / Generated Artifact Root Cause
- Ownership-, Pycache- und generated-file-Ursachen bereinigen
## Sofort Nächste Disjunkte Arbeitsblöcke
Für die aktuelle nächste Ausführung werden diese drei Blöcke als kleinste sinnvolle Parallel-Batch vorbereitet:
- Batch `NB-1`: Node-Contract Closure
- Status: verifiziert
- Fokus: Registry- und Schema-Contracts als harte Source of Truth
- Verifikation: `backend/tests/domains/test_workflow_schema.py`, `backend/tests/domains/test_workflow_node_registry.py`
- Batch `NB-2`: Output-Type / Invocation Model Closure
- Status: verifiziert
- Fokus: Artifact-/Family-/Invocation-Contracts fuer Output Types
- Verifikation: `backend/tests/domains/test_output_types_api.py`, `frontend/src/__tests__/api/outputTypes.test.ts`
- Batch `NB-3`: CAD / Material parity root-cause closure
- Status: verifiziert
- Fokus: part-key-/instance-stabile Materialidentitaet zwischen Exporter, Manifest und Viewer
- Teilfortschritt April 10, 2026: scene-manifest aliasiert jetzt auch exporter-variant keys wie `_af6` auf ihren kanonischen semantischen part key; der Viewer kann damit dieselbe autoritative Materialidentitaet konsumieren wie der Manifest-Pfad
- Verifikation: gezielte low-RAM CAD-/Viewer-Tests nach Root-Cause-Fix
- Abschluss April 11, 2026: live HartOMat-Export fuer `7c214057-9982-4d6e-aa87-43bfabfdb709` liefert jetzt `146` Manifest-Parts, `146` Mesh-Nodes, `146` eindeutige `partKey`s, `0` fehlende und `0` duplizierte Zuordnungen; Root Cause war die Kombination aus stale GLB cache plus nicht-atomarem OCC-Overwrite beim Re-Export
Merge-Reihenfolge:
1. `NB-1`
2. `NB-2`
3. `NB-3`
4. danach erst weitere Runtime-/Editor-Folgeblöcke
## Letzte Verifikation ## Letzte Verifikation
- `./scripts/repo_hygiene.sh`
- Ergebnis: Dry-run listet bereinigbare Cache-/Bytecode-Artefakte plus nicht dem aktuellen Nutzer gehörende Generated Files; die Repo-Hygiene deckt jetzt auch `render-worker/scripts/__pycache__` explizit ab
- `find . \! -user "$USER" -not -path './.git/*' -ls | sed -n '1,120p'`
- Ergebnis: verbleibende Ownership-Reste liegen im `render-worker`-Pycache; Compose-Härtung wird nun über `PYTHONPYCACHEPREFIX=/tmp/pycache` auf die Ursache angesetzt
- `backend/.venv/bin/pytest backend/tests/test_config_runtime_resolution.py -q` - `backend/.venv/bin/pytest backend/tests/test_config_runtime_resolution.py -q`
- Ergebnis: 3 Tests grün; Host-Runtime normalisiert Docker-Service-Aliase (`postgres`, `redis`) außerhalb von Containern nun automatisch auf `localhost`, Container-Runtime bleibt unverändert - Ergebnis: 3 Tests grün; Host-Runtime normalisiert Docker-Service-Aliase (`postgres`, `redis`) außerhalb von Containern nun automatisch auf `localhost`, Container-Runtime bleibt unverändert
- `backend/.venv/bin/pytest backend/tests/domains/test_workflow_runtime_services.py -q -x` - `backend/.venv/bin/pytest backend/tests/domains/test_workflow_runtime_services.py -q -x`
@@ -166,5 +256,17 @@ Ergebnis:
- Ergebnis: 5 Tests grün; autoritative Scene-Manifest-Zuweisungen werden nun im Workflow-Renderpfad auf `part_key` und `source_name` gespiegelt, Legacy-Fallback bleibt unverändert - Ergebnis: 5 Tests grün; autoritative Scene-Manifest-Zuweisungen werden nun im Workflow-Renderpfad auf `part_key` und `source_name` gespiegelt, Legacy-Fallback bleibt unverändert
- `./backend/.venv/bin/pytest backend/tests/test_part_key_service.py -q` - `./backend/.venv/bin/pytest backend/tests/test_part_key_service.py -q`
- Ergebnis: 1 Test grün; part-key-basierte Manifest-Auflösung bleibt konsistent - Ergebnis: 1 Test grün; part-key-basierte Manifest-Auflösung bleibt konsistent
- `python3 scripts/compare_live_cad_parity.py --cad-id 7c214057-9982-4d6e-aa87-43bfabfdb709`
- Ergebnis: Live-CAD-Parität grün; Manifest, ausgeliefertes GLB und Viewer-`partKey`-Grundlage stimmen für alle 146 renderbaren Teile exakt überein
- `cd frontend && npx vitest run src/__tests__/components/workflowEditorUi.test.tsx src/__tests__/api/outputTypes.test.ts --pool forks --poolOptions.forks.singleFork=true` - `cd frontend && npx vitest run src/__tests__/components/workflowEditorUi.test.tsx src/__tests__/api/outputTypes.test.ts --pool forks --poolOptions.forks.singleFork=true`
- Ergebnis: 20 Tests grün, sequenziell ausgeführt - Ergebnis: 20 Tests grün, sequenziell ausgeführt
- `cd frontend && npm test -- src/__tests__/components/workflowEditorUi.test.tsx src/__tests__/components/workflowAuthoringGuidance.test.ts`
- Ergebnis: 17 Tests grün; die gemeinsame Authoring-Surface bleibt nach dem jüngsten Wiring-Refactor stabil
- `cd frontend && npm run build`
- Ergebnis: Build grün; `/workflows` bleibt kompilierbar nach dem Authoring-Refactor
- `./backend/.venv/bin/pytest backend/tests/test_part_key_service.py -q`
- Ergebnis: 6 Tests grün; scene-manifest deckt jetzt neben `_2`/`_3` auch exporter-style `_af*`-Varianten ueber semantische Alias-Keys ab
- `./backend/.venv/bin/pytest backend/tests/test_export_step_to_gltf.py -q`
- Ergebnis: 3 Tests grün; GLB partKey stamping bleibt mit semantic-sibling-Aufloesung stabil
- `cd frontend && npm test -- src/__tests__/components/cadUtils.test.ts`
- Ergebnis: 11 Tests grün; Viewer-seitige part-key-Aufloesung bleibt nach dem Manifest-Alias-Fix konsistent
+252
View File
@@ -0,0 +1,252 @@
# First Wave Execution Plan
## Goal
Translate the worker orchestration into concrete first patch slices that can be executed in parallel without breaking legacy rendering.
This first wave is intentionally conservative:
- close contract holes before broad UI refactors
- avoid cross-cutting runtime rewrites in the same slice
- keep all graph changes legacy-safe
## Current Codebase Snapshot
### Block A: Node Contracts
Already in place:
- node definitions already expose `family`, `module_key`, `execution_kind`, `input_contract`, `output_contract`, `artifact_roles_*`, and `legacy_source`
- frontend already consumes node definitions and uses family-aware palette grouping
- `glb_bbox` already gained a real editor field for `glb_path`
Still missing:
- backend workflow schema validation is still DAG-structural only
- backend does not yet validate graph family consistency against the registry
- backend does not yet validate node params against registry-owned field definitions
- editor-visible nodes with weak or absent settings still need an explicit audit, especially export/CAD bridge nodes
- status note: the canonical still-path bridge nodes have now been expanded with real settings for template override, material override/disable, auto-populate persistence controls, GLB source preference, output-save artifact expectations, and notify arming. The remaining audit emphasis is export/CAD parity rather than still-path hidden defaults.
### Block D: Output-Type Invocation Contracts
Already in place:
- `workflow_family`, `artifact_kind`, and `invocation_overrides` exist
- backend already blocks mixed-family workflow links and direct family mismatches
- frontend admin UI already exposes family, artifact kind, and workflow selection
Still missing:
- workflow selection can still communicate contract semantics more clearly
- status note: backend now rejects workflow-family mismatches, mixed-family workflow links, and workflow/artifact mismatches against the linked workflow graph. The admin form has also been re-ordered into workflow-contract and invocation-profile sections so renderer details no longer dominate the first screenful.
### Block E: Editor Organization
Already in place:
- right-click canvas insertion exists
- searchable node command menu exists
- split of `legacy` / `bridge` / `graph` node groupings exists
- auto-align exists
- edge deletion exists via button, Delete key, right-click, and double-click
- the workflow toolbar has been compressed so context, mode, authoring actions, and run actions sit in one compact canvas-adjacent strip
Still missing:
- the editor is feature-rich but still structurally busy
- node insertion, run inspection, and inspector controls need clearer hierarchy
- UX cleanup should follow contract cleanup so the editor does not encode unstable assumptions
### Block I: Rollout And Regression Gates
Already in place:
- `legacy`, `graph`, and `shadow` dispatch modes exist
- graph mode falls back to legacy on failure
- shadow mode keeps legacy authoritative
- workflow comparison endpoint exists with hash, dimensions, and mean pixel delta reporting
- there is meaningful backend coverage for shadow dispatch and comparison behavior
- the live rollout harness now exposes `--workflow-still-smoke` and `--workflow-golden-suite` paths for canonical still and representative graph cases
Still missing:
- per-workflow and per-output-type rollout enablement is still an operational step, not yet a guided product workflow
- smoke and golden harnesses still depend on live stack health and seeded render fixtures, so parity coverage is not yet CI-grade deterministic
## Parallel Patch Slices
### Slice A1: Registry-Backed Schema Validation
Owner:
- Block A worker
Files:
- `backend/app/domains/rendering/workflow_schema.py`
- `backend/tests/domains/test_workflow_schema.py`
- `backend/tests/domains/test_workflow_node_registry.py`
Patch scope:
- add backend validation that all nodes in a graph belong to one family unless explicitly allowed for migration
- validate that node params only use keys declared by the node registry
- return clear validation errors that name the offending node id, step, and param key
Why this slice first:
- it closes the largest backend contract gap without changing runtime execution
- it gives Block D and Block E a stable source of truth to build on
Acceptance:
- a mixed-family graph without migration exemption is rejected
- unknown node param keys are rejected
- current canonical still graph remains valid
### Slice A2: Export/CAD Contract Audit
Owner:
- Block A worker
Files:
- `backend/app/domains/rendering/workflow_node_registry.py`
- `backend/tests/domains/test_workflow_node_registry.py`
Patch scope:
- audit `occ_glb_export`, `export_blend`, `thumbnail_save`, and `stl_cache_generate`
- add real field definitions only where runtime behavior genuinely supports editable inputs
- avoid fake settings just to make the editor look complete
Acceptance:
- each audited node either has a justified field schema or an explicit no-settings contract backed by tests
### Slice D1: Artifact/Family Validation Tightening
Owner:
- Block D worker
Files:
- `backend/app/domains/rendering/output_type_contracts.py`
- `backend/app/api/routers/output_types.py`
- `backend/tests/domains/test_output_types_api.py`
Patch scope:
- define artifact-kind compatibility rules per workflow family
- reject impossible combinations early in create/edit APIs
- keep legacy output types renderable if they predate strict linkage
Acceptance:
- `cad_file` output types cannot declare order-line-only artifact kinds
- invalid create/edit payloads fail with actionable errors
- existing valid output types still load and render
### Slice D2: Workflow-First Output-Type Form Cleanup
Owner:
- Block D worker with light coordination from Block E
Files:
- `frontend/src/components/admin/OutputTypeTable.tsx`
- `frontend/src/api/outputTypes.ts`
Patch scope:
- reorder the form to lead with family, workflow, artifact kind, then invocation overrides
- visually separate invocation profile fields from legacy compatibility fields
- preserve current API payload shape
Acceptance:
- a new output type can be created from top to bottom as a workflow invocation profile
- renderer-specific detail fields no longer dominate the first screenful
- status: completed with a four-section admin form (`Workflow Contract`, `Invocation Profile`, `Renderer Compatibility / Legacy Details`, `Catalog / Business`) while preserving the existing API payload and legacy fallback behavior
### Slice E1: Workflow Editor Header Simplification
Owner:
- Block E worker
Files:
- `frontend/src/components/workflows/WorkflowCanvasToolbar.tsx`
- `frontend/src/pages/WorkflowEditor.tsx`
- `frontend/src/__tests__/components/workflowEditorUi.test.tsx`
Patch scope:
- compress the oversized top area
- move non-primary metadata into secondary badges or sidebar context
- keep node insertion, align, save, dry-run, and run controls near the canvas
Acceptance:
- above-the-fold editor space is materially reduced
- primary actions remain visible without scrolling
- existing right-click and edge deletion behaviors remain intact
### Slice I1: Golden-Case Rollout Harness
Owner:
- Block I worker
Files:
- `scripts/test_render_pipeline.py`
- `backend/tests/domains/test_workflow_dispatch_service.py`
- `docs/workflows/WORKFLOW_DELIVERY_CHECKLIST.md`
Patch scope:
- add a canonical non-legacy still workflow smoke path to the render test script
- record whether the run was legacy, graph, or shadow and surface comparison output when shadow is used
- document the rollout gate needed before enabling graph mode on real output types
Acceptance:
- one command can exercise the canonical graph still path end to end
- the script clearly reports whether rollout conditions were met or blocked
## Merge Order Inside First Wave
1. Slice A1
2. Slice D1
3. Slice A2
4. Slice D2
5. Slice E1
6. Slice I1
## Integration Gates
### Gate FW-1
- Slice A1 merged
- schema validation errors are deterministic and test-covered
### Gate FW-2
- Slice D1 merged
- output-type API rules align with the tightened workflow contracts
### Gate FW-3
- Slice E1 merged
- editor remains functionally complete after toolbar simplification
### Gate FW-4
- Slice I1 merged
- canonical graph still smoke path is runnable and documented
+399
View File
@@ -0,0 +1,399 @@
# Next Batch Orchestration
## Goal
Define the next sensible implementation batch after the export/CAD contract audit, with work split for parallel execution and an integration order that keeps the legacy workflow operational.
## Current Batch
### Batch B1: Test Infrastructure Recovery
Purpose:
Restore deterministic backend test execution so workflow parity work can be validated against real DB-backed tests again.
Why now:
- current targeted DB-backed tests fail on missing tables such as `users` and `cad_files`
- this blocks trustworthy validation for further workflow runtime work
Primary ownership:
- `backend/tests/**`
- `backend/app/database.py`
- `backend/app/config.py` only if required for test bootstrapping
Acceptance:
- targeted workflow tests create their schema reliably
- DB-backed pytest runs do not fail due to missing core tables
- no production runtime behavior changes unless strictly required for test setup correctness
### Batch B2: Workflow Editor Authoring Organization
Purpose:
Reduce authoring friction in `/workflows` by tightening node organization around family, module, and execution role while reclaiming canvas space.
Why now:
- the editor already has the needed primitives
- the remaining gap is structural clarity, not missing mechanics
Primary ownership:
- `frontend/src/components/workflows/**`
- `frontend/src/pages/WorkflowEditor.tsx`
- `frontend/src/__tests__/components/workflowEditorUi.test.tsx`
Acceptance:
- top-area clutter is reduced
- node discovery is cleaner by family/module grouping
- right-click insertion, edge deletion, align, dry-run, and run inspection still work
### Batch B3: Canonical Still Path Smoke-Harness Closure
Purpose:
Move the non-legacy still workflow closer to a runnable, documented smoke path without weakening legacy fallback.
Why now:
- backend graph/runtime coverage is already substantial
- the next risk is proving that the canonical still graph can be exercised as a stable rollout candidate
Primary ownership:
- `backend/app/domains/rendering/**`
- `backend/app/domains/pipeline/tasks/**`
- `backend/tests/domains/test_workflow_*.py`
- `scripts/test_render_pipeline.py`
- `docs/workflows/**` where needed
Acceptance:
- canonical still graph path has a bounded smoke route
- graph-vs-legacy safety remains explicit
- remaining blockers are documented as concrete runtime or fixture issues, not vague parity claims
## Updated Immediate Next Batch
The next implementation batch should now be cut along contract and root-cause boundaries instead of UI-only slices.
### Batch N1: Node-Contract Closure
Purpose:
Make the backend node registry and workflow schema the authoritative source for graph family safety, parameter validity, and editor-visible node settings.
Why now:
- authoring UX is already good enough to build on
- remaining parity work depends on trustworthy backend-owned contracts
Primary ownership:
- `backend/app/domains/rendering/workflow_node_registry.py`
- `backend/app/domains/rendering/workflow_schema.py`
- `backend/tests/domains/test_workflow_node_registry.py`
- `backend/tests/domains/test_workflow_schema.py`
Acceptance:
- unknown node param keys are rejected deterministically
- family drift is blocked by schema validation
- every production-facing node has either a justified field schema or an explicit no-settings contract
- existing canonical still graph remains valid
### Batch N2: Output-Type / Invocation Model Closure
Purpose:
Finish the shift from legacy renderer flags to a real workflow invocation model with explicit family, artifact, and override semantics.
Why now:
- new output types and workflow-linked variants still depend on this contract being airtight
- this is the clean boundary between product configuration and runtime dispatch
Primary ownership:
- `backend/app/domains/rendering/output_type_contracts.py`
- `backend/app/api/routers/output_types.py`
- `backend/app/domains/rendering/schemas.py`
- `backend/app/domains/rendering/models.py`
- `frontend/src/api/outputTypes.ts`
- `frontend/src/components/admin/OutputTypeTable.tsx`
Acceptance:
- impossible workflow/artifact/family combinations are rejected early
- new output types can be created top-to-bottom as invocation profiles
- legacy-safe output types continue to render
### Batch N3: CAD / Material Parity Root-Cause Closure
Purpose:
Close the remaining instance- and part-key-related drift between CAD exporter, GLTF viewer, preview, and downstream render consumption.
Why now:
- this is still a real production blocker, not a polish item
- workflow parity stays superficial until geometry/material identity is stable
Primary ownership:
- `backend/app/domains/pipeline/tasks/export_glb.py`
- `backend/app/domains/pipeline/tasks/extract_metadata.py`
- `backend/app/services/part_key_service.py`
- `frontend/src/components/cad/cadUtils.ts`
- `frontend/src/components/cad/ThreeDViewer.tsx`
- `frontend/src/components/cad/InlineCadViewer.tsx`
Acceptance:
- viewer and manifest resolve the same authoritative material identity
- unresolved nodes are surfaced explicitly instead of silently using pseudo keys
- legacy preview and render behavior remain intact
## Parallelization Rule
These three blocks should be prepared in parallel, but merged in order:
1. `N1` first, because it establishes the source of truth for the other two.
2. `N2` second, because it builds directly on those contracts.
3. `N3` can be investigated in parallel, but should merge after `N1` unless it proves fully isolated.
## Gate For The Following Batch
For the updated immediate next batch, the following gate applies:
- `N1` has deterministic backend validation and focused tests
- `N2` preserves legacy-safe output types while tightening impossible combinations
- `N3` proves the authoritative part/material identity chain with a focused low-RAM verification sequence
## Current Execution Status
- `N1` verified
- focused checks green on April 10, 2026:
- `backend/tests/domains/test_workflow_schema.py`
- `backend/tests/domains/test_workflow_node_registry.py`
- `N2` verified
- focused checks green on April 10, 2026:
- `backend/tests/domains/test_output_types_api.py`
- `frontend/src/__tests__/api/outputTypes.test.ts`
- `N3` in progress
- April 10, 2026:
- scene-manifest alias coverage expanded for exporter `_af*` suffix keys
- order-line runtime now prefers authoritative scene-manifest assignments where manifest metadata exists
- inline and fullscreen viewers now share the same manifest-plus-fallback merge contract
- unresolved meshes are now surfaced explicitly in both viewers instead of silently disappearing behind pseudo keys
- output-type authoring now consumes a backend-authored contract catalog for family/artifact/format/override constraints
- next action: manual product-level parity check plus B04 node/module contract completion
## Executable Block List
Die naechste sinnvolle Abarbeitung ist nicht mehr nach einzelnen Features, sondern nach stabilen Vertrags- und Produktionsgrenzen geschnitten.
### Batch Wave W1: Identity And Contract Closure
- `B01` CAD manifest alias closure
- Ziel: scene-manifest und viewer auf denselben kanonischen semantischen part keys bringen
- Fokus: exporter-style suffixe wie `_af6`, `_af0_asm`, dedup keys, alias inheritance
- Status: abgeschlossen am April 10, 2026
- Gate: `backend/tests/test_part_key_service.py`, `backend/tests/test_export_step_to_gltf.py`, `frontend/src/__tests__/components/cadUtils.test.ts`
- `B02` CAD viewer/manifest convergence
- Ziel: unresolved parts explizit sichtbar halten, aber alle autoritativ aufloesbaren parts im Viewer korrekt materialisieren
- Fokus: `ThreeDViewer`, `InlineCadViewer`, logical part keys, reconciliation UX
- Parallel zu: `B03`, `B04`
- Status: abgeschlossen am April 10, 2026
- April 10, 2026:
- inline und fullscreen viewer auf denselben `buildEffectiveViewerMaterials(...)` contract gezogen
- unresolved meshes werden explizit gezaehlt und sichtbar angezeigt statt pseudo-keys zu synthetisieren
- focused gates gruen:
- `frontend/src/__tests__/components/cadUtils.test.ts`
- `npm run build`
- `./scripts/workflow_sequential_gates.sh`
- Gate: manueller Produktcheck gegen reales CAD-Beispiel
- `B03` Output-type authoring closure
- Ziel: neue output types workflow-first und ohne hidden legacy assumptions anlegbar machen
- Fokus: form-state, invocation overrides, artifact/family guards, defaults
- Parallel zu: `B02`, `B04`
- Status: abgeschlossen am April 10, 2026
- April 10, 2026:
- backend publiziert `GET /api/output-types/contract-catalog` als read-only Source of Truth fuer Family-, Artifact-, Format- und Override-Regeln
- `frontend/src/api/outputTypes.ts` nutzt den Catalog mit lokalem Fallback statt Hardcode-Regeltabellen als primaere Truth
- `OutputTypeTable` speist Family-, Artifact- und Rollout-Auswahl jetzt aus dem Backend-Catalog
- focused gates gruen:
- `backend/tests/domains/test_output_types_api.py`
- `frontend/src/__tests__/api/outputTypes.test.ts`
- `frontend npm run build`
- `./scripts/workflow_sequential_gates.sh`
- Gate: `backend/tests/domains/test_output_types_api.py`, `frontend/src/__tests__/api/outputTypes.test.ts`
- `B04` Node/module contract completion
- Ziel: jede produktionsrelevante node hat einen klaren backend-owned settings/input/output contract
- Fokus: registry metadata, no-settings contracts, family-safe validation
- Gate: `backend/tests/domains/test_workflow_schema.py`, `backend/tests/domains/test_workflow_node_registry.py`
### Batch Wave W2: Canonical Authoring Surface
- `B05` Family-scoped node organization
- Ziel: CAD, Bridge und Graph nodes im Editor klar trennen und suchbar halten
- Fokus: family/module grouping, right-click search, low-noise discovery
- Abhaengigkeit: `B04`
- Status: abgeschlossen am April 10, 2026
- April 10, 2026:
- raw node catalog ist jetzt family-first organisiert: family -> module -> stage -> category
- family/module runtime chips und stage scopes bleiben sichtbar, ohne zur stage-first Navigation zurueckzufallen
- focused gates gruen:
- `frontend/src/__tests__/components/workflowNodeCatalog.test.ts`
- `frontend/src/__tests__/components/workflowEditorUi.test.tsx`
- `frontend npm run build`
- `B06` Authoring surface simplification
- Ziel: eine gemeinsame authoring surface fuer canvas-menu, sidebar und starter paths
- Fokus: shared controller/model statt mehrfacher UI-Sonderlogik
- Abhaengigkeit: `B04`
- Status: abgeschlossen am April 10, 2026
- April 10, 2026:
- `workflowAuthoringSurface.ts` kapselt jetzt shared section resolution, active-section validity und insert bindings als gemeinsame Surface-Controller-Logik
- `NodeCommandMenu` und `NodeDefinitionsPanel` nutzen denselben Controller statt paralleler lokaler Section-/Insert-State-Implementierungen
- focused gates gruen:
- `frontend/src/__tests__/components/workflowEditorUi.test.tsx`
- `frontend/src/__tests__/components/workflowAuthoringGuidance.test.ts`
- `frontend npm run build`
- `B07` Canvas ergonomics closure
- Ziel: edge deletion, auto-align, reduced top-area clutter, faster insertion paths sauber zusammenziehen
- Fokus: interaction consistency statt punktuelle UX-Patches
- Abhaengigkeit: `B06`
- Status: in Arbeit am April 10, 2026
- April 10, 2026:
- `WorkflowCanvasToolbar` auf kompaktere, wiederverwendbare Action-/Field-/Badge-Bausteine gezogen
- Top-Area auf zwei dichtere Informationsreihen reduziert: Identitaet/Status oben, Context/Hint-Rail unten
- focused gates gruen:
- `frontend/src/__tests__/components/workflowEditorUi.test.tsx`
- `frontend npm run build`
- `B08` Starter graph and module bundle normalization
- Ziel: starter blueprints, reference bundles und seed workflows auf dieselben kanonischen module paths ziehen
- Fokus: still graph, CAD intake graph, bundle drift verhindern
- Parallel zu: `B07`
### Batch Wave W3: Runtime And Production Parity
- `B09` Template-aware runtime unification
- Ziel: legacy und graph nutzen dieselbe template/material/output orchestration
- Fokus: template resolution, samples/transparency, publish semantics
- Abhaengigkeit: `B03`, `B04`, `B08`
- `B10` Non-legacy still smoke closure
- Ziel: der kanonische still graph wird als wiederholbarer smoke path belastbar
- Fokus: preflight, dispatch, authoritative output_save, failure visibility
- Abhaengigkeit: `B09`
- `B11` Template parity matrix
- Ziel: graph vs legacy mit echten render-templates, output-varianten und alpha/sample settings vergleichen
- Fokus: echte parity-beweise statt pillow-only checks
- Abhaengigkeit: `B09`
- `B12` CAD intake moduleization
- Ziel: CAD import/extract/export/bbox/material-steps als echte workflow-module verfuegbar machen
- Fokus: node-based production fuer intake workflows
- Abhaengigkeit: `B04`, `B08`
### Batch Wave W4: Operational Rollout
- `B13` Rollout and fallback controls
- Ziel: graph/shadow/legacy pro workflow und pro output type sicher steuerbar halten
- Fokus: rollout mode, immediate rollback, operator clarity
- Abhaengigkeit: `B10`, `B11`
- `B14` Sequential E2E gates
- Ziel: low-RAM, reproduzierbare smoke/golden/browser gates fuer `/workflows`
- Fokus: sequenzielle statt parallele Verifikation
- Abhaengigkeit: `B10`, `B11`, `B13`
- `B15` Repo hygiene root-cause closure
- Ziel: generated artifacts, root-owned caches und compose-side effects ursachenseitig schliessen
- Fokus: ownership, pycache, build artifacts, helper script cleanup
- Kann parallel laufen zu: `B10` bis `B14`
## Recommended Immediate Parallel Batch
Die naechste sinnvolle Batch mit minimalen Konflikten ist:
1. `B02` lokal auf explizite unresolved-state-Fuehrung und viewer-level parity checks ziehen
2. `B03` parallel als contract-catalog dedup zwischen Backend und Admin-UI bearbeiten
3. `B15` parallel als Hygiene-Nebenstrang treiben
Danach:
1. `B04`
2. `B05` bis `B08` als Authoring-Welle
3. `B09` bis `B12` als Runtime-/Produktionswelle
## Latest Verification
- April 10, 2026:
- `./scripts/workflow_sequential_gates.sh` gruen
- backend runtime gates: `34 passed`
- frontend workflow/editor gates: `23 passed`
## Executable Next Queue
Die naechsten 12 Blöcke werden ab jetzt als eine gemeinsame Queue gefahren. Parallel bedeutet hier: Analyse, Vorbereitung und isolierte Teilpatches koennen parallel laufen. Merge und Verifikation bleiben bewusst sequentiell.
### Queue Q2: Merge Order
1. `B04-a` Node text-contract validation
- Ziel: unvalidierte produktionsrelevante Text-Inputs im Registry-/Schema-Layer schliessen
- Scope: `workflow_node_registry.py`, `workflow_schema.py`, `test_workflow_schema.py`
- Gate: neue Schema-Tests fuer UUID, absolute Pfade, float-string, hex-color, suffix-format
- Status: abgeschlossen am April 10, 2026
2. `B04-b` Node registry invariants
- Ziel: defaults/fields/module_key/input-output-contracts als Registry-Invarianten pruefen
- Scope: `test_workflow_node_registry.py`
- Gate: registryweite Invariant-Tests gruen
- Status: abgeschlossen am April 10, 2026
3. `B06` Shared authoring surface
- Ziel: gemeinsame Authoring-Schicht fuer Canvas-Menu, Sidebar und Starter-Aktionen
- Why now: verhindert doppelte UI-Logik in `NodeCommandMenu` und `NodeDefinitionsPanel`
- Status: abgeschlossen am April 10, 2026
4. `B05` Family-scoped node organization
- Ziel: modul-/family-basierte Node-Organisation auf der gemeinsamen Authoring-Schicht
- Status: abgeschlossen am April 10, 2026
5. `B07` Canvas ergonomics closure
- Ziel: reduzierte Top-Area, konsistente Edge-/Insert-Interaktionen, Auto-Align sauber abschliessen
6. `B08` Starter graph and module bundle normalization
- Ziel: Blueprints, Bundles und New-Workflow-Einstieg auf dieselben kanonischen Pfade ziehen
7. `B09` Template-aware runtime unification
- Ziel: Graph und Legacy durch dieselbe Template-/Output-Orchestrierung fuehren
8. `B10` Non-legacy still smoke closure
- Ziel: kanonischer Still-Graph als wiederholbarer Smoke-Pfad mit klarer Fehlerflaeche
9. `B11` Template parity matrix
- Ziel: echte Graph-vs-Legacy-Vergleiche mit Render-Templates, Varianten und Alpha/Samples
10. `B12` CAD intake moduleization
- Ziel: CAD-Import/Extract/Export/BBox als echte Production-Module im Editor und in der Runtime
11. `B13` Rollout and fallback controls
- Ziel: Graph/Shadow/Legacy pro Workflow und Output-Type operativ steuerbar halten
12. `B14` Sequential E2E gates
- Ziel: Low-RAM Golden-/Smoke-/Browser-Gates fuer `/workflows`
13. `B15` Repo hygiene root-cause closure
- Ziel: generated artifacts, root-owned caches und compose side effects ursachenseitig schliessen
- Parallel vorbereitbar zu `B09` bis `B14`
### Queue Q2: Parallel Preparation Tracks
- Track A: Backend contracts
- aktiv: `B04-a`, danach `B04-b`
- Merge-Blocker fuer fast alle Folgearbeiten
- Track B: Frontend authoring refactor
- Vorbereitung jetzt, Merge erst nach `B04`
- Reihenfolge laut Analyse: `B06` -> `B05` -> `B07` -> `B08`
- Track C: Runtime and parity
- Investigation parallel moeglich
- Merge-Reihenfolge: `B09` -> `B10` -> `B11` -> `B12` -> `B13` -> `B14`
- Track D: Hygiene
- Root-cause Sammlung und Script-Haertung parallel
- Merge spaet, solange keine produktionskritische Blockade sichtbar wird
### Queue Q2: Immediate Start
- Aktiver Implementierungsblock: `B07`
- Bereits abgeschlossene Merge-Slices in dieser Queue:
- `B04-a`
- `B04-b`
- `B06`
- `B05`
- Vorbereitete Folgeblöcke:
- `B07`
- `B08`
@@ -0,0 +1,245 @@
# Node-Based Production Architecture
## Purpose
Define the target model for a reusable, node-based production system where workflow steps are backend-owned modules, the editor is schema-driven, and legacy rendering stays operational during migration.
## Problem Statement
The current workflow system already has meaningful extraction work:
- bridge/runtime services exist for setup, template resolution, material mapping, bbox resolution, publish, and notify
- graph execution can already orchestrate still, turntable, and blend export flows
- the editor already consumes backend node definitions
What is still missing is a clean production model.
Today, three different concerns are still partially collapsed into each other:
1. `OutputType` as user-facing commercial/render choice
2. workflow graph as orchestration definition
3. legacy/internal render settings as implicit execution contract
That makes it hard to:
- reuse a process step like CAD import as a true module
- expose all node settings coherently in the editor
- bind output types to workflows without fragile implicit assumptions
- preserve legacy behavior while enabling graph-native production
## Target Model
### 1. Production Module
A production module is the canonical backend capability unit.
Examples:
- `cad.resolve_step_path`
- `cad.extract_objects`
- `cad.export_glb`
- `cad.compute_bbox`
- `materials.resolve_map`
- `materials.auto_populate`
- `render.resolve_template`
- `render.blender_still`
- `render.blender_turntable`
- `output.publish_asset`
- `output.notify`
Rules:
- modules are backend-owned
- modules define typed input contract, output contract, defaults, and execution semantics
- modules are reusable from legacy code, graph runtime, shadow mode, and tests
- modules must not depend on editor-only UI metadata
### 2. Workflow Node
A workflow node is an orchestration wrapper around a production module.
It adds:
- node id
- graph connectivity
- per-instance parameter overrides
- editor UI metadata
- retry/failure policy
It must not redefine business behavior that belongs to the production module itself.
Implication:
- the node registry should evolve from "palette metadata + field definitions" to "module-backed node definitions"
- `step` should remain the stable runtime key, but internally map to a reusable module contract
### 3. Workflow Family
Workflows must be separated into runtime families, not inferred ad hoc:
- `cad_file`
- `order_line`
Family drives:
- valid entry context
- allowed node palette
- validation rules
- available output contracts
- preflight expectations
Mixed-family graphs may still exist temporarily for migration visibility, but must not be the target authoring model.
### 4. Output Type as Invocation Profile
`OutputType` should no longer be treated as a loose bag of renderer flags.
It should be the product-facing invocation profile for a workflow:
- commercial name and visibility
- compatible categories
- pricing tier binding
- workflow family
- linked workflow definition
- invocation-level parameter overrides
- output artifact contract
Examples of invocation-level overrides:
- resolution
- samples
- engine
- transparency
- animation timing
- material override
Examples of artifact contract:
- still image
- turntable video
- production `.blend`
- preview thumbnail
- future exported package types
This keeps the responsibility split clean:
- workflow definition answers: "what steps run and in what order?"
- output type answers: "what productized variant of that workflow do we sell and with which defaults/constraints?"
## Required Refactor Direction
### A. Formalize Node Contracts
Extend the node registry so each definition exposes:
- `family`
- `module_key`
- `input_contract`
- `output_contract`
- `param_schema`
- `artifact_roles_produced`
- `artifact_roles_consumed`
- `legacy_source`
Current definitions already cover labels, categories, defaults, and fields. They do not yet fully express machine-usable production contracts.
### B. Promote Runtime Services to Module Layer
The extracted bridge/runtime services are the right foundation. They now need a clear module boundary so both legacy and graph runtimes call the same backend operation layer.
Desired shape:
- legacy pipeline calls module layer directly
- graph runtime calls module layer directly or via async task adapters
- Celery task mapping becomes transport/adaptation, not the primary execution model
### C. Split Graph Authoring by Family
The editor should author against family-scoped graphs:
- CAD Intake graph
- Order Rendering graph
That includes:
- family-specific starter templates
- family-specific node palette groups
- validation that rejects wrong-family entry nodes early
- cleaner organization than a single mixed library
### D. Reframe Output Type Creation
Output type creation is currently too close to legacy render settings and too far from workflow invocation.
Create/edit flow should become:
1. choose family
2. choose or create workflow
3. choose artifact kind
4. set invocation overrides
5. bind pricing/category/material constraints
The current `workflow_definition_id` field is directionally correct, but too weak on its own because there is no explicit invocation contract or family validation around it yet.
## Compatibility Rules
### Legacy Safety
- legacy dispatch remains the fallback path
- existing output types without workflow linkage remain valid
- graph rollout must be opt-in per output type/workflow
### Migration Safety
- old output types may continue to store render settings in their current shape
- a compatibility adapter should map legacy render settings into invocation overrides
- workflow definitions must remain canonical JSON DAGs
## Recommended Implementation Sequence
### Phase A: Stabilize Broken Contracts
- align frontend/backend `OutputType` defaults and allowed values
- add backend validation for output-type family/workflow compatibility
- make output type creation/editing reflect current real backend constraints
### Phase B: Contract-First Registry
- add `family`, contracts, and module metadata to node definitions
- expose them over `/api/workflows/node-definitions`
- move editor grouping/validation to registry-owned family metadata
### Phase C: Invocation Profiles
- extend `OutputType` into a workflow invocation profile
- add explicit artifact kind and workflow family
- separate invocation overrides from raw render settings
### Phase D: Module Unification
- route legacy and graph execution through the same module layer
- keep Celery as transport where async work is needed
- reduce duplicate logic in tasks and runtime adapters
### Phase E: Full Parity Authoring
- ship family-specific starter workflows
- expose all module settings in editor
- support end-to-end preflight, dispatch, run inspection, and parity verification
## Immediate Code Implications
- `workflow_node_registry.py` is the correct extension point for module contracts
- `workflow_schema.py` will need stronger family- and contract-aware validation
- `OutputType` needs a clearer model than raw renderer/backend defaults plus optional workflow id
- editor UX should follow model cleanup, not lead it
## Decision
We should simplify and refactor before doing more isolated workflow-editor UX work.
The next implementation blocks should prioritize:
1. fixing the output-type/workflow contract
2. formalizing node/module contracts
3. only then expanding editor affordances on top of the cleaned model
+52
View File
@@ -0,0 +1,52 @@
# Template Input Audit
Stand: 12. April 2026
## Befund
Die Transportkette fuer `workflow_input_schema` und `template_inputs` funktioniert inzwischen end-to-end im Graph-Workflow. Der aktuelle Engpass liegt in den live hinterlegten `.blend`-Templates selbst:
- Alle produktiven Render-Templates haben aktuell `workflow_input_schema = []`.
- Die live hochgeladenen `.blend`-Dateien enthalten keine erkennbaren Template-Input-Marker auf Collections, Objekten oder Worlds.
- Damit gibt es derzeit keine realen, template-spezifischen Dropdown-/Options-Felder, die wir ehrlich in `resolve_template` exponieren koennen.
## Gepruefte Live-Templates
- `Blender_Studio_Schadowcatcher_Anim_RotOBJ`
- `Default`
- `Blender_Studio_Schadowcatcher_Anim`
- `BlenderStudio_Shadowcatcher`
- `BlenderStudio`
## Beobachtete Blend-Struktur
- `BlenderStudio`: `Collection`, `Export`, `Lighting`, `World`
- `BlenderStudio_Shadowcatcher`: `Collection`, `Export`, `Lighting`, `Shadowcatcher`, `World`
- `Blender_Studio_Schadowcatcher_Anim`: gleiche Struktur wie Shadowcatcher-Template
- `Blender_Studio_Schadowcatcher_Anim_RotOBJ`: gleiche Struktur wie Shadowcatcher-Template
- `Default`: nur `Export`, keine World, keine Marker
## Konsequenz
Der naechste saubere Schritt ist nicht ein blinder Schema-Backfill, sondern Template-Authoring:
1. Marker oder Scene-Property-gesteuerte Varianten in den `.blend`-Dateien anlegen.
2. Daraus eine echte `workflow_input_schema` ableiten.
3. Danach die Felder im Admin pflegen oder per Script backfillen.
## Tooling
Fuer diese Authoring-Arbeit gibt es jetzt ein reproduzierbares Audit-Script:
```bash
python3 scripts/audit_render_templates.py --json
python3 scripts/audit_render_templates.py --write-markdown docs/workflows/TEMPLATE_INPUT_AUDIT.generated.md
```
Das Script:
- loggt sich gegen die lokale API ein,
- kopiert die live verwendeten `.blend`-Dateien aus dem Backend-Container,
- inspiziert sie mit host-Blender,
- erkennt Marker gemaess der HartOMat-Konventionen,
- und erzeugt daraus ggf. Schema-Vorschlaege.
+25 -7
View File
@@ -39,23 +39,29 @@ Parallel execution ownership and stage gates are defined in [`docs/workflows/WOR
- [ ] Editor saves nodes and edges - [ ] Editor saves nodes and edges
- [ ] Editor roundtrip preserves workflow configs - [ ] Editor roundtrip preserves workflow configs
- [ ] All node settings are editable - [ ] All node settings are editable
- [ ] Validate, dry-run, and dispatch are available - [x] Validate, dry-run, and dispatch are available
- [ ] Runs are visible with node-level status and logs - [x] Runs are visible with node-level status and logs
- [ ] Editor authoring follows family-safe module contracts instead of ad hoc node metadata - [ ] Editor authoring follows family-safe module contracts instead of ad hoc node metadata
- Progress: The workflow canvas header has been compressed into a single canvas-adjacent control strip, preserving right-click node insertion, auto-align, edge deletion, preflight, dispatch, and save actions while reducing top-of-page bloat.
- Progress: The canonical still-path bridge nodes now expose editor-visible, runtime-backed settings for template overrides, material resolution, auto-population behavior, GLB source preference, output-save artifact expectations, and notify arming. The remaining Phase 5 work is authoring hierarchy and end-to-end editor verification, not hidden backend-only params.
### Phase 7 ### Phase 7
- [x] Output-type create defaults match current backend constraints - [x] Output-type create defaults match current backend constraints
- [ ] Output types model workflow invocation profiles - [x] Output types model workflow invocation profiles
- [ ] Output types validate against workflow family and artifact contract - [x] Output types validate against workflow family and artifact contract
- [ ] Admin create/edit flow is workflow-first instead of renderer-first - [x] Admin create/edit flow is workflow-first instead of renderer-first
- Progress: The admin output-type form now opens with a dedicated workflow-contract section, keeps invocation-profile inputs ahead of renderer compatibility knobs, and moves catalog/business fields into a separate closing section so legacy Blender details no longer dominate the primary authoring flow.
- Progress: Output-type contract helpers now expose family-safe format lists, `blend` is treated as a first-class `blend_asset` contract in both frontend and backend, and the admin form no longer steers users into obviously invalid `cad_file`/video or `order_line`/model-export combinations.
- Progress: API responses now serialize the invocation profile through the typed schema, and create/edit validation rejects mixed-family workflows plus workflow/artifact mismatches before dispatch time.
### Phase 6 ### Phase 6
- [x] Shadow mode parity execution dispatches real graph observer runs alongside authoritative legacy dispatch - [x] Shadow mode parity execution dispatches real graph observer runs alongside authoritative legacy dispatch
- Progress: Workflow runs now expose a comparison endpoint that resolves authoritative legacy outputs and matching shadow artifacts, including file hashes, image dimensions, and mean pixel delta for parity inspection. - Progress: Workflow runs now expose a comparison endpoint that resolves authoritative legacy outputs and matching shadow artifacts, including file hashes, image dimensions, and mean pixel delta for parity inspection.
- Progress: `scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode shadow` now provisions the canonical still smoke contract, runs preflight, dispatches via the real order/output-type workflow linkage, resolves the resulting workflow run, and prints the shadow comparison verdict. - Progress: `scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode shadow` now provisions the canonical still smoke contract, runs preflight, dispatches via the real order/output-type workflow linkage, resolves the resulting workflow run, and prints the shadow comparison verdict.
- [ ] Golden cases pass against legacy outputs - [x] Golden cases pass against legacy outputs
- Progress: On April 8, 2026, the live `--workflow-golden-suite` passed end to end for `still_legacy`, `still_graph`, `still_shadow`, `turntable_graph`, and `blend_graph`. The blend export contract now completes the order line, persists the primary `.blend` result, and links the resulting `blend_production` media asset back to the workflow run.
- [ ] Rollout can be enabled per workflow or output type - [ ] Rollout can be enabled per workflow or output type
- [ ] Rollback to legacy is immediate - [ ] Rollback to legacy is immediate
@@ -100,12 +106,14 @@ Parallel execution ownership and stage gates are defined in [`docs/workflows/WOR
- Invalid graphs are blocked before dispatch. - Invalid graphs are blocked before dispatch.
- All node settings needed for parity are present in the editor. - All node settings needed for parity are present in the editor.
- Family-specific authoring prevents invalid `cad_file`/`order_line` graph composition. - Family-specific authoring prevents invalid `cad_file`/`order_line` graph composition.
- Progress: Backend-owned still bridge modules now declare the settings required for parity; the next gate is browser-level confirmation that the inspector presents them cleanly enough for real authoring.
### QG-7: Invocation Gate ### QG-7: Invocation Gate
- Output type creation and editing use valid backend defaults. - Output type creation and editing use valid backend defaults.
- Output types bind to workflows through an explicit invocation contract. - Output types bind to workflows through an explicit invocation contract.
- Legacy output types remain renderable during migration. - Legacy output types remain renderable during migration.
- Progress: This gate is functionally green at the API layer. Remaining rollout work is operational adoption, not missing contract primitives.
### QG-6: Rollout Gate ### QG-6: Rollout Gate
@@ -116,11 +124,21 @@ Parallel execution ownership and stage gates are defined in [`docs/workflows/WOR
- `python scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode legacy` - `python scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode legacy`
- `python scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode graph` - `python scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode graph`
- `python scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode shadow` - `python scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode shadow`
- Sequential low-RAM gate wrapper:
- `./scripts/workflow_sequential_gates.sh`
- `./scripts/workflow_sequential_gates.sh --with-cad-parity`
- `./scripts/workflow_sequential_gates.sh --with-live-shadow`
- `./scripts/workflow_sequential_gates.sh --with-cad-parity --with-live-shadow --with-golden`
- CAD/Viewer parity smoke for repeated-instance products:
- `python3 scripts/compare_live_cad_parity.py --cad-id 7c214057-9982-4d6e-aa87-43bfabfdb709`
- Progress: The repeated-instance CAD regression now has a dedicated live gate. The current reference case passes with `146` manifest parts, `146` mesh nodes, `146` unique `partKey`s, and no missing or duplicate assignments.
- Rollout approval rule for the canonical still workflow: - Rollout approval rule for the canonical still workflow:
- `shadow` must finish with a successful order line and a comparison verdict of `pass` - `shadow` must finish with a successful order line and a comparison verdict of `pass`
- `warn` or `fail` means legacy remains authoritative - `warn` or `fail` means legacy remains authoritative
- `graph` may only be enabled on real output types after the shadow command passes cleanly - `graph` may only be enabled on real output types after the shadow command passes cleanly
- Progress: the canonical still smoke flow now passes live in `legacy` and `graph`; `shadow` stabilizes after a short observer-output lag and currently reports `warn` because the observer image differs slightly, so legacy remains authoritative for rollout decisions. - Progress: the canonical still smoke flow now passes live in `legacy`, `graph`, and `shadow`. Shadow observer output may arrive slightly later than the authoritative legacy file, but the rollout gate now treats proven de-minimis Blender drift as `pass` instead of a rollout-blocking `warn`.
- Progress: the live smoke and golden harnesses now provision explicit `workflow_rollout_mode` values when linking non-legacy output types, so `graph` and `shadow` exercises no longer depend on whatever rollout state happened to be left behind in the database.
- Progress: the broader golden suite is green in live graph execution, and the remaining rollout work is operational enablement per workflow/output type rather than still-shadow image drift.
## Definition of Done ## Definition of Done
@@ -1,5 +1,7 @@
# Workflow Implementation Backlog # Workflow Implementation Backlog
Execution orchestration, ownership split, and merge order are tracked in [`docs/workflows/WORKERS.md`](/home/hartmut/Documents/Copilot/schaefflerautomat/docs/workflows/WORKERS.md).
## Epic 1: Canonical Workflow Model ## Epic 1: Canonical Workflow Model
### Tickets ### Tickets
@@ -25,6 +27,7 @@
- `E2-T3` Add `GET /api/workflows/node-definitions`. - `E2-T3` Add `GET /api/workflows/node-definitions`.
- `E2-T4` Provide schema-driven defaults and editor field groups. - `E2-T4` Provide schema-driven defaults and editor field groups.
- `E2-T5` Add composite bridge nodes for safe migration. - `E2-T5` Add composite bridge nodes for safe migration.
- `E2-T6` Extend node definitions with family, module key, input contract, output contract, and artifact roles.
### Required Node Coverage ### Required Node Coverage
@@ -76,6 +79,11 @@
- `E4-T4` Support retry and failure policies. `completed` - `E4-T4` Support retry and failure policies. `completed`
- `E4-T5` Add execution mode switch: `legacy`, `graph`, `shadow`. `completed` - `E4-T5` Add execution mode switch: `legacy`, `graph`, `shadow`. `completed`
- `E4-T6` Add hard fallback to legacy dispatch on graph failure. `completed` - `E4-T6` Add hard fallback to legacy dispatch on graph failure. `completed`
- `E4-T7` Make `output_save` graph-authoritative for still renders by disabling render-task self-publish whenever a downstream `output_save` node is present. `completed`
- `E4-T8` Persist authoritative still output metadata back into `WorkflowNodeResult` rows and keep shadow mode non-authoritative. `completed`
- `E4-T9` Extend runtime, dispatch, and task tests for graph-authoritative still persistence and legacy-safe notify handoff. `completed`
- `E4-T10` Extend graph-authoritative `output_save` semantics to `export_blend`, including asset persistence and node-result updates without mutating the primary order-line render output. `completed`
- `E4-T11` Extend graph-authoritative `output_save` semantics to `blender_turntable`, including graph/legacy-safe task argument handling, authoritative persistence, and node-result updates for video outputs. `completed`
## Epic 5: Editor Parity ## Epic 5: Editor Parity
@@ -87,6 +95,20 @@
- `E5-T4` Add graph validation in the editor. - `E5-T4` Add graph validation in the editor.
- `E5-T5` Add dry-run and dispatch from the editor. - `E5-T5` Add dry-run and dispatch from the editor.
- `E5-T6` Add workflow run inspection UI. - `E5-T6` Add workflow run inspection UI.
- `E5-T7` Reorganize authoring around family-specific starter graphs and family-safe palettes.
- Progress: The editor already supports right-click node insertion, searchable family-aware palettes, auto-align, edge deletion, dry-run/dispatch, and run inspection. The current remaining parity slice is authoring polish and validation around the canonical non-legacy still graph, not missing basic editor mechanics.
## Epic 7: Output-Type Invocation Profiles
### Tickets
- `E7-T1` Align frontend/backend defaults and allowed values for output-type creation. `completed`
- `E7-T2` Define explicit workflow family on output types. `completed`
- `E7-T3` Separate invocation overrides from legacy raw render settings. `completed`
- `E7-T4` Add artifact-kind contract to output types. `completed`
- `E7-T5` Validate output-type family/workflow compatibility in backend APIs. `completed`
- `E7-T6` Redesign admin create/edit flow around workflow invocation instead of legacy renderer flags. `completed`
- Progress: Output types now persist an explicit workflow family, artifact kind, and invocation override set; backend APIs reject family/artifact mismatches against linked workflows; serializer output exposes a typed invocation profile instead of leaking raw dict payloads.
## Epic 6: Rollout and Quality ## Epic 6: Rollout and Quality
@@ -94,6 +116,8 @@
- `E6-T1` Add shadow mode parity execution. `completed` - `E6-T1` Add shadow mode parity execution. `completed`
- `E6-T2` Build output comparison tooling. `completed` - `E6-T2` Build output comparison tooling. `completed`
- `E6-T3` Define golden test cases. - `E6-T3` Define golden test cases. `completed`
- `E6-T3a` Harden the golden-suite harness against transient backend disconnects and 502/503/504 responses. `completed`
- `E6-T3b` Close the primary `.blend` graph-authoritative persistence gap so graph blend exports complete the order, persist the order-line output, and publish a `blend_production` media asset. `completed`
- `E6-T4` Roll out per workflow or output type. - `E6-T4` Roll out per workflow or output type.
- `E6-T5` Keep legacy fallback after rollout. - `E6-T5` Keep legacy fallback after rollout.
+21
View File
@@ -9,6 +9,12 @@ Bring `/workflows` to full production parity with the existing legacy render pip
- Phase 1 completed on canonical config storage, preset migration, and legacy-safe runtime extraction. - Phase 1 completed on canonical config storage, preset migration, and legacy-safe runtime extraction.
- Phase 2 completed on backend node registry, node definitions API, and schema-driven editor palette/settings. - Phase 2 completed on backend node registry, node definitions API, and schema-driven editor palette/settings.
- Phase 3 completed: `order_line_setup`, `resolve_template`, `material_map_resolve`, `auto_populate_materials`, `glb_bbox`, `output_save`, and `notify` are extracted behind the legacy task boundary, validated with targeted backend tests, and covered by workflow executor dispatch tests. - Phase 3 completed: `order_line_setup`, `resolve_template`, `material_map_resolve`, `auto_populate_materials`, `glb_bbox`, `output_save`, and `notify` are extracted behind the legacy task boundary, validated with targeted backend tests, and covered by workflow executor dispatch tests.
- Phase 4 is partially completed: graph runtime dispatch now treats downstream `output_save` as the authoritative persistence boundary for still renders, turntable/video renders, and `.blend` exports, updates node results after persistence, and keeps shadow executions non-authoritative.
- The canonical still bridge path now exposes runtime-backed node settings for template overrides, material resolution, auto-population rules, GLB source selection, output-save artifact filtering, and notify arming, so `/workflows` can author the real still-render contract instead of relying on hidden legacy defaults.
- Output types now behave as explicit invocation profiles in both API and admin UI: workflow family, artifact kind, and invocation overrides are first-class, and linked workflow compatibility is enforced before dispatch.
- Canonical reference workflows now need to be managed as at least two families, not one mixed graph:
- `CAD Intake`
- `Order Rendering`
## Non-Negotiables ## Non-Negotiables
@@ -46,6 +52,16 @@ Notes:
- `ui` is editor-only metadata and must not change runtime semantics. - `ui` is editor-only metadata and must not change runtime semantics.
- `edges` are mandatory for graph persistence and validation. - `edges` are mandatory for graph persistence and validation.
## Architecture Direction
- Workflow nodes are orchestration wrappers, not the canonical implementation unit.
- The canonical implementation unit is a backend-owned production module with typed inputs, outputs, defaults, and execution semantics.
- Workflow definitions answer "what runs"; output types answer "which productized invocation profile of that workflow is offered".
- Workflow authoring remains split by family:
- `cad_file`
- `order_line`
- The detailed target model is captured in [`docs/workflows/NODE_BASED_PRODUCTION_ARCHITECTURE.md`](/home/hartmut/Documents/Copilot/schaefflerautomat/docs/workflows/NODE_BASED_PRODUCTION_ARCHITECTURE.md).
## Phases ## Phases
### Phase 1: Canonical Model and Migration Base ### Phase 1: Canonical Model and Migration Base
@@ -81,12 +97,17 @@ Notes:
- Support node outputs and artifact handoff across edges. - Support node outputs and artifact handoff across edges.
- Keep `legacy`, `graph`, and `shadow` execution modes. - Keep `legacy`, `graph`, and `shadow` execution modes.
- Current slice: graph dispatch executes extracted bridge nodes for order-line setup, template/material resolution, auto-material population, and bounding-box resolution before queueing render/export tasks. - Current slice: graph dispatch executes extracted bridge nodes for order-line setup, template/material resolution, auto-material population, and bounding-box resolution before queueing render/export tasks.
- Current slice completed: still-render graphs, turntable/video graphs, and `.blend` export graphs with downstream `output_save` now disable task-level self-publish, persist authoritative output/media metadata through shared runtime services, and write the result back into `WorkflowNodeResult`.
- Next Phase 4 slice: extend the same authoritative `output_save` contract to any remaining legacy export variants and close the remaining notify/editor parity gaps.
### Phase 5: Workflow Editor Parity ### Phase 5: Workflow Editor Parity
- Persist and load `nodes`, `edges`, `step`, `params`, and `ui`. - Persist and load `nodes`, `edges`, `step`, `params`, and `ui`.
- Render all node settings dynamically from backend schemas. - Render all node settings dynamically from backend schemas.
- Add validation, dry-run, dispatch, and run inspection. - Add validation, dry-run, dispatch, and run inspection.
- Organize workflows and node palette by family so the editor reflects the runtime split between `cad_file` and `order_line` contexts.
- Current state: right-click insertion, searchable family-aware palettes, auto-align, edge deletion, dry-run, dispatch, and run inspection are already in place; the remaining work is authoring clarity, node organization, and browser-verified end-to-end usability for the non-legacy still graph.
- Do not add further editor-only UX surface before node/module contracts and output-type invocation profiles are stabilized.
### Phase 6: Shadow Mode and Rollout ### Phase 6: Shadow Mode and Rollout
+82
View File
@@ -0,0 +1,82 @@
# Template Inputs For Graph Workflows
`resolve_template` can now expose template-specific inputs into the graph workflow without breaking legacy rendering.
## Where the inputs come from
Render templates can define a `workflow_input_schema` JSON array in Admin.
Example:
```json
[
{
"key": "studio_variant",
"label": "Studio Variant",
"type": "select",
"section": "Template Inputs",
"default": "default",
"options": [
{ "value": "default", "label": "Default" },
{ "value": "warm", "label": "Warm" }
]
},
{
"key": "camera_profile",
"label": "Camera Profile",
"type": "text",
"section": "Template Inputs",
"default": "macro"
}
]
```
These fields appear in the workflow editor on the `resolve_template` node after a template override is selected.
## Runtime behavior
At runtime the graph path resolves these values into `template_inputs` and forwards them through:
1. `resolve_template`
2. workflow runtime invocation
3. still / turntable / cinematic Blender services
4. Blender worker CLI via `--template-inputs`
5. template scene setup after opening the `.blend`
Legacy workflows continue to work if no template input schema is defined.
## Blender-side conventions
Every resolved template input is always written onto the active scene as:
- `template_input__<key>`
- `hartomat_template_input__<key>`
Example:
- `template_input__studio_variant = "warm"`
- `hartomat_template_input__camera_profile = "macro"`
Templates can optionally react to these values using markers on collections, objects, or worlds.
Supported marker styles:
- custom property value: `template_input=studio_variant=warm`
- object or collection name: `template-input:studio_variant=warm`
- object or collection name: `ti::studio_variant::warm`
If a marker matches the resolved input value, the target is enabled. Non-matching variants are hidden.
## Practical authoring pattern
For a lighting setup with two variants:
1. Put warm lights into a collection named `template-input:studio_variant=warm`
2. Put cool lights into a collection named `template-input:studio_variant=cool`
3. Define `studio_variant` in `workflow_input_schema`
4. Select the template in `resolve_template`
5. Pick the wanted variant in the node inspector
## Important limitation
The graph runtime now transports template inputs end-to-end, but existing `.blend` files only change visually if they use one of the conventions above or read the scene custom properties themselves.
+119 -107
View File
@@ -1,30 +1,40 @@
import { Suspense, lazy } from 'react'
import { BrowserRouter, Routes, Route, Navigate, useLocation } from 'react-router-dom' import { BrowserRouter, Routes, Route, Navigate, useLocation } from 'react-router-dom'
import { useAuthStore, isPrivileged as checkIsPrivileged } from './store/auth' import { useAuthStore, isPrivileged as checkIsPrivileged } from './store/auth'
import { WebSocketProvider } from './contexts/WebSocketContext' import { WebSocketProvider } from './contexts/WebSocketContext'
import Layout from './components/layout/Layout' import Layout from './components/layout/Layout'
import LoginPage from './pages/Login'
import NotFoundPage from './pages/NotFound' const LoginPage = lazy(() => import('./pages/Login'))
import DashboardPage from './pages/Dashboard' const NotFoundPage = lazy(() => import('./pages/NotFound'))
import OrdersPage from './pages/Orders' const DashboardPage = lazy(() => import('./pages/Dashboard'))
import OrderDetailPage from './pages/OrderDetail' const OrdersPage = lazy(() => import('./pages/Orders'))
import NewOrderPage from './pages/NewOrder' const OrderDetailPage = lazy(() => import('./pages/OrderDetail'))
import UploadPage from './pages/Upload' const NewOrderPage = lazy(() => import('./pages/NewOrder'))
import AdminPage from './pages/Admin' const UploadPage = lazy(() => import('./pages/Upload'))
import CadPreviewPage from './pages/CadPreview' const AdminPage = lazy(() => import('./pages/Admin'))
import MaterialsPage from './pages/Materials' const CadPreviewPage = lazy(() => import('./pages/CadPreview'))
import WorkerActivityPage from './pages/WorkerActivity' const MaterialsPage = lazy(() => import('./pages/Materials'))
import ProductLibraryPage from './pages/ProductLibrary' const WorkerActivityPage = lazy(() => import('./pages/WorkerActivity'))
import ProductDetailPage from './pages/ProductDetail' const ProductLibraryPage = lazy(() => import('./pages/ProductLibrary'))
import NewProductOrderPage from './pages/NewProductOrder' const ProductDetailPage = lazy(() => import('./pages/ProductDetail'))
import NotificationsPage from './pages/Notifications' const NewProductOrderPage = lazy(() => import('./pages/NewProductOrder'))
import NotificationSettingsPage from './pages/NotificationSettings' const NotificationsPage = lazy(() => import('./pages/Notifications'))
import PreferencesPage from './pages/Preferences' const NotificationSettingsPage = lazy(() => import('./pages/NotificationSettings'))
import TenantsPage from './pages/Tenants' const PreferencesPage = lazy(() => import('./pages/Preferences'))
import WorkflowEditorPage from './pages/WorkflowEditor' const TenantsPage = lazy(() => import('./pages/Tenants'))
import MediaBrowserPage from './pages/MediaBrowser' const WorkflowEditorPage = lazy(() => import('./pages/WorkflowEditor'))
import BillingPage from './pages/Billing' const MediaBrowserPage = lazy(() => import('./pages/MediaBrowser'))
import WorkerManagementPage from './pages/WorkerManagement' const BillingPage = lazy(() => import('./pages/Billing'))
import AssetLibraryPage from './pages/AssetLibrary' const WorkerManagementPage = lazy(() => import('./pages/WorkerManagement'))
const AssetLibraryPage = lazy(() => import('./pages/AssetLibrary'))
function RouteFallback() {
return (
<div className="flex min-h-[40vh] items-center justify-center text-sm text-content-muted">
Loading...
</div>
)
}
function ProtectedRoute({ children }: { children: React.ReactNode }) { function ProtectedRoute({ children }: { children: React.ReactNode }) {
const token = useAuthStore((s) => s.token) const token = useAuthStore((s) => s.token)
@@ -42,91 +52,93 @@ function AdminRoute({ children }: { children: React.ReactNode }) {
export default function App() { export default function App() {
return ( return (
<BrowserRouter> <BrowserRouter future={{ v7_startTransition: true, v7_relativeSplatPath: true }}>
<WebSocketProvider> <WebSocketProvider>
<Routes> <Suspense fallback={<RouteFallback />}>
<Route path="/login" element={<LoginPage />} /> <Routes>
<Route <Route path="/login" element={<LoginPage />} />
path="/" <Route
element={ path="/"
<ProtectedRoute> element={
<Layout /> <ProtectedRoute>
</ProtectedRoute> <Layout />
} </ProtectedRoute>
> }
<Route index element={<DashboardPage />} /> >
<Route path="orders" element={<OrdersPage />} /> <Route index element={<DashboardPage />} />
<Route path="orders/new" element={<NewOrderPage />} /> <Route path="orders" element={<OrdersPage />} />
<Route path="orders/new/product" element={<NewProductOrderPage />} /> <Route path="orders/new" element={<NewOrderPage />} />
<Route path="orders/:id" element={<OrderDetailPage />} /> <Route path="orders/new/product" element={<NewProductOrderPage />} />
<Route path="upload" element={<UploadPage />} /> <Route path="orders/:id" element={<OrderDetailPage />} />
<Route <Route path="upload" element={<UploadPage />} />
path="admin" <Route
element={ path="admin"
<AdminRoute> element={
<AdminPage /> <AdminRoute>
</AdminRoute> <AdminPage />
} </AdminRoute>
/> }
<Route />
path="tenants" <Route
element={ path="tenants"
<AdminRoute> element={
<TenantsPage /> <AdminRoute>
</AdminRoute> <TenantsPage />
} </AdminRoute>
/> }
<Route />
path="workflows" <Route
element={ path="workflows"
<AdminRoute> element={
<WorkflowEditorPage /> <AdminRoute>
</AdminRoute> <WorkflowEditorPage />
} </AdminRoute>
/> }
<Route path="materials" element={<MaterialsPage />} /> />
<Route path="activity" element={<WorkerActivityPage />} /> <Route path="materials" element={<MaterialsPage />} />
<Route path="products" element={<ProductLibraryPage />} /> <Route path="activity" element={<WorkerActivityPage />} />
<Route path="products/:id" element={<ProductDetailPage />} /> <Route path="products" element={<ProductLibraryPage />} />
<Route path="notifications" element={<NotificationsPage />} /> <Route path="products/:id" element={<ProductDetailPage />} />
<Route path="notification-settings" element={<NotificationSettingsPage />} /> <Route path="notifications" element={<NotificationsPage />} />
<Route path="preferences" element={<PreferencesPage />} /> <Route path="notification-settings" element={<NotificationSettingsPage />} />
<Route path="cad/:id" element={<CadPreviewPage />} /> <Route path="preferences" element={<PreferencesPage />} />
<Route <Route path="cad/:id" element={<CadPreviewPage />} />
path="media" <Route
element={ path="media"
<AdminRoute> element={
<MediaBrowserPage /> <AdminRoute>
</AdminRoute> <MediaBrowserPage />
} </AdminRoute>
/> }
<Route />
path="billing" <Route
element={ path="billing"
<AdminRoute> element={
<BillingPage /> <AdminRoute>
</AdminRoute> <BillingPage />
} </AdminRoute>
/> }
<Route />
path="workers" <Route
element={ path="workers"
<AdminRoute> element={
<WorkerManagementPage /> <AdminRoute>
</AdminRoute> <WorkerManagementPage />
} </AdminRoute>
/> }
<Route />
path="asset-libraries" <Route
element={ path="asset-libraries"
<AdminRoute> element={
<AssetLibraryPage /> <AdminRoute>
</AdminRoute> <AssetLibraryPage />
} </AdminRoute>
/> }
</Route> />
<Route path="*" element={<NotFoundPage />} /> </Route>
</Routes> <Route path="*" element={<NotFoundPage />} />
</Routes>
</Suspense>
</WebSocketProvider> </WebSocketProvider>
</BrowserRouter> </BrowserRouter>
) )
@@ -0,0 +1,253 @@
import { describe, expect, test } from 'vitest'
import {
getCachedOutputTypeContractCatalog,
getCompatibleWorkflowsForOutputTypeContract,
getOutputTypeInvocationOverrides,
getOutputTypeWorkflowContractIssues,
getDefaultOutputFormatForArtifactKind,
inferArtifactKind,
listAllowedInvocationOverrideKeysForArtifactKind,
listAllowedOutputFormatsForFamily,
type OutputType,
} from '../../api/outputTypes'
describe('output type contract helpers', () => {
test('lists family-safe output formats', () => {
const contractCatalog = getCachedOutputTypeContractCatalog()
expect(listAllowedOutputFormatsForFamily('cad_file', contractCatalog)).toEqual([
'png',
'jpg',
'jpeg',
'webp',
'gltf',
'glb',
'stl',
'obj',
'usd',
'usdz',
])
expect(listAllowedOutputFormatsForFamily('order_line', contractCatalog)).toEqual([
'png',
'jpg',
'jpeg',
'webp',
'mp4',
'webm',
'mov',
'blend',
])
})
test('infers artifact kinds for family-safe formats', () => {
expect(inferArtifactKind('order_line', 'blend', false)).toBe('blend_asset')
expect(inferArtifactKind('order_line', 'mp4', true)).toBe('turntable_video')
expect(inferArtifactKind('cad_file', 'gltf', false)).toBe('model_export')
expect(inferArtifactKind('cad_file', 'png', false)).toBe('thumbnail_image')
})
test('returns defaults that match artifact contracts', () => {
const contractCatalog = getCachedOutputTypeContractCatalog()
expect(getDefaultOutputFormatForArtifactKind('still_image', contractCatalog)).toBe('png')
expect(getDefaultOutputFormatForArtifactKind('thumbnail_image', contractCatalog)).toBe('png')
expect(getDefaultOutputFormatForArtifactKind('turntable_video', contractCatalog)).toBe('mp4')
expect(getDefaultOutputFormatForArtifactKind('model_export', contractCatalog)).toBe('gltf')
expect(getDefaultOutputFormatForArtifactKind('blend_asset', contractCatalog)).toBe('blend')
})
test('exposes invocation override keys by artifact contract', () => {
const contractCatalog = getCachedOutputTypeContractCatalog()
expect(listAllowedInvocationOverrideKeysForArtifactKind('turntable_video', contractCatalog)).toEqual([
'width',
'height',
'engine',
'samples',
'bg_color',
'noise_threshold',
'denoiser',
'denoising_input_passes',
'denoising_prefilter',
'denoising_quality',
'denoising_use_gpu',
'frame_count',
'fps',
'turntable_axis',
])
expect(listAllowedInvocationOverrideKeysForArtifactKind('blend_asset', contractCatalog)).toEqual([])
})
test('respects backend-authored contract catalog when provided', () => {
const contractCatalog = {
...getCachedOutputTypeContractCatalog(),
allowed_output_formats_by_family: {
order_line: ['png', 'heic'],
cad_file: ['jpg'],
},
default_output_format_by_artifact_kind: {
...getCachedOutputTypeContractCatalog().default_output_format_by_artifact_kind,
still_image: 'heic',
},
}
expect(listAllowedOutputFormatsForFamily('order_line', contractCatalog)).toEqual(['png', 'heic'])
expect(getDefaultOutputFormatForArtifactKind('still_image', contractCatalog)).toBe('heic')
})
test('exposes parameter ownership boundaries in the contract catalog', () => {
const contractCatalog = getCachedOutputTypeContractCatalog()
expect(contractCatalog.parameter_ownership.output_type_profile_keys).toEqual([
'transparent_bg',
'cycles_device',
'material_override',
])
expect(contractCatalog.parameter_ownership.template_runtime_keys).toEqual([
'target_collection',
'lighting_only',
'shadow_catcher',
'camera_orbit',
'template_inputs',
])
expect(contractCatalog.parameter_ownership.workflow_node_keys_by_step.resolve_template).toContain('target_collection')
expect(contractCatalog.parameter_ownership.workflow_node_keys_by_step.blender_still).toContain('material_override')
expect(contractCatalog.parameter_ownership.workflow_node_keys_by_step.blender_turntable).toContain('camera_orbit')
})
test('prefers invocation_profile overrides over legacy render settings', () => {
const outputType = {
id: 'ot-1',
name: 'Still',
description: null,
renderer: 'blender',
render_settings: { width: 4096, frame_count: 180 },
invocation_overrides: { width: 2048, frame_count: 120 },
output_format: 'png',
sort_order: 0,
compatible_categories: [],
render_backend: 'celery',
is_animation: false,
transparent_bg: false,
workflow_family: 'order_line',
artifact_kind: 'still_image',
cycles_device: null,
pricing_tier_id: null,
pricing_tier_name: null,
price_per_item: null,
workflow_definition_id: null,
workflow_rollout_mode: 'legacy_only',
workflow_name: null,
material_override: null,
invocation_profile: {
renderer: 'blender',
render_backend: 'celery',
workflow_family: 'order_line',
artifact_kind: 'still_image',
output_format: 'png',
is_animation: false,
workflow_definition_id: null,
workflow_rollout_mode: 'legacy_only',
transparent_bg: false,
cycles_device: null,
material_override: null,
allowed_override_keys: ['width', 'height', 'engine', 'samples', 'bg_color', 'noise_threshold', 'denoiser', 'denoising_input_passes', 'denoising_prefilter', 'denoising_quality', 'denoising_use_gpu'],
invocation_overrides: { width: 2048 },
},
is_active: true,
created_at: '',
updated_at: '',
} satisfies OutputType
expect(getOutputTypeInvocationOverrides(outputType)).toEqual({ width: 2048 })
})
test('flags rollout modes without a linked workflow', () => {
expect(getOutputTypeWorkflowContractIssues({
workflowFamily: 'order_line',
artifactKind: 'still_image',
outputFormat: 'png',
isAnimation: false,
workflowDefinitionId: '',
workflowRolloutMode: 'graph',
workflows: [],
})).toEqual([
expect.objectContaining({
code: 'rollout_requires_workflow',
severity: 'error',
}),
])
})
test('flags workflow family and artifact mismatches', () => {
const issues = getOutputTypeWorkflowContractIssues({
workflowFamily: 'order_line',
artifactKind: 'still_image',
outputFormat: 'png',
isAnimation: false,
workflowDefinitionId: 'wf-cad',
workflowRolloutMode: 'shadow',
workflows: [
{
id: 'wf-cad',
name: 'CAD Intake',
family: 'cad_file',
supported_artifact_kinds: ['thumbnail_image'],
},
],
})
expect(issues).toEqual([
expect.objectContaining({ code: 'workflow_family_mismatch', severity: 'error' }),
expect.objectContaining({ code: 'workflow_artifact_mismatch', severity: 'error' }),
])
})
test('flags output formats that are incompatible with the workflow family', () => {
expect(getOutputTypeWorkflowContractIssues({
workflowFamily: 'order_line',
artifactKind: 'custom',
outputFormat: 'gltf',
isAnimation: false,
workflowDefinitionId: '',
workflowRolloutMode: 'legacy_only',
workflows: [],
})).toEqual([
expect.objectContaining({
code: 'format_family_mismatch',
severity: 'error',
}),
])
})
test('returns only workflows that satisfy family and artifact contract', () => {
expect(getCompatibleWorkflowsForOutputTypeContract(
[
{
id: 'wf-1',
name: 'Still Graph',
family: 'order_line',
supported_artifact_kinds: ['still_image', 'turntable_video'],
},
{
id: 'wf-2',
name: 'CAD Intake',
family: 'cad_file',
supported_artifact_kinds: ['thumbnail_image'],
},
{
id: 'wf-3',
name: 'Mixed Graph',
family: 'mixed',
supported_artifact_kinds: ['still_image'],
},
],
'order_line',
'still_image',
)).toEqual([
expect.objectContaining({ id: 'wf-1' }),
])
})
})
+118 -3
View File
@@ -1,6 +1,18 @@
import { describe, expect, test } from 'vitest' import { describe, expect, test, vi } from 'vitest'
import { createPresetWorkflowConfig, createStarterWorkflowConfig, normalizeWorkflowConfig } from '../../api/workflows' import {
createPresetWorkflowConfig,
createStarterWorkflowConfig,
normalizeWorkflowConfig,
getWorkflows,
} from '../../api/workflows'
import api from '../../api/client'
vi.mock('../../api/client', () => ({
default: {
get: vi.fn(),
},
}))
describe('workflow preset config builders', () => { describe('workflow preset config builders', () => {
test('builds a non-legacy still graph preset', () => { test('builds a non-legacy still graph preset', () => {
@@ -24,7 +36,7 @@ describe('workflow preset config builders', () => {
'notify', 'notify',
]) ])
expect(config.nodes.find(node => node.step === 'blender_still')?.params).toMatchObject({ expect(config.nodes.find(node => node.step === 'blender_still')?.params).toMatchObject({
use_custom_render_settings: true, use_custom_render_settings: false,
render_engine: 'cycles', render_engine: 'cycles',
samples: 128, samples: 128,
width: 1600, width: 1600,
@@ -62,4 +74,107 @@ describe('workflow preset config builders', () => {
expect(config.ui?.family).toBe('order_line') expect(config.ui?.family).toBe('order_line')
expect(config.ui?.execution_mode).toBe('shadow') expect(config.ui?.execution_mode).toBe('shadow')
}) })
test('rebuilds canonical reference blueprints during normalization', () => {
const config = normalizeWorkflowConfig({
version: 1,
ui: {
preset: 'custom',
execution_mode: 'legacy',
blueprint: 'cad_intake',
},
nodes: [
{ id: 'resolve_step', step: 'resolve_step_path', params: {} },
],
edges: [],
})
expect(config.ui?.blueprint).toBe('cad_intake')
expect(config.ui?.family).toBe('cad_file')
expect(config.nodes.map(node => node.step)).toEqual([
'resolve_step_path',
'occ_object_extract',
'occ_glb_export',
'glb_bbox',
'stl_cache_generate',
'blender_render',
'threejs_render',
'thumbnail_save',
'thumbnail_save',
])
expect(config.edges).toEqual(
expect.arrayContaining([
{ from: 'export_glb', to: 'bbox' },
{ from: 'bbox', to: 'threejs_thumb' },
]),
)
})
test('rebuilds canonical starter blueprints during normalization', () => {
const config = normalizeWorkflowConfig({
version: 1,
ui: {
preset: 'custom',
execution_mode: 'legacy',
blueprint: 'starter_order_rendering',
},
nodes: [],
edges: [],
})
expect(config.ui?.blueprint).toBe('starter_order_rendering')
expect(config.ui?.family).toBe('order_line')
expect(config.nodes.map(node => node.step)).toEqual(['order_line_setup'])
})
test('normalizes workflow rollout summary from the API payload', async () => {
vi.mocked(api.get).mockResolvedValueOnce({
data: [
{
id: 'wf-1',
name: 'Still Graph',
output_type_id: null,
config: createPresetWorkflowConfig('still_graph'),
family: 'order_line',
supported_artifact_kinds: ['still_image'],
rollout_summary: {
linked_output_type_count: 2,
active_output_type_count: 1,
linked_output_type_names: ['Still Render', 'Still Render Shadow'],
rollout_modes: ['shadow'],
has_blocking_contracts: false,
blocking_reasons: [],
latest_run: {
workflow_run_id: 'run-1',
execution_mode: 'graph',
status: 'completed',
created_at: '2026-04-11T10:00:00Z',
completed_at: '2026-04-11T10:01:00Z',
},
latest_shadow_run: {
workflow_run_id: 'run-shadow-1',
execution_mode: 'shadow',
status: 'completed',
created_at: '2026-04-11T09:00:00Z',
completed_at: '2026-04-11T09:01:00Z',
},
latest_rollout_gate_verdict: 'pass',
latest_rollout_ready: true,
latest_rollout_status: 'ready_for_rollout',
latest_rollout_reasons: ['Observer output matches the authoritative legacy output byte-for-byte.'],
},
is_active: true,
created_at: '2026-04-11T08:00:00Z',
},
],
})
const [workflow] = await getWorkflows()
expect(workflow.rollout_summary.linked_output_type_count).toBe(2)
expect(workflow.rollout_summary.rollout_modes).toEqual(['shadow'])
expect(workflow.rollout_summary.latest_shadow_run?.execution_mode).toBe('shadow')
expect(workflow.rollout_summary.latest_rollout_gate_verdict).toBe('pass')
expect(workflow.rollout_summary.latest_rollout_ready).toBe(true)
})
}) })
@@ -0,0 +1,307 @@
import { useState } from 'react'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { render, screen, waitFor, within } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { beforeEach, describe, expect, test, vi } from 'vitest'
import type { RenderTemplate } from '../../api/renderTemplates'
import type { WorkflowNodeDefinition, WorkflowParams } from '../../api/workflows'
import { WorkflowNodeInspector } from '../../components/workflows/WorkflowNodeInspector'
const listRenderTemplates = vi.fn<() => Promise<RenderTemplate[]>>()
vi.mock('../../api/renderTemplates', () => ({
listRenderTemplates: () => listRenderTemplates(),
}))
const resolveTemplateDefinition: WorkflowNodeDefinition = {
step: 'resolve_template',
label: 'Resolve Template',
family: 'order_line',
module_key: 'rendering.resolve_template',
category: 'processing',
description: 'Resolve a template for order-line rendering.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [
{
key: 'template_id_override',
label: 'Template Override',
type: 'text',
description: 'Manual template override.',
section: 'General',
default: '',
min: null,
max: null,
step: null,
unit: null,
options: [],
allow_blank: true,
max_length: null,
text_format: 'uuid',
},
],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['order_line_context'] },
output_contract: { context: 'order_line', provides: ['render_template'] },
artifact_roles_consumed: ['order_line_context'],
artifact_roles_produced: ['render_template'],
legacy_source: 'legacy.resolve_template',
}
const notifyDefinition: WorkflowNodeDefinition = {
step: 'notify',
label: 'Notify',
family: 'order_line',
module_key: 'notifications.emit',
category: 'output',
description: 'Emit the workflow result.',
node_type: 'outputNode',
icon: 'bell',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['workflow_result'] },
output_contract: { context: 'order_line', provides: ['notification_event'] },
artifact_roles_consumed: ['workflow_result'],
artifact_roles_produced: ['notification_event'],
legacy_source: 'legacy.notify',
}
function createRenderTemplate(overrides: Partial<RenderTemplate> = {}): RenderTemplate {
return {
id: '0d87b85f-c454-4d61-a124-d5b59e6a43a2',
name: 'Bearing Studio',
category_key: 'bearings',
output_type_id: null,
output_type_name: null,
output_type_ids: [],
output_type_names: ['Still'],
blend_file_path: '/templates/bearing.blend',
original_filename: 'bearing.blend',
target_collection: 'Product',
material_replace_enabled: true,
lighting_only: false,
shadow_catcher_enabled: false,
camera_orbit: true,
workflow_input_schema: [],
is_active: true,
created_at: '2026-04-11T00:00:00Z',
updated_at: '2026-04-11T00:00:00Z',
...overrides,
}
}
function renderInspector(
params: WorkflowParams,
onChange = vi.fn(),
options: { stateful?: boolean } = {},
) {
const queryClient = new QueryClient({
defaultOptions: {
queries: {
retry: false,
},
},
})
function StatefulInspector() {
const [currentParams, setCurrentParams] = useState(params)
return (
<WorkflowNodeInspector
params={currentParams}
onChange={nextParams => {
setCurrentParams(nextParams)
onChange(nextParams)
}}
nodeDefinition={resolveTemplateDefinition}
step="resolve_template"
nodeDefinitions={[resolveTemplateDefinition]}
graphFamily="order_line"
onStepChange={vi.fn()}
/>
)
}
render(
<QueryClientProvider client={queryClient}>
{options.stateful ? (
<StatefulInspector />
) : (
<WorkflowNodeInspector
params={params}
onChange={onChange}
nodeDefinition={resolveTemplateDefinition}
step="resolve_template"
nodeDefinitions={[resolveTemplateDefinition]}
graphFamily="order_line"
onStepChange={vi.fn()}
/>
)}
</QueryClientProvider>,
)
return { onChange, queryClient }
}
describe('WorkflowNodeInspector', () => {
beforeEach(() => {
listRenderTemplates.mockReset()
})
test('renders template-defined workflow input fields for resolve_template nodes', async () => {
listRenderTemplates.mockResolvedValue([
createRenderTemplate({
workflow_input_schema: [
{
key: 'studio_variant',
label: 'Studio Variant',
type: 'select',
section: 'Template Inputs',
description: 'Choose the blend lighting preset.',
default: 'default',
min: null,
max: null,
step: null,
unit: null,
options: [
{ value: 'default', label: 'Default' },
{ value: 'warm', label: 'Warm' },
],
allow_blank: false,
},
],
}),
])
renderInspector({
template_id_override: '0d87b85f-c454-4d61-a124-d5b59e6a43a2',
template_input__studio_variant: 'warm',
})
expect(await screen.findByLabelText('Template Override')).toHaveValue(
'0d87b85f-c454-4d61-a124-d5b59e6a43a2',
)
expect(await screen.findByLabelText('Studio Variant')).toHaveValue('warm')
expect(screen.getByText('Bearing Studio')).toBeInTheDocument()
})
test('clears dynamic template inputs when template override is removed', async () => {
listRenderTemplates.mockResolvedValue([
createRenderTemplate({
workflow_input_schema: [
{
key: 'studio_variant',
label: 'Studio Variant',
type: 'select',
section: 'Template Inputs',
description: 'Choose the blend lighting preset.',
default: 'default',
min: null,
max: null,
step: null,
unit: null,
options: [
{ value: 'default', label: 'Default' },
{ value: 'warm', label: 'Warm' },
],
allow_blank: false,
},
],
}),
])
const user = userEvent.setup()
const { onChange } = renderInspector({
template_id_override: '0d87b85f-c454-4d61-a124-d5b59e6a43a2',
template_input__studio_variant: 'warm',
template_input__camera_profile: 'macro',
})
const templateOverride = await screen.findByLabelText('Template Override')
await user.selectOptions(templateOverride, '')
await waitFor(() => {
expect(onChange).toHaveBeenCalledWith({})
})
})
test('explains connection-driven nodes when no editor fields are available', async () => {
listRenderTemplates.mockResolvedValue([])
const queryClient = new QueryClient({
defaultOptions: {
queries: {
retry: false,
},
},
})
render(
<QueryClientProvider client={queryClient}>
<WorkflowNodeInspector
params={{}}
onChange={vi.fn()}
nodeDefinition={notifyDefinition}
step="notify"
nodeDefinitions={[notifyDefinition]}
graphFamily="order_line"
onStepChange={vi.fn()}
/>
</QueryClientProvider>,
)
expect(screen.getByText('This node has no editor settings.')).toBeInTheDocument()
expect(screen.getByText(/each required upstream input gets its own socket/i)).toBeInTheDocument()
expect(screen.getByText(/0 local variables by design/i)).toBeInTheDocument()
expect(screen.getByText('Socket 1')).toBeInTheDocument()
expect(
screen.getAllByText('Any of: Rendered Image / Rendered Frames / Rendered Video / Workflow Result / Blend Asset').length,
).toBeGreaterThan(0)
})
test('summarizes wired inputs and inspector variables separately', async () => {
listRenderTemplates.mockResolvedValue([
createRenderTemplate({
workflow_input_schema: [
{
key: 'studio_variant',
label: 'Studio Variant',
type: 'select',
section: 'Template Inputs',
description: 'Choose the blend lighting preset.',
default: 'default',
min: null,
max: null,
step: null,
unit: null,
options: [
{ value: 'default', label: 'Default' },
{ value: 'warm', label: 'Warm' },
],
allow_blank: false,
},
],
}),
])
const user = userEvent.setup()
renderInspector({}, vi.fn(), { stateful: true })
const templateOverride = await screen.findByLabelText('Template Override')
await waitFor(() => {
expect(within(templateOverride).getByRole('option', { name: /bearing studio/i })).toBeInTheDocument()
})
await user.selectOptions(templateOverride, '0d87b85f-c454-4d61-a124-d5b59e6a43a2')
expect(await screen.findByText(/1 canvas socket is required/i)).toBeInTheDocument()
expect(screen.getByText('Socket 1')).toBeInTheDocument()
expect(await screen.findByText(/2 local variables are edited in the inspector/i)).toBeInTheDocument()
expect(screen.getByText(/Static: Template Override/i)).toBeInTheDocument()
expect(screen.getByText(/Template-driven: Studio Variant/i)).toBeInTheDocument()
})
})
@@ -1,7 +1,7 @@
import { describe, expect, test } from 'vitest' import { describe, expect, test } from 'vitest'
import * as THREE from 'three' import * as THREE from 'three'
import { buildScenePartRegistry, convertSceneManifestMaterials, remapToPartKeys, resolveObjectPartKey } from '../../components/cad/cadUtils' import { alignSceneManifestToLogicalPartKeys, buildEffectiveViewerMaterials, buildScenePartRegistry, convertSceneManifestMaterials, mergeViewerMaterialSources, remapToPartKeys, resolveObjectPartKey } from '../../components/cad/cadUtils'
describe('cadUtils scene manifest conversion', () => { describe('cadUtils scene manifest conversion', () => {
test('uses scene manifest part keys as authoritative viewer material map', () => { test('uses scene manifest part keys as authoritative viewer material map', () => {
@@ -31,6 +31,55 @@ describe('cadUtils scene manifest conversion', () => {
}, },
}) })
}) })
test('adds viewer logical keys when manifest stores repeated leaf instances under deduplicated part keys', () => {
const materials = alignSceneManifestToLogicalPartKeys(
convertSceneManifestMaterials([
{
part_key: 'kero_z_575693_qp_drh_isb_1_1',
effective_material: 'HARTOMAT_010101_Steel-Bare',
},
{
part_key: 'kero_z_575693_qp_drh_isb_1_1_2',
effective_material: 'HARTOMAT_010101_Steel-Bare',
},
]),
new Set(['kero_z_575693_qp_drh_isb_1']),
)
expect(materials.kero_z_575693_qp_drh_isb_1).toEqual({
type: 'library',
value: 'HARTOMAT_010101_Steel-Bare',
})
})
test('backfills helper and af-instance logical keys through legacy fuzzy lookup when manifest keys differ', () => {
const materials = alignSceneManifestToLogicalPartKeys(
convertSceneManifestMaterials([
{
part_key: 'kero_z_575693_qp_drh_isb',
effective_material: 'HARTOMAT_010101_Steel-Bare',
},
{
part_key: 'kero_z_575693_qp_drh_isb_1_1',
effective_material: 'HARTOMAT_010101_Steel-Bare',
},
]),
new Set([
'kero_z_575693_qp_drh_isb_1',
'kero_z_575693_qp_drh_isb_1_af0',
]),
)
expect(materials.kero_z_575693_qp_drh_isb_1).toEqual({
type: 'library',
value: 'HARTOMAT_010101_Steel-Bare',
})
expect(materials.kero_z_575693_qp_drh_isb_1_af0).toEqual({
type: 'library',
value: 'HARTOMAT_010101_Steel-Bare',
})
})
}) })
describe('cadUtils legacy fallback remapping', () => { describe('cadUtils legacy fallback remapping', () => {
@@ -113,8 +162,82 @@ describe('cadUtils legacy fallback remapping', () => {
}) })
}) })
describe('cadUtils viewer material source merge', () => {
test('keeps fallback assignments authoritative while filling manifest-only gaps', () => {
const merged = mergeViewerMaterialSources(
{
rwdr_b_f_802044_tr4_h122bk: {
type: 'library',
value: 'HARTOMAT_010101_Steel-Bare',
},
usd_only_part: {
type: 'library',
value: 'HARTOMAT_050101_Elastomer-Black',
},
},
{
rwdr_b_f_802044_tr4_h122bk: {
type: 'library',
value: 'Steel--Stahl',
},
},
)
expect(merged).toEqual({
rwdr_b_f_802044_tr4_h122bk: {
type: 'library',
value: 'Steel--Stahl',
},
usd_only_part: {
type: 'library',
value: 'HARTOMAT_050101_Elastomer-Black',
},
})
})
test('applies manual overrides last on top of fallback-authoritative merged sources', () => {
const merged = buildEffectiveViewerMaterials(
{
rwdr_b_f_802044_tr4_h122bk: {
type: 'library',
value: 'HARTOMAT_010101_Steel-Bare',
},
usd_only_part: {
type: 'library',
value: 'HARTOMAT_050101_Elastomer-Black',
},
},
{
rwdr_b_f_802044_tr4_h122bk: {
type: 'library',
value: 'Steel--Stahl',
},
},
{
rwdr_b_f_802044_tr4_h122bk: '#123456',
manual_only_part: 'HARTOMAT_070707_Test-Material',
},
)
expect(merged).toEqual({
rwdr_b_f_802044_tr4_h122bk: {
type: 'hex',
value: '#123456',
},
usd_only_part: {
type: 'library',
value: 'HARTOMAT_050101_Elastomer-Black',
},
manual_only_part: {
type: 'library',
value: 'HARTOMAT_070707_Test-Material',
},
})
})
})
describe('cadUtils scene graph part-key registry', () => { describe('cadUtils scene graph part-key registry', () => {
test('inherits instance part keys from ancestor nodes and keeps logical keys from scene metadata', () => { test('inherits instance part keys from ancestor nodes while excluding helper-only logical keys from renderable counts', () => {
const scene = new THREE.Group() const scene = new THREE.Group()
const instanceGroup = new THREE.Group() const instanceGroup = new THREE.Group()
@@ -139,14 +262,10 @@ describe('cadUtils scene graph part-key registry', () => {
expect(meshRegistry).toHaveLength(1) expect(meshRegistry).toHaveLength(1)
expect(meshRegistry[0].partKey).toBe('kero_z_575693_qp_drh_isb_1') expect(meshRegistry[0].partKey).toBe('kero_z_575693_qp_drh_isb_1')
expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1') expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1')
expect(logicalPartKeys).toEqual(new Set([ expect(logicalPartKeys).toEqual(new Set(['kero_z_575693_qp_drh_isb_1']))
'kero_z_575693_qp_drh_isb_1',
'rwdr_skel_f_802044_tr4_h122bk',
'f_802044_tr4_h122bk_04',
]))
}) })
test('prefers sibling semantic instance nodes over mesh-local exporter keys when transforms match', () => { test('prefers explicit mesh-local exporter keys over sibling semantic instance nodes when transforms match', () => {
const scene = new THREE.Group() const scene = new THREE.Group()
const assembly = new THREE.Group() const assembly = new THREE.Group()
@@ -169,12 +288,12 @@ describe('cadUtils scene graph part-key registry', () => {
const { meshRegistry } = buildScenePartRegistry(scene, {}) const { meshRegistry } = buildScenePartRegistry(scene, {})
expect(meshRegistry).toHaveLength(1) expect(meshRegistry).toHaveLength(1)
expect(meshRegistry[0].partKey).toBe('kero_z_575693_qp_drh_isb_1') expect(meshRegistry[0].partKey).toBe('kero_z_575693_qp_drh_isb_1_1')
expect(mesh.userData.partKey).toBe('kero_z_575693_qp_drh_isb_1') expect(mesh.userData.partKey).toBe('kero_z_575693_qp_drh_isb_1_1')
expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1') expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1_1')
}) })
test('prefers sibling semantic instance nodes even when transforms do not match', () => { test('falls back to sibling semantic instance nodes when explicit mesh keys are absent, even when transforms do not match', () => {
const scene = new THREE.Group() const scene = new THREE.Group()
const assembly = new THREE.Group() const assembly = new THREE.Group()
@@ -185,7 +304,6 @@ describe('cadUtils scene graph part-key registry', () => {
const mesh = new THREE.Mesh(new THREE.BufferGeometry(), new THREE.MeshStandardMaterial()) const mesh = new THREE.Mesh(new THREE.BufferGeometry(), new THREE.MeshStandardMaterial())
mesh.name = 'KERO_Z-575693-QP-DRH_ISB_1_1' mesh.name = 'KERO_Z-575693-QP-DRH_ISB_1_1'
mesh.userData.partKey = 'kero_z_575693_qp_drh_isb_1_1'
mesh.position.set(0.2422435981345, 0.06134441033723, 0.2109037401181) mesh.position.set(0.2422435981345, 0.06134441033723, 0.2109037401181)
assembly.add(semanticSibling) assembly.add(semanticSibling)
@@ -199,4 +317,19 @@ describe('cadUtils scene graph part-key registry', () => {
expect(mesh.userData.partKey).toBe('kero_z_575693_qp_drh_isb_1') expect(mesh.userData.partKey).toBe('kero_z_575693_qp_drh_isb_1')
expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1') expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1')
}) })
test('does not synthesize pseudo part keys from normalized mesh names when no authoritative mapping exists', () => {
const scene = new THREE.Group()
const mesh = new THREE.Mesh(new THREE.BufferGeometry(), new THREE.MeshStandardMaterial())
mesh.name = 'RWDR_B_F-802044_TR4_H122BK_AF0'
scene.add(mesh)
const { meshRegistry, logicalPartKeys, unresolvedMeshNames } = buildScenePartRegistry(scene, {})
expect(resolveObjectPartKey(mesh, {})).toBe('')
expect(mesh.userData.partKey).toBeUndefined()
expect(meshRegistry).toHaveLength(0)
expect(logicalPartKeys).toEqual(new Set())
expect(unresolvedMeshNames).toEqual(new Set(['RWDR_B_F-802044_TR4_H122BK']))
})
}) })
@@ -0,0 +1,49 @@
import { describe, expect, test } from 'vitest'
import { getOutputTypeRolloutPresentation } from '../../components/admin/outputTypeRolloutPresentation'
describe('outputTypeRolloutPresentation', () => {
test('describes unlinked output types as fully legacy', () => {
expect(getOutputTypeRolloutPresentation({
hasWorkflowLink: false,
workflowRolloutMode: 'legacy_only',
})).toEqual(expect.objectContaining({
badgeLabel: 'Legacy Only',
statusLabel: 'Production: Legacy',
rowSummary: 'No linked graph workflow.',
}))
})
test('describes shadow rollout as legacy-authoritative observer mode', () => {
expect(getOutputTypeRolloutPresentation({
hasWorkflowLink: true,
workflowRolloutMode: 'shadow',
})).toEqual(expect.objectContaining({
badgeLabel: 'Shadow',
statusLabel: 'Production: Legacy',
rowSummary: 'Graph observes only; legacy remains authoritative.',
}))
})
test('describes graph rollout as production-graph with legacy fallback', () => {
expect(getOutputTypeRolloutPresentation({
hasWorkflowLink: true,
workflowRolloutMode: 'graph',
})).toEqual(expect.objectContaining({
badgeLabel: 'Graph Authoritative',
statusLabel: 'Production: Graph',
rowSummary: 'Graph drives production with legacy fallback armed.',
}))
})
test('elevates blocking issues above rollout mode', () => {
expect(getOutputTypeRolloutPresentation({
hasWorkflowLink: true,
workflowRolloutMode: 'graph',
hasBlockingIssues: true,
})).toEqual(expect.objectContaining({
badgeLabel: 'Contract Blocked',
statusLabel: 'Do Not Promote',
}))
})
})
@@ -0,0 +1,50 @@
import { describe, expect, test, vi } from 'vitest'
import {
bindWorkflowAuthoringInsertActions,
type WorkflowAuthoringActions,
} from '../../components/workflows/workflowAuthoringActions'
describe('bindWorkflowAuthoringInsertActions', () => {
test('binds preferred canvas position into insert actions', () => {
const insertNode = vi.fn()
const insertModule = vi.fn()
const insertReferencePath = vi.fn()
const actions: WorkflowAuthoringActions = {
insertNode,
insertModule,
insertReferencePath,
}
const bindings = bindWorkflowAuthoringInsertActions(actions, {
preferredPosition: { x: 120, y: 240 },
})
bindings.onSelectStep?.('blender_still')
bindings.onInsertModule?.('still_render_core')
bindings.onInsertReferencePath?.('still_render_reference')
expect(insertNode).toHaveBeenCalledWith('blender_still', { x: 120, y: 240 })
expect(insertModule).toHaveBeenCalledWith('still_render_core', { x: 120, y: 240 })
expect(insertReferencePath).toHaveBeenCalledWith('still_render_reference', { x: 120, y: 240 })
})
test('runs the after-insert callback after successful bound actions', () => {
const afterInsert = vi.fn()
const insertNode = vi.fn()
const bindings = bindWorkflowAuthoringInsertActions(
{
insertNode,
},
{
onAfterInsert: afterInsert,
},
)
bindings.onSelectStep?.('resolve_template')
expect(insertNode).toHaveBeenCalledWith('resolve_template', undefined)
expect(afterInsert).toHaveBeenCalledOnce()
})
})
@@ -0,0 +1,227 @@
import { describe, expect, test } from 'vitest'
import type { WorkflowNodeDefinition } from '../../api/workflows'
import { getWorkflowAuthoringPlan } from '../../components/workflows/workflowAuthoringGuidance'
import {
getWorkflowAuthoringSurfaceModel,
resolveWorkflowAuthoringSection,
} from '../../components/workflows/workflowAuthoringSurface'
const definitions: WorkflowNodeDefinition[] = [
{
step: 'order_line_setup',
label: 'Order Line Setup',
family: 'order_line',
module_key: 'order_line.prepare_render_context',
category: 'input',
description: 'Prepare order line.',
node_type: 'inputNode',
icon: 'refresh-cw',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line' },
output_contract: { context: 'order_line', provides: ['order_line'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.order_line_setup',
},
{
step: 'resolve_template',
label: 'Resolve Template',
family: 'order_line',
module_key: 'rendering.resolve_template',
category: 'processing',
description: 'Resolve template.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['render_template'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.resolve_template',
},
{
step: 'auto_populate_materials',
label: 'Auto Populate Materials',
family: 'order_line',
module_key: 'materials.auto_populate',
category: 'processing',
description: 'Populate materials.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['cad_materials'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.auto_populate_materials',
},
{
step: 'glb_bbox',
label: 'Compute Bounding Box',
family: 'order_line',
module_key: 'geometry.compute_bbox',
category: 'processing',
description: 'Compute bbox.',
node_type: 'processNode',
icon: 'box',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['bbox'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'material_map_resolve',
label: 'Resolve Material Map',
family: 'order_line',
module_key: 'materials.resolve_map',
category: 'processing',
description: 'Resolve material map.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['material_map'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'blender_still',
label: 'Blender Still',
family: 'order_line',
module_key: 'render.production.still',
category: 'rendering',
description: 'Render still.',
node_type: 'renderNode',
icon: 'camera',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['render_image'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'output_save',
label: 'Save Output',
family: 'order_line',
module_key: 'media.save_output',
category: 'output',
description: 'Persist output.',
node_type: 'outputNode',
icon: 'download',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['render_image'] },
output_contract: { context: 'order_line', provides: ['saved_output'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.output_save',
},
{
step: 'notify',
label: 'Notify Result',
family: 'order_line',
module_key: 'notifications.emit',
category: 'output',
description: 'Notify completion.',
node_type: 'outputNode',
icon: 'bell',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['saved_output'] },
output_contract: { context: 'order_line', provides: ['notification'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.notify',
},
]
describe('workflow authoring guidance', () => {
test('derives a single shared order-line authoring plan', () => {
const plan = getWorkflowAuthoringPlan(definitions, 'order_line', ['blender_still'])
expect(plan.referenceBundles).toHaveLength(1)
expect(plan.moduleBundles).toHaveLength(2)
expect(plan.referenceBundles[0]?.presentCount).toBe(1)
expect(plan.moduleBundles.find(bundle => bundle.id === 'still_render_core')?.presentCount).toBe(1)
expect(plan.stageProgress.map(stage => stage.id)).toEqual([
'still_render_reference',
'still_render_core',
'output_publish_notify',
'order_line_setup',
])
expect(plan.gapFillDefinitions.map(definition => definition.step)).toEqual([
'order_line_setup',
'resolve_template',
'auto_populate_materials',
])
})
test('hides starter-only guidance for mixed graphs', () => {
const plan = getWorkflowAuthoringPlan(definitions, 'mixed', ['blender_still'])
expect(plan.title).toBe('Guided Authoring')
expect(plan.starterItems).toEqual([])
expect(plan.stageProgress).toEqual([])
expect(plan.authoringFlow[2]?.title).toBe('Starter Path')
})
test('derives one shared authoring surface model from the same plan', () => {
const surface = getWorkflowAuthoringSurfaceModel({
definitions,
graphFamily: 'order_line',
activeSteps: ['order_line_setup'],
})
expect(surface.defaultSection).toBe('overview')
expect(surface.sections.map(section => section.key)).toEqual([
'overview',
'paths',
'modules',
'starter',
'nodes',
])
expect(surface.plan.referenceBundles[0]?.id).toBe('still_render_reference')
expect(surface.plan.moduleBundles.map(bundle => bundle.id)).toEqual([
'still_render_core',
'output_publish_notify',
])
})
test('falls back to a valid section when the requested section is unavailable', () => {
const surface = getWorkflowAuthoringSurfaceModel({
definitions,
graphFamily: 'mixed',
activeSteps: ['order_line_setup'],
})
expect(resolveWorkflowAuthoringSection('overview', surface.sections, surface.defaultSection)).toBe('nodes')
expect(resolveWorkflowAuthoringSection('nodes', surface.sections, surface.defaultSection)).toBe('nodes')
})
})
@@ -1,4 +1,4 @@
import { render, screen } from '@testing-library/react' import { render, screen, within } from '@testing-library/react'
import userEvent from '@testing-library/user-event' import userEvent from '@testing-library/user-event'
import { describe, expect, test, vi } from 'vitest' import { describe, expect, test, vi } from 'vitest'
@@ -14,6 +14,11 @@ import { WorkflowCanvasToolbar } from '../../components/workflows/WorkflowCanvas
import { WorkflowNodeContractCard } from '../../components/workflows/WorkflowNodeContractCard' import { WorkflowNodeContractCard } from '../../components/workflows/WorkflowNodeContractCard'
import { WorkflowPreflightPanel } from '../../components/workflows/WorkflowPreflightPanel' import { WorkflowPreflightPanel } from '../../components/workflows/WorkflowPreflightPanel'
import { WorkflowRunsPanel } from '../../components/workflows/WorkflowRunsPanel' import { WorkflowRunsPanel } from '../../components/workflows/WorkflowRunsPanel'
import {
bindWorkflowAuthoringInsertActions,
getWorkflowAuthoringEntryAction,
} from '../../components/workflows/workflowAuthoringActions'
import { getWorkflowAuthoringSurfaceModel } from '../../components/workflows/workflowAuthoringSurface'
const nodeDefinitions: WorkflowNodeDefinition[] = [ const nodeDefinitions: WorkflowNodeDefinition[] = [
{ {
@@ -35,6 +40,177 @@ const nodeDefinitions: WorkflowNodeDefinition[] = [
artifact_roles_consumed: [], artifact_roles_consumed: [],
legacy_source: 'legacy.resolve_step_path', legacy_source: 'legacy.resolve_step_path',
}, },
{
step: 'occ_object_extract',
label: 'Extract OCC Objects',
family: 'cad_file',
module_key: 'cad.intake',
category: 'processing',
description: 'Extract assembly objects from CAD input.',
node_type: 'processNode',
icon: 'boxes',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'cad_file', requires: ['step_path'] },
output_contract: { context: 'cad_file', provides: ['cad_objects'] },
artifact_roles_produced: [],
artifact_roles_consumed: ['step_file'],
legacy_source: null,
},
{
step: 'occ_glb_export',
label: 'Export GLB',
family: 'cad_file',
module_key: 'cad.intake',
category: 'processing',
description: 'Export preview GLB from CAD geometry.',
node_type: 'processNode',
icon: 'box',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'cad_file', requires: ['cad_objects'] },
output_contract: { context: 'cad_file', provides: ['cad_preview'] },
artifact_roles_produced: ['cad_preview'],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'stl_cache_generate',
label: 'Generate STL Cache',
family: 'cad_file',
module_key: 'cad.intake',
category: 'processing',
description: 'Build STL cache for downstream consumers.',
node_type: 'processNode',
icon: 'database',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'cad_file', requires: ['cad_objects'] },
output_contract: { context: 'cad_file', provides: ['stl_cache'] },
artifact_roles_produced: ['stl_cache'],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'thumbnail_save',
label: 'Publish Thumbnail',
family: 'cad_file',
module_key: 'cad.intake',
category: 'output',
description: 'Persist preview thumbnail output.',
node_type: 'outputNode',
icon: 'image',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file', requires: ['cad_preview'] },
output_contract: { context: 'cad_file', provides: ['thumbnail'] },
artifact_roles_produced: ['thumbnail'],
artifact_roles_consumed: ['cad_preview'],
legacy_source: 'legacy.thumbnail_save',
},
{
step: 'order_line_setup',
label: 'Order Line Setup',
family: 'order_line',
module_key: 'order_line.prepare_render_context',
category: 'input',
description: 'Prepare order-line render context.',
node_type: 'inputNode',
icon: 'refresh-cw',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line' },
output_contract: { context: 'order_line', provides: ['order_line'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.order_line_setup',
},
{
step: 'resolve_template',
label: 'Resolve Template',
family: 'order_line',
module_key: 'rendering.resolve_template',
category: 'processing',
description: 'Resolve render template.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['render_template'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.resolve_template',
},
{
step: 'auto_populate_materials',
label: 'Auto Populate Materials',
family: 'order_line',
module_key: 'materials.auto_populate',
category: 'processing',
description: 'Populate materials automatically.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['cad_materials'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.auto_populate_materials',
},
{
step: 'glb_bbox',
label: 'Compute Bounding Box',
family: 'shared',
module_key: 'geometry.compute_bbox',
category: 'processing',
description: 'Compute GLB bounding box.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { requires: ['cad_preview'] },
output_contract: { provides: ['bbox'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'material_map_resolve',
label: 'Resolve Material Map',
family: 'order_line',
module_key: 'materials.resolve_map',
category: 'processing',
description: 'Resolve material mapping.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['material_map'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{ {
step: 'blender_still', step: 'blender_still',
label: 'Blender Still', label: 'Blender Still',
@@ -54,6 +230,44 @@ const nodeDefinitions: WorkflowNodeDefinition[] = [
artifact_roles_consumed: ['cad_preview'], artifact_roles_consumed: ['cad_preview'],
legacy_source: null, legacy_source: null,
}, },
{
step: 'output_save',
label: 'Save Output',
family: 'order_line',
module_key: 'media.save_output',
category: 'output',
description: 'Save rendered output.',
node_type: 'outputNode',
icon: 'download',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['rendered_image'] },
output_contract: { context: 'order_line', provides: ['saved_output'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.output_save',
},
{
step: 'notify',
label: 'Notify Result',
family: 'order_line',
module_key: 'notifications.emit',
category: 'output',
description: 'Emit completion notification.',
node_type: 'outputNode',
icon: 'bell',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['saved_output'] },
output_contract: { context: 'order_line', provides: ['notification'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.notify',
},
] ]
const shadowRun: WorkflowRun = { const shadowRun: WorkflowRun = {
@@ -72,7 +286,10 @@ const shadowRun: WorkflowRun = {
id: 'node-result-1', id: 'node-result-1',
node_name: 'Blender Still', node_name: 'Blender Still',
status: 'completed', status: 'completed',
output: null, output: {
image_path: '/tmp/render.png',
artifact_role: 'png_output',
},
log: 'Rendered successfully.', log: 'Rendered successfully.',
duration_s: 2.3, duration_s: 2.3,
created_at: '2026-04-08T10:02:00Z', created_at: '2026-04-08T10:02:00Z',
@@ -87,6 +304,16 @@ const shadowComparison: WorkflowRunComparison = {
execution_mode: 'shadow', execution_mode: 'shadow',
status: 'matched', status: 'matched',
summary: 'Observer output matches authoritative output.', summary: 'Observer output matches authoritative output.',
rollout_gate_verdict: 'pass',
workflow_rollout_ready: true,
workflow_rollout_status: 'ready_for_rollout',
rollout_reasons: [
'Observer output matches the authoritative legacy output byte-for-byte.',
],
rollout_thresholds: {
pass_max_mean_pixel_delta: 0.000001,
warn_max_mean_pixel_delta: 0.02,
},
authoritative_output: { authoritative_output: {
path: null, path: null,
storage_key: null, storage_key: null,
@@ -122,7 +349,7 @@ const preflightResponse: WorkflowPreflightResponse = {
summary: 'Graph requires one missing upstream artifact.', summary: 'Graph requires one missing upstream artifact.',
resolved_order_line_id: 'ol-1', resolved_order_line_id: 'ol-1',
resolved_cad_file_id: null, resolved_cad_file_id: null,
unsupported_node_ids: [], unsupported_node_ids: ['node-legacy-1'],
issues: [ issues: [
{ {
severity: 'warning', severity: 'warning',
@@ -168,6 +395,7 @@ describe('WorkflowNodeContractCard', () => {
inputContextLabel="Order Rendering" inputContextLabel="Order Rendering"
outputContextLabel="Order Rendering" outputContextLabel="Order Rendering"
requiredInputs={['order_line', 'render_template']} requiredInputs={['order_line', 'render_template']}
requiredAnyInputs={[['rendered_image', 'rendered_frames']]}
consumedArtifacts={['cad_preview']} consumedArtifacts={['cad_preview']}
providedOutputs={['render_image']} providedOutputs={['render_image']}
producedArtifacts={['png_output']} producedArtifacts={['png_output']}
@@ -180,7 +408,8 @@ describe('WorkflowNodeContractCard', () => {
expect(screen.getByText('legacy.still_render')).toBeInTheDocument() expect(screen.getByText('legacy.still_render')).toBeInTheDocument()
expect(screen.getByText('Order Line')).toBeInTheDocument() expect(screen.getByText('Order Line')).toBeInTheDocument()
expect(screen.getByText('Render Template')).toBeInTheDocument() expect(screen.getByText('Render Template')).toBeInTheDocument()
expect(screen.getByText('Cad Preview')).toBeInTheDocument() expect(screen.getByText('Any of: Rendered Image / Rendered Frames')).toBeInTheDocument()
expect(screen.getByText('CAD Preview')).toBeInTheDocument()
expect(screen.getByText('Render Image')).toBeInTheDocument() expect(screen.getByText('Render Image')).toBeInTheDocument()
expect(screen.getByText('Png Output')).toBeInTheDocument() expect(screen.getByText('Png Output')).toBeInTheDocument()
}) })
@@ -197,19 +426,58 @@ describe('WorkflowCanvasToolbar', () => {
const onPreflight = vi.fn() const onPreflight = vi.fn()
const onDispatch = vi.fn() const onDispatch = vi.fn()
const onSave = vi.fn() const onSave = vi.fn()
const onRollbackOutputType = vi.fn()
render( render(
<WorkflowCanvasToolbar <WorkflowCanvasToolbar
workflowName="Still Image - Graph" workflowName="Still Image - Graph"
blueprintLabel="Still Graph" blueprintLabel="Still Graph"
blueprintDescription="Reference graph for the non-legacy still render path." blueprintDescription="Reference graph for the non-legacy still render path."
authoringFamilyLabel="Order Rendering"
authoringFamilyClassName="bg-emerald-100 text-emerald-700"
graphFamilyLabel="Order Rendering" graphFamilyLabel="Order Rendering"
graphFamilyClassName="bg-emerald-100 text-emerald-700" graphFamilyClassName="bg-emerald-100 text-emerald-700"
executionMode="graph" executionMode="graph"
executionModeLabel="Graph" executionModeLabel="Graph"
executionModeClassName="bg-green-100 text-green-700" executionModeClassName="bg-green-100 text-green-700"
executionModeHint="Production dispatch uses graph runtime with fallback." executionModeHint="Production dispatch uses graph runtime with fallback."
dispatchContextId="" rolloutBadgeLabel="Shadow"
rolloutBadgeClassName="bg-sky-100 text-sky-700"
rolloutStatusLabel="Legacy Authoritative"
rolloutStatusClassName="bg-sky-100 text-sky-700"
rolloutSummary="Latest shadow verdict: pass."
linkedOutputTypeCount={2}
linkedOutputTypes={[
{
id: 'ot-1',
name: 'Shadow Still Output',
is_active: true,
artifact_kind: 'still_image',
workflow_rollout_mode: 'shadow',
},
{
id: 'ot-2',
name: 'Legacy Archive Output',
is_active: false,
artifact_kind: 'blend_asset',
workflow_rollout_mode: 'legacy_only',
},
]}
dispatchContextKind="order_line"
dispatchContextLabel="Order Line"
dispatchContextId="line-1"
dispatchContextSummary="Product A · Still"
dispatchContextMeta="ORD-1001 · pending"
orderLineContextGroups={[
{
orderId: 'order-1',
orderLabel: 'ORD-1001',
options: [
{ value: 'line-1', label: 'Product A · Still', meta: 'ORD-1001 · pending' },
{ value: 'line-2', label: 'Product B · Still', meta: 'ORD-1001 · completed' },
],
},
]}
executionModes={[ executionModes={[
{ value: 'legacy', label: 'Legacy' }, { value: 'legacy', label: 'Legacy' },
{ value: 'graph', label: 'Graph' }, { value: 'graph', label: 'Graph' },
@@ -217,39 +485,66 @@ describe('WorkflowCanvasToolbar', () => {
]} ]}
selectedEdgeCount={2} selectedEdgeCount={2}
canAutoLayout canAutoLayout
canPreflight
canDispatch
hasValidationErrors={false} hasValidationErrors={false}
isPreflightPending={false} isPreflightPending={false}
isDispatchPending={false} isDispatchPending={false}
isContextOptionsLoading={false}
isSaving={false} isSaving={false}
rollbackPendingOutputTypeId={null}
preflightState="ready"
authoringActions={{ openNodeMenu: onOpenNodeMenu }}
authoringEntryAction={{
label: 'Author',
title: 'Open guided workflow authoring browser',
helper: 'Open reference paths, production modules, starter steps, and raw nodes.',
icon: () => null,
}}
onDispatchContextIdChange={onDispatchContextIdChange} onDispatchContextIdChange={onDispatchContextIdChange}
onExecutionModeChange={onExecutionModeChange} onExecutionModeChange={onExecutionModeChange}
onOpenNodeMenu={onOpenNodeMenu}
onAutoLayout={onAutoLayout} onAutoLayout={onAutoLayout}
onDeleteSelectedEdges={onDeleteSelectedEdges} onDeleteSelectedEdges={onDeleteSelectedEdges}
onPreflight={onPreflight} onPreflight={onPreflight}
onDispatch={onDispatch} onDispatch={onDispatch}
onSave={onSave} onSave={onSave}
onRollbackOutputType={onRollbackOutputType}
/>, />,
) )
expect(screen.getByText('Workflow Canvas')).toBeInTheDocument() expect(screen.getByText('Workflow Canvas')).toBeInTheDocument()
expect(screen.getByText('Still Image - Graph')).toBeInTheDocument() expect(screen.getByText('Still Image - Graph')).toBeInTheDocument()
expect(screen.getByText('Still Graph')).toBeInTheDocument() expect(screen.getByText('Still Graph')).toBeInTheDocument()
expect(screen.getAllByText('Shadow').length).toBeGreaterThan(0)
expect(screen.getByText('Legacy Authoritative')).toBeInTheDocument()
expect(screen.getByText(/2 linked output types/i)).toBeInTheDocument()
expect(screen.getByText('Rollout Controls')).toBeInTheDocument()
expect(screen.getByText('Shadow Still Output')).toBeInTheDocument()
expect(screen.getByText('Legacy Archive Output')).toBeInTheDocument()
expect(screen.getAllByText('Order Line').length).toBeGreaterThan(0)
expect(screen.getAllByText('Product A · Still').length).toBeGreaterThan(0)
expect(screen.getByText('Right-click to add')).toBeInTheDocument() expect(screen.getByText('Right-click to add')).toBeInTheDocument()
expect(screen.getByText('Preflight ready')).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Delete (2)' })).toBeEnabled() expect(screen.getByRole('button', { name: 'Delete (2)' })).toBeEnabled()
const rollbackButtons = screen.getAllByRole('button', { name: 'Set Legacy' })
expect(rollbackButtons.length).toBe(2)
expect(rollbackButtons[0]?.getAttribute('title')).toContain('legacy_only')
expect(rollbackButtons[1]).toBeDisabled()
await user.click(screen.getByRole('button', { name: 'Node' })) await user.click(screen.getByRole('button', { name: 'Author' }))
await user.click(screen.getByRole('button', { name: 'Align' })) await user.click(screen.getByRole('button', { name: 'Align' }))
await user.click(screen.getByRole('button', { name: 'Delete (2)' })) await user.click(screen.getByRole('button', { name: 'Delete (2)' }))
await user.click(rollbackButtons[0] as HTMLElement)
await user.click(screen.getByRole('button', { name: 'Dry Run' })) await user.click(screen.getByRole('button', { name: 'Dry Run' }))
await user.click(screen.getByRole('button', { name: 'Run' })) await user.click(screen.getByRole('button', { name: 'Run' }))
await user.click(screen.getByRole('button', { name: 'Save' })) await user.click(screen.getByRole('button', { name: 'Save' }))
await user.type(screen.getByPlaceholderText('context id'), 'order-123') await user.selectOptions(screen.getByRole('combobox', { name: 'Order line context' }), 'line-2')
await user.selectOptions(screen.getByRole('combobox'), 'shadow') await user.selectOptions(screen.getByRole('combobox', { name: 'Mode' }), 'shadow')
expect(onOpenNodeMenu).toHaveBeenCalledOnce() expect(onOpenNodeMenu).toHaveBeenCalledOnce()
expect(onAutoLayout).toHaveBeenCalledOnce() expect(onAutoLayout).toHaveBeenCalledOnce()
expect(onDeleteSelectedEdges).toHaveBeenCalledOnce() expect(onDeleteSelectedEdges).toHaveBeenCalledOnce()
expect(onRollbackOutputType).toHaveBeenCalledWith('ot-1')
expect(onPreflight).toHaveBeenCalledOnce() expect(onPreflight).toHaveBeenCalledOnce()
expect(onDispatch).toHaveBeenCalledOnce() expect(onDispatch).toHaveBeenCalledOnce()
expect(onSave).toHaveBeenCalledOnce() expect(onSave).toHaveBeenCalledOnce()
@@ -263,28 +558,54 @@ describe('WorkflowCanvasToolbar', () => {
workflowName="CAD Intake" workflowName="CAD Intake"
blueprintLabel={null} blueprintLabel={null}
blueprintDescription={null} blueprintDescription={null}
authoringFamilyLabel="CAD Intake"
authoringFamilyClassName="bg-sky-100 text-sky-700"
graphFamilyLabel="CAD Intake" graphFamilyLabel="CAD Intake"
graphFamilyClassName="bg-sky-100 text-sky-700" graphFamilyClassName="bg-sky-100 text-sky-700"
executionMode="legacy" executionMode="legacy"
executionModeLabel="Legacy" executionModeLabel="Legacy"
executionModeClassName="bg-slate-100 text-slate-700" executionModeClassName="bg-slate-100 text-slate-700"
executionModeHint="Legacy dispatcher stays authoritative." executionModeHint="Legacy dispatcher stays authoritative."
rolloutBadgeLabel="Unlinked"
rolloutBadgeClassName="bg-surface-muted text-content-muted"
rolloutStatusLabel="Legacy Only"
rolloutStatusClassName="bg-slate-100 text-slate-700"
rolloutSummary="No output types are linked to this workflow yet."
linkedOutputTypeCount={0}
linkedOutputTypes={[]}
dispatchContextKind="cad_file"
dispatchContextLabel="CAD File"
dispatchContextId="" dispatchContextId=""
dispatchContextSummary={null}
dispatchContextMeta={null}
orderLineContextGroups={[]}
executionModes={[{ value: 'legacy', label: 'Legacy' }]} executionModes={[{ value: 'legacy', label: 'Legacy' }]}
selectedEdgeCount={0} selectedEdgeCount={0}
canAutoLayout={false} canAutoLayout={false}
canPreflight={false}
canDispatch={false}
hasValidationErrors hasValidationErrors
isPreflightPending={false} isPreflightPending={false}
isDispatchPending={false} isDispatchPending={false}
isContextOptionsLoading={false}
isSaving={false} isSaving={false}
rollbackPendingOutputTypeId={null}
preflightState="required"
authoringActions={{ openNodeMenu: vi.fn() }}
authoringEntryAction={{
label: 'Node',
title: 'Open raw node browser',
helper: 'Open the searchable node catalog directly on the canvas.',
icon: () => null,
}}
onDispatchContextIdChange={vi.fn()} onDispatchContextIdChange={vi.fn()}
onExecutionModeChange={vi.fn()} onExecutionModeChange={vi.fn()}
onOpenNodeMenu={vi.fn()}
onAutoLayout={vi.fn()} onAutoLayout={vi.fn()}
onDeleteSelectedEdges={vi.fn()} onDeleteSelectedEdges={vi.fn()}
onPreflight={vi.fn()} onPreflight={vi.fn()}
onDispatch={vi.fn()} onDispatch={vi.fn()}
onSave={vi.fn()} onSave={vi.fn()}
onRollbackOutputType={vi.fn()}
/>, />,
) )
@@ -305,7 +626,8 @@ describe('NodeCommandMenu', () => {
<NodeCommandMenu <NodeCommandMenu
definitions={nodeDefinitions} definitions={nodeDefinitions}
graphFamily="mixed" graphFamily="mixed"
onSelectStep={onSelectStep} activeSteps={[]}
actions={{ insertNode: step => onSelectStep(step) }}
onClose={vi.fn()} onClose={vi.fn()}
renderIcon={iconName => <span>{iconName}</span>} renderIcon={iconName => <span>{iconName}</span>}
/>, />,
@@ -315,22 +637,187 @@ describe('NodeCommandMenu', () => {
await user.type(screen.getByPlaceholderText('Search nodes'), 'blender{enter}') await user.type(screen.getByPlaceholderText('Search nodes'), 'blender{enter}')
expect(onSelectStep).toHaveBeenCalledWith('blender_still') expect(onSelectStep).toHaveBeenCalledWith('blender_still')
expect(screen.getByRole('button', { name: 'All Categories' })).toBeInTheDocument()
expect(screen.getByText('Quick Insert')).toBeInTheDocument()
expect(screen.getByText('Graph Nodes')).toBeInTheDocument() expect(screen.getByText('Graph Nodes')).toBeInTheDocument()
}) })
test('supports module insertion directly from the canvas authoring menu', async () => {
const user = userEvent.setup()
const onInsertReferencePath = vi.fn()
const onInsertModule = vi.fn()
const onSelectStep = vi.fn()
render(
<NodeCommandMenu
definitions={nodeDefinitions}
graphFamily="order_line"
activeSteps={['order_line_setup']}
actions={{
insertNode: step => onSelectStep(step),
insertReferencePath: bundleId => onInsertReferencePath(bundleId),
insertModule: bundleId => onInsertModule(bundleId),
}}
onClose={vi.fn()}
renderIcon={iconName => <span>{iconName}</span>}
/>,
)
expect(screen.getByRole('button', { name: 'Overview' })).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Paths' })).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Modules' })).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Starter' })).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Nodes' })).toBeInTheDocument()
expect(screen.getByText('Recommended Path')).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Insert Still Reference' })).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: 'Insert Still Reference' }))
expect(onInsertReferencePath).toHaveBeenCalledWith('still_render_reference')
await user.click(screen.getByRole('button', { name: 'Paths' }))
expect(screen.getByText('Reference Paths')).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Insert Still Render Reference' })).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: 'Insert Still Render Reference' }))
expect(onInsertReferencePath).toHaveBeenNthCalledWith(2, 'still_render_reference')
await user.click(screen.getByRole('button', { name: 'Modules' }))
expect(screen.getByText('Production Modules')).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: 'Insert Still Render Core' }))
expect(onInsertModule).toHaveBeenCalledWith('still_render_core')
})
}) })
describe('NodeDefinitionsPanel', () => { describe('NodeDefinitionsPanel', () => {
test('groups nodes by runtime bucket and module in the utility rail library view', () => { test('organizes library authoring into overview and focused browser sections', async () => {
const user = userEvent.setup()
render(<NodeDefinitionsPanel definitions={nodeDefinitions} graphFamily="mixed" />) render(<NodeDefinitionsPanel definitions={nodeDefinitions} graphFamily="mixed" />)
expect(screen.getByText('Node Library')).toBeInTheDocument() expect(screen.getByText('Node Library')).toBeInTheDocument()
expect(screen.getByText('Authoring Browser')).toBeInTheDocument()
expect(screen.getByText('Authoring Flow')).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Paths' })).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Modules' })).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Nodes' })).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: 'Paths' }))
expect(screen.getByText('Reference Paths')).toBeInTheDocument()
expect(screen.getByText('Still Render Reference')).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: 'Modules' }))
expect(screen.getByText('Production Modules')).toBeInTheDocument()
expect(screen.getByText('Still Render Core')).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: 'Nodes' }))
expect(screen.getAllByText('Raw Node Catalog').length).toBeGreaterThan(0)
expect(screen.getByText('Quick Insert')).toBeInTheDocument()
expect(screen.getByText('Runtime')).toBeInTheDocument()
expect(screen.getByText('Family')).toBeInTheDocument()
expect(screen.getByText('Category')).toBeInTheDocument()
expect(screen.getByPlaceholderText('Search modules')).toBeInTheDocument()
expect(screen.getAllByText('CAD Intake').length).toBeGreaterThan(0) expect(screen.getAllByText('CAD Intake').length).toBeGreaterThan(0)
expect(screen.getAllByText('Order Rendering').length).toBeGreaterThan(0) expect(screen.getAllByText('Order Rendering').length).toBeGreaterThan(0)
expect(screen.getByText('Legacy Nodes')).toBeInTheDocument() expect(screen.getByText('Legacy Nodes')).toBeInTheDocument()
expect(screen.getByText('Graph Nodes')).toBeInTheDocument() expect(screen.getByText('Graph Nodes')).toBeInTheDocument()
expect(screen.getByText('Blender Still')).toBeInTheDocument() expect(screen.getAllByText('Blender Still').length).toBeGreaterThan(0)
expect(screen.getAllByText('Graph').length).toBeGreaterThan(0) expect(screen.getAllByText('Graph').length).toBeGreaterThan(0)
expect(screen.getByRole('button', { name: 'All Modules' })).toBeInTheDocument() expect(screen.getByRole('button', { name: 'All Modules' })).toBeInTheDocument()
expect(screen.getAllByText('Cad').length).toBeGreaterThan(0)
expect(screen.getAllByText('Notifications').length).toBeGreaterThan(0)
})
test('shows starter-path progress for still-render authoring flows', async () => {
const user = userEvent.setup()
const onInsertReferencePath = vi.fn()
const onInsertModule = vi.fn()
const onSelectStep = vi.fn()
render(
<NodeDefinitionsPanel
definitions={nodeDefinitions}
graphFamily="order_line"
activeSteps={['blender_still']}
actions={{
insertReferencePath: bundleId => onInsertReferencePath(bundleId),
insertModule: bundleId => onInsertModule(bundleId),
insertNode: step => onSelectStep(step),
}}
/>,
)
const stageStatusHeading = screen.getByText('Stage Status')
const recommendedPathHeading = screen.getByText('Recommended Path')
expect(stageStatusHeading).toBeInTheDocument()
expect(recommendedPathHeading).toBeInTheDocument()
expect(
stageStatusHeading.compareDocumentPosition(recommendedPathHeading) &
Node.DOCUMENT_POSITION_FOLLOWING,
).toBeTruthy()
expect(screen.getByText('Still Render Reference')).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Reapply Still Reference' })).toBeInTheDocument()
expect(screen.getAllByRole('button', { name: 'Insert Publish' }).length).toBeGreaterThan(0)
expect(screen.getAllByRole('button', { name: 'Add Order Line Setup' }).length).toBeGreaterThan(0)
await user.click(screen.getByRole('button', { name: 'Reapply Still Reference' }))
await user.click(screen.getAllByRole('button', { name: 'Insert Publish' })[0] as HTMLElement)
await user.click(screen.getAllByRole('button', { name: 'Add Order Line Setup' })[0] as HTMLElement)
expect(onInsertReferencePath).toHaveBeenCalledWith('still_render_reference')
expect(onInsertModule).toHaveBeenCalledWith('output_publish_notify')
expect(onSelectStep).toHaveBeenCalledWith('order_line_setup')
await user.click(screen.getByRole('button', { name: 'Starter' }))
expect(screen.getByText('Starter Path')).toBeInTheDocument()
expect(screen.getAllByText('Still-render assembly').length).toBeGreaterThan(0)
expect(screen.getAllByText('1/8 present').length).toBeGreaterThan(0)
expect(screen.getAllByText('Present').length).toBeGreaterThan(0)
})
test('gives CAD authoring the same guided reference-path flow without duplicate intake stages', async () => {
const user = userEvent.setup()
const onInsertReferencePath = vi.fn()
const onInsertModule = vi.fn()
const onSelectStep = vi.fn()
render(
<NodeDefinitionsPanel
definitions={nodeDefinitions}
graphFamily="cad_file"
activeSteps={['resolve_step_path']}
actions={{
insertReferencePath: bundleId => onInsertReferencePath(bundleId),
insertModule: bundleId => onInsertModule(bundleId),
insertNode: step => onSelectStep(step),
}}
/>,
)
expect(screen.getByText('Stage Status')).toBeInTheDocument()
expect(screen.getByText('Start with the CAD intake assembly')).toBeInTheDocument()
expect(screen.getByText('CAD Intake Reference')).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Reapply CAD Intake Reference' })).toBeInTheDocument()
expect(screen.queryByRole('button', { name: 'Complete CAD Intake' })).not.toBeInTheDocument()
expect(screen.getAllByRole('button', { name: 'Add Extract OCC Objects' }).length).toBeGreaterThan(0)
await user.click(screen.getByRole('button', { name: 'Reapply CAD Intake Reference' }))
await user.click(screen.getAllByRole('button', { name: 'Add Extract OCC Objects' })[0] as HTMLElement)
expect(onInsertReferencePath).toHaveBeenCalledWith('cad_intake_reference')
expect(onInsertModule).not.toHaveBeenCalled()
expect(onSelectStep).toHaveBeenCalledWith('occ_object_extract')
await user.click(screen.getByRole('button', { name: 'Paths' }))
expect(screen.getByText('Reference Paths')).toBeInTheDocument()
expect(screen.getByRole('button', { name: 'Insert CAD Intake Reference' })).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: 'Starter' }))
expect(screen.getByText('Starter Path')).toBeInTheDocument()
expect(screen.getAllByText('CAD intake assembly').length).toBeGreaterThan(0)
expect(screen.getAllByText('1/5 present').length).toBeGreaterThan(0)
}) })
test('supports direct node insertion from the library sidebar', async () => { test('supports direct node insertion from the library sidebar', async () => {
@@ -341,15 +828,89 @@ describe('NodeDefinitionsPanel', () => {
<NodeDefinitionsPanel <NodeDefinitionsPanel
definitions={nodeDefinitions} definitions={nodeDefinitions}
graphFamily="mixed" graphFamily="mixed"
onSelectStep={onSelectStep} actions={{ insertNode: step => onSelectStep(step) }}
renderIcon={iconName => <span>{iconName}</span>} renderIcon={iconName => <span>{iconName}</span>}
/>, />,
) )
await user.click(screen.getAllByRole('button', { name: 'Insert' })[1]) await user.click(screen.getByRole('button', { name: 'Nodes' }))
const blenderCard = screen.getAllByText('Blender Still')[0]?.closest('div.rounded-lg')
expect(blenderCard).not.toBeNull()
await user.click(within(blenderCard as HTMLElement).getByRole('button', { name: 'Insert Blender Still' }))
expect(onSelectStep).toHaveBeenCalledWith('blender_still') expect(onSelectStep).toHaveBeenCalledWith('blender_still')
}) })
test('supports direct workflow-module insertion from the library sidebar', async () => {
const user = userEvent.setup()
const onInsertModule = vi.fn()
render(
<NodeDefinitionsPanel
definitions={nodeDefinitions}
graphFamily="order_line"
activeSteps={[]}
actions={{ insertModule: bundleId => onInsertModule(bundleId) }}
/>,
)
await user.click(screen.getByRole('button', { name: 'Modules' }))
await user.click(screen.getByRole('button', { name: 'Insert Still Render Core' }))
expect(onInsertModule).toHaveBeenCalledWith('still_render_core')
})
})
describe('workflowAuthoringActions', () => {
test('binds preferred position and after-insert callback once for every insert action', () => {
const insertNode = vi.fn()
const insertModule = vi.fn()
const insertReferencePath = vi.fn()
const onAfterInsert = vi.fn()
const bindings = bindWorkflowAuthoringInsertActions(
{
insertNode,
insertModule,
insertReferencePath,
},
{
preferredPosition: { x: 240, y: 180 },
onAfterInsert,
},
)
bindings.onSelectStep?.('blender_still')
bindings.onInsertModule?.('still_render_core')
bindings.onInsertReferencePath?.('still_render_reference')
expect(insertNode).toHaveBeenCalledWith('blender_still', { x: 240, y: 180 })
expect(insertModule).toHaveBeenCalledWith('still_render_core', { x: 240, y: 180 })
expect(insertReferencePath).toHaveBeenCalledWith('still_render_reference', { x: 240, y: 180 })
expect(onAfterInsert).toHaveBeenCalledTimes(3)
})
test('derives the primary authoring entry from the shared surface model', () => {
const guidedEntry = getWorkflowAuthoringEntryAction(
getWorkflowAuthoringSurfaceModel({
definitions: nodeDefinitions,
graphFamily: 'order_line',
activeSteps: [],
}),
)
const rawEntry = getWorkflowAuthoringEntryAction(
getWorkflowAuthoringSurfaceModel({
definitions: nodeDefinitions,
graphFamily: 'mixed',
activeSteps: [],
}),
)
expect(guidedEntry.label).toBe('Author')
expect(guidedEntry.title).toContain('guided workflow authoring')
expect(rawEntry.label).toBe('Node')
expect(rawEntry.title).toContain('raw node browser')
})
}) })
describe('WorkflowRunsPanel', () => { describe('WorkflowRunsPanel', () => {
@@ -370,6 +931,15 @@ describe('WorkflowRunsPanel', () => {
expect(screen.getByText('Workflow Runs')).toBeInTheDocument() expect(screen.getByText('Workflow Runs')).toBeInTheDocument()
expect(screen.getByText('Shadow Comparison')).toBeInTheDocument() expect(screen.getByText('Shadow Comparison')).toBeInTheDocument()
expect(screen.getByText('Observer output matches authoritative output.')).toBeInTheDocument() expect(screen.getByText('Observer output matches authoritative output.')).toBeInTheDocument()
expect(screen.getByText('Execution Mode')).toBeInTheDocument()
expect(screen.getByText('Celery Task')).toBeInTheDocument()
expect(screen.getByText('Duration: 2.3 s')).toBeInTheDocument()
expect(screen.getByText('Rollout Gate: pass')).toBeInTheDocument()
expect(screen.getByText('Ready For Rollout')).toBeInTheDocument()
expect(screen.getByText('Operator Decision')).toBeInTheDocument()
expect(screen.getByText('Observer output matches the authoritative legacy output byte-for-byte.')).toBeInTheDocument()
expect(screen.getByText('Exact Match: yes')).toBeInTheDocument()
expect(screen.getByText('Dimensions: match')).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: /run-shad/i })) await user.click(screen.getByRole('button', { name: /run-shad/i }))
@@ -384,6 +954,12 @@ describe('WorkflowPreflightPanel', () => {
expect(screen.getByText('Graph Preflight')).toBeInTheDocument() expect(screen.getByText('Graph Preflight')).toBeInTheDocument()
expect(screen.getByText('Graph requires one missing upstream artifact.')).toBeInTheDocument() expect(screen.getByText('Graph requires one missing upstream artifact.')).toBeInTheDocument()
expect(screen.getByText('Missing cad_preview artifact.')).toBeInTheDocument() expect(screen.getByText('Missing cad_preview artifact.')).toBeInTheDocument()
expect(screen.getByText('Mode: graph')).toBeInTheDocument()
expect(screen.getByText('Unsupported Node IDs')).toBeInTheDocument()
expect(screen.getByText('node-legacy-1')).toBeInTheDocument()
expect(screen.getByText('Code: missing-artifact')).toBeInTheDocument()
expect(screen.getByText('Runtime: native')).toBeInTheDocument()
expect(screen.getByText('Supported: yes')).toBeInTheDocument()
expect(screen.getByText('cad_preview must be produced upstream.')).toBeInTheDocument() expect(screen.getByText('cad_preview must be produced upstream.')).toBeInTheDocument()
expect(screen.getByText('blocked')).toBeInTheDocument() expect(screen.getByText('blocked')).toBeInTheDocument()
}) })
@@ -2,7 +2,16 @@ import type { Edge, Node } from '@xyflow/react'
import { describe, expect, test } from 'vitest' import { describe, expect, test } from 'vitest'
import type { WorkflowNodeDefinition } from '../../api/workflows' import type { WorkflowNodeDefinition } from '../../api/workflows'
import { resolveParamsForStepChange, validateWorkflowDraft } from '../../components/workflows/workflowGraphDraft' import {
buildWorkflowCanvasNodeData,
graphNeedsAutoLayout,
resolveParamsForStepChange,
resolveNodeCollisions,
validateWorkflowDraft,
WORKFLOW_NODE_MIN_HEIGHT,
WORKFLOW_NODE_VERTICAL_GAP,
workflowToGraph,
} from '../../components/workflows/workflowGraphDraft'
function createNode(id: string, step: string, label = step): Node { function createNode(id: string, step: string, label = step): Node {
return { return {
@@ -17,6 +26,13 @@ function createNode(id: string, step: string, label = step): Node {
} as Node } as Node
} }
function createPositionedNode(id: string, step: string, x: number, y: number, label = step): Node {
return {
...createNode(id, step, label),
position: { x, y },
}
}
function createEdge(source: string, target: string): Edge { function createEdge(source: string, target: string): Edge {
return { return {
id: `${source}-${target}`, id: `${source}-${target}`,
@@ -105,7 +121,7 @@ const definitions: Record<string, WorkflowNodeDefinition> = {
glb_bbox: { glb_bbox: {
step: 'glb_bbox', step: 'glb_bbox',
label: 'Compute Bounding Box', label: 'Compute Bounding Box',
family: 'order_line', family: 'shared',
module_key: 'geometry.compute_bbox', module_key: 'geometry.compute_bbox',
category: 'processing', category: 'processing',
description: 'Compute bbox.', description: 'Compute bbox.',
@@ -115,8 +131,8 @@ const definitions: Record<string, WorkflowNodeDefinition> = {
fields: [], fields: [],
execution_kind: 'bridge', execution_kind: 'bridge',
legacy_compatible: true, legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['glb_preview'] }, input_contract: { requires: ['glb_preview'] },
output_contract: { context: 'order_line', provides: ['bbox'] }, output_contract: { provides: ['bbox'] },
artifact_roles_consumed: ['glb_preview'], artifact_roles_consumed: ['glb_preview'],
artifact_roles_produced: ['bbox'], artifact_roles_produced: ['bbox'],
legacy_source: 'legacy.glb_bbox', legacy_source: 'legacy.glb_bbox',
@@ -160,7 +176,7 @@ const definitions: Record<string, WorkflowNodeDefinition> = {
}, },
], ],
execution_kind: 'native', execution_kind: 'native',
legacy_compatible: false, legacy_compatible: true,
input_contract: { input_contract: {
context: 'order_line', context: 'order_line',
requires: ['order_line_context', 'render_template', 'material_assignments', 'bbox'], requires: ['order_line_context', 'render_template', 'material_assignments', 'bbox'],
@@ -208,6 +224,25 @@ const definitions: Record<string, WorkflowNodeDefinition> = {
artifact_roles_produced: ['notification_event'], artifact_roles_produced: ['notification_event'],
legacy_source: 'legacy.notify', legacy_source: 'legacy.notify',
}, },
export_blend: {
step: 'export_blend',
label: 'Export Blend',
family: 'order_line',
module_key: 'rendering.export_blend',
category: 'output',
description: 'Export blend asset.',
node_type: 'outputNode',
icon: 'download',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['render_template'] },
output_contract: { context: 'order_line', provides: ['blend_asset'] },
artifact_roles_consumed: ['render_template'],
artifact_roles_produced: ['blend_asset'],
legacy_source: 'legacy.export_blend',
},
resolve_step_path: { resolve_step_path: {
step: 'resolve_step_path', step: 'resolve_step_path',
label: 'Resolve STEP Path', label: 'Resolve STEP Path',
@@ -299,6 +334,46 @@ describe('validateWorkflowDraft', () => {
expect(result.errors).toEqual([]) expect(result.errors).toEqual([])
}) })
test('accepts legacy-compatible still render chains without an explicit material map resolver', () => {
const nodes = [
createNode('setup', 'order_line_setup', 'Order Line Setup'),
createNode('template', 'resolve_template', 'Resolve Template'),
createNode('bbox', 'glb_bbox', 'Compute Bounding Box'),
createNode('render', 'blender_still', 'Render Still'),
createNode('save', 'output_save', 'Save Output'),
]
const edges = [
createEdge('setup', 'template'),
createEdge('setup', 'bbox'),
createEdge('setup', 'render'),
createEdge('template', 'render'),
createEdge('bbox', 'render'),
createEdge('render', 'save'),
]
const result = validateWorkflowDraft(nodes, edges, definitions, true)
expect(result.errors).toEqual([])
})
test('accepts notify nodes fed by blend-export outputs', () => {
const nodes = [
createNode('setup', 'order_line_setup', 'Order Line Setup'),
createNode('template', 'resolve_template', 'Resolve Template'),
createNode('blend', 'export_blend', 'Export Blend'),
createNode('notify', 'notify', 'Notify'),
]
const edges = [
createEdge('setup', 'template'),
createEdge('template', 'blend'),
createEdge('blend', 'notify'),
]
const result = validateWorkflowDraft(nodes, edges, definitions, true)
expect(result.errors).toEqual([])
})
test('blocks mixed CAD-file and order-line graphs', () => { test('blocks mixed CAD-file and order-line graphs', () => {
const result = validateWorkflowDraft( const result = validateWorkflowDraft(
[createNode('cad', 'resolve_step_path', 'Resolve STEP Path'), createNode('render', 'blender_still', 'Render Still')], [createNode('cad', 'resolve_step_path', 'Resolve STEP Path'), createNode('render', 'blender_still', 'Render Still')],
@@ -309,6 +384,92 @@ describe('validateWorkflowDraft', () => {
expect(result.errors).toContain('Workflow mixes CAD-file and order-line nodes. Split them into separate workflows.') expect(result.errors).toContain('Workflow mixes CAD-file and order-line nodes. Split them into separate workflows.')
}) })
test('accepts a CAD intake graph that feeds shared bbox into threejs thumbnail render', () => {
const threejsDefinition: WorkflowNodeDefinition = {
step: 'threejs_render',
label: 'Render Thumbnail',
family: 'cad_file',
module_key: 'render.thumbnail.threejs',
category: 'rendering',
description: 'Render a thumbnail from the GLB preview.',
node_type: 'renderNode',
icon: 'camera',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file', requires: ['glb_preview', 'bbox'] },
output_contract: { context: 'cad_file', provides: ['rendered_image'] },
artifact_roles_consumed: ['glb_preview', 'bbox'],
artifact_roles_produced: ['rendered_image'],
legacy_source: 'legacy.threejs_render',
}
const thumbnailSaveDefinition: WorkflowNodeDefinition = {
step: 'thumbnail_save',
label: 'Save Thumbnail',
family: 'cad_file',
module_key: 'media.save_thumbnail',
category: 'output',
description: 'Persist the thumbnail.',
node_type: 'outputNode',
icon: 'download',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file', requires: ['rendered_image'] },
output_contract: { context: 'cad_file', provides: ['cad_thumbnail_media'] },
artifact_roles_consumed: ['rendered_image'],
artifact_roles_produced: ['cad_thumbnail_media'],
legacy_source: 'legacy.thumbnail_save',
}
const occGlbDefinition: WorkflowNodeDefinition = {
step: 'occ_glb_export',
label: 'Export GLB',
family: 'cad_file',
module_key: 'cad.export_glb',
category: 'processing',
description: 'Export a GLB preview.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file', requires: ['step_path'] },
output_contract: { context: 'cad_file', provides: ['glb_preview'] },
artifact_roles_consumed: ['step_path'],
artifact_roles_produced: ['glb_preview'],
legacy_source: 'legacy.occ_glb_export',
}
const result = validateWorkflowDraft(
[
createNode('resolve', 'resolve_step_path', 'Resolve STEP Path'),
createNode('glb', 'occ_glb_export', 'Export GLB'),
createNode('bbox', 'glb_bbox', 'Compute Bounding Box'),
createNode('thumb', 'threejs_render', 'Render Thumbnail'),
createNode('save', 'thumbnail_save', 'Save Thumbnail'),
],
[
createEdge('resolve', 'glb'),
createEdge('glb', 'bbox'),
createEdge('glb', 'thumb'),
createEdge('bbox', 'thumb'),
createEdge('thumb', 'save'),
],
{
...definitions,
occ_glb_export: occGlbDefinition,
threejs_render: threejsDefinition,
thumbnail_save: thumbnailSaveDefinition,
},
true,
)
expect(result.errors).toEqual([])
})
}) })
describe('resolveParamsForStepChange', () => { describe('resolveParamsForStepChange', () => {
@@ -325,4 +486,103 @@ describe('resolveParamsForStepChange', () => {
width: 1024, width: 1024,
}) })
}) })
test('preserves dynamic template input overrides for resolve_template nodes', () => {
const next = resolveParamsForStepChange(definitions.resolve_template, {
template_id_override: 'd7d7a1bb-2f14-4d83-99d1-7d7e36eb05d9',
template_input__studio_variant: 'warm',
template_input__camera_profile: 'macro',
stale_key: 'drop-me',
})
expect(next).toEqual({
template_input__studio_variant: 'warm',
template_input__camera_profile: 'macro',
})
})
})
describe('resolveNodeCollisions', () => {
test('pushes overlapping nodes away from a settled anchor without moving the anchor', () => {
const nodes = [
createPositionedNode('anchor', 'order_line_setup', 56, 48, 'Anchor'),
createPositionedNode('overlap', 'resolve_template', 88, 76, 'Overlap'),
]
const resolved = resolveNodeCollisions(nodes, ['anchor'])
expect(resolved.find(node => node.id === 'anchor')?.position).toEqual({ x: 56, y: 48 })
expect(graphNeedsAutoLayout(resolved)).toBe(false)
expect(resolved.find(node => node.id === 'overlap')?.position.y).toBeGreaterThanOrEqual(
48 + WORKFLOW_NODE_MIN_HEIGHT + WORKFLOW_NODE_VERTICAL_GAP,
)
})
test('cascades pushed nodes so stacked collisions are fully resolved', () => {
const nodes = [
createPositionedNode('anchor', 'order_line_setup', 56, 48, 'Anchor'),
createPositionedNode('middle', 'resolve_template', 56, 48, 'Middle'),
createPositionedNode('tail', 'blender_still', 64, 56, 'Tail'),
]
const resolved = resolveNodeCollisions(nodes, ['anchor'])
const middle = resolved.find(node => node.id === 'middle')
const tail = resolved.find(node => node.id === 'tail')
expect(middle).toBeTruthy()
expect(tail).toBeTruthy()
expect(graphNeedsAutoLayout(resolved)).toBe(false)
expect(middle?.position).not.toEqual({ x: 56, y: 48 })
expect(tail?.position).not.toEqual({ x: 64, y: 56 })
})
})
describe('workflowToGraph', () => {
test('derives explicit input and output ports from the node contract', () => {
const data = buildWorkflowCanvasNodeData('blender_still', {}, definitions.blender_still)
expect(data.inputPorts?.map(port => port.label)).toEqual([
'Order Line Context',
'Render Template',
'Material Assignments',
'Bounding Box',
])
expect(data.outputPorts?.map(port => port.label)).toEqual(['Rendered Image'])
expect(data.editableFieldCount).toBe(2)
})
test('assigns semantic handle ids to edges based on matching contracts', () => {
const graph = workflowToGraph(
{
version: 1,
nodes: [
{ id: 'setup', step: 'order_line_setup', params: {} },
{ id: 'template', step: 'resolve_template', params: {} },
{ id: 'materials', step: 'material_map_resolve', params: {} },
{ id: 'bbox', step: 'glb_bbox', params: {} },
{ id: 'render', step: 'blender_still', params: {} },
],
edges: [
{ from: 'setup', to: 'render' },
{ from: 'template', to: 'render' },
{ from: 'materials', to: 'render' },
{ from: 'bbox', to: 'render' },
],
},
definitions,
)
expect(graph.edges.find(edge => edge.source === 'setup' && edge.target === 'render')?.targetHandle).toBe(
'input:order_line_context',
)
expect(graph.edges.find(edge => edge.source === 'template' && edge.target === 'render')?.targetHandle).toBe(
'input:render_template',
)
expect(graph.edges.find(edge => edge.source === 'materials' && edge.target === 'render')?.targetHandle).toBe(
'input:material_assignments',
)
expect(graph.edges.find(edge => edge.source === 'bbox' && edge.target === 'render')?.targetHandle).toBe(
'input:bbox',
)
})
}) })
@@ -0,0 +1,414 @@
import { describe, expect, test } from 'vitest'
import type { WorkflowNodeDefinition } from '../../api/workflows'
import { createWorkflowModuleBundleInsertion, getWorkflowModuleBundles } from '../../components/workflows/workflowModuleBundles'
import {
createWorkflowReferenceBundleInsertion,
getWorkflowReferenceBundles,
} from '../../components/workflows/workflowReferenceBundles'
const definitions: WorkflowNodeDefinition[] = [
{
step: 'order_line_setup',
label: 'Order Line Setup',
family: 'order_line',
module_key: 'order_line.prepare_render_context',
category: 'input',
description: 'Prepare render context.',
node_type: 'inputNode',
icon: 'refresh-cw',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line' },
output_contract: { context: 'order_line', provides: ['order_line'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.order_line_setup',
},
{
step: 'resolve_template',
label: 'Resolve Template',
family: 'order_line',
module_key: 'rendering.resolve_template',
category: 'processing',
description: 'Resolve template.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['render_template'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.resolve_template',
},
{
step: 'auto_populate_materials',
label: 'Auto Populate Materials',
family: 'order_line',
module_key: 'materials.auto_populate',
category: 'processing',
description: 'Populate materials.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['cad_materials'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.auto_populate_materials',
},
{
step: 'glb_bbox',
label: 'Compute Bounding Box',
family: 'order_line',
module_key: 'geometry.compute_bbox',
category: 'processing',
description: 'Compute bbox.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['bbox'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'material_map_resolve',
label: 'Resolve Material Map',
family: 'order_line',
module_key: 'materials.resolve_map',
category: 'processing',
description: 'Resolve material map.',
node_type: 'processNode',
icon: 'layers',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['material_map'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'blender_still',
label: 'Blender Still',
family: 'order_line',
module_key: 'render.production.still',
category: 'rendering',
description: 'Render still image.',
node_type: 'renderNode',
icon: 'camera',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['rendered_image'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'output_save',
label: 'Save Output',
family: 'order_line',
module_key: 'media.save_output',
category: 'output',
description: 'Save output.',
node_type: 'outputNode',
icon: 'download',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['rendered_image'] },
output_contract: { context: 'order_line', provides: ['saved_output'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.output_save',
},
{
step: 'notify',
label: 'Notify Result',
family: 'order_line',
module_key: 'notifications.emit',
category: 'output',
description: 'Notify result.',
node_type: 'outputNode',
icon: 'bell',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['saved_output'] },
output_contract: { context: 'order_line', provides: ['notification'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.notify',
},
]
const cadDefinitions: WorkflowNodeDefinition[] = [
{
step: 'resolve_step_path',
label: 'Resolve STEP Path',
family: 'cad_file',
module_key: 'cad.resolve_step_path',
category: 'input',
description: 'Resolve the STEP path.',
node_type: 'inputNode',
icon: 'file-code-2',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file' },
output_contract: { context: 'cad_file', provides: ['cad_file_record'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.resolve_step_path',
},
{
step: 'occ_object_extract',
label: 'Extract STEP Objects',
family: 'cad_file',
module_key: 'cad.extract_objects',
category: 'processing',
description: 'Extract objects.',
node_type: 'processNode',
icon: 'boxes',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'cad_file', requires: ['cad_file_record'] },
output_contract: { context: 'cad_file', provides: ['occ_scene'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'occ_glb_export',
label: 'Export GLB',
family: 'cad_file',
module_key: 'cad.export_glb',
category: 'processing',
description: 'Export GLB.',
node_type: 'processNode',
icon: 'package',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'cad_file', requires: ['occ_scene'] },
output_contract: { context: 'cad_file', provides: ['glb_preview'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'stl_cache_generate',
label: 'Generate STL Cache',
family: 'cad_file',
module_key: 'cad.generate_stl_cache',
category: 'processing',
description: 'Generate STL cache.',
node_type: 'processNode',
icon: 'database',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file', requires: ['glb_preview'] },
output_contract: { context: 'cad_file', provides: ['stl_cache'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.stl_cache_generate',
},
{
step: 'blender_render',
label: 'Render Thumbnail (Blender)',
family: 'cad_file',
module_key: 'cad.thumbnail.blender',
category: 'rendering',
description: 'Render Blender thumbnail.',
node_type: 'renderNode',
icon: 'camera',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file', requires: ['glb_preview'] },
output_contract: { context: 'cad_file', provides: ['rendered_image'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.blender_render',
},
{
step: 'threejs_render',
label: 'Render Thumbnail (Three.js)',
family: 'cad_file',
module_key: 'cad.thumbnail.threejs',
category: 'rendering',
description: 'Render Three.js thumbnail.',
node_type: 'renderNode',
icon: 'box',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file', requires: ['glb_preview'] },
output_contract: { context: 'cad_file', provides: ['rendered_image'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.threejs_render',
},
{
step: 'thumbnail_save',
label: 'Save Thumbnail',
family: 'cad_file',
module_key: 'cad.thumbnail.save',
category: 'output',
description: 'Persist thumbnail.',
node_type: 'outputNode',
icon: 'download',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file', requires: ['rendered_image'] },
output_contract: { context: 'cad_file', provides: ['saved_thumbnail'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.thumbnail_save',
},
]
describe('workflowModuleBundles', () => {
test('exposes family-scoped bundles when required steps exist', () => {
const bundles = getWorkflowModuleBundles(definitions, 'order_line')
expect(bundles.map(bundle => bundle.id)).toEqual(['still_render_core', 'output_publish_notify'])
})
test('creates a connected bundle insertion graph for still-render authoring', () => {
const insertion = createWorkflowModuleBundleInsertion({
bundleId: 'still_render_core',
graphFamily: 'order_line',
nodeDefinitionsByStep: Object.fromEntries(definitions.map(definition => [definition.step, definition])),
existingNodes: [],
preferredPosition: { x: 200, y: 320 },
})
expect(insertion.ok).toBe(true)
if (!insertion.ok) return
expect(insertion.nodes).toHaveLength(6)
expect(insertion.edges).toHaveLength(5)
expect(insertion.nodes[0].position).toEqual({ x: 200, y: 320 })
expect(insertion.nodes[1].position).toEqual({ x: 420, y: 320 })
expect(insertion.nodes[0].data).toMatchObject({ step: 'order_line_setup', label: 'Order Line Setup' })
expect(insertion.nodes[5].data).toMatchObject({ step: 'blender_still', label: 'Blender Still' })
expect(insertion.edges[0]).toMatchObject({
source: insertion.nodes[0].id,
target: insertion.nodes[1].id,
})
})
test('exposes full reference paths for complete non-legacy authoring flows', () => {
const bundles = getWorkflowReferenceBundles(definitions, 'order_line')
expect(bundles.map(bundle => bundle.id)).toEqual(['still_render_reference'])
})
test('creates the canonical still-render reference graph with branched edges', () => {
const insertion = createWorkflowReferenceBundleInsertion({
bundleId: 'still_render_reference',
graphFamily: 'order_line',
nodeDefinitionsByStep: Object.fromEntries(definitions.map(definition => [definition.step, definition])),
existingNodes: [],
preferredPosition: { x: 200, y: 320 },
})
expect(insertion.ok).toBe(true)
if (!insertion.ok) return
expect(insertion.nodes).toHaveLength(8)
expect(insertion.edges).toHaveLength(10)
expect(insertion.nodes[0].position).toEqual({ x: 200, y: 440 })
expect(insertion.nodes[1].position).toEqual({ x: 420, y: 440 })
expect(insertion.nodes[5].data).toMatchObject({
step: 'blender_still',
label: 'Still Render',
params: { use_custom_render_settings: true },
})
expect(insertion.edges).toEqual(
expect.arrayContaining([
expect.objectContaining({
source: insertion.nodes[0].id,
target: insertion.nodes[1].id,
}),
expect.objectContaining({
source: insertion.nodes[5].id,
target: insertion.nodes[6].id,
}),
expect.objectContaining({
source: insertion.nodes[5].id,
target: insertion.nodes[7].id,
}),
]),
)
})
test('creates the canonical CAD intake reference graph from the shared blueprint', () => {
const insertion = createWorkflowReferenceBundleInsertion({
bundleId: 'cad_intake_reference',
graphFamily: 'cad_file',
nodeDefinitionsByStep: Object.fromEntries(cadDefinitions.map(definition => [definition.step, definition])),
existingNodes: [],
preferredPosition: { x: 120, y: 240 },
})
expect(insertion.ok).toBe(true)
if (!insertion.ok) return
expect(insertion.nodes).toHaveLength(8)
expect(insertion.edges).toHaveLength(7)
expect(insertion.nodes.map(node => node.data.step)).toEqual([
'resolve_step_path',
'occ_object_extract',
'occ_glb_export',
'stl_cache_generate',
'blender_render',
'threejs_render',
'thumbnail_save',
'thumbnail_save',
])
expect(insertion.edges).toEqual(
expect.arrayContaining([
expect.objectContaining({
source: insertion.nodes[2].id,
target: insertion.nodes[4].id,
}),
expect.objectContaining({
source: insertion.nodes[2].id,
target: insertion.nodes[5].id,
}),
]),
)
})
})
@@ -0,0 +1,158 @@
import { describe, expect, test } from 'vitest'
import type { WorkflowNodeDefinition } from '../../api/workflows'
import { buildWorkflowNodeCatalogModel } from '../../components/workflows/workflowNodeCatalog'
import { getDefinitionAuthoringStage } from '../../components/workflows/workflowNodeLibrary'
const definitions: WorkflowNodeDefinition[] = [
{
step: 'resolve_step_path',
label: 'Resolve STEP Path',
family: 'cad_file',
module_key: 'cad.resolve_step_path',
category: 'input',
description: 'Resolve CAD path.',
node_type: 'inputNode',
icon: 'file',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'cad_file' },
output_contract: { context: 'cad_file', provides: ['cad_file'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.resolve_step_path',
},
{
step: 'order_line_setup',
label: 'Order Line Setup',
family: 'order_line',
module_key: 'order_line.prepare_render_context',
category: 'input',
description: 'Prepare order line.',
node_type: 'inputNode',
icon: 'file',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line' },
output_contract: { context: 'order_line', provides: ['order_line'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.order_line_setup',
},
{
step: 'material_map_resolve',
label: 'Resolve Material Map',
family: 'order_line',
module_key: 'materials.resolve_map',
category: 'processing',
description: 'Resolve materials.',
node_type: 'processNode',
icon: 'palette',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['material_map'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'blender_still',
label: 'Blender Still',
family: 'order_line',
module_key: 'render.production.still',
category: 'rendering',
description: 'Render image.',
node_type: 'renderNode',
icon: 'camera',
defaults: {},
fields: [],
execution_kind: 'native',
legacy_compatible: false,
input_contract: { context: 'order_line', requires: ['order_line'] },
output_contract: { context: 'order_line', provides: ['rendered_image'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: null,
},
{
step: 'output_save',
label: 'Save Output',
family: 'order_line',
module_key: 'media.save_output',
category: 'output',
description: 'Save output.',
node_type: 'outputNode',
icon: 'download',
defaults: {},
fields: [],
execution_kind: 'bridge',
legacy_compatible: true,
input_contract: { context: 'order_line', requires: ['rendered_image'] },
output_contract: { context: 'order_line', provides: ['saved_output'] },
artifact_roles_produced: [],
artifact_roles_consumed: [],
legacy_source: 'legacy.output_save',
},
]
describe('workflow node organization', () => {
test('assigns authoring stages from module namespaces and categories', () => {
expect(getDefinitionAuthoringStage(definitions[0])).toBe('cad_intake')
expect(getDefinitionAuthoringStage(definitions[1])).toBe('scene_prep')
expect(getDefinitionAuthoringStage(definitions[2])).toBe('materials')
expect(getDefinitionAuthoringStage(definitions[3])).toBe('render')
expect(getDefinitionAuthoringStage(definitions[4])).toBe('publish')
})
test('builds stage and family catalog models with module and runtime counts', () => {
const model = buildWorkflowNodeCatalogModel(definitions)
expect(model.moduleFilters.map(module => module.label)).toEqual([
'Cad',
'Materials',
'Media',
'Order Line',
'Render',
])
expect(model.runtimeCounts).toEqual({
legacy: 3,
bridge: 0,
graph: 2,
})
const stageIds = model.stageSections.map(section => section.stage)
expect(stageIds).toEqual(['cad_intake', 'scene_prep', 'materials', 'render', 'publish'])
const renderSection = model.stageSections.find(section => section.stage === 'render')
expect(renderSection?.modules[0]?.namespace).toBe('render')
expect(renderSection?.runtimeCounts.graph).toBe(1)
const publishSection = model.stageSections.find(section => section.stage === 'publish')
expect(publishSection?.modules[0]?.runtimeCounts.legacy).toBe(1)
expect(model.familySections.map(section => section.family)).toEqual(['cad_file', 'order_line'])
const cadFamilySection = model.familySections.find(section => section.family === 'cad_file')
expect(cadFamilySection?.modules.map(module => module.namespace)).toEqual(['cad'])
expect(cadFamilySection?.modules[0]?.stageSections.map(section => section.stage)).toEqual(['cad_intake'])
const orderFamilySection = model.familySections.find(section => section.family === 'order_line')
expect(orderFamilySection?.modules.map(module => module.namespace)).toEqual([
'materials',
'media',
'order_line',
'render',
])
expect(orderFamilySection?.runtimeCounts.graph).toBe(2)
expect(orderFamilySection?.modules.find(module => module.namespace === 'materials')?.stageSections[0]?.stage).toBe(
'materials',
)
})
})
@@ -0,0 +1,37 @@
import { describe, expect, test } from 'vitest'
import type { WorkflowCanvasPort } from '../../components/workflows/workflowGraphDraft'
import {
getWorkflowNodePortBadgeLabel,
getWorkflowNodePortTitle,
} from '../../components/workflows/workflowNodePresentation'
describe('workflowNodePresentation', () => {
test('renders explicit labels for direct required inputs', () => {
const port: WorkflowCanvasPort = {
id: 'input:material_assignments',
label: 'Material Assignments',
roles: ['material_assignments'],
kind: 'required',
}
expect(getWorkflowNodePortBadgeLabel(port)).toBe('Material Assignments')
expect(getWorkflowNodePortTitle(port)).toBe('Material Assignments')
})
test('renders alternative sockets as explicit role choices', () => {
const port: WorkflowCanvasPort = {
id: 'input-any:rendered_image|rendered_frames|rendered_video|workflow_result|blend_asset',
label: 'Any of Rendered Image / Rendered Frames / Rendered Video / Workflow Result / Blend Asset',
roles: ['rendered_image', 'rendered_frames', 'rendered_video', 'workflow_result', 'blend_asset'],
kind: 'alternative',
}
expect(getWorkflowNodePortBadgeLabel(port)).toBe(
'Any: Image / Frames / Video / Workflow Result / Blend Asset',
)
expect(getWorkflowNodePortTitle(port)).toBe(
'Accepts any of: Rendered Image / Rendered Frames / Rendered Video / Workflow Result / Blend Asset',
)
})
})

Some files were not shown because too many files have changed in this diff Show More