chore: snapshot workflow migration progress

This commit is contained in:
2026-04-12 11:49:04 +02:00
parent 0cd02513d5
commit 3e810c74a3
163 changed files with 31774 additions and 2753 deletions
+158 -10
View File
@@ -16,6 +16,8 @@ import logging
logger = logging.getLogger(__name__)
_WORKFLOW_ROLLOUT_MODES = {"legacy_only", "shadow", "graph"}
def _build_rollout_signal(
*,
@@ -39,6 +41,13 @@ def _build_rollout_signal(
}
def _normalize_workflow_rollout_mode(value: str | None) -> str:
normalized = (value or "legacy_only").strip().lower()
if normalized in _WORKFLOW_ROLLOUT_MODES:
return normalized
return "legacy_only"
def dispatch_render_with_workflow(order_line_id: str) -> dict:
"""Dispatch a render for the given order line.
@@ -54,12 +63,19 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
from app.config import settings
from app.domains.orders.models import OrderLine
from app.domains.rendering.models import OutputType, WorkflowDefinition
from app.domains.rendering.output_type_contracts import (
derive_supported_artifact_kinds_from_workflow_config,
)
from app.domains.rendering.workflow_config_utils import (
canonicalize_workflow_config,
extract_runtime_workflow,
get_workflow_execution_mode,
)
from app.domains.rendering.workflow_executor import prepare_workflow_context
from app.domains.rendering.workflow_executor import (
WorkflowTaskSubmissionError,
prepare_workflow_context,
submit_prepared_workflow_tasks,
)
from app.domains.rendering.workflow_graph_runtime import (
execute_graph_workflow,
find_unsupported_graph_nodes,
@@ -150,7 +166,41 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
)
return legacy_result
execution_mode = get_workflow_execution_mode(canonical_config, default="legacy")
supported_artifact_kinds = derive_supported_artifact_kinds_from_workflow_config(canonical_config)
output_type_artifact_kind = getattr(output_type, "artifact_kind", None)
if output_type_artifact_kind and output_type_artifact_kind not in supported_artifact_kinds:
supported = ", ".join(supported_artifact_kinds) if supported_artifact_kinds else "none"
logger.warning(
"order_line %s: workflow_definition_id %s is incompatible with output_type %s artifact_kind %s; "
"falling back to legacy dispatch",
order_line_id,
wf_def.id,
output_type.id,
output_type_artifact_kind,
)
legacy_result = _legacy_dispatch(order_line_id)
legacy_result.update(
_build_rollout_signal(
gate_status="workflow_contract_mismatch",
ready=False,
reasons=[
"Linked workflow does not produce the artifact kind required by the output type; legacy dispatch remains authoritative.",
f"Expected artifact kind: {output_type_artifact_kind}. Supported by workflow: [{supported}].",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
return legacy_result
configured_execution_mode = get_workflow_execution_mode(canonical_config, default="legacy")
workflow_rollout_mode = _normalize_workflow_rollout_mode(
getattr(output_type, "workflow_rollout_mode", None)
)
legacy_runtime_gate_status = "workflow_legacy_runtime"
legacy_runtime_reasons = [
"Workflow definition is active, but execution still uses the legacy runtime path."
]
def _prepare_graph_context(target_mode: str):
workflow_context = prepare_workflow_context(
@@ -175,7 +225,38 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
session.commit()
return run
if execution_mode == "graph":
if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "legacy_only":
logger.info(
"order_line %s: workflow_definition_id %s is graph-capable but output_type %s is pinned to legacy_only rollout",
order_line_id,
wf_def.id,
output_type.id,
)
legacy_result = _legacy_dispatch(order_line_id)
legacy_result["workflow_rollout_mode"] = workflow_rollout_mode
legacy_result["configured_execution_mode"] = configured_execution_mode
legacy_result.update(
_build_rollout_signal(
gate_status="rollout_legacy_only",
ready=False,
reasons=[
"Output type rollout mode is pinned to legacy_only; legacy dispatch remains authoritative.",
f"Linked workflow stays attached in configured execution mode '{configured_execution_mode}' until rollout is promoted.",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
return legacy_result
if workflow_rollout_mode in {"graph", "shadow"} and configured_execution_mode not in {"graph", "shadow"}:
legacy_runtime_gate_status = "rollout_requires_graph_workflow"
legacy_runtime_reasons = [
f"Output type rollout mode '{workflow_rollout_mode}' requires a workflow configured for graph or shadow execution.",
f"Linked workflow is still configured for '{configured_execution_mode}', so legacy runtime remains authoritative.",
]
if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "graph":
try:
workflow_context = _prepare_graph_context("graph")
except Exception as exc:
@@ -225,13 +306,44 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
return legacy_result
try:
dispatch_result = execute_graph_workflow(session, workflow_context)
dispatch_result = execute_graph_workflow(
session,
workflow_context,
dispatch_tasks=False,
)
session.commit()
submit_prepared_workflow_tasks(dispatch_result)
except Exception as exc:
session.rollback()
session.add(run)
mark_workflow_run_failed(run, str(exc))
session.commit()
if isinstance(exc, WorkflowTaskSubmissionError) and exc.submitted_task_ids:
logger.exception(
"order_line %s: graph workflow submission partially failed after %d task(s); "
"not falling back to legacy to avoid duplicate renders",
order_line_id,
len(exc.submitted_task_ids),
)
return {
"backend": "workflow_graph",
"execution_mode": "graph",
"workflow_run_id": str(run.id),
"workflow_rollout_mode": workflow_rollout_mode,
"configured_execution_mode": configured_execution_mode,
"submission_status": "partial_failure",
"submitted_task_ids": exc.submitted_task_ids,
**_build_rollout_signal(
gate_status="graph_submission_failed",
ready=False,
reasons=[
"Graph workflow task submission failed after some tasks were already queued.",
f"Submission error: {exc}.",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
),
}
logger.exception(
"order_line %s: graph workflow execution via definition %s failed, falling back to legacy dispatch",
order_line_id,
@@ -257,6 +369,8 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
"workflow_run_id": str(run.id),
"celery_task_id": dispatch_result.task_ids[0] if dispatch_result.task_ids else None,
"task_ids": dispatch_result.task_ids,
"workflow_rollout_mode": workflow_rollout_mode,
"configured_execution_mode": configured_execution_mode,
}
result.update(
_build_rollout_signal(
@@ -267,10 +381,10 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
)
return result
if execution_mode == "shadow":
if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "shadow":
legacy_result = _legacy_dispatch(order_line_id)
try:
@@ -330,13 +444,43 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
return legacy_result
try:
dispatch_result = execute_graph_workflow(session, workflow_context)
dispatch_result = execute_graph_workflow(
session,
workflow_context,
dispatch_tasks=False,
)
session.commit()
submit_prepared_workflow_tasks(dispatch_result)
except Exception as exc:
session.rollback()
session.add(run)
mark_workflow_run_failed(run, str(exc))
session.commit()
if isinstance(exc, WorkflowTaskSubmissionError) and exc.submitted_task_ids:
logger.exception(
"order_line %s: shadow workflow submission partially failed after %d task(s); "
"legacy dispatch remains authoritative",
order_line_id,
len(exc.submitted_task_ids),
)
legacy_result["execution_mode"] = "shadow"
legacy_result["shadow_status"] = "partial_failure"
legacy_result["shadow_error"] = str(exc)
legacy_result["shadow_workflow_run_id"] = str(run.id)
legacy_result["shadow_submitted_task_ids"] = exc.submitted_task_ids
legacy_result.update(
_build_rollout_signal(
gate_status="shadow_submission_failed",
ready=False,
reasons=[
"Shadow workflow task submission failed after some tasks were already queued.",
f"Submission error: {exc}.",
],
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
return legacy_result
logger.exception(
"order_line %s: shadow workflow execution via definition %s failed; legacy dispatch remains authoritative",
order_line_id,
@@ -364,6 +508,8 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
legacy_result["shadow_status"] = "dispatched"
legacy_result["shadow_workflow_run_id"] = str(run.id)
legacy_result["shadow_task_ids"] = dispatch_result.task_ids
legacy_result["workflow_rollout_mode"] = workflow_rollout_mode
legacy_result["configured_execution_mode"] = configured_execution_mode
legacy_result.update(
_build_rollout_signal(
gate_status="pending_shadow_verdict",
@@ -375,7 +521,7 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
)
)
return legacy_result
workflow_type, params = extract_runtime_workflow(canonical_config)
@@ -519,12 +665,14 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
"execution_mode": "legacy",
"workflow_run_id": str(run.id),
"celery_task_id": celery_task_id,
"workflow_rollout_mode": workflow_rollout_mode,
"configured_execution_mode": configured_execution_mode,
}
result.update(
_build_rollout_signal(
gate_status="workflow_legacy_runtime",
gate_status=legacy_runtime_gate_status,
ready=False,
reasons=["Workflow definition is active, but execution still uses the legacy runtime path."],
reasons=legacy_runtime_reasons,
workflow_def_id=wf_def.id,
output_type_id=output_type.id,
)
+30 -2
View File
@@ -1,5 +1,6 @@
import uuid
from datetime import datetime
from typing import Any
from sqlalchemy import String, DateTime, Boolean, Text, Integer, Float, ForeignKey, Table, Column
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID, JSONB
@@ -15,6 +16,17 @@ render_template_output_types = Table(
)
VALID_RENDER_BACKENDS = {"celery"}
OUTPUT_TYPE_WORKFLOW_FAMILIES = {"cad_file", "order_line"}
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES = {"legacy_only", "shadow", "graph"}
OUTPUT_TYPE_ARTIFACT_KINDS = {
"still_image",
"turntable_video",
"model_export",
"thumbnail_image",
"blend_asset",
"package",
"custom",
}
class OutputType(Base):
@@ -23,14 +35,21 @@ class OutputType(Base):
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
name: Mapped[str] = mapped_column(String(200), unique=True, nullable=False)
description: Mapped[str | None] = mapped_column(Text, nullable=True)
renderer: Mapped[str] = mapped_column(String(50), nullable=False, default="threejs")
renderer: Mapped[str] = mapped_column(String(50), nullable=False, default="blender")
render_settings: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict)
output_format: Mapped[str] = mapped_column(String(20), nullable=False, default="png")
sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
compatible_categories: Mapped[list] = mapped_column(JSONB, default=list, server_default="[]")
render_backend: Mapped[str] = mapped_column(String(20), nullable=False, default="auto", server_default="auto")
render_backend: Mapped[str] = mapped_column(String(20), nullable=False, default="celery", server_default="auto")
is_animation: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
transparent_bg: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
workflow_family: Mapped[str] = mapped_column(
String(20), nullable=False, default="order_line", server_default="order_line"
)
artifact_kind: Mapped[str] = mapped_column(
String(50), nullable=False, default="still_image", server_default="still_image"
)
invocation_overrides: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict, server_default="{}")
cycles_device: Mapped[str | None] = mapped_column(String(10), nullable=True, default=None)
pricing_tier_id: Mapped[int | None] = mapped_column(
Integer, ForeignKey("pricing_tiers.id", ondelete="SET NULL"), nullable=True, index=True
@@ -49,6 +68,9 @@ class OutputType(Base):
workflow_definition_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True), ForeignKey("workflow_definitions.id", ondelete="SET NULL"), nullable=True
)
workflow_rollout_mode: Mapped[str] = mapped_column(
String(20), nullable=False, default="legacy_only", server_default="legacy_only"
)
order_lines: Mapped[list["OrderLine"]] = relationship("OrderLine", back_populates="output_type")
pricing_tier: Mapped["PricingTier | None"] = relationship("PricingTier", back_populates="output_types")
@@ -70,6 +92,12 @@ class RenderTemplate(Base):
lighting_only: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
shadow_catcher_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false")
camera_orbit: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
workflow_input_schema: Mapped[list[dict[str, Any]]] = mapped_column(
JSONB,
nullable=False,
default=list,
server_default="[]",
)
is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
tenant_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True
@@ -3,6 +3,12 @@ from __future__ import annotations
from collections.abc import Mapping
from typing import Any, Literal
from app.core.process_steps import StepName
from app.domains.rendering.models import (
OUTPUT_TYPE_ARTIFACT_KINDS,
OUTPUT_TYPE_WORKFLOW_FAMILIES,
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES,
)
from app.domains.rendering.workflow_config_utils import canonicalize_workflow_config
from app.domains.rendering.workflow_node_registry import get_node_definition
@@ -22,6 +28,11 @@ OutputTypeArtifactKind = Literal[
_MODEL_EXPORT_FORMATS = {"gltf", "glb", "stl", "obj", "usd", "usdz"}
_VIDEO_FORMATS = {"mp4", "webm", "mov"}
_IMAGE_FORMATS = {"png", "jpg", "jpeg", "webp"}
_BLEND_FORMATS = {"blend"}
_OUTPUT_FORMATS_BY_FAMILY: dict[OutputTypeWorkflowFamily, set[str]] = {
"cad_file": {*_IMAGE_FORMATS, *_MODEL_EXPORT_FORMATS},
"order_line": {*_IMAGE_FORMATS, *_VIDEO_FORMATS, *_BLEND_FORMATS},
}
_ARTIFACT_KINDS_BY_FAMILY: dict[OutputTypeWorkflowFamily, set[OutputTypeArtifactKind]] = {
"cad_file": {"thumbnail_image", "model_export", "package", "custom"},
"order_line": {"still_image", "turntable_video", "blend_asset", "package", "custom"},
@@ -42,6 +53,83 @@ INVOCATION_OVERRIDE_KEYS = (
"denoising_quality",
"denoising_use_gpu",
)
_STATIC_RENDER_OVERRIDE_KEYS = (
"width",
"height",
"engine",
"samples",
"bg_color",
"noise_threshold",
"denoiser",
"denoising_input_passes",
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
)
_ANIMATION_OVERRIDE_KEYS = (
"frame_count",
"fps",
"turntable_axis",
)
_TURNABLE_AXES = {"world_x", "world_y", "world_z"}
_WORKFLOW_FAMILY_DISPLAY_ORDER: tuple[OutputTypeWorkflowFamily, ...] = ("order_line", "cad_file")
_WORKFLOW_ROLLOUT_DISPLAY_ORDER: tuple[str, ...] = ("legacy_only", "shadow", "graph")
_ARTIFACT_KIND_DISPLAY_ORDER: tuple[OutputTypeArtifactKind, ...] = (
"still_image",
"turntable_video",
"model_export",
"thumbnail_image",
"blend_asset",
"package",
"custom",
)
_OUTPUT_FORMAT_DISPLAY_ORDER: tuple[str, ...] = (
"png",
"jpg",
"jpeg",
"webp",
"mp4",
"webm",
"mov",
"gltf",
"glb",
"stl",
"obj",
"usd",
"usdz",
"blend",
)
_DEFAULT_OUTPUT_FORMAT_BY_ARTIFACT_KIND: dict[OutputTypeArtifactKind, str] = {
"still_image": "png",
"turntable_video": "mp4",
"model_export": "gltf",
"thumbnail_image": "png",
"blend_asset": "blend",
"package": "png",
"custom": "png",
}
_OUTPUT_TYPE_PROFILE_KEYS: tuple[str, ...] = (
"transparent_bg",
"cycles_device",
"material_override",
)
_TEMPLATE_RUNTIME_KEYS: tuple[str, ...] = (
"target_collection",
"lighting_only",
"shadow_catcher",
"camera_orbit",
"template_inputs",
)
_WORKFLOW_NODE_PARAMETER_OWNERSHIP_STEPS: tuple[StepName, ...] = (
StepName.RESOLVE_TEMPLATE,
StepName.BLENDER_STILL,
StepName.BLENDER_TURNTABLE,
StepName.EXPORT_BLEND,
)
class InvalidInvocationOverridesError(ValueError):
pass
def list_allowed_artifact_kinds_for_family(
@@ -55,6 +143,79 @@ def list_allowed_artifact_kinds_for_family(
return tuple(sorted(allowed))
def list_allowed_output_formats_for_family(workflow_family: str) -> tuple[str, ...]:
normalized_family = (workflow_family or "order_line").strip().lower()
if normalized_family == "cad_file":
allowed = _OUTPUT_FORMATS_BY_FAMILY["cad_file"]
else:
allowed = _OUTPUT_FORMATS_BY_FAMILY["order_line"]
return tuple(sorted(allowed))
def build_output_type_contract_catalog() -> dict[str, Any]:
workflow_families = [
family for family in _WORKFLOW_FAMILY_DISPLAY_ORDER if family in OUTPUT_TYPE_WORKFLOW_FAMILIES
]
workflow_rollout_modes = [
mode for mode in _WORKFLOW_ROLLOUT_DISPLAY_ORDER if mode in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES
]
artifact_kinds = [
artifact_kind
for artifact_kind in _ARTIFACT_KIND_DISPLAY_ORDER
if artifact_kind in OUTPUT_TYPE_ARTIFACT_KINDS
]
allowed_artifact_kinds_by_family = {
family: [
artifact_kind
for artifact_kind in artifact_kinds
if artifact_kind in list_allowed_artifact_kinds_for_family(family)
]
for family in workflow_families
}
allowed_output_formats_by_family = {
family: [
output_format
for output_format in _OUTPUT_FORMAT_DISPLAY_ORDER
if output_format in list_allowed_output_formats_for_family(family)
]
for family in workflow_families
}
allowed_invocation_override_keys_by_artifact_kind = {
artifact_kind: list(
list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind,
is_animation=artifact_kind == "turntable_video",
)
)
for artifact_kind in artifact_kinds
}
default_output_format_by_artifact_kind = {
artifact_kind: _DEFAULT_OUTPUT_FORMAT_BY_ARTIFACT_KIND[artifact_kind]
for artifact_kind in artifact_kinds
}
workflow_node_keys_by_step = {
step.value: [field.key for field in definition.fields]
for step in _WORKFLOW_NODE_PARAMETER_OWNERSHIP_STEPS
if (definition := get_node_definition(step.value)) is not None
}
return {
"workflow_families": workflow_families,
"workflow_rollout_modes": workflow_rollout_modes,
"artifact_kinds": artifact_kinds,
"allowed_artifact_kinds_by_family": allowed_artifact_kinds_by_family,
"allowed_output_formats_by_family": allowed_output_formats_by_family,
"allowed_invocation_override_keys_by_artifact_kind": allowed_invocation_override_keys_by_artifact_kind,
"default_output_format_by_artifact_kind": default_output_format_by_artifact_kind,
"parameter_ownership": {
"output_type_profile_keys": list(_OUTPUT_TYPE_PROFILE_KEYS),
"template_runtime_keys": list(_TEMPLATE_RUNTIME_KEYS),
"workflow_node_keys_by_step": workflow_node_keys_by_step,
},
}
def infer_output_type_artifact_kind(
output_format: str | None,
is_animation: bool,
@@ -65,6 +226,8 @@ def infer_output_type_artifact_kind(
if is_animation or normalized_format in _VIDEO_FORMATS:
return "turntable_video"
if normalized_format in _BLEND_FORMATS:
return "blend_asset"
if normalized_format in _MODEL_EXPORT_FORMATS:
return "model_export"
if normalized_family == "cad_file" and normalized_format in _IMAGE_FORMATS:
@@ -91,6 +254,14 @@ def validate_output_type_contract(
f"'{workflow_family}'. Allowed: {allowed}"
)
allowed_output_formats = list_allowed_output_formats_for_family(normalized_family)
if normalized_format and normalized_format not in allowed_output_formats:
allowed = ", ".join(allowed_output_formats)
raise ValueError(
f"Output format '{output_format}' is not allowed for workflow_family "
f"'{workflow_family}'. Allowed: {allowed}"
)
if normalized_family == "cad_file" and is_animation:
raise ValueError("CAD-file workflows do not support animated output types")
@@ -114,6 +285,20 @@ def validate_output_type_contract(
f"({', '.join(sorted(_MODEL_EXPORT_FORMATS))})"
)
if normalized_artifact == "blend_asset":
if is_animation:
raise ValueError("Artifact kind 'blend_asset' does not support is_animation=true")
if normalized_format and normalized_format not in _BLEND_FORMATS:
raise ValueError(
"Artifact kind 'blend_asset' requires a blend output_format "
f"({', '.join(sorted(_BLEND_FORMATS))})"
)
if normalized_format in _BLEND_FORMATS and normalized_artifact != "blend_asset":
raise ValueError(
f"Output format '{output_format}' requires artifact kind 'blend_asset'"
)
def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily | None:
normalized = canonicalize_workflow_config(config)
@@ -121,6 +306,7 @@ def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily |
definition.family
for node in normalized.get("nodes", [])
if (definition := get_node_definition(node.get("step"))) is not None
if definition.family in {"cad_file", "order_line"}
}
if not families:
return None
@@ -129,14 +315,329 @@ def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily |
return next(iter(families))
def derive_workflow_terminal_node_ids(config: dict[str, Any]) -> tuple[str, ...]:
normalized = canonicalize_workflow_config(config)
nodes = normalized.get("nodes", [])
if not nodes:
return ()
node_ids = {
str(node.get("id"))
for node in nodes
if node.get("id") not in (None, "")
}
upstream_ids = {
str(edge.get("from"))
for edge in normalized.get("edges", [])
if edge.get("from") not in (None, "")
}
return tuple(sorted(node_id for node_id in node_ids if node_id not in upstream_ids))
def derive_supported_artifact_kinds_from_workflow_config(
config: dict[str, Any],
) -> tuple[OutputTypeArtifactKind, ...]:
try:
normalized = canonicalize_workflow_config(config)
except Exception:
return ()
nodes = normalized.get("nodes", [])
if not nodes:
return ()
nodes_by_id = {
str(node.get("id")): node
for node in nodes
if node.get("id") not in (None, "")
}
incoming_by_target: dict[str, set[str]] = {}
for edge in normalized.get("edges", []):
source = edge.get("from")
target = edge.get("to")
if source in (None, "") or target in (None, ""):
continue
incoming_by_target.setdefault(str(target), set()).add(str(source))
cache: dict[str, set[str]] = {}
def _collect_upstream_steps(node_id: str) -> set[str]:
cached = cache.get(node_id)
if cached is not None:
return set(cached)
steps: set[str] = set()
node = nodes_by_id.get(node_id)
if node is not None and node.get("step"):
steps.add(str(node["step"]))
for upstream_id in incoming_by_target.get(node_id, set()):
steps.update(_collect_upstream_steps(upstream_id))
cache[node_id] = set(steps)
return steps
def _derive_node_artifact_kinds(node_id: str) -> set[OutputTypeArtifactKind]:
node = nodes_by_id.get(node_id)
if node is None:
return set()
step = str(node.get("step") or "")
if step in {StepName.BLENDER_STILL.value}:
return {"still_image"}
if step in {StepName.BLENDER_TURNTABLE.value}:
return {"turntable_video"}
if step in {StepName.EXPORT_BLEND.value}:
return {"blend_asset"}
if step in {
StepName.OCC_GLB_EXPORT.value,
StepName.STL_CACHE_GENERATE.value,
}:
return {"model_export"}
if step == StepName.THUMBNAIL_SAVE.value:
return {"thumbnail_image"}
if step != StepName.OUTPUT_SAVE.value:
return set()
upstream_steps = _collect_upstream_steps(node_id)
has_still = StepName.BLENDER_STILL.value in upstream_steps
has_turntable = StepName.BLENDER_TURNTABLE.value in upstream_steps
if has_still and has_turntable:
return set()
if has_turntable:
return {"turntable_video"}
if has_still:
return {"still_image"}
return set()
supported: set[OutputTypeArtifactKind] = set()
for terminal_id in derive_workflow_terminal_node_ids(normalized):
supported.update(_derive_node_artifact_kinds(terminal_id))
return tuple(sorted(supported))
def workflow_supports_artifact_kind(
config: dict[str, Any],
artifact_kind: str,
) -> bool:
normalized_artifact = (artifact_kind or "").strip().lower()
if not normalized_artifact:
return False
return normalized_artifact in derive_supported_artifact_kinds_from_workflow_config(config)
def list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind: str,
*,
is_animation: bool = False,
) -> tuple[str, ...]:
normalized_artifact = (artifact_kind or "").strip().lower()
if normalized_artifact in {"still_image", "thumbnail_image"}:
return _STATIC_RENDER_OVERRIDE_KEYS
if normalized_artifact == "turntable_video":
return _STATIC_RENDER_OVERRIDE_KEYS + _ANIMATION_OVERRIDE_KEYS
if normalized_artifact in {"model_export", "blend_asset"}:
return ()
if normalized_artifact in {"package", "custom"}:
return INVOCATION_OVERRIDE_KEYS
if is_animation:
return _STATIC_RENDER_OVERRIDE_KEYS + _ANIMATION_OVERRIDE_KEYS
return _STATIC_RENDER_OVERRIDE_KEYS
def _normalize_positive_int_override(key: str, value: Any) -> int:
if isinstance(value, bool):
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be a positive integer")
try:
normalized = int(str(value).strip()) if isinstance(value, str) else int(value)
except (TypeError, ValueError) as exc:
raise InvalidInvocationOverridesError(
f"Invocation override '{key}' must be a positive integer"
) from exc
if normalized <= 0:
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be greater than zero")
return normalized
def _normalize_string_override(key: str, value: Any) -> str:
if not isinstance(value, str):
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be a string")
normalized = value.strip()
if not normalized:
raise InvalidInvocationOverridesError(f"Invocation override '{key}' must not be blank")
return normalized
def _normalize_noise_threshold_override(value: Any) -> str:
if isinstance(value, bool):
raise InvalidInvocationOverridesError("Invocation override 'noise_threshold' must be a string or number")
if isinstance(value, (int, float)):
return str(value)
if isinstance(value, str) and value.strip():
return value.strip()
raise InvalidInvocationOverridesError("Invocation override 'noise_threshold' must be a string or number")
def _normalize_gpu_toggle_override(value: Any) -> str:
if isinstance(value, bool):
return "1" if value else "0"
if isinstance(value, int) and value in {0, 1}:
return str(value)
if isinstance(value, str):
normalized = value.strip().lower()
if normalized in {"1", "true", "enabled", "yes"}:
return "1"
if normalized in {"0", "false", "disabled", "no"}:
return "0"
raise InvalidInvocationOverridesError(
"Invocation override 'denoising_use_gpu' must be one of: 1, 0, true, false"
)
def _normalize_invocation_override_value(key: str, value: Any) -> int | str:
if key in {"width", "height", "samples", "frame_count", "fps"}:
return _normalize_positive_int_override(key, value)
if key == "turntable_axis":
normalized = _normalize_string_override(key, value).lower()
if normalized not in _TURNABLE_AXES:
raise InvalidInvocationOverridesError(
"Invocation override 'turntable_axis' must be one of: world_x, world_y, world_z"
)
return normalized
if key == "noise_threshold":
return _normalize_noise_threshold_override(value)
if key == "denoising_use_gpu":
return _normalize_gpu_toggle_override(value)
return _normalize_string_override(key, value)
def validate_and_normalize_invocation_overrides(
raw: Mapping[str, Any] | None,
*,
artifact_kind: str | None = None,
is_animation: bool = False,
reject_unknown_keys: bool = False,
) -> dict[str, Any]:
if raw is None:
return {}
if not isinstance(raw, Mapping):
raise InvalidInvocationOverridesError("invocation_overrides must be an object")
normalized: dict[str, Any] = {}
unknown_keys: list[str] = []
for key, value in raw.items():
key_name = str(key)
if key_name not in INVOCATION_OVERRIDE_KEYS:
if reject_unknown_keys:
unknown_keys.append(key_name)
continue
if value in (None, ""):
continue
normalized[key_name] = _normalize_invocation_override_value(key_name, value)
if unknown_keys:
supported = ", ".join(INVOCATION_OVERRIDE_KEYS)
raise InvalidInvocationOverridesError(
f"Unsupported invocation override keys: {', '.join(sorted(unknown_keys))}. Supported: {supported}"
)
if artifact_kind is not None:
allowed_keys = set(
list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind,
is_animation=is_animation,
)
)
disallowed = sorted(key for key in normalized if key not in allowed_keys)
if disallowed:
raise InvalidInvocationOverridesError(
f"Invocation overrides not allowed for artifact kind '{artifact_kind}': {', '.join(disallowed)}"
)
return normalized
def resolve_output_type_invocation_overrides(
render_settings: Mapping[str, Any] | None,
invocation_overrides: Mapping[str, Any] | None,
*,
artifact_kind: str,
is_animation: bool = False,
) -> dict[str, Any]:
merged = merge_output_type_invocation_overrides(render_settings, invocation_overrides)
allowed_keys = set(
list_allowed_invocation_override_keys_for_artifact_kind(
artifact_kind,
is_animation=is_animation,
)
)
return {
key: value
for key, value in merged.items()
if key in allowed_keys
}
def build_output_type_invocation_profile(
*,
renderer: str,
render_backend: str,
workflow_family: str,
artifact_kind: str,
output_format: str | None,
is_animation: bool,
workflow_definition_id: Any = None,
workflow_rollout_mode: str = "legacy_only",
transparent_bg: bool = False,
cycles_device: str | None = None,
material_override: str | None = None,
render_settings: Mapping[str, Any] | None = None,
invocation_overrides: Mapping[str, Any] | None = None,
) -> dict[str, Any]:
resolved_artifact_kind = artifact_kind or infer_output_type_artifact_kind(
output_format,
is_animation,
workflow_family,
)
resolved_overrides = resolve_output_type_invocation_overrides(
render_settings,
invocation_overrides,
artifact_kind=resolved_artifact_kind,
is_animation=is_animation,
)
return {
"renderer": renderer,
"render_backend": render_backend,
"workflow_family": workflow_family,
"artifact_kind": resolved_artifact_kind,
"output_format": (output_format or "").strip().lower(),
"is_animation": bool(is_animation),
"workflow_definition_id": workflow_definition_id,
"workflow_rollout_mode": workflow_rollout_mode,
"transparent_bg": bool(transparent_bg),
"cycles_device": cycles_device,
"material_override": material_override,
"allowed_override_keys": list(
list_allowed_invocation_override_keys_for_artifact_kind(
resolved_artifact_kind,
is_animation=is_animation,
)
),
"invocation_overrides": resolved_overrides,
}
def normalize_invocation_overrides(raw: Mapping[str, Any] | None) -> dict[str, Any]:
if not isinstance(raw, Mapping):
return {}
normalized: dict[str, Any] = {}
for key in INVOCATION_OVERRIDE_KEYS:
value = raw.get(key)
if value not in (None, ""):
normalized[key] = value
if value in (None, ""):
continue
try:
normalized[key] = _normalize_invocation_override_value(key, value)
except InvalidInvocationOverridesError:
continue
return normalized
+143 -3
View File
@@ -1,22 +1,27 @@
import uuid
from datetime import datetime
from pydantic import BaseModel
from pydantic import BaseModel, Field
class OutputTypeCreate(BaseModel):
name: str
description: str | None = None
renderer: str = "threejs"
renderer: str = "blender"
render_settings: dict = {}
output_format: str = "png"
sort_order: int = 0
is_active: bool = True
compatible_categories: list[str] = []
render_backend: str = "auto"
render_backend: str = "celery"
is_animation: bool = False
transparent_bg: bool = False
pricing_tier_id: int | None = None
cycles_device: str | None = None
workflow_family: str = "order_line"
artifact_kind: str | None = None
invocation_overrides: dict = {}
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str = "legacy_only"
material_override: str | None = None
@@ -32,12 +37,43 @@ class OutputTypePatch(BaseModel):
render_backend: str | None = None
is_animation: bool | None = None
transparent_bg: bool | None = None
workflow_family: str | None = None
artifact_kind: str | None = None
invocation_overrides: dict | None = None
pricing_tier_id: int | None = None
cycles_device: str | None = None
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str | None = None
material_override: str | None = None
class OutputTypeInvocationProfileOut(BaseModel):
renderer: str
render_backend: str
workflow_family: str
artifact_kind: str
output_format: str
is_animation: bool
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str = "legacy_only"
transparent_bg: bool
cycles_device: str | None = None
material_override: str | None = None
allowed_override_keys: list[str] = Field(default_factory=list)
invocation_overrides: dict = Field(default_factory=dict)
class OutputTypeContractCatalogOut(BaseModel):
workflow_families: list[str] = Field(default_factory=list)
workflow_rollout_modes: list[str] = Field(default_factory=list)
artifact_kinds: list[str] = Field(default_factory=list)
allowed_artifact_kinds_by_family: dict[str, list[str]] = Field(default_factory=dict)
allowed_output_formats_by_family: dict[str, list[str]] = Field(default_factory=dict)
allowed_invocation_override_keys_by_artifact_kind: dict[str, list[str]] = Field(default_factory=dict)
default_output_format_by_artifact_kind: dict[str, str] = Field(default_factory=dict)
parameter_ownership: dict[str, dict | list[str]] = Field(default_factory=dict)
class OutputTypeOut(BaseModel):
id: uuid.UUID
name: str
@@ -50,13 +86,18 @@ class OutputTypeOut(BaseModel):
render_backend: str
is_animation: bool
transparent_bg: bool
workflow_family: str
artifact_kind: str
invocation_overrides: dict
cycles_device: str | None = None
pricing_tier_id: int | None = None
pricing_tier_name: str | None = None
price_per_item: float | None = None
workflow_definition_id: uuid.UUID | None = None
workflow_rollout_mode: str
workflow_name: str | None = None
material_override: str | None = None
invocation_profile: OutputTypeInvocationProfileOut | None = None
is_active: bool
created_at: datetime
updated_at: datetime
@@ -159,11 +200,28 @@ class WorkflowDefinitionOut(BaseModel):
name: str
output_type_id: uuid.UUID | None
config: dict
family: str | None = None
supported_artifact_kinds: list[str] = Field(default_factory=list)
rollout_summary: "WorkflowRolloutSummaryOut" = Field(
default_factory=lambda: WorkflowRolloutSummaryOut()
)
is_active: bool
created_at: datetime
model_config = {"from_attributes": True}
class WorkflowDraftPreflightRequest(BaseModel):
context_id: str
config: dict
workflow_id: uuid.UUID | None = None
class WorkflowDraftDispatchRequest(BaseModel):
context_id: str
config: dict
workflow_id: uuid.UUID | None = None
class WorkflowNodeResultOut(BaseModel):
id: uuid.UUID
node_name: str
@@ -190,6 +248,38 @@ class WorkflowRunOut(BaseModel):
model_config = {"from_attributes": True}
class WorkflowRolloutLatestRunOut(BaseModel):
workflow_run_id: uuid.UUID
execution_mode: str
status: str
created_at: datetime
completed_at: datetime | None = None
class WorkflowRolloutLinkedOutputTypeOut(BaseModel):
id: uuid.UUID
name: str
is_active: bool
artifact_kind: str
workflow_rollout_mode: str
class WorkflowRolloutSummaryOut(BaseModel):
linked_output_type_count: int = 0
active_output_type_count: int = 0
linked_output_type_names: list[str] = Field(default_factory=list)
linked_output_types: list[WorkflowRolloutLinkedOutputTypeOut] = Field(default_factory=list)
rollout_modes: list[str] = Field(default_factory=list)
has_blocking_contracts: bool = False
blocking_reasons: list[str] = Field(default_factory=list)
latest_run: WorkflowRolloutLatestRunOut | None = None
latest_shadow_run: WorkflowRolloutLatestRunOut | None = None
latest_rollout_gate_verdict: str | None = None
latest_rollout_ready: bool | None = None
latest_rollout_status: str | None = None
latest_rollout_reasons: list[str] = Field(default_factory=list)
class WorkflowComparisonArtifactOut(BaseModel):
path: str | None
storage_key: str | None
@@ -208,8 +298,58 @@ class WorkflowRunComparisonOut(BaseModel):
execution_mode: str
status: str
summary: str
rollout_gate_verdict: str
workflow_rollout_ready: bool
workflow_rollout_status: str
rollout_reasons: list[str] = []
rollout_thresholds: dict[str, float] = Field(default_factory=dict)
authoritative_output: WorkflowComparisonArtifactOut
observer_output: WorkflowComparisonArtifactOut
exact_match: bool | None
dimensions_match: bool | None
mean_pixel_delta: float | None
class WorkflowPreflightIssueOut(BaseModel):
severity: str
code: str
message: str
node_id: str | None = None
step: str | None = None
class WorkflowPreflightNodeOut(BaseModel):
node_id: str
step: str
label: str | None = None
execution_kind: str
supported: bool
status: str
issues: list[WorkflowPreflightIssueOut] = []
class WorkflowPreflightOut(BaseModel):
workflow_id: uuid.UUID | None = None
context_id: str
context_kind: str | None = None
expected_context_kind: str
execution_mode: str
graph_dispatch_allowed: bool
summary: str
resolved_order_line_id: uuid.UUID | None = None
resolved_cad_file_id: uuid.UUID | None = None
unsupported_node_ids: list[str] = []
issues: list[WorkflowPreflightIssueOut] = []
nodes: list[WorkflowPreflightNodeOut] = []
class WorkflowOrderLineContextOptionOut(BaseModel):
value: uuid.UUID
label: str
meta: str
class WorkflowOrderLineContextGroupOut(BaseModel):
order_id: uuid.UUID
order_label: str
options: list[WorkflowOrderLineContextOptionOut] = []
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,146 @@
from __future__ import annotations
import json
import re
from collections import defaultdict
from typing import Any, Iterable, Mapping
_MARKER_PROP_NAMES = (
"hartomat_template_input",
"hartomat.template_input",
"template_input",
"schaeffler_template_input",
)
_MARKER_KEY_PROP_NAMES = (
"hartomat_template_input_key",
"hartomat.template_input_key",
"template_input_key",
"schaeffler_template_input_key",
)
_MARKER_VALUE_PROP_NAMES = (
"hartomat_template_input_value",
"hartomat.template_input_value",
"template_input_value",
"schaeffler_template_input_value",
)
_NAME_PATTERNS = (
re.compile(r"template_input__(?P<key>[^_]+)__(?P<value>[^_]+)", re.IGNORECASE),
re.compile(r"template-input:(?P<key>[^=]+)=(?P<value>.+)", re.IGNORECASE),
re.compile(r"ti::(?P<key>[^:]+)::(?P<value>.+)", re.IGNORECASE),
)
def _normalize_marker_token(value: Any) -> str | None:
if value is None:
return None
if isinstance(value, bool):
return "true" if value else "false"
text = str(value).strip()
return text or None
def _parse_marker_text(text: str) -> tuple[str, str] | None:
cleaned = text.strip()
if not cleaned:
return None
if cleaned.startswith("{"):
try:
payload = json.loads(cleaned)
except Exception:
payload = None
if isinstance(payload, dict):
key = _normalize_marker_token(payload.get("key"))
value = _normalize_marker_token(payload.get("value"))
if key and value:
return key, value
if "=" in cleaned:
key, value = cleaned.split("=", 1)
key = _normalize_marker_token(key)
value = _normalize_marker_token(value)
if key and value:
return key, value
return None
def extract_template_input_marker(
*,
name: str | None = None,
props: Mapping[str, Any] | None = None,
) -> tuple[str, str] | None:
raw_props = props or {}
for prop_name in _MARKER_PROP_NAMES:
raw_value = raw_props.get(prop_name)
text = _normalize_marker_token(raw_value)
if not text:
continue
marker = _parse_marker_text(text)
if marker is not None:
return marker
key = None
value = None
for prop_name in _MARKER_KEY_PROP_NAMES:
key = _normalize_marker_token(raw_props.get(prop_name))
if key:
break
for prop_name in _MARKER_VALUE_PROP_NAMES:
value = _normalize_marker_token(raw_props.get(prop_name))
if value:
break
if key and value:
return key, value
candidate_name = (name or "").strip()
if candidate_name:
for pattern in _NAME_PATTERNS:
match = pattern.search(candidate_name)
if not match:
continue
marker_key = _normalize_marker_token(match.group("key"))
marker_value = _normalize_marker_token(match.group("value"))
if marker_key and marker_value:
return marker_key, marker_value
return None
def suggest_workflow_input_schema(
markers: Iterable[tuple[str, str]],
) -> list[dict[str, Any]]:
values_by_key: dict[str, set[str]] = defaultdict(set)
for key, value in markers:
normalized_key = _normalize_marker_token(key)
normalized_value = _normalize_marker_token(value)
if not normalized_key or not normalized_value:
continue
values_by_key[normalized_key].add(normalized_value)
schema: list[dict[str, Any]] = []
for key in sorted(values_by_key):
options = sorted(values_by_key[key])
if not options:
continue
label = key.replace("_", " ").strip().title()
if len(options) == 2 and set(options) == {"false", "true"}:
schema.append(
{
"key": key,
"label": label,
"type": "boolean",
"section": "Template Inputs",
"default": options[0] == "true",
}
)
continue
schema.append(
{
"key": key,
"label": label,
"type": "select",
"section": "Template Inputs",
"default": options[0],
"options": [{"value": option, "label": option.replace("_", " ").title()} for option in options],
}
)
return schema
@@ -18,6 +18,7 @@ def dispatch_workflow(
params = params or {}
builders = {
"still": _build_still,
"still_graph": _build_still,
"turntable": _build_turntable,
"multi_angle": _build_multi_angle,
"still_with_exports": _build_still_with_exports,
@@ -17,7 +17,7 @@ from app.domains.orders.models import OrderLine
from app.domains.rendering.models import WorkflowRun
from app.domains.rendering.schemas import WorkflowComparisonArtifactOut, WorkflowRunComparisonOut
ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA = 0.0
ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA = 1e-6
ROLLOUT_WARN_MAX_MEAN_PIXEL_DELTA = 0.02
@@ -217,6 +217,7 @@ def _find_shadow_file(order_line: OrderLine, workflow_run: WorkflowRun) -> str |
upload_root = Path(settings.upload_dir)
candidate_roots.append(upload_root / "renders" / str(order_line.id))
candidate_roots.append(upload_root / "step_files" / "renders" / str(order_line.id))
candidate_roots.append(upload_root / "step_files" / "renders")
seen_roots: set[Path] = set()
@@ -258,6 +259,13 @@ async def build_workflow_run_comparison(
authoritative_output = _build_artifact(authoritative_path)
observer_output = _build_artifact(observer_path)
rollout_gate = evaluate_rollout_gate(
authoritative_output=authoritative_output,
observer_output=observer_output,
exact_match=None,
dimensions_match=None,
mean_pixel_delta=None,
)
if not authoritative_output.exists:
status = "missing_authoritative"
@@ -283,9 +291,9 @@ async def build_workflow_run_comparison(
if exact_match:
status = "matched"
summary = "Observer output matches the authoritative legacy output byte-for-byte."
elif mean_pixel_delta == 0.0 and dimensions_match:
elif mean_pixel_delta is not None and mean_pixel_delta <= ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA and dimensions_match:
status = "matched"
summary = "Observer output matches the authoritative legacy output visually, but file metadata differs."
summary = "Observer output matches the authoritative legacy output within the visual pass threshold."
else:
status = "different"
if dimensions_match is False:
@@ -294,6 +302,13 @@ async def build_workflow_run_comparison(
summary = "Observer output differs from the authoritative output."
else:
summary = "Observer output differs from the authoritative output and could not be pixel-compared."
rollout_gate = evaluate_rollout_gate(
authoritative_output=authoritative_output,
observer_output=observer_output,
exact_match=exact_match,
dimensions_match=dimensions_match,
mean_pixel_delta=mean_pixel_delta,
)
return WorkflowRunComparisonOut(
workflow_run_id=workflow_run.id,
@@ -302,6 +317,14 @@ async def build_workflow_run_comparison(
execution_mode=workflow_run.execution_mode,
status=status,
summary=summary,
rollout_gate_verdict=str(rollout_gate["verdict"]),
workflow_rollout_ready=bool(rollout_gate["workflow_rollout_ready"]),
workflow_rollout_status=str(rollout_gate["workflow_rollout_status"]),
rollout_reasons=[str(reason) for reason in rollout_gate["reasons"]],
rollout_thresholds={
str(key): float(value)
for key, value in dict(rollout_gate["thresholds"]).items()
},
authoritative_output=authoritative_output.to_schema(),
observer_output=observer_output.to_schema(),
exact_match=exact_match,
@@ -21,6 +21,10 @@ _PRESET_TYPES = {
_EXECUTION_MODES = {"legacy", "graph", "shadow"}
_WORKFLOW_BLUEPRINTS = {"cad_intake", "order_rendering", "still_graph_reference"}
_WORKFLOW_STARTERS = {"cad_file", "order_line"}
_WORKFLOW_STARTER_BLUEPRINTS = {
"starter_cad_intake": "cad_file",
"starter_order_rendering": "order_line",
}
_NODE_TYPE_TO_STEP: dict[str, str] = {
"inputNode": StepName.RESOLVE_STEP_PATH.value,
@@ -72,7 +76,7 @@ def _extract_render_params_from_nodes(nodes: list[dict[str, Any]], step: StepNam
def _build_order_line_still_graph_nodes(render_params: dict[str, Any]) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
graph_render_params = deepcopy(render_params)
graph_render_params.setdefault("use_custom_render_settings", True)
graph_render_params.setdefault("use_custom_render_settings", False)
nodes = [
_make_node("setup", StepName.ORDER_LINE_SETUP, 0, 160, label="Order Line Setup"),
@@ -222,6 +226,7 @@ def build_preset_workflow_config(
"ui": {
"preset": preset_type,
"execution_mode": "graph" if preset_type == "still_graph" else "legacy",
"family": "order_line",
},
}
@@ -235,6 +240,7 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]:
_make_node("resolve_step", StepName.RESOLVE_STEP_PATH, 0, 180, label="Resolve STEP Path"),
_make_node("extract_objects", StepName.OCC_OBJECT_EXTRACT, 220, 180, label="Extract STEP Objects"),
_make_node("export_glb", StepName.OCC_GLB_EXPORT, 440, 180, label="Export GLB"),
_make_node("bbox", StepName.GLB_BBOX, 660, 120, label="Compute Bounding Box"),
_make_node("stl_cache", StepName.STL_CACHE_GENERATE, 660, 300, label="Generate STL Cache"),
_make_node(
"blender_thumb",
@@ -260,9 +266,11 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]:
edges = [
{"from": "resolve_step", "to": "extract_objects"},
{"from": "extract_objects", "to": "export_glb"},
{"from": "export_glb", "to": "bbox"},
{"from": "export_glb", "to": "stl_cache"},
{"from": "export_glb", "to": "blender_thumb"},
{"from": "export_glb", "to": "threejs_thumb"},
{"from": "bbox", "to": "threejs_thumb"},
{"from": "blender_thumb", "to": "save_blender_thumb"},
{"from": "threejs_thumb", "to": "save_threejs_thumb"},
]
@@ -329,6 +337,7 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]:
"ui": {
"preset": "custom",
"execution_mode": "graph" if blueprint == "still_graph_reference" else "legacy",
"family": "cad_file" if blueprint == "cad_intake" else "order_line",
"blueprint": blueprint,
},
}
@@ -356,6 +365,7 @@ def build_starter_workflow_config(family: str = "order_line") -> dict[str, Any]:
"ui": {
"preset": "custom",
"execution_mode": "legacy",
"family": family,
"blueprint": blueprint,
},
}
@@ -385,6 +395,7 @@ def _build_legacy_custom_render_fallback_config(params: dict[str, Any] | None =
"ui": {
"preset": "custom",
"execution_mode": "legacy",
"family": "order_line",
"blueprint": "starter_order_rendering",
},
}
@@ -480,9 +491,16 @@ def canonicalize_workflow_config(raw: dict[str, Any]) -> dict[str, Any]:
canonical["ui"].update(merged_ui)
return canonical
if blueprint == "still_graph_reference":
if blueprint in _WORKFLOW_BLUEPRINTS:
merged_ui = dict(normalized["ui"])
canonical = build_workflow_blueprint_config("still_graph_reference")
canonical = build_workflow_blueprint_config(blueprint)
merged_ui.setdefault("execution_mode", canonical["ui"]["execution_mode"])
canonical["ui"].update(merged_ui)
return canonical
if blueprint in _WORKFLOW_STARTER_BLUEPRINTS:
merged_ui = dict(normalized["ui"])
canonical = build_starter_workflow_config(_WORKFLOW_STARTER_BLUEPRINTS[blueprint])
merged_ui.setdefault("execution_mode", canonical["ui"]["execution_mode"])
canonical["ui"].update(merged_ui)
return canonical
@@ -25,7 +25,7 @@ from collections import deque
from dataclasses import dataclass, field
from typing import Literal
from app.domains.rendering.workflow_schema import WorkflowConfig, WorkflowNode
from app.domains.rendering.workflow_schema import WorkflowConfig, WorkflowEdge, WorkflowNode
from app.core.process_steps import StepName
logger = logging.getLogger(__name__)
@@ -40,6 +40,17 @@ class WorkflowContext:
execution_mode: WorkflowExecutionMode
workflow_run_id: uuid.UUID | None = None
ordered_nodes: list[WorkflowNode] = field(default_factory=list)
edges: list[WorkflowEdge] = field(default_factory=list)
@dataclass(slots=True)
class WorkflowTaskDispatchSpec:
node_id: str
task_name: str
args: list[str]
kwargs: dict
task_id: str
queue: str | None = None
@dataclass(slots=True)
@@ -48,6 +59,38 @@ class WorkflowDispatchResult:
task_ids: list[str]
node_task_ids: dict[str, str]
skipped_node_ids: list[str]
task_specs: list[WorkflowTaskDispatchSpec] = field(default_factory=list)
class WorkflowTaskSubmissionError(RuntimeError):
def __init__(self, message: str, *, submitted_task_ids: list[str] | None = None) -> None:
super().__init__(message)
self.submitted_task_ids = list(submitted_task_ids or [])
def submit_prepared_workflow_tasks(dispatch_result: WorkflowDispatchResult) -> None:
"""Submit pre-built Celery tasks after DB state has been committed."""
from app.tasks.celery_app import celery_app
submitted_task_ids: list[str] = []
for spec in dispatch_result.task_specs:
task_options: dict[str, str] = {"task_id": spec.task_id}
if spec.queue:
task_options["queue"] = spec.queue
try:
celery_app.send_task(
spec.task_name,
args=spec.args,
kwargs=spec.kwargs,
**task_options,
)
except Exception as exc:
raise WorkflowTaskSubmissionError(
f"Failed to submit workflow task for node '{spec.node_id}': {exc}",
submitted_task_ids=submitted_task_ids,
) from exc
submitted_task_ids.append(spec.task_id)
# ---------------------------------------------------------------------------
@@ -65,7 +108,7 @@ STEP_TASK_MAP: dict[StepName, str] = {
StepName.STL_CACHE_GENERATE: "app.tasks.step_tasks.process_step_file",
# ── Thumbnail generation ─────────────────────────────────────────────
StepName.BLENDER_RENDER: "app.tasks.step_tasks.render_step_thumbnail",
StepName.THUMBNAIL_SAVE: "app.tasks.step_tasks.render_step_thumbnail",
StepName.THUMBNAIL_SAVE: "app.tasks.step_tasks.render_graph_thumbnail",
# ── Order line stills & turntables ──────────────────────────────────
StepName.BLENDER_STILL: "app.domains.rendering.tasks.render_order_line_still_task",
StepName.BLENDER_TURNTABLE: "app.domains.rendering.tasks.render_turntable_task",
@@ -98,6 +141,7 @@ def prepare_workflow_context(
execution_mode=execution_mode,
workflow_run_id=workflow_run_id,
ordered_nodes=ordered_nodes,
edges=list(config.edges),
)
@@ -12,12 +12,19 @@ from sqlalchemy import select
from sqlalchemy.orm import Session, selectinload
from app.config import settings
from app.core.render_paths import build_order_line_export_path, build_order_line_step_render_path
from app.core.process_steps import StepName
from app.domains.products.models import CadFile
from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun
from app.domains.rendering.workflow_executor import STEP_TASK_MAP, WorkflowContext, WorkflowDispatchResult
from app.domains.rendering.workflow_executor import (
STEP_TASK_MAP,
WorkflowContext,
WorkflowDispatchResult,
WorkflowTaskDispatchSpec,
)
from app.domains.rendering.workflow_node_registry import get_node_definition
from app.domains.rendering.workflow_runtime_services import (
_resolve_render_output_extension,
AutoPopulateMaterialsResult,
BBoxResolutionResult,
MaterialResolutionResult,
@@ -25,6 +32,7 @@ from app.domains.rendering.workflow_runtime_services import (
TemplateResolutionResult,
auto_populate_materials_for_cad,
build_order_line_render_invocation,
extract_template_input_overrides,
prepare_order_line_render_context,
resolve_cad_bbox,
resolve_order_line_material_map,
@@ -89,11 +97,13 @@ _STILL_TASK_KEYS = {
"material_override",
"render_engine",
"resolution",
"template_inputs",
}
_TURNTABLE_TASK_KEYS = {
"output_name",
"engine",
"render_engine",
"samples",
"smooth_angle",
"cycles_device",
@@ -119,6 +129,8 @@ _TURNTABLE_TASK_KEYS = {
"focal_length_mm",
"sensor_width_mm",
"material_override",
"template_inputs",
"duration_s",
}
_THUMBNAIL_TASK_KEYS = {
@@ -144,13 +156,62 @@ _AUTHORITATIVE_RENDER_SETTING_KEYS = {
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
"camera_orbit",
"focal_length_mm",
"sensor_width_mm",
"bg_color",
}
def _inspect_active_worker_queues(timeout: float = 1.0) -> set[str]:
from app.tasks.celery_app import celery_app
try:
inspect_result = celery_app.control.inspect(timeout=timeout)
active_queues = inspect_result.active_queues() or {}
except Exception as exc:
logger.info("[WORKFLOW] Could not inspect active Celery queues: %s", exc)
return set()
queue_names: set[str] = set()
for queues in active_queues.values():
for queue in queues or []:
if not isinstance(queue, dict):
continue
name = queue.get("name")
if isinstance(name, str) and name.strip():
queue_names.add(name.strip())
return queue_names
def _resolve_shadow_render_queue(
*,
workflow_context: WorkflowContext,
node,
active_queue_names: set[str],
) -> str | None:
if workflow_context.execution_mode != "shadow":
return None
if node.step not in {
StepName.BLENDER_STILL,
StepName.BLENDER_TURNTABLE,
StepName.EXPORT_BLEND,
}:
return None
preferred_queue = (settings.workflow_shadow_render_queue or "").strip()
if not preferred_queue or preferred_queue == "asset_pipeline":
return None
if preferred_queue in active_queue_names:
return preferred_queue
logger.info(
"[WORKFLOW] Preferred shadow render queue %s unavailable for node %s; using default routing",
preferred_queue,
node.id,
)
return None
def _filter_graph_render_overrides(step: StepName, params: dict[str, Any]) -> dict[str, Any]:
normalized = dict(params)
use_custom_render_settings = bool(normalized.pop("use_custom_render_settings", False))
@@ -186,6 +247,8 @@ def find_unsupported_graph_nodes(workflow_context: WorkflowContext) -> list[str]
def execute_graph_workflow(
session: Session,
workflow_context: WorkflowContext,
*,
dispatch_tasks: bool = True,
) -> WorkflowDispatchResult:
if workflow_context.workflow_run_id is None:
raise ValueError("workflow_context.workflow_run_id is required for graph execution")
@@ -201,6 +264,12 @@ def execute_graph_workflow(
task_ids: list[str] = []
node_task_ids: dict[str, str] = {}
skipped_node_ids: list[str] = []
task_specs: list[WorkflowTaskDispatchSpec] = []
active_queue_names = (
_inspect_active_worker_queues()
if workflow_context.execution_mode == "shadow"
else set()
)
for node in workflow_context.ordered_nodes:
node_result = node_results.get(node.id)
@@ -326,8 +395,6 @@ def execute_graph_workflow(
skipped_node_ids.append(node.id)
continue
from app.tasks.celery_app import celery_app
task_kwargs = _build_task_kwargs(
session=session,
workflow_context=workflow_context,
@@ -335,12 +402,42 @@ def execute_graph_workflow(
node=node,
)
result = celery_app.send_task(
task_name,
args=[workflow_context.context_id],
kwargs=task_kwargs,
target_queue = _resolve_shadow_render_queue(
workflow_context=workflow_context,
node=node,
active_queue_names=active_queue_names,
)
metadata["task_id"] = result.id
if dispatch_tasks:
from app.tasks.celery_app import celery_app
if target_queue:
result = celery_app.send_task(
task_name,
args=[workflow_context.context_id],
kwargs=task_kwargs,
queue=target_queue,
)
else:
result = celery_app.send_task(
task_name,
args=[workflow_context.context_id],
kwargs=task_kwargs,
)
task_id = result.id
else:
task_id = str(uuid.uuid4())
task_specs.append(
WorkflowTaskDispatchSpec(
node_id=node.id,
task_name=task_name,
args=[workflow_context.context_id],
kwargs=dict(task_kwargs),
task_id=task_id,
queue=target_queue,
)
)
metadata["task_id"] = task_id
metadata["task_queue"] = target_queue or "asset_pipeline"
if definition is not None:
metadata["execution_kind"] = definition.execution_kind
metadata["attempt_count"] = 1
@@ -360,15 +457,15 @@ def execute_graph_workflow(
node_result.duration_s = None
state.node_outputs[node.id] = dict(metadata)
session.flush()
task_ids.append(result.id)
node_task_ids[node.id] = result.id
task_ids.append(task_id)
node_task_ids[node.id] = task_id
logger.info(
"[WORKFLOW] Dispatched node %r (step=%s, mode=%s, run=%s) -> Celery task %s",
node.id,
node.step,
workflow_context.execution_mode,
workflow_context.workflow_run_id,
result.id,
task_id,
)
continue
@@ -397,6 +494,7 @@ def execute_graph_workflow(
task_ids=task_ids,
node_task_ids=node_task_ids,
skipped_node_ids=skipped_node_ids,
task_specs=task_specs,
)
@@ -466,8 +564,15 @@ def _serialize_template_result(result: TemplateResolutionResult) -> dict[str, An
"material_map_count": len(result.material_map or {}),
"use_materials": result.use_materials,
"override_material": result.override_material,
"target_collection": result.target_collection,
"lighting_only": result.lighting_only,
"shadow_catcher": result.shadow_catcher,
"camera_orbit": result.camera_orbit,
"category_key": result.category_key,
"output_type_id": result.output_type_id,
"workflow_input_schema": result.workflow_input_schema,
"template_inputs": result.template_inputs,
"template_input_count": len(result.template_inputs or {}),
}
@@ -597,13 +702,17 @@ def _predict_task_output_metadata(
order_line_id = str(state.setup.order_line.id)
if node.step == StepName.BLENDER_STILL:
output_dir = step_path.parent / "renders"
output_filename = f"line_{order_line_id}.png"
output_extension = _resolve_render_output_extension(state.setup.order_line)
if output_extension not in {"png", "jpg", "webp"}:
output_extension = "png"
output_filename = f"line_{order_line_id}.{output_extension}"
if output_name_suffix:
output_filename = f"line_{order_line_id}_{output_name_suffix}.png"
output_filename = f"line_{order_line_id}_{output_name_suffix}.{output_extension}"
return {
"artifact_role": "render_output",
"predicted_output_path": str(output_dir / output_filename),
"predicted_output_path": str(
build_order_line_step_render_path(step_path, order_line_id, output_filename)
),
"predicted_asset_type": "still",
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
"graph_authoritative_output_enabled": bool(
@@ -618,9 +727,10 @@ def _predict_task_output_metadata(
output_filename = f"{step_path.stem}_production.blend"
if output_name_suffix:
output_filename = f"{step_path.stem}_production_{output_name_suffix}.blend"
predicted_output_path = str(build_order_line_export_path(order_line_id, output_filename))
return {
"artifact_role": "blend_export",
"predicted_output_path": str(step_path.parent / output_filename),
"predicted_output_path": predicted_output_path,
"predicted_asset_type": "blend_production",
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
"graph_authoritative_output_enabled": bool(
@@ -641,7 +751,9 @@ def _predict_task_output_metadata(
if isinstance(output_dir, str) and output_dir.strip():
predicted_output_path = str(Path(output_dir) / f"{output_name}.mp4")
else:
predicted_output_path = str(step_path.parent / "renders" / f"{output_name}.mp4")
predicted_output_path = str(
build_order_line_step_render_path(step_path, order_line_id, f"{output_name}.mp4")
)
return {
"artifact_role": "turntable_output",
"predicted_output_path": predicted_output_path,
@@ -733,6 +845,30 @@ def _resolve_thumbnail_request(
return None
def _normalize_turntable_task_kwargs(task_kwargs: dict[str, Any]) -> dict[str, Any]:
normalized = dict(task_kwargs)
raw_duration = normalized.get("duration_s")
if raw_duration in (None, ""):
return normalized
try:
duration_s = float(raw_duration)
except (TypeError, ValueError):
return normalized
try:
fps = int(float(normalized.get("fps", 0)))
except (TypeError, ValueError):
return normalized
if duration_s <= 0 or fps <= 0:
return normalized
normalized["duration_s"] = duration_s
normalized["frame_count"] = max(1, int(round(duration_s * fps)))
return normalized
def _build_task_kwargs(
*,
session: Session,
@@ -751,6 +887,7 @@ def _build_task_kwargs(
template_context=state.template,
position_context=resolve_render_position_context(session, state.setup.order_line),
material_context=state.materials,
artifact_kind_override=_artifact_kind_override_for_step(node.step),
)
render_defaults = render_invocation.task_defaults()
@@ -774,6 +911,15 @@ def _build_task_kwargs(
}.items()
if key in _TURNTABLE_TASK_KEYS
}
task_kwargs = _normalize_turntable_task_kwargs(task_kwargs)
if state.setup is not None and state.setup.is_ready and state.setup.cad_file is not None:
task_kwargs["output_dir"] = str(
build_order_line_step_render_path(
state.setup.cad_file.stored_path,
str(state.setup.order_line.id),
"turntable.mp4",
).parent
)
elif node.step == StepName.THUMBNAIL_SAVE:
thumbnail_request = _resolve_thumbnail_request(workflow_context, state, node.id) or {}
task_kwargs = {
@@ -787,7 +933,7 @@ def _build_task_kwargs(
task_kwargs["workflow_run_id"] = str(workflow_context.workflow_run_id)
task_kwargs["workflow_node_id"] = node.id
if workflow_context.execution_mode == "graph" and node.step in {
if workflow_context.execution_mode in {"graph", "shadow"} and node.step in {
StepName.BLENDER_STILL,
StepName.EXPORT_BLEND,
StepName.BLENDER_TURNTABLE,
@@ -798,19 +944,23 @@ def _build_task_kwargs(
step=StepName.OUTPUT_SAVE,
direction="downstream",
)
connected_notify_node_ids = _connected_node_ids_by_step(
workflow_context,
node_id=node.id,
step=StepName.NOTIFY,
direction="downstream",
)
if connected_output_node_ids:
task_kwargs["publish_asset_enabled"] = False
task_kwargs["graph_authoritative_output_enabled"] = True
task_kwargs["graph_output_node_ids"] = connected_output_node_ids
if connected_notify_node_ids:
task_kwargs["emit_legacy_notifications"] = True
task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids
if workflow_context.execution_mode == "graph":
task_kwargs["graph_authoritative_output_enabled"] = True
else:
task_kwargs["observer_output_enabled"] = True
if workflow_context.execution_mode == "graph":
connected_notify_node_ids = _connected_node_ids_by_step(
workflow_context,
node_id=node.id,
step=StepName.NOTIFY,
direction="downstream",
)
if connected_notify_node_ids:
task_kwargs["emit_legacy_notifications"] = True
task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids
if workflow_context.execution_mode == "shadow":
task_kwargs["publish_asset_enabled"] = False
task_kwargs["emit_events"] = False
@@ -819,6 +969,16 @@ def _build_task_kwargs(
return task_kwargs
def _artifact_kind_override_for_step(step: StepName) -> str | None:
if step == StepName.BLENDER_TURNTABLE:
return "turntable_video"
if step == StepName.BLENDER_STILL:
return "still_image"
if step == StepName.EXPORT_BLEND:
return "blend_asset"
return None
def _execute_order_line_setup(
*,
session: Session,
@@ -857,12 +1017,25 @@ def _execute_resolve_template(
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del workflow_context, node_params
del workflow_context
if state.setup is None or not state.setup.is_ready:
if state.setup is not None and state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
raise WorkflowGraphRuntimeError("resolve_template requires a ready order_line_setup result")
result = resolve_order_line_template_context(session, state.setup)
result = resolve_order_line_template_context(
session,
state.setup,
template_id_override=node_params.get("template_id_override"),
material_library_path_override=node_params.get("material_library_path"),
require_template=bool(node_params.get("require_template", False)),
disable_materials=bool(node_params.get("disable_materials", False)),
target_collection_override=node_params.get("target_collection"),
material_replace_mode=node_params.get("material_replace_mode"),
lighting_only_mode=node_params.get("lighting_only_mode"),
shadow_catcher_mode=node_params.get("shadow_catcher_mode"),
camera_orbit_mode=node_params.get("camera_orbit_mode"),
template_input_overrides=extract_template_input_overrides(node_params),
)
state.template = result
return _serialize_template_result(result), "completed", None
@@ -876,7 +1049,7 @@ def _execute_material_map_resolve(
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del session, workflow_context, node_params
del session, workflow_context
if state.setup is None or not state.setup.is_ready:
if state.setup is not None and state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
@@ -895,6 +1068,8 @@ def _execute_material_map_resolve(
state.setup.materials_source,
material_library=material_library,
template=template,
material_override=node_params.get("material_override"),
disable_materials=bool(node_params.get("disable_materials", False)),
)
state.materials = result
return _serialize_material_result(result), "completed", None
@@ -909,26 +1084,45 @@ def _execute_auto_populate_materials(
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del node_params
if state.setup is None or state.setup.cad_file is None:
if state.setup is not None and state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
raise WorkflowGraphRuntimeError("auto_populate_materials requires a resolved cad_file")
shadow_mode = workflow_context.execution_mode == "shadow"
persist_updates = bool(node_params.get("persist_updates", not shadow_mode))
if shadow_mode:
persist_updates = False
refresh_material_source = bool(node_params.get("refresh_material_source", True))
include_populated_products = bool(node_params.get("include_populated_products", False))
if shadow_mode:
result = auto_populate_materials_for_cad(
session,
str(state.setup.cad_file.id),
persist_updates=False,
include_populated_products=include_populated_products,
)
else:
result = auto_populate_materials_for_cad(session, str(state.setup.cad_file.id))
result = auto_populate_materials_for_cad(
session,
str(state.setup.cad_file.id),
persist_updates=persist_updates,
include_populated_products=include_populated_products,
)
state.auto_populate = result
if not shadow_mode and state.setup.order_line is not None and state.setup.order_line.product is not None:
if (
persist_updates
and refresh_material_source
and not shadow_mode
and state.setup.order_line is not None
and state.setup.order_line.product is not None
):
session.refresh(state.setup.order_line.product)
state.setup.materials_source = state.setup.order_line.product.cad_part_materials or []
payload = _serialize_auto_populate_result(result)
payload["shadow_mode"] = shadow_mode
payload["persist_updates"] = persist_updates
payload["refresh_material_source"] = refresh_material_source
payload["include_populated_products"] = include_populated_products
return payload, "completed", None
@@ -949,17 +1143,31 @@ def _execute_glb_bbox(
step_path = state.setup.cad_file.stored_path
glb_path = node_params.get("glb_path")
if glb_path is None and state.setup.glb_reuse_path is not None:
source_preference = str(node_params.get("source_preference") or "auto")
if glb_path is None and source_preference != "step_only" and state.setup.glb_reuse_path is not None:
glb_path = str(state.setup.glb_reuse_path)
elif glb_path is None:
elif glb_path is None and source_preference != "step_only":
step_file = Path(step_path)
fallback_glb = step_file.parent / f"{step_file.stem}_thumbnail.glb"
if fallback_glb.exists():
glb_path = str(fallback_glb)
if source_preference == "glb_only" and not glb_path:
payload = {
"bbox_data": None,
"has_bbox": False,
"source_kind": "none",
"step_path": step_path,
"glb_path": None,
"source_preference": source_preference,
}
return payload, "failed", "glb_only requested but no GLB artifact is available"
result = resolve_cad_bbox(step_path, glb_path=glb_path)
state.bbox = result
return _serialize_bbox_result(result), "completed", None
payload = _serialize_bbox_result(result)
payload["source_preference"] = source_preference
return payload, "completed", None
def _execute_resolve_step_path(
@@ -1069,7 +1277,7 @@ def _execute_output_save(
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del session, node_params
del session
if state.setup is None or state.setup.order_line is None:
raise WorkflowGraphRuntimeError("output_save requires an order_line_setup result")
@@ -1085,19 +1293,42 @@ def _execute_output_save(
"shadow_mode": workflow_context.execution_mode == "shadow",
}
upstream_artifacts = _connected_upstream_artifacts(workflow_context, state, node.id)
expected_artifact_role = str(node_params.get("expected_artifact_role") or "").strip() or None
require_upstream_artifact = bool(node_params.get("require_upstream_artifact", False))
if expected_artifact_role is not None:
upstream_artifacts = [
artifact for artifact in upstream_artifacts if artifact.get("artifact_role") == expected_artifact_role
]
if workflow_context.execution_mode == "shadow":
payload["publication_mode"] = "shadow_observer_only"
elif any(artifact["publish_asset_enabled"] for artifact in upstream_artifacts):
payload["publication_mode"] = "deferred_to_render_task"
else:
payload["publication_mode"] = "awaiting_graph_authoritative_save"
payload["expected_artifact_role"] = expected_artifact_role
payload["require_upstream_artifact"] = require_upstream_artifact
if upstream_artifacts:
payload["artifact_count"] = len(upstream_artifacts)
payload["upstream_artifacts"] = upstream_artifacts
elif require_upstream_artifact:
payload["artifact_count"] = 0
return payload, "failed", "No upstream render artifact is connected to this output node"
if state.template is not None and state.template.template is not None:
payload["template_name"] = state.template.template.name
if state.materials is not None:
payload["material_map_count"] = len(state.materials.material_map or {})
deferred_handoff_node_ids = [
str(artifact.get("node_id"))
for artifact in upstream_artifacts
if artifact.get("task_id")
]
if deferred_handoff_node_ids:
payload["handoff_state"] = "armed"
payload["handoff_node_ids"] = deferred_handoff_node_ids
payload["handoff_node_count"] = len(deferred_handoff_node_ids)
return payload, "pending", None
return payload, "completed", None
@@ -1109,7 +1340,7 @@ def _execute_notify(
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del session, node_params
del session
if state.setup is None or state.setup.order_line is None:
raise WorkflowGraphRuntimeError("notify requires an order_line_setup result")
@@ -1121,8 +1352,10 @@ def _execute_notify(
payload: dict[str, Any] = {
"order_line_id": str(state.setup.order_line.id),
"shadow_mode": workflow_context.execution_mode == "shadow",
"channel": "audit_log",
"channel": str(node_params.get("channel") or "audit_log"),
}
require_armed_render = bool(node_params.get("require_armed_render", False))
payload["require_armed_render"] = require_armed_render
if workflow_context.execution_mode == "shadow":
payload["notification_mode"] = "shadow_suppressed"
@@ -1136,12 +1369,15 @@ def _execute_notify(
]
if not armed_node_ids:
payload["notification_mode"] = "not_armed"
if require_armed_render:
return payload, "failed", "No graph render task is configured for notification handoff"
return payload, "skipped", "No graph render task is configured for notification handoff"
payload["notification_mode"] = "deferred_to_render_task"
payload["armed_node_ids"] = armed_node_ids
payload["armed_node_count"] = len(armed_node_ids)
return payload, "completed", None
payload["handoff_state"] = "armed"
return payload, "pending", None
_BRIDGE_EXECUTORS = {
@@ -10,7 +10,17 @@ from app.core.process_steps import StepName
StepCategory = Literal["input", "processing", "rendering", "output"]
FieldType = Literal["number", "select", "boolean", "text"]
ExecutionKind = Literal["native", "bridge"]
WorkflowNodeFamily = Literal["cad_file", "order_line"]
WorkflowNodeFamily = Literal["cad_file", "order_line", "shared"]
TextFormat = Literal[
"plain",
"uuid",
"absolute_path",
"absolute_blend_path",
"absolute_glb_path",
"float_string",
"hex_color",
"safe_filename_suffix",
]
class WorkflowNodeFieldOption(BaseModel):
@@ -30,6 +40,9 @@ class WorkflowNodeFieldDefinition(BaseModel):
step: float | None = None
unit: str | None = None
options: list[WorkflowNodeFieldOption] = []
allow_blank: bool = True
max_length: int | None = None
text_format: TextFormat = "plain"
class WorkflowNodeDefinition(BaseModel):
@@ -65,6 +78,9 @@ def _field(
step: float | None = None,
unit: str | None = None,
options: list[tuple[str | int | float | bool, str]] | None = None,
allow_blank: bool = True,
max_length: int | None = None,
text_format: TextFormat = "plain",
) -> WorkflowNodeFieldDefinition:
return WorkflowNodeFieldDefinition(
key=key,
@@ -81,6 +97,9 @@ def _field(
WorkflowNodeFieldOption(value=value, label=option_label)
for value, option_label in (options or [])
],
allow_blank=allow_blank,
max_length=max_length,
text_format=text_format,
)
@@ -169,7 +188,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"cad_file",
"cad.export_glb",
"processing",
"Convert STEP geometry into GLB for previews and downstream rendering.",
"Convert STEP geometry into GLB for previews and downstream rendering. Uses the system tessellation profile; this node does not expose per-node overrides yet.",
node_type="processNode",
icon="refresh-cw",
execution_kind="bridge",
@@ -181,10 +200,10 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
_definition(
StepName.GLB_BBOX,
"Compute Bounding Box",
"order_line",
"shared",
"geometry.compute_bbox",
"processing",
"Compute the model bounding box from the exported GLB for framing decisions.",
"Compute the model bounding box from a prepared GLB artifact for framing decisions in either CAD-intake or order-line workflows.",
node_type="processNode",
icon="layers",
execution_kind="bridge",
@@ -196,10 +215,24 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional absolute path to a specific GLB file. Leave empty to reuse the prepared preview/export artifact automatically.",
section="Inputs",
default="",
text_format="absolute_glb_path",
),
_field(
"source_preference",
"Source Preference",
"select",
description="Prefer a prepared GLB, force STEP fallback, or fail when no GLB artifact is available.",
section="Inputs",
default="auto",
options=[
("auto", "Auto"),
("step_only", "STEP Only"),
("glb_only", "GLB Only"),
],
),
],
input_contract={"context": "order_line", "requires": ["glb_preview"]},
output_contract={"context": "order_line", "provides": ["bbox"]},
input_contract={"requires": ["glb_preview"]},
output_contract={"provides": ["bbox"]},
artifact_roles_consumed=["glb_preview"],
artifact_roles_produced=["bbox"],
),
@@ -213,6 +246,25 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="processNode",
icon="layers",
execution_kind="bridge",
defaults={"disable_materials": False, "material_override": ""},
fields=[
_field(
"disable_materials",
"Disable Materials",
"boolean",
description="Bypass template and alias-based material mapping for this node.",
section="Materials",
default=False,
),
_field(
"material_override",
"Material Override",
"text",
description="Optional material name forced onto every detected part before rendering.",
section="Materials",
default="",
),
],
input_contract={"context": "order_line", "requires": ["order_line_context", "cad_materials"]},
output_contract={"context": "order_line", "provides": ["material_assignments"]},
artifact_roles_consumed=["order_line_context", "cad_materials"],
@@ -228,6 +280,37 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="processNode",
icon="layers",
execution_kind="bridge",
defaults={
"persist_updates": True,
"refresh_material_source": True,
"include_populated_products": False,
},
fields=[
_field(
"persist_updates",
"Persist Updates",
"boolean",
description="Write discovered part-material mappings back to product records in graph mode.",
section="Behavior",
default=True,
),
_field(
"refresh_material_source",
"Refresh Material Source",
"boolean",
description="Reload product material mappings into the workflow context after persistence.",
section="Behavior",
default=True,
),
_field(
"include_populated_products",
"Rewrite Populated Products",
"boolean",
description="Also rebuild material mappings for products that already have non-empty assignments.",
section="Behavior",
default=False,
),
],
input_contract={"context": "order_line", "requires": ["cad_materials"]},
output_contract={"context": "order_line", "provides": ["material_catalog_updates"]},
artifact_roles_consumed=["cad_materials"],
@@ -306,7 +389,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"cad_file",
"media.save_thumbnail",
"output",
"Persist the generated thumbnail back onto the CAD file record.",
"Persist the generated thumbnail back onto the CAD file record. Rendering settings are supplied by the connected upstream thumbnail request node.",
node_type="outputNode",
icon="download",
execution_kind="bridge",
@@ -360,6 +443,113 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="processNode",
icon="layers",
execution_kind="bridge",
defaults={
"template_id_override": "",
"material_library_path": "",
"require_template": False,
"disable_materials": False,
"target_collection": "",
"material_replace_mode": "inherit",
"lighting_only_mode": "inherit",
"shadow_catcher_mode": "inherit",
"camera_orbit_mode": "inherit",
},
fields=[
_field(
"template_id_override",
"Template ID Override",
"text",
description="Optional render-template UUID to force for this workflow node instead of category/output-type resolution.",
section="Template",
default="",
text_format="uuid",
),
_field(
"require_template",
"Require Template",
"boolean",
description="Fail this node when no active render template can be resolved.",
section="Template",
default=False,
),
_field(
"material_library_path",
"Material Library Path",
"text",
description="Optional absolute .blend path used instead of the active asset library.",
section="Materials",
default="",
text_format="absolute_blend_path",
),
_field(
"disable_materials",
"Disable Materials",
"boolean",
description="Resolve the template but skip material-map generation for downstream nodes.",
section="Materials",
default=False,
),
_field(
"target_collection",
"Target Collection Override",
"text",
description="Optional collection name override applied after template resolution. Leave blank to inherit from the template.",
section="Template Overrides",
default="",
),
_field(
"material_replace_mode",
"Material Replace",
"select",
description="Override whether template material replacement is active for downstream nodes.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Enabled"),
("disabled", "Force Disabled"),
],
),
_field(
"lighting_only_mode",
"Lighting Only",
"select",
description="Override the template lighting-only flag for downstream nodes.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Enabled"),
("disabled", "Force Disabled"),
],
),
_field(
"shadow_catcher_mode",
"Shadow Catcher",
"select",
description="Override the template shadow-catcher flag for downstream nodes.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Enabled"),
("disabled", "Force Disabled"),
],
),
_field(
"camera_orbit_mode",
"Camera Orbit",
"select",
description="Override whether turntable renders orbit the camera or rotate the object.",
section="Template Overrides",
default="inherit",
options=[
("inherit", "Inherit Template"),
("enabled", "Force Camera Orbit"),
("disabled", "Force Object Rotation"),
],
),
],
input_contract={"context": "order_line", "requires": ["order_line_context"]},
output_contract={
"context": "order_line",
@@ -372,6 +562,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"use_materials",
"override_material",
"category_key",
"workflow_input_schema",
"template_inputs",
],
},
artifact_roles_consumed=["order_line_context"],
@@ -384,6 +576,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"use_materials",
"override_material",
"category_key",
"workflow_input_schema",
"template_inputs",
],
),
_definition(
@@ -420,7 +614,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"select",
description="Force CPU, GPU, or automatic device selection.",
section="Render",
default="auto",
default="gpu",
options=_CYCLES_DEVICE_OPTIONS,
),
_field(
@@ -451,6 +645,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional Cycles adaptive sampling threshold, for example 0.01.",
section="Denoising",
default="",
text_format="float_string",
),
_field(
"denoiser",
@@ -606,7 +801,11 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
defaults={
"use_custom_render_settings": False,
"fps": 24,
"frame_count": 120,
"duration_s": 5,
"turntable_degrees": 360,
"turntable_axis": "world_z",
"camera_orbit": True,
"rotation_z": 0,
},
fields=[
@@ -664,8 +863,20 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional hex color used during FFmpeg compositing, for example #FFFFFF.",
section="Output",
default="",
text_format="hex_color",
),
_field("fps", "FPS", "number", section="Animation", default=24, min=1, max=120, step=1),
_field(
"frame_count",
"Frame Count",
"number",
description="Explicit total frame count for the rendered turntable clip.",
section="Animation",
default=120,
min=1,
max=7200,
step=1,
),
_field(
"duration_s",
"Duration",
@@ -818,6 +1029,32 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
node_type="outputNode",
icon="download",
execution_kind="bridge",
defaults={"expected_artifact_role": "", "require_upstream_artifact": False},
fields=[
_field(
"expected_artifact_role",
"Expected Artifact Role",
"select",
description="Restrict this node to a specific upstream render artifact type.",
section="Output",
default="",
options=[
("", "Any Connected Artifact"),
("render_output", "Still Output"),
("turntable_output", "Turntable Output"),
("blend_export", "Blend Export"),
("thumbnail_output", "Thumbnail Output"),
],
),
_field(
"require_upstream_artifact",
"Require Upstream Artifact",
"boolean",
description="Fail the node when no matching upstream artifact is connected.",
section="Output",
default=False,
),
],
input_contract={
"context": "order_line",
"requires": ["order_line_context"],
@@ -833,7 +1070,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"order_line",
"media.export_blend",
"output",
"Persist the generated .blend file as a downloadable media asset.",
"Persist the generated .blend file as a downloadable media asset. Only the optional filename suffix is workflow-configurable today.",
node_type="outputNode",
icon="download",
defaults={"output_name_suffix": ""},
@@ -845,6 +1082,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
description="Optional suffix appended to the generated `.blend` filename.",
section="Output",
default="",
text_format="safe_filename_suffix",
max_length=64,
),
],
execution_kind="bridge",
@@ -859,7 +1098,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"cad_file",
"cad.generate_stl_cache",
"processing",
"Generate and cache STL derivatives next to the STEP source.",
"Compatibility node for legacy CAD flows. HartOMat graph execution uses direct OCC/GLB export instead, so this node intentionally performs no per-node-configurable cache generation.",
node_type="convertNode",
icon="refresh-cw",
execution_kind="bridge",
@@ -877,7 +1116,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
"Emit a user-visible notification for workflow completion or failure.",
node_type="outputNode",
icon="bell",
defaults={"channel": "audit_log"},
defaults={"channel": "audit_log", "require_armed_render": False},
fields=[
_field(
"channel",
@@ -888,6 +1127,14 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [
default="audit_log",
options=[("audit_log", "Audit Log")],
),
_field(
"require_armed_render",
"Require Armed Render",
"boolean",
description="Fail this node when no upstream graph render task is configured to hand off notifications.",
section="Notification",
default=False,
),
],
execution_kind="bridge",
input_contract={
File diff suppressed because it is too large Load Diff
@@ -5,7 +5,7 @@ import re
import shutil
import uuid
from dataclasses import dataclass, field
from datetime import datetime
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Callable, Literal
@@ -13,7 +13,11 @@ from sqlalchemy import select, update as sql_update
from sqlalchemy.orm import Session, joinedload
from app.config import settings as app_settings
from app.core.render_paths import resolve_result_path, result_path_to_storage_key
from app.core.render_paths import (
ensure_group_writable_dir,
resolve_result_path,
result_path_to_storage_key,
)
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.orders.models import Order, OrderLine, OrderStatus
from app.domains.products.models import CadFile, Product
@@ -37,6 +41,199 @@ logger = logging.getLogger(__name__)
EmitFn = Callable[..., None] | None
SetupStatus = Literal["ready", "skip", "failed", "missing"]
QueueThumbnailFn = Callable[[str, dict[str, str]], None] | None
TEMPLATE_INPUT_PARAM_PREFIX = "template_input__"
_PNG_SIGNATURE = b"\x89PNG\r\n\x1a\n"
_VOLATILE_PNG_CHUNK_TYPES = {b"tEXt", b"zTXt", b"iTXt", b"tIME"}
def _slugify_material_lookup_key(value: str) -> str:
return re.sub(r"[^a-z0-9]+", "_", value).strip("_")
def _build_authoritative_material_lookup(materials_source: list[dict[str, Any]]) -> dict[str, str]:
lookup: dict[str, str] = {}
for material in materials_source:
raw_part_name = material.get("part_name")
raw_material_name = material.get("material")
if not raw_part_name or not raw_material_name:
continue
part_name = str(raw_part_name).lower().strip()
material_name = str(raw_material_name)
if not part_name:
continue
lookup.setdefault(part_name, material_name)
slug_key = _slugify_material_lookup_key(part_name)
if slug_key:
lookup.setdefault(slug_key, material_name)
stripped = re.sub(r"(_af\d+(_\d+)?)+$", "", part_name, flags=re.IGNORECASE)
if stripped != part_name:
lookup.setdefault(stripped, material_name)
slug_stripped = _slugify_material_lookup_key(stripped)
if slug_stripped:
lookup.setdefault(slug_stripped, material_name)
return lookup
def _common_prefix_length(left: str, right: str) -> int:
limit = min(len(left), len(right))
idx = 0
while idx < limit and left[idx] == right[idx]:
idx += 1
return idx
def _lookup_material_by_prefix(query: str, material_lookup: dict[str, str]) -> str | None:
if not query or not material_lookup:
return None
contenders: list[tuple[int, str]] = []
for key, material_name in material_lookup.items():
if len(key) >= 5 and len(query) >= 5 and (query.startswith(key) or key.startswith(query)):
contenders.append((len(key), material_name))
if not contenders:
return None
contenders.sort(reverse=True)
top_length = contenders[0][0]
close_materials = {
material_name
for key_length, material_name in contenders
if key_length >= top_length - 2
}
return contenders[0][1] if len(close_materials) == 1 else None
def _lookup_material_by_common_prefix(query: str, material_lookup: dict[str, str]) -> str | None:
if not query or not material_lookup:
return None
scored: list[tuple[float, int, int, str]] = []
for key, material_name in material_lookup.items():
prefix_length = _common_prefix_length(query, key)
if prefix_length < 12:
continue
ratio = prefix_length / max(len(query), len(key))
if ratio < 0.68:
continue
scored.append((ratio, prefix_length, len(key), material_name))
if not scored:
return None
scored.sort(reverse=True)
top_ratio, top_prefix_length, _, top_material_name = scored[0]
close_materials = {
material_name
for ratio, prefix_length, _, material_name in scored
if ratio >= top_ratio - 0.02 and prefix_length >= top_prefix_length - 2
}
return top_material_name if len(close_materials) == 1 else None
def _resolve_authoritative_material_name(
raw_name: str | None,
material_lookup: dict[str, str],
*fallback_names: str | None,
) -> str | None:
candidates = [raw_name, *fallback_names]
seen: set[str] = set()
for candidate in candidates:
if not candidate:
continue
normalized = str(candidate).lower().strip()
variants = [normalized]
stripped = re.sub(r"(_af\d+(_\d+)?)+$", "", normalized, flags=re.IGNORECASE)
if stripped != normalized:
variants.append(stripped)
no_instance = re.sub(r"_\d+$", "", stripped)
if no_instance and no_instance not in variants:
variants.append(no_instance)
for variant in list(variants):
slug_variant = _slugify_material_lookup_key(variant)
if slug_variant and slug_variant not in variants:
variants.append(slug_variant)
deduped_variants = [variant for variant in variants if variant and not (variant in seen or seen.add(variant))]
for variant in deduped_variants:
material_name = material_lookup.get(variant)
if material_name:
return material_name
for variant in deduped_variants:
material_name = _lookup_material_by_prefix(variant, material_lookup)
if material_name:
return material_name
for variant in deduped_variants:
material_name = _lookup_material_by_common_prefix(variant, material_lookup)
if material_name:
return material_name
return None
def _utcnow_naive() -> datetime:
"""Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns."""
return datetime.now(timezone.utc).replace(tzinfo=None)
def extract_template_input_overrides(params: dict[str, Any] | None) -> dict[str, Any]:
if not params:
return {}
overrides: dict[str, Any] = {}
for key, value in params.items():
if not isinstance(key, str) or not key.startswith(TEMPLATE_INPUT_PARAM_PREFIX):
continue
input_key = key[len(TEMPLATE_INPUT_PARAM_PREFIX):].strip()
if input_key:
overrides[input_key] = value
return overrides
def _normalize_template_input_schema(template: RenderTemplate | None) -> list[dict[str, Any]]:
raw_schema = getattr(template, "workflow_input_schema", None) if template is not None else None
if not isinstance(raw_schema, list):
return []
normalized: list[dict[str, Any]] = []
for raw_field in raw_schema:
if not isinstance(raw_field, dict):
continue
key = str(raw_field.get("key") or "").strip()
if not key:
continue
normalized.append(dict(raw_field))
return normalized
def _resolve_template_input_values(
schema: list[dict[str, Any]],
overrides: dict[str, Any] | None,
) -> dict[str, Any]:
raw_overrides = overrides or {}
resolved: dict[str, Any] = {}
for field in schema:
key = str(field.get("key") or "").strip()
if not key:
continue
if key in raw_overrides:
resolved[key] = raw_overrides[key]
continue
if "default" in field:
resolved[key] = field.get("default")
return resolved
@dataclass(slots=True)
@@ -75,8 +272,14 @@ class TemplateResolutionResult:
material_map: dict[str, str] | None
use_materials: bool
override_material: str | None
target_collection: str
lighting_only: bool
shadow_catcher: bool
camera_orbit: bool
category_key: str | None
output_type_id: str | None
workflow_input_schema: list[dict[str, Any]] = field(default_factory=list)
template_inputs: dict[str, Any] = field(default_factory=dict)
@dataclass(slots=True)
@@ -159,6 +362,7 @@ class OrderLineRenderInvocation:
sensor_width_mm: float | None = None
usd_path: str | None = None
material_override: str | None = None
template_inputs: dict[str, Any] = field(default_factory=dict)
def task_defaults(self) -> dict[str, Any]:
payload: dict[str, Any] = {
@@ -196,9 +400,10 @@ class OrderLineRenderInvocation:
"sensor_width_mm": self.sensor_width_mm,
"usd_path": self.usd_path,
"material_override": self.material_override,
"template_inputs": self.template_inputs,
}
for key, value in optional_values.items():
if value not in (None, ""):
if value not in (None, "", {}, [], ()):
payload[key] = value
return payload
@@ -242,6 +447,7 @@ class OrderLineRenderInvocation:
"focal_length_mm": self.focal_length_mm,
"sensor_width_mm": self.sensor_width_mm,
"material_override": self.material_override,
"template_inputs": self.template_inputs,
}
def as_turntable_renderer_kwargs(
@@ -285,6 +491,7 @@ class OrderLineRenderInvocation:
"focal_length_mm": self.focal_length_mm,
"sensor_width_mm": self.sensor_width_mm,
"material_override": self.material_override,
"template_inputs": self.template_inputs,
}
def as_cinematic_renderer_kwargs(
@@ -324,6 +531,7 @@ class OrderLineRenderInvocation:
"focal_length_mm": self.focal_length_mm,
"sensor_width_mm": self.sensor_width_mm,
"material_override": self.material_override,
"template_inputs": self.template_inputs,
"log_callback": log_callback,
}
@@ -341,7 +549,61 @@ def _resolve_asset_path(storage_key: str | None) -> Path | None:
return resolve_result_path(storage_key)
def _usd_master_refresh_reason(cad_file: CadFile) -> str | None:
def _usd_master_file_refresh_reason(usd_render_path: Path | None) -> str | None:
if usd_render_path is None:
return "missing USD master file"
if not usd_render_path.exists():
return "missing USD master file"
try:
usd_bytes = usd_render_path.read_bytes()
except OSError:
logger.exception("render_order_line: failed to inspect usd_master %s", usd_render_path)
return "unreadable USD master file"
usd_bytes_lower = usd_bytes.lower()
if b"schaeffler:" in usd_bytes_lower:
return "legacy Schaeffler USD primvars"
if b"hartomat:" in usd_bytes_lower:
return None
# Binary USD (`PXR-USDC`) stores HartOMat customData in a form that is not
# reliably discoverable via a raw byte grep. For those files we rely on the
# cache fingerprint plus the upstream resolved material metadata checks.
if usd_bytes.startswith(b"PXR-USDC") or b"\x00" in usd_bytes[:256]:
return None
# Textual USD payloads without any HartOMat markers are legacy/stale in the
# current pipeline and should be refreshed before they are reused.
try:
usd_bytes.decode("utf-8")
except UnicodeDecodeError:
return None
return "missing HartOMat USD markers"
def _usd_master_cache_refresh_reason(usd_asset: MediaAsset | None) -> str | None:
if usd_asset is None:
return None
render_config = usd_asset.render_config if isinstance(usd_asset.render_config, dict) else {}
cache_key = render_config.get("cache_key")
if not isinstance(cache_key, str) or not cache_key.strip():
return "missing USD cache fingerprint"
# New-format keys append the render-script fingerprint as a sixth colon-delimited segment.
if len(cache_key.split(":")) < 6:
return "legacy USD cache fingerprint"
return None
def _usd_master_refresh_reason(
cad_file: CadFile,
*,
usd_asset: MediaAsset | None = None,
usd_render_path: Path | None = None,
) -> str | None:
resolved = cad_file.resolved_material_assignments
if not isinstance(resolved, dict) or not resolved:
return "missing resolved material assignments"
@@ -350,7 +612,7 @@ def _usd_master_refresh_reason(cad_file: CadFile) -> str | None:
for meta in resolved.values():
if not isinstance(meta, dict):
continue
canonical = meta.get("canonical_material")
canonical = meta.get("canonical_material") or meta.get("material")
if isinstance(canonical, str) and canonical.strip():
canonical_materials.append(canonical.strip())
@@ -360,6 +622,14 @@ def _usd_master_refresh_reason(cad_file: CadFile) -> str | None:
if any(material.upper().startswith("SCHAEFFLER_") for material in canonical_materials):
return "legacy Schaeffler material metadata"
cache_reason = _usd_master_cache_refresh_reason(usd_asset)
if cache_reason is not None:
return cache_reason
file_reason = _usd_master_file_refresh_reason(usd_render_path)
if file_reason is not None:
return file_reason
return None
@@ -502,6 +772,27 @@ def _coerce_bool(value: Any) -> bool:
return bool(value)
def _resolve_tristate_mode(
value: Any,
*,
field_name: str,
fallback: bool | None = None,
) -> bool | None:
if value in (None, "", "inherit"):
return fallback
if isinstance(value, bool):
return value
if isinstance(value, str):
normalized = value.strip().lower()
if normalized in {"enabled", "true", "1", "yes", "on"}:
return True
if normalized in {"disabled", "false", "0", "no", "off"}:
return False
raise ValueError(
f"{field_name} must be one of: inherit, enabled, disabled"
)
def _resolve_render_output_extension(line: OrderLine) -> str:
output_type = line.output_type
output_extension = "jpg"
@@ -582,7 +873,7 @@ def build_order_line_render_invocation(
denoising_quality = str(render_settings.get("denoising_quality", ""))
denoising_use_gpu = str(render_settings.get("denoising_use_gpu", ""))
transparent_bg = bool(output_type and output_type.transparent_bg)
cycles_device = (output_type.cycles_device or "auto") if output_type is not None else "auto"
cycles_device = (output_type.cycles_device or "gpu") if output_type is not None else "gpu"
render_overrides = getattr(line, "render_overrides", None)
if isinstance(render_overrides, dict):
@@ -682,22 +973,14 @@ def build_order_line_render_invocation(
part_colors=dict(setup.part_colors or {}),
part_names_ordered=part_names_ordered,
template_path=template_context.template.blend_file_path if template_context and template_context.template else None,
target_collection=(
template_context.template.target_collection
if template_context and template_context.template and template_context.template.target_collection
else "Product"
),
target_collection=template_context.target_collection if template_context else "Product",
material_library_path=(
template_context.material_library if template_context and use_materials else None
),
material_map=material_map,
lighting_only=bool(template_context.template.lighting_only) if template_context and template_context.template else False,
shadow_catcher=(
bool(template_context.template.shadow_catcher_enabled)
if template_context and template_context.template
else False
),
camera_orbit=bool(template_context.template.camera_orbit) if template_context and template_context.template else True,
lighting_only=template_context.lighting_only if template_context else False,
shadow_catcher=template_context.shadow_catcher if template_context else False,
camera_orbit=template_context.camera_orbit if template_context else True,
rotation_x=position.rotation_x,
rotation_y=position.rotation_y,
rotation_z=position.rotation_z,
@@ -705,6 +988,7 @@ def build_order_line_render_invocation(
sensor_width_mm=position.sensor_width_mm,
usd_path=str(setup.usd_render_path) if setup.usd_render_path is not None else None,
material_override=material_override,
template_inputs=dict(template_context.template_inputs) if template_context is not None else {},
)
@@ -727,10 +1011,49 @@ def _canonical_public_output_path(line: OrderLine, output_path: str) -> str:
return str(upload_root / "renders" / str(line.id) / filename)
def _strip_volatile_png_metadata(output_path: Path) -> None:
if output_path.suffix.lower() != ".png" or not output_path.is_file():
return
raw_bytes = output_path.read_bytes()
if not raw_bytes.startswith(_PNG_SIGNATURE):
return
cursor = len(_PNG_SIGNATURE)
kept_chunks: list[bytes] = []
changed = False
while cursor + 12 <= len(raw_bytes):
chunk_length = int.from_bytes(raw_bytes[cursor : cursor + 4], "big")
chunk_end = cursor + 12 + chunk_length
if chunk_end > len(raw_bytes):
return
chunk_type = raw_bytes[cursor + 4 : cursor + 8]
chunk_bytes = raw_bytes[cursor:chunk_end]
if chunk_type in _VOLATILE_PNG_CHUNK_TYPES:
changed = True
else:
kept_chunks.append(chunk_bytes)
cursor = chunk_end
if chunk_type == b"IEND":
break
if not changed:
return
output_path.write_bytes(_PNG_SIGNATURE + b"".join(kept_chunks))
def _normalize_output_artifact(output_path: str) -> None:
_strip_volatile_png_metadata(Path(output_path))
def _materialize_public_output(line: OrderLine, output_path: str) -> str:
canonical_path = Path(_canonical_public_output_path(line, output_path))
source_path = Path(output_path)
canonical_path.parent.mkdir(parents=True, exist_ok=True)
ensure_group_writable_dir(canonical_path.parent)
if source_path != canonical_path:
shutil.copy2(source_path, canonical_path)
return str(canonical_path)
@@ -765,6 +1088,7 @@ def persist_order_line_media_asset(
resolved_workflow_run_id = _resolve_existing_workflow_run_id(session, workflow_run_id)
if success:
_normalize_output_artifact(output_path)
storage_key = _normalize_storage_key(output_path)
output_file = Path(output_path)
existing_asset = session.execute(
@@ -906,13 +1230,14 @@ def persist_order_line_output(
) -> OutputSaveResult:
"""Persist the render result for an order line and publish the media asset if needed."""
status: Literal["completed", "failed"] = "completed" if success else "failed"
completed_at = render_completed_at or datetime.utcnow()
completed_at = render_completed_at or _utcnow_naive()
persisted_output_path = output_path
line.render_status = status
line.render_completed_at = completed_at
line.render_log = render_log
if success:
_normalize_output_artifact(output_path)
persisted_output_path = _materialize_public_output(line, output_path)
line.result_path = persisted_output_path if success else None
session.flush()
@@ -1084,7 +1409,7 @@ def prepare_order_line_render_context(
reason="missing_cad_file",
)
render_start = datetime.utcnow() if persist_state else None
render_start = _utcnow_naive() if persist_state else None
if persist_state:
session.execute(
sql_update(OrderLine)
@@ -1111,7 +1436,12 @@ def prepare_order_line_render_context(
.limit(1)
).scalar_one_or_none()
if usd_asset:
refresh_reason = _usd_master_refresh_reason(cad_file)
usd_candidate_path = _resolve_asset_path(usd_asset.storage_key)
refresh_reason = _usd_master_refresh_reason(
cad_file,
usd_asset=usd_asset,
usd_render_path=usd_candidate_path,
)
if refresh_reason is not None:
logger.warning(
"render_order_line: ignoring stale usd_master for cad %s (%s)",
@@ -1127,7 +1457,7 @@ def prepare_order_line_render_context(
if _queue_usd_master_refresh(str(cad_file.id)):
_emit(emit, order_line_id, "Queued USD master regeneration in background")
else:
usd_render_path = _resolve_asset_path(usd_asset.storage_key)
usd_render_path = usd_candidate_path
if usd_render_path:
logger.info(
"render_order_line: using usd_master %s for cad %s",
@@ -1203,6 +1533,12 @@ def resolve_order_line_template_context(
material_library_path_override: str | None = None,
require_template: bool = False,
disable_materials: bool = False,
target_collection_override: str | None = None,
material_replace_mode: str | None = None,
lighting_only_mode: str | None = None,
shadow_catcher_mode: str | None = None,
camera_orbit_mode: str | None = None,
template_input_overrides: dict[str, Any] | None = None,
) -> TemplateResolutionResult:
"""Resolve render template, material library, and material map for a prepared order line."""
if not setup.is_ready:
@@ -1242,6 +1578,7 @@ def resolve_order_line_template_context(
if isinstance(material_library_path_override, str) and material_library_path_override.strip()
else get_material_library_path_for_session(session)
)
material_replace_override = _resolve_tristate_mode(material_replace_mode, field_name="material_replace_mode")
material_resolution = resolve_order_line_material_map(
line,
cad_file,
@@ -1250,8 +1587,36 @@ def resolve_order_line_template_context(
template=template,
emit=emit,
disable_materials=disable_materials,
material_replace_enabled_override=material_replace_override,
)
resolved_target_collection = (
target_collection_override.strip()
if isinstance(target_collection_override, str) and target_collection_override.strip()
else (
template.target_collection
if template is not None and template.target_collection
else "Product"
)
)
resolved_lighting_only = _resolve_tristate_mode(
lighting_only_mode,
field_name="lighting_only_mode",
fallback=bool(template.lighting_only) if template is not None else False,
)
resolved_shadow_catcher = _resolve_tristate_mode(
shadow_catcher_mode,
field_name="shadow_catcher_mode",
fallback=bool(template.shadow_catcher_enabled) if template is not None else False,
)
resolved_camera_orbit = _resolve_tristate_mode(
camera_orbit_mode,
field_name="camera_orbit_mode",
fallback=bool(template.camera_orbit) if template is not None else True,
)
workflow_input_schema = _normalize_template_input_schema(template)
template_inputs = _resolve_template_input_values(workflow_input_schema, template_input_overrides)
if template:
_emit(
emit,
@@ -1267,6 +1632,8 @@ def resolve_order_line_template_context(
template.blend_file_path,
template.lighting_only,
)
if template_inputs:
logger.info("Render template inputs resolved for '%s': %s", template.name, sorted(template_inputs))
if not template:
_emit(emit, str(line.id), "No render template found — using factory settings (Mode A)")
logger.info(
@@ -1281,8 +1648,14 @@ def resolve_order_line_template_context(
material_map=material_resolution.material_map,
use_materials=material_resolution.use_materials,
override_material=material_resolution.override_material,
target_collection=resolved_target_collection,
lighting_only=resolved_lighting_only,
shadow_catcher=resolved_shadow_catcher,
camera_orbit=resolved_camera_orbit,
category_key=category_key,
output_type_id=output_type_id,
workflow_input_schema=workflow_input_schema,
template_inputs=template_inputs,
)
@@ -1296,6 +1669,7 @@ def resolve_order_line_material_map(
emit: EmitFn = None,
material_override: str | None = None,
disable_materials: bool = False,
material_replace_enabled_override: bool | None = None,
) -> MaterialResolutionResult:
"""Resolve the effective order-line material map with legacy precedence rules."""
if disable_materials:
@@ -1311,11 +1685,15 @@ def resolve_order_line_material_map(
raw_material_count = 0
raw_material_map = _build_effective_material_lookup(cad_file, materials_source)
use_materials = bool(material_library and raw_material_map)
if template and not template.material_replace_enabled:
if material_replace_enabled_override is not None:
use_materials = bool(material_replace_enabled_override and material_library and raw_material_map)
elif template and not template.material_replace_enabled:
use_materials = False
if use_materials:
raw_material_count = len(raw_material_map)
material_map = resolve_material_map(raw_material_map)
if cad_file:
material_map = _overlay_scene_manifest_material_map(cad_file, material_map)
line_override = getattr(line, "material_override", None)
output_override = line.output_type.material_override if line.output_type else None
@@ -1344,21 +1722,55 @@ def resolve_order_line_material_map(
)
def _overlay_scene_manifest_material_map(
cad_file: CadFile,
material_map: dict[str, str],
) -> dict[str, str]:
"""Overlay authoritative scene-manifest materials onto a resolved material map.
Low-level lookups still retain legacy/product source assignments so older
fallback paths keep working. The final order-line material map, however,
must prefer the scene manifest's effective assignments wherever the USD/CAD
pipeline has already established authoritative part identity.
"""
if not material_map:
return material_map
merged = dict(material_map)
manifest = build_scene_manifest(cad_file)
for part in manifest.get("parts", []):
if not isinstance(part, dict):
continue
effective_material = part.get("effective_material")
if not isinstance(effective_material, str) or not effective_material.strip():
continue
source_name = part.get("source_name")
part_key = part.get("part_key")
if isinstance(source_name, str) and source_name.strip():
merged[source_name] = effective_material
if isinstance(part_key, str) and part_key.strip():
merged[part_key] = effective_material
return merged
def _build_effective_material_lookup(
cad_file: CadFile | None,
materials_source: list[dict[str, Any]],
) -> dict[str, str]:
"""Build a renderer-compatible material lookup from all available layers.
Authoritative scene-manifest assignments win when present, but we emit both
source-name and part-key keys so USD and GLB/STEP fallback paths resolve the
same effective material map.
Product/Excel CAD assignments stay authoritative for overlapping source-name
keys so legacy renders, thumbnails, and viewer previews keep parity with the
pre-USD pipeline. Scene-manifest assignments still fill gaps and emit part-key
aliases so USD and GLB/STEP fallback paths resolve the same effective map.
"""
raw_material_map: dict[str, str] = {
str(material["part_name"]): str(material["material"])
for material in materials_source
if material.get("part_name") and material.get("material")
}
authoritative_lookup = _build_authoritative_material_lookup(materials_source)
if not cad_file:
return raw_material_map
@@ -1372,10 +1784,16 @@ def _build_effective_material_lookup(
continue
source_name = part.get("source_name")
part_key = part.get("part_key")
if source_name:
raw_material_map[str(source_name)] = str(effective_material)
authoritative_material = _resolve_authoritative_material_name(
str(source_name) if source_name else None,
authoritative_lookup,
str(part_key) if part_key else None,
)
merged_material = authoritative_material or str(effective_material)
if source_name and str(source_name) not in raw_material_map:
raw_material_map[str(source_name)] = merged_material
if part_key:
raw_material_map[str(part_key)] = str(effective_material)
raw_material_map.setdefault(str(part_key), merged_material)
return raw_material_map
+159 -14
View File
@@ -18,6 +18,7 @@ Example config::
"""
from collections import deque
from typing import Any, Literal
from uuid import UUID
from pydantic import BaseModel, Field, field_validator, model_validator
@@ -29,6 +30,14 @@ from app.domains.rendering.workflow_node_registry import (
)
_WORKFLOW_META_PARAM_KEYS = {"retry_policy", "failure_policy"}
_TEMPLATE_INPUT_PARAM_PREFIX = "template_input__"
_HEX_COLOR_LENGTHS = {7, 9}
_SAFE_FILENAME_SUFFIX_CHARS = set(
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-"
)
def _context_seed_artifacts(definition: WorkflowNodeDefinition) -> set[str]:
if definition.family == "order_line":
return {"order_line_record"}
@@ -37,10 +46,43 @@ def _context_seed_artifacts(definition: WorkflowNodeDefinition) -> set[str]:
return set()
def _infer_concrete_workflow_family(
definitions: list[WorkflowNodeDefinition],
) -> Literal["cad_file", "order_line", "mixed"] | None:
concrete_families = {
definition.family
for definition in definitions
if definition.family in {"cad_file", "order_line"}
}
if not concrete_families:
return None
if len(concrete_families) > 1:
return "mixed"
return next(iter(concrete_families))
def _coerce_node_label(node: "WorkflowNode") -> str:
return f"{node.id!r} ({node.step.value})"
def _require_node_definition(node: "WorkflowNode") -> WorkflowNodeDefinition:
definition = get_node_definition(node.step)
if definition is None:
raise ValueError(
f"node {_coerce_node_label(node)} is not registered in workflow_node_registry"
)
return definition
def _is_dynamic_template_input_param(node: "WorkflowNode", key: str) -> bool:
return (
node.step == StepName.RESOLVE_TEMPLATE
and isinstance(key, str)
and key.startswith(_TEMPLATE_INPUT_PARAM_PREFIX)
and key[len(_TEMPLATE_INPUT_PARAM_PREFIX):].strip() != ""
)
def _validate_param_value(
*,
node: "WorkflowNode",
@@ -72,6 +114,105 @@ def _validate_param_value(
if value not in valid_values:
allowed_values = ", ".join(repr(option) for option in sorted(valid_values, key=repr))
raise ValueError(f"{field_label} must be one of: {allowed_values}")
return
if field_definition.type == "text":
if not isinstance(value, str):
raise ValueError(f"{field_label} must be a string")
stripped_value = value.strip()
if stripped_value == "":
if field_definition.allow_blank:
return
raise ValueError(f"{field_label} may not be blank")
if field_definition.max_length is not None and len(value) > field_definition.max_length:
raise ValueError(
f"{field_label} must be at most {field_definition.max_length} characters"
)
if field_definition.text_format == "plain":
return
if field_definition.text_format == "uuid":
try:
UUID(stripped_value)
except ValueError as exc:
raise ValueError(f"{field_label} must be a valid UUID") from exc
return
if field_definition.text_format == "absolute_path":
if not stripped_value.startswith("/"):
raise ValueError(f"{field_label} must be an absolute path")
return
if field_definition.text_format == "absolute_blend_path":
if not stripped_value.startswith("/"):
raise ValueError(f"{field_label} must be an absolute path")
if not stripped_value.lower().endswith(".blend"):
raise ValueError(f"{field_label} must point to a .blend file")
return
if field_definition.text_format == "absolute_glb_path":
if not stripped_value.startswith("/"):
raise ValueError(f"{field_label} must be an absolute path")
if not stripped_value.lower().endswith(".glb"):
raise ValueError(f"{field_label} must point to a .glb file")
return
if field_definition.text_format == "float_string":
try:
float(stripped_value)
except ValueError as exc:
raise ValueError(f"{field_label} must be a valid numeric string") from exc
return
if field_definition.text_format == "hex_color":
if len(stripped_value) not in _HEX_COLOR_LENGTHS or not stripped_value.startswith("#"):
raise ValueError(f"{field_label} must be a hex color like #FFFFFF or #FFFFFFFF")
color_digits = stripped_value[1:]
if any(character not in "0123456789abcdefABCDEF" for character in color_digits):
raise ValueError(f"{field_label} must be a hex color like #FFFFFF or #FFFFFFFF")
return
if field_definition.text_format == "safe_filename_suffix":
if any(character not in _SAFE_FILENAME_SUFFIX_CHARS for character in stripped_value):
raise ValueError(
f"{field_label} may only contain letters, numbers, '.', '-' or '_'"
)
return
raise ValueError(
f"{field_label} uses unsupported text format {field_definition.text_format!r}"
)
def _validate_meta_param_value(*, node: "WorkflowNode", key: str, value: Any) -> None:
field_label = f"node {_coerce_node_label(node)} meta param {key!r}"
if key == "retry_policy":
if not isinstance(value, dict):
raise ValueError(f"{field_label} must be an object")
unknown_keys = sorted(raw_key for raw_key in value if raw_key not in {"max_attempts"})
if unknown_keys:
joined = ", ".join(repr(raw_key) for raw_key in unknown_keys)
raise ValueError(f"{field_label} uses unknown key(s): {joined}")
max_attempts = value.get("max_attempts", 1)
if isinstance(max_attempts, bool) or not isinstance(max_attempts, int):
raise ValueError(f"{field_label} field 'max_attempts' must be an integer")
if max_attempts < 1 or max_attempts > 5:
raise ValueError(f"{field_label} field 'max_attempts' must be between 1 and 5")
return
if key == "failure_policy":
if not isinstance(value, dict):
raise ValueError(f"{field_label} must be an object")
allowed_keys = {"halt_workflow", "fallback_to_legacy"}
unknown_keys = sorted(raw_key for raw_key in value if raw_key not in allowed_keys)
if unknown_keys:
joined = ", ".join(repr(raw_key) for raw_key in unknown_keys)
raise ValueError(f"{field_label} uses unknown key(s): {joined}")
for bool_key in allowed_keys:
if bool_key not in value:
continue
if not isinstance(value[bool_key], bool):
raise ValueError(f"{field_label} field {bool_key!r} must be a boolean")
return
raise ValueError(f"{field_label} is not supported")
class WorkflowPosition(BaseModel):
@@ -149,18 +290,25 @@ class WorkflowConfig(BaseModel):
@model_validator(mode="after")
def node_params_match_registry(self) -> "WorkflowConfig":
for node in self.nodes:
definition = get_node_definition(node.step)
if definition is None:
continue
definition = _require_node_definition(node)
field_definitions = {field.key: field for field in definition.fields}
allowed_keys = {field.key for field in definition.fields}
unknown_keys = sorted(key for key in node.params if key not in allowed_keys)
allowed_keys = {field.key for field in definition.fields} | _WORKFLOW_META_PARAM_KEYS
unknown_keys = sorted(
key
for key in node.params
if key not in allowed_keys and not _is_dynamic_template_input_param(node, key)
)
if unknown_keys:
joined = ", ".join(repr(key) for key in unknown_keys)
raise ValueError(
f"node {node.id!r} ({node.step.value}) uses unknown param key(s): {joined}"
)
for key, value in node.params.items():
if _is_dynamic_template_input_param(node, key):
continue
if key in _WORKFLOW_META_PARAM_KEYS:
_validate_meta_param_value(node=node, key=key, value=value)
continue
field_definition = field_definitions.get(key)
if field_definition is None:
continue
@@ -173,20 +321,19 @@ class WorkflowConfig(BaseModel):
@model_validator(mode="after")
def ui_family_matches_node_families(self) -> "WorkflowConfig":
families = {
definition.family
for node in self.nodes
if (definition := get_node_definition(node.step)) is not None
}
definitions = [_require_node_definition(node) for node in self.nodes]
families = {definition.family for definition in definitions}
inferred_family = _infer_concrete_workflow_family(definitions)
if not families:
return self
inferred_family = "mixed" if len(families) > 1 else next(iter(families))
execution_mode = self.ui.execution_mode if self.ui is not None else "legacy"
if execution_mode in {"graph", "shadow"} and inferred_family == "mixed":
raise ValueError(
"workflow ui.execution_mode must stay single-family for graph/shadow execution"
)
if inferred_family is None:
return self
if self.ui is None or self.ui.family is None:
return self
if self.ui.family != inferred_family:
@@ -220,9 +367,7 @@ class WorkflowConfig(BaseModel):
node_id = queue.popleft()
processed += 1
node = node_by_id[node_id]
definition = get_node_definition(node.step)
if definition is None:
continue
definition = _require_node_definition(node)
node_inputs = available_artifacts[node_id] | _context_seed_artifacts(definition)
required = set(definition.input_contract.get("requires", []))