feat: unify order-line render invocation paths

This commit is contained in:
2026-04-08 21:57:37 +02:00
parent 042f62fe55
commit dde04fcaa5
5 changed files with 3016 additions and 278 deletions
@@ -2,6 +2,7 @@ from __future__ import annotations
import logging
import time
import uuid
from dataclasses import dataclass, field
from datetime import datetime
from pathlib import Path
@@ -10,7 +11,9 @@ from typing import Any
from sqlalchemy import select
from sqlalchemy.orm import Session, selectinload
from app.config import settings
from app.core.process_steps import StepName
from app.domains.products.models import CadFile
from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun
from app.domains.rendering.workflow_executor import STEP_TASK_MAP, WorkflowContext, WorkflowDispatchResult
from app.domains.rendering.workflow_node_registry import get_node_definition
@@ -21,10 +24,12 @@ from app.domains.rendering.workflow_runtime_services import (
OrderLineRenderSetupResult,
TemplateResolutionResult,
auto_populate_materials_for_cad,
build_order_line_render_invocation,
prepare_order_line_render_context,
resolve_cad_bbox,
resolve_order_line_material_map,
resolve_order_line_template_context,
resolve_render_position_context,
)
logger = logging.getLogger(__name__)
@@ -37,6 +42,7 @@ class WorkflowGraphRuntimeError(RuntimeError):
@dataclass(slots=True)
class WorkflowGraphState:
setup: OrderLineRenderSetupResult | None = None
cad_file: CadFile | None = None
template: TemplateResolutionResult | None = None
materials: MaterialResolutionResult | None = None
auto_populate: AutoPopulateMaterialsResult | None = None
@@ -52,6 +58,119 @@ _ORDER_LINE_RENDER_STEPS = {
StepName.NOTIFY,
}
_STILL_TASK_KEYS = {
"width",
"height",
"engine",
"samples",
"smooth_angle",
"cycles_device",
"transparent_bg",
"part_colors",
"template_path",
"target_collection",
"material_library_path",
"material_map",
"part_names_ordered",
"lighting_only",
"shadow_catcher",
"rotation_x",
"rotation_y",
"rotation_z",
"noise_threshold",
"denoiser",
"denoising_input_passes",
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
"usd_path",
"focal_length_mm",
"sensor_width_mm",
"material_override",
"render_engine",
"resolution",
}
_TURNTABLE_TASK_KEYS = {
"output_name",
"engine",
"samples",
"smooth_angle",
"cycles_device",
"transparent_bg",
"width",
"height",
"frame_count",
"fps",
"turntable_degrees",
"turntable_axis",
"bg_color",
"template_path",
"target_collection",
"material_library_path",
"material_map",
"part_names_ordered",
"lighting_only",
"shadow_catcher",
"camera_orbit",
"rotation_x",
"rotation_y",
"rotation_z",
"focal_length_mm",
"sensor_width_mm",
"material_override",
}
_THUMBNAIL_TASK_KEYS = {
"renderer",
"render_engine",
"samples",
"width",
"height",
"transparent_bg",
}
_AUTHORITATIVE_RENDER_SETTING_KEYS = {
"render_engine",
"engine",
"samples",
"width",
"height",
"transparent_bg",
"cycles_device",
"noise_threshold",
"denoiser",
"denoising_input_passes",
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
"camera_orbit",
"focal_length_mm",
"sensor_width_mm",
"bg_color",
}
def _filter_graph_render_overrides(step: StepName, params: dict[str, Any]) -> dict[str, Any]:
normalized = dict(params)
use_custom_render_settings = bool(normalized.pop("use_custom_render_settings", False))
if use_custom_render_settings:
return normalized
filtered = dict(normalized)
for key in _AUTHORITATIVE_RENDER_SETTING_KEYS:
if key in filtered:
filtered.pop(key, None)
if step == StepName.BLENDER_TURNTABLE:
# Turntable timing remains workflow-specific even when render quality inherits from the output type.
for key in ("fps", "duration_s", "frame_count", "turntable_degrees", "turntable_axis"):
value = normalized.get(key)
if value not in (None, ""):
filtered[key] = value
return filtered
def find_unsupported_graph_nodes(workflow_context: WorkflowContext) -> list[str]:
unsupported: list[str] = []
@@ -119,6 +238,7 @@ def execute_graph_workflow(
session=session,
workflow_context=workflow_context,
state=state,
node=node,
node_params=node.params,
)
except Exception as exc:
@@ -208,14 +328,12 @@ def execute_graph_workflow(
from app.tasks.celery_app import celery_app
task_kwargs = dict(node.params)
task_kwargs["workflow_run_id"] = str(workflow_context.workflow_run_id)
task_kwargs["workflow_node_id"] = node.id
if workflow_context.execution_mode == "shadow":
task_kwargs["publish_asset_enabled"] = False
task_kwargs["emit_events"] = False
task_kwargs["job_document_enabled"] = False
task_kwargs["output_name_suffix"] = f"shadow-{str(workflow_context.workflow_run_id)[:8]}"
task_kwargs = _build_task_kwargs(
session=session,
workflow_context=workflow_context,
state=state,
node=node,
)
result = celery_app.send_task(
task_name,
@@ -228,10 +346,19 @@ def execute_graph_workflow(
metadata["attempt_count"] = 1
metadata["max_attempts"] = retry_policy["max_attempts"]
metadata["execution_mode"] = workflow_context.execution_mode
predicted_output = _predict_task_output_metadata(
workflow_context=workflow_context,
state=state,
node=node,
task_kwargs=task_kwargs,
)
if predicted_output:
metadata.update(predicted_output)
node_result.status = "queued"
node_result.output = metadata
node_result.log = None
node_result.duration_s = None
state.node_outputs[node.id] = dict(metadata)
session.flush()
task_ids.append(result.id)
node_task_ids[node.id] = result.id
@@ -377,13 +504,330 @@ def _serialize_bbox_result(result: BBoxResolutionResult) -> dict[str, Any]:
}
def _serialize_cad_file_result(cad_file: CadFile) -> dict[str, Any]:
parsed_objects = cad_file.parsed_objects or {}
objects = parsed_objects.get("objects")
object_count = len(objects) if isinstance(objects, list) else None
return {
"cad_file_id": str(cad_file.id),
"step_path": cad_file.stored_path,
"original_name": cad_file.original_name,
"processing_status": cad_file.processing_status.value if getattr(cad_file, "processing_status", None) else None,
"object_count": object_count,
"has_parsed_objects": bool(parsed_objects),
"gltf_path": cad_file.gltf_path,
}
def _workflow_node_ids(workflow_context: WorkflowContext, step: StepName) -> list[str]:
return [node.id for node in workflow_context.ordered_nodes if node.step == step]
def _workflow_node_map(workflow_context: WorkflowContext) -> dict[str, Any]:
return {node.id: node for node in workflow_context.ordered_nodes}
def _upstream_node_ids(workflow_context: WorkflowContext, node_id: str) -> list[str]:
return [edge.from_node for edge in workflow_context.edges if edge.to_node == node_id]
def _downstream_node_ids(workflow_context: WorkflowContext, node_id: str) -> list[str]:
return [edge.to_node for edge in workflow_context.edges if edge.from_node == node_id]
def _connected_node_ids_by_step(
workflow_context: WorkflowContext,
*,
node_id: str,
step: StepName,
direction: str,
) -> list[str]:
node_map = _workflow_node_map(workflow_context)
if direction == "upstream":
candidate_ids = _upstream_node_ids(workflow_context, node_id)
elif direction == "downstream":
candidate_ids = _downstream_node_ids(workflow_context, node_id)
else:
raise ValueError(f"Unsupported graph direction: {direction}")
return [
candidate_id
for candidate_id in candidate_ids
if node_map.get(candidate_id) is not None and node_map[candidate_id].step == step
]
def _connected_upstream_artifacts(
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node_id: str,
) -> list[dict[str, Any]]:
preferred_upstream_ids = set(_upstream_node_ids(workflow_context, node_id))
artifacts = _collect_upstream_artifacts(state)
if not preferred_upstream_ids:
return []
return [artifact for artifact in artifacts if artifact["node_id"] in preferred_upstream_ids]
def _predict_task_output_metadata(
*,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
task_kwargs: dict[str, Any],
) -> dict[str, Any]:
if node.step == StepName.THUMBNAIL_SAVE:
renderer = str(task_kwargs.get("renderer") or "blender")
output_format = "png" if renderer == "threejs" or bool(task_kwargs.get("transparent_bg")) else "jpg"
output_dir = Path(settings.upload_dir) / "thumbnails"
return {
"artifact_role": "thumbnail_output",
"predicted_output_path": str(output_dir / f"{workflow_context.context_id}.{output_format}"),
"predicted_asset_type": "thumbnail",
"publish_asset_enabled": True,
"graph_authoritative_output_enabled": True,
"graph_output_node_ids": [node.id],
"notify_handoff_enabled": False,
}
if state.setup is None or state.setup.order_line is None or state.setup.cad_file is None:
return {}
step_path = Path(state.setup.cad_file.stored_path)
output_name_suffix = task_kwargs.get("output_name_suffix")
order_line_id = str(state.setup.order_line.id)
if node.step == StepName.BLENDER_STILL:
output_dir = step_path.parent / "renders"
output_filename = f"line_{order_line_id}.png"
if output_name_suffix:
output_filename = f"line_{order_line_id}_{output_name_suffix}.png"
return {
"artifact_role": "render_output",
"predicted_output_path": str(output_dir / output_filename),
"predicted_asset_type": "still",
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
"graph_authoritative_output_enabled": bool(
task_kwargs.get("graph_authoritative_output_enabled", False)
),
"graph_output_node_ids": list(task_kwargs.get("graph_output_node_ids") or []),
"notify_handoff_enabled": bool(task_kwargs.get("emit_legacy_notifications", False)),
"graph_notify_node_ids": list(task_kwargs.get("graph_notify_node_ids") or []),
}
if node.step == StepName.EXPORT_BLEND:
output_filename = f"{step_path.stem}_production.blend"
if output_name_suffix:
output_filename = f"{step_path.stem}_production_{output_name_suffix}.blend"
return {
"artifact_role": "blend_export",
"predicted_output_path": str(step_path.parent / output_filename),
"predicted_asset_type": "blend_production",
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
"graph_authoritative_output_enabled": bool(
task_kwargs.get("graph_authoritative_output_enabled", False)
),
"graph_output_node_ids": list(task_kwargs.get("graph_output_node_ids") or []),
"notify_handoff_enabled": bool(task_kwargs.get("emit_legacy_notifications", False)),
"graph_notify_node_ids": list(task_kwargs.get("graph_notify_node_ids") or []),
}
if node.step == StepName.BLENDER_TURNTABLE:
output_name = str(task_kwargs.get("output_name") or "turntable")
output_name_suffix = task_kwargs.get("output_name_suffix")
if output_name_suffix:
output_name = f"{output_name}_{output_name_suffix}"
output_dir = task_kwargs.get("output_dir")
predicted_output_path = None
if isinstance(output_dir, str) and output_dir.strip():
predicted_output_path = str(Path(output_dir) / f"{output_name}.mp4")
else:
predicted_output_path = str(step_path.parent / "renders" / f"{output_name}.mp4")
return {
"artifact_role": "turntable_output",
"predicted_output_path": predicted_output_path,
"predicted_asset_type": "turntable",
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
"graph_authoritative_output_enabled": bool(
task_kwargs.get("graph_authoritative_output_enabled", False)
),
"graph_output_node_ids": list(task_kwargs.get("graph_output_node_ids") or []),
"notify_handoff_enabled": bool(task_kwargs.get("emit_legacy_notifications", False)),
"graph_notify_node_ids": list(task_kwargs.get("graph_notify_node_ids") or []),
}
return {}
def _collect_upstream_artifacts(state: WorkflowGraphState) -> list[dict[str, Any]]:
artifacts: list[dict[str, Any]] = []
for node_id, output in state.node_outputs.items():
predicted_output_path = output.get("predicted_output_path")
artifact_role = output.get("artifact_role")
if not artifact_role and not predicted_output_path:
continue
artifacts.append(
{
"node_id": node_id,
"artifact_role": artifact_role,
"predicted_output_path": predicted_output_path,
"predicted_asset_type": output.get("predicted_asset_type"),
"publish_asset_enabled": bool(output.get("publish_asset_enabled", False)),
"graph_authoritative_output_enabled": bool(
output.get("graph_authoritative_output_enabled", False)
),
"graph_output_node_ids": list(output.get("graph_output_node_ids") or []),
"notify_handoff_enabled": bool(output.get("notify_handoff_enabled", False)),
"task_id": output.get("task_id"),
**(
{"graph_notify_node_ids": list(output.get("graph_notify_node_ids") or [])}
if output.get("graph_notify_node_ids")
else {}
),
}
)
return artifacts
def _resolve_cad_file_context(
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
) -> CadFile:
if state.cad_file is not None:
return state.cad_file
try:
cad_file_id = workflow_context.context_id
except AttributeError as exc:
raise WorkflowGraphRuntimeError("cad_file context_id is missing") from exc
try:
parsed_cad_file_id = uuid.UUID(cad_file_id)
except ValueError as exc:
raise WorkflowGraphRuntimeError(f"cad_file context is not a valid UUID: {cad_file_id}") from exc
cad_file = session.get(CadFile, parsed_cad_file_id)
if cad_file is None:
raise WorkflowGraphRuntimeError(f"cad_file context not found: {cad_file_id}")
state.cad_file = cad_file
return cad_file
def _resolve_thumbnail_request(
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node_id: str,
) -> dict[str, Any] | None:
preferred_upstream_ids = set(_upstream_node_ids(workflow_context, node_id))
if preferred_upstream_ids:
for upstream_node in reversed(workflow_context.ordered_nodes):
if upstream_node.id not in preferred_upstream_ids:
continue
output = state.node_outputs.get(upstream_node.id)
if output and output.get("thumbnail_request") is True:
return output
for output in reversed(list(state.node_outputs.values())):
if output.get("thumbnail_request") is True:
return output
return None
def _build_task_kwargs(
*,
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
) -> dict[str, Any]:
task_kwargs = dict(node.params)
connected_output_node_ids: list[str] = []
connected_notify_node_ids: list[str] = []
render_defaults: dict[str, Any] = {}
if state.setup is not None and state.setup.is_ready and state.setup.order_line is not None:
render_invocation = build_order_line_render_invocation(
state.setup,
template_context=state.template,
position_context=resolve_render_position_context(session, state.setup.order_line),
material_context=state.materials,
)
render_defaults = render_invocation.task_defaults()
if node.step == StepName.BLENDER_STILL:
task_kwargs = _filter_graph_render_overrides(StepName.BLENDER_STILL, task_kwargs)
task_kwargs = {
key: value
for key, value in {
**render_defaults,
**task_kwargs,
}.items()
if key in _STILL_TASK_KEYS
}
elif node.step == StepName.BLENDER_TURNTABLE:
task_kwargs = _filter_graph_render_overrides(StepName.BLENDER_TURNTABLE, task_kwargs)
task_kwargs = {
key: value
for key, value in {
**render_defaults,
**task_kwargs,
}.items()
if key in _TURNTABLE_TASK_KEYS
}
elif node.step == StepName.THUMBNAIL_SAVE:
thumbnail_request = _resolve_thumbnail_request(workflow_context, state, node.id) or {}
task_kwargs = {
key: value
for key, value in {
**thumbnail_request,
**task_kwargs,
}.items()
if key in _THUMBNAIL_TASK_KEYS
}
task_kwargs["workflow_run_id"] = str(workflow_context.workflow_run_id)
task_kwargs["workflow_node_id"] = node.id
if workflow_context.execution_mode == "graph" and node.step in {
StepName.BLENDER_STILL,
StepName.EXPORT_BLEND,
StepName.BLENDER_TURNTABLE,
}:
connected_output_node_ids = _connected_node_ids_by_step(
workflow_context,
node_id=node.id,
step=StepName.OUTPUT_SAVE,
direction="downstream",
)
connected_notify_node_ids = _connected_node_ids_by_step(
workflow_context,
node_id=node.id,
step=StepName.NOTIFY,
direction="downstream",
)
if connected_output_node_ids:
task_kwargs["publish_asset_enabled"] = False
task_kwargs["graph_authoritative_output_enabled"] = True
task_kwargs["graph_output_node_ids"] = connected_output_node_ids
if connected_notify_node_ids:
task_kwargs["emit_legacy_notifications"] = True
task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids
if workflow_context.execution_mode == "shadow":
task_kwargs["publish_asset_enabled"] = False
task_kwargs["emit_events"] = False
task_kwargs["job_document_enabled"] = False
task_kwargs["output_name_suffix"] = f"shadow-{str(workflow_context.workflow_run_id)[:8]}"
return task_kwargs
def _execute_order_line_setup(
*,
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del node_params
shadow_mode = workflow_context.execution_mode == "shadow"
if shadow_mode:
@@ -409,8 +853,10 @@ def _execute_resolve_template(
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del workflow_context, node_params
if state.setup is None or not state.setup.is_ready:
if state.setup is not None and state.setup.status == "skip":
@@ -426,8 +872,10 @@ def _execute_material_map_resolve(
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del session, workflow_context, node_params
if state.setup is None or not state.setup.is_ready:
if state.setup is not None and state.setup.status == "skip":
@@ -457,8 +905,10 @@ def _execute_auto_populate_materials(
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del node_params
if state.setup is None or state.setup.cad_file is None:
if state.setup is not None and state.setup.status == "skip":
@@ -487,8 +937,10 @@ def _execute_glb_bbox(
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del session, workflow_context
if state.setup is None or state.setup.cad_file is None:
if state.setup is not None and state.setup.status == "skip":
@@ -510,10 +962,198 @@ def _execute_glb_bbox(
return _serialize_bbox_result(result), "completed", None
def _execute_resolve_step_path(
*,
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del node_params
cad_file = _resolve_cad_file_context(session, workflow_context, state)
return _serialize_cad_file_result(cad_file), "completed", None
def _execute_stl_cache_generate(
*,
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del node
del node_params
cad_file = _resolve_cad_file_context(session, workflow_context, state)
step_path = Path(cad_file.stored_path)
stl_dir = step_path.parent / "stl_cache"
payload = _serialize_cad_file_result(cad_file)
payload.update(
{
"cache_mode": "compatibility_noop",
"cache_required": False,
"stl_cache_dir": str(stl_dir),
"reason": "HartOMat CAD graph uses direct OCC/GLB export instead of legacy STL cache generation.",
}
)
return payload, "completed", None
def _execute_thumbnail_render_request(
*,
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
renderer: str,
) -> tuple[dict[str, Any], str, str | None]:
del node
cad_file = _resolve_cad_file_context(session, workflow_context, state)
payload: dict[str, Any] = {
"cad_file_id": str(cad_file.id),
"step_path": cad_file.stored_path,
"renderer": renderer,
"thumbnail_request": True,
}
for key in ("width", "height", "transparent_bg", "render_engine", "samples"):
value = node_params.get(key)
if value not in (None, ""):
payload[key] = value
return payload, "completed", None
def _execute_blender_thumbnail_render(
*,
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
return _execute_thumbnail_render_request(
session=session,
workflow_context=workflow_context,
state=state,
node=node,
node_params=node_params,
renderer="blender",
)
def _execute_threejs_thumbnail_render(
*,
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
return _execute_thumbnail_render_request(
session=session,
workflow_context=workflow_context,
state=state,
node=node,
node_params=node_params,
renderer="threejs",
)
def _execute_output_save(
*,
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del session, node_params
if state.setup is None or state.setup.order_line is None:
raise WorkflowGraphRuntimeError("output_save requires an order_line_setup result")
if state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
if not state.setup.is_ready:
return _serialize_setup_result(state.setup), "failed", state.setup.reason or "output_save_blocked"
order_line = state.setup.order_line
payload: dict[str, Any] = {
"order_line_id": str(order_line.id),
"authoritative_result_path": order_line.result_path,
"shadow_mode": workflow_context.execution_mode == "shadow",
}
upstream_artifacts = _connected_upstream_artifacts(workflow_context, state, node.id)
if workflow_context.execution_mode == "shadow":
payload["publication_mode"] = "shadow_observer_only"
elif any(artifact["publish_asset_enabled"] for artifact in upstream_artifacts):
payload["publication_mode"] = "deferred_to_render_task"
else:
payload["publication_mode"] = "awaiting_graph_authoritative_save"
if upstream_artifacts:
payload["artifact_count"] = len(upstream_artifacts)
payload["upstream_artifacts"] = upstream_artifacts
if state.template is not None and state.template.template is not None:
payload["template_name"] = state.template.template.name
if state.materials is not None:
payload["material_map_count"] = len(state.materials.material_map or {})
return payload, "completed", None
def _execute_notify(
*,
session: Session,
workflow_context: WorkflowContext,
state: WorkflowGraphState,
node,
node_params: dict[str, Any],
) -> tuple[dict[str, Any], str, str | None]:
del session, node_params
if state.setup is None or state.setup.order_line is None:
raise WorkflowGraphRuntimeError("notify requires an order_line_setup result")
if state.setup.status == "skip":
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
if not state.setup.is_ready:
return _serialize_setup_result(state.setup), "failed", state.setup.reason or "notify_blocked"
payload: dict[str, Any] = {
"order_line_id": str(state.setup.order_line.id),
"shadow_mode": workflow_context.execution_mode == "shadow",
"channel": "audit_log",
}
if workflow_context.execution_mode == "shadow":
payload["notification_mode"] = "shadow_suppressed"
return payload, "skipped", "shadow mode suppresses user notifications"
connected_artifacts = _connected_upstream_artifacts(workflow_context, state, node.id)
armed_node_ids = [
artifact["node_id"]
for artifact in connected_artifacts
if artifact["notify_handoff_enabled"]
]
if not armed_node_ids:
payload["notification_mode"] = "not_armed"
return payload, "skipped", "No graph render task is configured for notification handoff"
payload["notification_mode"] = "deferred_to_render_task"
payload["armed_node_ids"] = armed_node_ids
payload["armed_node_count"] = len(armed_node_ids)
return payload, "completed", None
_BRIDGE_EXECUTORS = {
StepName.RESOLVE_STEP_PATH: _execute_resolve_step_path,
StepName.BLENDER_RENDER: _execute_blender_thumbnail_render,
StepName.THREEJS_RENDER: _execute_threejs_thumbnail_render,
StepName.ORDER_LINE_SETUP: _execute_order_line_setup,
StepName.RESOLVE_TEMPLATE: _execute_resolve_template,
StepName.MATERIAL_MAP_RESOLVE: _execute_material_map_resolve,
StepName.AUTO_POPULATE_MATERIALS: _execute_auto_populate_materials,
StepName.GLB_BBOX: _execute_glb_bbox,
StepName.STL_CACHE_GENERATE: _execute_stl_cache_generate,
StepName.OUTPUT_SAVE: _execute_output_save,
StepName.NOTIFY: _execute_notify,
}