1396 lines
52 KiB
Python
1396 lines
52 KiB
Python
from __future__ import annotations
|
|
|
|
import logging
|
|
import time
|
|
import uuid
|
|
from dataclasses import dataclass, field
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from typing import Any
|
|
|
|
from sqlalchemy import select
|
|
from sqlalchemy.orm import Session, selectinload
|
|
|
|
from app.config import settings
|
|
from app.core.render_paths import build_order_line_export_path, build_order_line_step_render_path
|
|
from app.core.process_steps import StepName
|
|
from app.domains.products.models import CadFile
|
|
from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun
|
|
from app.domains.rendering.workflow_executor import (
|
|
STEP_TASK_MAP,
|
|
WorkflowContext,
|
|
WorkflowDispatchResult,
|
|
WorkflowTaskDispatchSpec,
|
|
)
|
|
from app.domains.rendering.workflow_node_registry import get_node_definition
|
|
from app.domains.rendering.workflow_runtime_services import (
|
|
_resolve_render_output_extension,
|
|
AutoPopulateMaterialsResult,
|
|
BBoxResolutionResult,
|
|
MaterialResolutionResult,
|
|
OrderLineRenderSetupResult,
|
|
TemplateResolutionResult,
|
|
auto_populate_materials_for_cad,
|
|
build_order_line_render_invocation,
|
|
extract_template_input_overrides,
|
|
prepare_order_line_render_context,
|
|
resolve_cad_bbox,
|
|
resolve_order_line_material_map,
|
|
resolve_order_line_template_context,
|
|
resolve_render_position_context,
|
|
)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
class WorkflowGraphRuntimeError(RuntimeError):
|
|
pass
|
|
|
|
|
|
@dataclass(slots=True)
|
|
class WorkflowGraphState:
|
|
setup: OrderLineRenderSetupResult | None = None
|
|
cad_file: CadFile | None = None
|
|
template: TemplateResolutionResult | None = None
|
|
materials: MaterialResolutionResult | None = None
|
|
auto_populate: AutoPopulateMaterialsResult | None = None
|
|
bbox: BBoxResolutionResult | None = None
|
|
node_outputs: dict[str, dict[str, Any]] = field(default_factory=dict)
|
|
|
|
|
|
_ORDER_LINE_RENDER_STEPS = {
|
|
StepName.BLENDER_STILL,
|
|
StepName.BLENDER_TURNTABLE,
|
|
StepName.EXPORT_BLEND,
|
|
StepName.OUTPUT_SAVE,
|
|
StepName.NOTIFY,
|
|
}
|
|
|
|
_STILL_TASK_KEYS = {
|
|
"width",
|
|
"height",
|
|
"engine",
|
|
"samples",
|
|
"smooth_angle",
|
|
"cycles_device",
|
|
"transparent_bg",
|
|
"part_colors",
|
|
"template_path",
|
|
"target_collection",
|
|
"material_library_path",
|
|
"material_map",
|
|
"part_names_ordered",
|
|
"lighting_only",
|
|
"shadow_catcher",
|
|
"rotation_x",
|
|
"rotation_y",
|
|
"rotation_z",
|
|
"noise_threshold",
|
|
"denoiser",
|
|
"denoising_input_passes",
|
|
"denoising_prefilter",
|
|
"denoising_quality",
|
|
"denoising_use_gpu",
|
|
"usd_path",
|
|
"focal_length_mm",
|
|
"sensor_width_mm",
|
|
"material_override",
|
|
"render_engine",
|
|
"resolution",
|
|
"template_inputs",
|
|
}
|
|
|
|
_TURNTABLE_TASK_KEYS = {
|
|
"output_name",
|
|
"engine",
|
|
"render_engine",
|
|
"samples",
|
|
"smooth_angle",
|
|
"cycles_device",
|
|
"transparent_bg",
|
|
"width",
|
|
"height",
|
|
"frame_count",
|
|
"fps",
|
|
"turntable_degrees",
|
|
"turntable_axis",
|
|
"bg_color",
|
|
"template_path",
|
|
"target_collection",
|
|
"material_library_path",
|
|
"material_map",
|
|
"part_names_ordered",
|
|
"lighting_only",
|
|
"shadow_catcher",
|
|
"camera_orbit",
|
|
"rotation_x",
|
|
"rotation_y",
|
|
"rotation_z",
|
|
"focal_length_mm",
|
|
"sensor_width_mm",
|
|
"material_override",
|
|
"template_inputs",
|
|
"duration_s",
|
|
}
|
|
|
|
_THUMBNAIL_TASK_KEYS = {
|
|
"renderer",
|
|
"render_engine",
|
|
"samples",
|
|
"width",
|
|
"height",
|
|
"transparent_bg",
|
|
}
|
|
|
|
_AUTHORITATIVE_RENDER_SETTING_KEYS = {
|
|
"render_engine",
|
|
"engine",
|
|
"samples",
|
|
"width",
|
|
"height",
|
|
"transparent_bg",
|
|
"cycles_device",
|
|
"noise_threshold",
|
|
"denoiser",
|
|
"denoising_input_passes",
|
|
"denoising_prefilter",
|
|
"denoising_quality",
|
|
"denoising_use_gpu",
|
|
"focal_length_mm",
|
|
"sensor_width_mm",
|
|
"bg_color",
|
|
}
|
|
|
|
|
|
def _inspect_active_worker_queues(timeout: float = 1.0) -> set[str]:
|
|
from app.tasks.celery_app import celery_app
|
|
|
|
try:
|
|
inspect_result = celery_app.control.inspect(timeout=timeout)
|
|
active_queues = inspect_result.active_queues() or {}
|
|
except Exception as exc:
|
|
logger.info("[WORKFLOW] Could not inspect active Celery queues: %s", exc)
|
|
return set()
|
|
|
|
queue_names: set[str] = set()
|
|
for queues in active_queues.values():
|
|
for queue in queues or []:
|
|
if not isinstance(queue, dict):
|
|
continue
|
|
name = queue.get("name")
|
|
if isinstance(name, str) and name.strip():
|
|
queue_names.add(name.strip())
|
|
return queue_names
|
|
|
|
|
|
def _resolve_shadow_render_queue(
|
|
*,
|
|
workflow_context: WorkflowContext,
|
|
node,
|
|
active_queue_names: set[str],
|
|
) -> str | None:
|
|
if workflow_context.execution_mode != "shadow":
|
|
return None
|
|
if node.step not in {
|
|
StepName.BLENDER_STILL,
|
|
StepName.BLENDER_TURNTABLE,
|
|
StepName.EXPORT_BLEND,
|
|
}:
|
|
return None
|
|
|
|
preferred_queue = (settings.workflow_shadow_render_queue or "").strip()
|
|
if not preferred_queue or preferred_queue == "asset_pipeline":
|
|
return None
|
|
if preferred_queue in active_queue_names:
|
|
return preferred_queue
|
|
|
|
logger.info(
|
|
"[WORKFLOW] Preferred shadow render queue %s unavailable for node %s; using default routing",
|
|
preferred_queue,
|
|
node.id,
|
|
)
|
|
return None
|
|
|
|
|
|
def _filter_graph_render_overrides(step: StepName, params: dict[str, Any]) -> dict[str, Any]:
|
|
normalized = dict(params)
|
|
use_custom_render_settings = bool(normalized.pop("use_custom_render_settings", False))
|
|
if use_custom_render_settings:
|
|
return normalized
|
|
|
|
filtered = dict(normalized)
|
|
for key in _AUTHORITATIVE_RENDER_SETTING_KEYS:
|
|
if key in filtered:
|
|
filtered.pop(key, None)
|
|
|
|
if step == StepName.BLENDER_TURNTABLE:
|
|
# Turntable timing remains workflow-specific even when render quality inherits from the output type.
|
|
for key in ("fps", "duration_s", "frame_count", "turntable_degrees", "turntable_axis"):
|
|
value = normalized.get(key)
|
|
if value not in (None, ""):
|
|
filtered[key] = value
|
|
|
|
return filtered
|
|
|
|
|
|
def find_unsupported_graph_nodes(workflow_context: WorkflowContext) -> list[str]:
|
|
unsupported: list[str] = []
|
|
for node in workflow_context.ordered_nodes:
|
|
if node.step in _BRIDGE_EXECUTORS:
|
|
continue
|
|
if STEP_TASK_MAP.get(node.step) is not None:
|
|
continue
|
|
unsupported.append(node.id)
|
|
return unsupported
|
|
|
|
|
|
def execute_graph_workflow(
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
*,
|
|
dispatch_tasks: bool = True,
|
|
) -> WorkflowDispatchResult:
|
|
if workflow_context.workflow_run_id is None:
|
|
raise ValueError("workflow_context.workflow_run_id is required for graph execution")
|
|
|
|
run = session.execute(
|
|
select(WorkflowRun)
|
|
.where(WorkflowRun.id == workflow_context.workflow_run_id)
|
|
.options(selectinload(WorkflowRun.node_results))
|
|
).scalar_one()
|
|
|
|
node_results = {node_result.node_name: node_result for node_result in run.node_results}
|
|
state = WorkflowGraphState()
|
|
task_ids: list[str] = []
|
|
node_task_ids: dict[str, str] = {}
|
|
skipped_node_ids: list[str] = []
|
|
task_specs: list[WorkflowTaskDispatchSpec] = []
|
|
active_queue_names = (
|
|
_inspect_active_worker_queues()
|
|
if workflow_context.execution_mode == "shadow"
|
|
else set()
|
|
)
|
|
|
|
for node in workflow_context.ordered_nodes:
|
|
node_result = node_results.get(node.id)
|
|
if node_result is None:
|
|
logger.warning(
|
|
"[WORKFLOW] Missing WorkflowNodeResult row for node %s on run %s",
|
|
node.id,
|
|
run.id,
|
|
)
|
|
continue
|
|
|
|
retry_policy = _retry_policy(node.params)
|
|
failure_policy = _failure_policy(node.params)
|
|
metadata = _base_output(node_result.output, node)
|
|
metadata["retry_policy"] = retry_policy
|
|
metadata["failure_policy"] = failure_policy
|
|
definition = get_node_definition(node.step)
|
|
bridge_executor = _BRIDGE_EXECUTORS.get(node.step)
|
|
|
|
if bridge_executor is not None:
|
|
max_attempts = retry_policy["max_attempts"]
|
|
last_error: str | None = None
|
|
|
|
for attempt in range(1, max_attempts + 1):
|
|
started = time.perf_counter()
|
|
attempt_output = dict(metadata)
|
|
attempt_output["attempt_count"] = attempt
|
|
attempt_output["max_attempts"] = max_attempts
|
|
node_result.status = "running"
|
|
node_result.output = attempt_output
|
|
session.flush()
|
|
|
|
try:
|
|
payload, status, log_message = bridge_executor(
|
|
session=session,
|
|
workflow_context=workflow_context,
|
|
state=state,
|
|
node=node,
|
|
node_params=node.params,
|
|
)
|
|
except Exception as exc:
|
|
last_error = str(exc)[:2000]
|
|
if attempt < max_attempts:
|
|
retry_output = dict(attempt_output)
|
|
retry_output["last_error"] = last_error
|
|
retry_output["retry_state"] = "retrying"
|
|
node_result.status = "retrying"
|
|
node_result.log = f"Attempt {attempt}/{max_attempts} failed: {last_error}"
|
|
node_result.output = retry_output
|
|
node_result.duration_s = round(time.perf_counter() - started, 4)
|
|
session.flush()
|
|
continue
|
|
|
|
failed_output = dict(attempt_output)
|
|
failed_output["last_error"] = last_error
|
|
failed_output["retry_exhausted"] = True
|
|
node_result.status = "failed"
|
|
node_result.log = last_error
|
|
node_result.duration_s = round(time.perf_counter() - started, 4)
|
|
node_result.output = failed_output
|
|
session.flush()
|
|
raise WorkflowGraphRuntimeError(
|
|
f"Node '{node.id}' ({node.step.value}) failed: {exc}"
|
|
) from exc
|
|
|
|
if payload:
|
|
metadata.update(payload)
|
|
state.node_outputs[node.id] = payload
|
|
|
|
final_output = dict(metadata)
|
|
final_output["attempt_count"] = attempt
|
|
final_output["max_attempts"] = max_attempts
|
|
if last_error is not None:
|
|
final_output["last_error"] = last_error
|
|
final_output["retry_state"] = "recovered"
|
|
|
|
node_result.status = status
|
|
node_result.log = log_message
|
|
node_result.output = final_output
|
|
node_result.duration_s = round(time.perf_counter() - started, 4)
|
|
session.flush()
|
|
|
|
if status == "failed":
|
|
last_error = (log_message or "unknown error")[:2000]
|
|
if attempt < max_attempts:
|
|
retry_output = dict(final_output)
|
|
retry_output["last_error"] = last_error
|
|
retry_output["retry_state"] = "retrying"
|
|
node_result.status = "retrying"
|
|
node_result.log = f"Attempt {attempt}/{max_attempts} failed: {last_error}"
|
|
node_result.output = retry_output
|
|
session.flush()
|
|
continue
|
|
|
|
failed_output = dict(final_output)
|
|
failed_output["last_error"] = last_error
|
|
failed_output["retry_exhausted"] = True
|
|
node_result.status = "failed"
|
|
node_result.log = last_error
|
|
node_result.output = failed_output
|
|
session.flush()
|
|
raise WorkflowGraphRuntimeError(
|
|
f"Node '{node.id}' ({node.step.value}) failed: {last_error}"
|
|
)
|
|
|
|
if status == "skipped":
|
|
skipped_node_ids.append(node.id)
|
|
break
|
|
continue
|
|
|
|
task_name = STEP_TASK_MAP.get(node.step)
|
|
if task_name is not None:
|
|
if node.step in _ORDER_LINE_RENDER_STEPS and state.setup is not None and not state.setup.is_ready:
|
|
metadata["blocked_by"] = "order_line_setup"
|
|
node_result.status = "skipped"
|
|
node_result.output = metadata
|
|
node_result.log = (
|
|
f"Skipped because order_line_setup did not complete successfully "
|
|
f"({state.setup.status})"
|
|
)
|
|
node_result.duration_s = None
|
|
session.flush()
|
|
skipped_node_ids.append(node.id)
|
|
continue
|
|
|
|
task_kwargs = _build_task_kwargs(
|
|
session=session,
|
|
workflow_context=workflow_context,
|
|
state=state,
|
|
node=node,
|
|
)
|
|
|
|
target_queue = _resolve_shadow_render_queue(
|
|
workflow_context=workflow_context,
|
|
node=node,
|
|
active_queue_names=active_queue_names,
|
|
)
|
|
if dispatch_tasks:
|
|
from app.tasks.celery_app import celery_app
|
|
|
|
if target_queue:
|
|
result = celery_app.send_task(
|
|
task_name,
|
|
args=[workflow_context.context_id],
|
|
kwargs=task_kwargs,
|
|
queue=target_queue,
|
|
)
|
|
else:
|
|
result = celery_app.send_task(
|
|
task_name,
|
|
args=[workflow_context.context_id],
|
|
kwargs=task_kwargs,
|
|
)
|
|
task_id = result.id
|
|
else:
|
|
task_id = str(uuid.uuid4())
|
|
task_specs.append(
|
|
WorkflowTaskDispatchSpec(
|
|
node_id=node.id,
|
|
task_name=task_name,
|
|
args=[workflow_context.context_id],
|
|
kwargs=dict(task_kwargs),
|
|
task_id=task_id,
|
|
queue=target_queue,
|
|
)
|
|
)
|
|
metadata["task_id"] = task_id
|
|
metadata["task_queue"] = target_queue or "asset_pipeline"
|
|
if definition is not None:
|
|
metadata["execution_kind"] = definition.execution_kind
|
|
metadata["attempt_count"] = 1
|
|
metadata["max_attempts"] = retry_policy["max_attempts"]
|
|
metadata["execution_mode"] = workflow_context.execution_mode
|
|
predicted_output = _predict_task_output_metadata(
|
|
workflow_context=workflow_context,
|
|
state=state,
|
|
node=node,
|
|
task_kwargs=task_kwargs,
|
|
)
|
|
if predicted_output:
|
|
metadata.update(predicted_output)
|
|
node_result.status = "queued"
|
|
node_result.output = metadata
|
|
node_result.log = None
|
|
node_result.duration_s = None
|
|
state.node_outputs[node.id] = dict(metadata)
|
|
session.flush()
|
|
task_ids.append(task_id)
|
|
node_task_ids[node.id] = task_id
|
|
logger.info(
|
|
"[WORKFLOW] Dispatched node %r (step=%s, mode=%s, run=%s) -> Celery task %s",
|
|
node.id,
|
|
node.step,
|
|
workflow_context.execution_mode,
|
|
workflow_context.workflow_run_id,
|
|
task_id,
|
|
)
|
|
continue
|
|
|
|
metadata["execution_kind"] = definition.execution_kind if definition is not None else "bridge"
|
|
node_result.status = "skipped"
|
|
node_result.output = metadata
|
|
node_result.log = f"Graph runtime not implemented for step '{node.step.value}'"
|
|
node_result.duration_s = None
|
|
session.flush()
|
|
skipped_node_ids.append(node.id)
|
|
|
|
run.celery_task_id = task_ids[0] if task_ids else None
|
|
if any(node_result.status == "failed" for node_result in run.node_results):
|
|
run.status = "failed"
|
|
run.completed_at = datetime.utcnow()
|
|
elif task_ids:
|
|
run.status = "pending"
|
|
run.completed_at = None
|
|
else:
|
|
run.status = "completed"
|
|
run.completed_at = datetime.utcnow()
|
|
session.flush()
|
|
|
|
return WorkflowDispatchResult(
|
|
context=workflow_context,
|
|
task_ids=task_ids,
|
|
node_task_ids=node_task_ids,
|
|
skipped_node_ids=skipped_node_ids,
|
|
task_specs=task_specs,
|
|
)
|
|
|
|
|
|
def _base_output(existing: dict[str, Any] | None, node) -> dict[str, Any]:
|
|
metadata = dict(existing or {})
|
|
metadata.setdefault("step", node.step.value)
|
|
if node.ui and node.ui.label:
|
|
metadata.setdefault("label", node.ui.label)
|
|
definition = get_node_definition(node.step)
|
|
if definition is not None:
|
|
metadata.setdefault("execution_kind", definition.execution_kind)
|
|
return metadata
|
|
|
|
|
|
def _retry_policy(node_params: dict[str, Any]) -> dict[str, Any]:
|
|
raw = node_params.get("retry_policy")
|
|
if not isinstance(raw, dict):
|
|
raw = {}
|
|
try:
|
|
max_attempts = int(raw.get("max_attempts", 1))
|
|
except (TypeError, ValueError):
|
|
max_attempts = 1
|
|
return {
|
|
"max_attempts": max(1, min(max_attempts, 5)),
|
|
}
|
|
|
|
|
|
def _failure_policy(node_params: dict[str, Any]) -> dict[str, Any]:
|
|
raw = node_params.get("failure_policy")
|
|
if not isinstance(raw, dict):
|
|
raw = {}
|
|
return {
|
|
"halt_workflow": bool(raw.get("halt_workflow", True)),
|
|
"fallback_to_legacy": bool(raw.get("fallback_to_legacy", False)),
|
|
}
|
|
|
|
|
|
def _serialize_setup_result(result: OrderLineRenderSetupResult) -> dict[str, Any]:
|
|
payload: dict[str, Any] = {
|
|
"setup_status": result.status,
|
|
"reason": result.reason,
|
|
"materials_source_count": len(result.materials_source or []),
|
|
"part_colors_count": len(result.part_colors or {}),
|
|
"usd_render_path": str(result.usd_render_path) if result.usd_render_path else None,
|
|
"glb_reuse_path": str(result.glb_reuse_path) if result.glb_reuse_path else None,
|
|
}
|
|
if result.order_line is not None:
|
|
payload["order_line_id"] = str(result.order_line.id)
|
|
payload["product_id"] = str(result.order_line.product_id) if result.order_line.product_id else None
|
|
payload["output_type_id"] = str(result.order_line.output_type_id) if result.order_line.output_type_id else None
|
|
if result.order is not None:
|
|
payload["order_id"] = str(result.order.id)
|
|
payload["order_status"] = result.order.status.value if getattr(result.order, "status", None) else None
|
|
if result.cad_file is not None:
|
|
payload["cad_file_id"] = str(result.cad_file.id)
|
|
payload["step_path"] = result.cad_file.stored_path
|
|
return payload
|
|
|
|
|
|
def _serialize_template_result(result: TemplateResolutionResult) -> dict[str, Any]:
|
|
return {
|
|
"template_id": str(result.template.id) if result.template is not None else None,
|
|
"template_name": result.template.name if result.template is not None else None,
|
|
"template_path": result.template.blend_file_path if result.template is not None else None,
|
|
"material_library": result.material_library,
|
|
"material_map": result.material_map,
|
|
"material_map_count": len(result.material_map or {}),
|
|
"use_materials": result.use_materials,
|
|
"override_material": result.override_material,
|
|
"target_collection": result.target_collection,
|
|
"lighting_only": result.lighting_only,
|
|
"shadow_catcher": result.shadow_catcher,
|
|
"camera_orbit": result.camera_orbit,
|
|
"category_key": result.category_key,
|
|
"output_type_id": result.output_type_id,
|
|
"workflow_input_schema": result.workflow_input_schema,
|
|
"template_inputs": result.template_inputs,
|
|
"template_input_count": len(result.template_inputs or {}),
|
|
}
|
|
|
|
|
|
def _serialize_material_result(result: MaterialResolutionResult) -> dict[str, Any]:
|
|
return {
|
|
"material_map": result.material_map,
|
|
"material_map_count": len(result.material_map or {}),
|
|
"use_materials": result.use_materials,
|
|
"override_material": result.override_material,
|
|
"source_material_count": result.source_material_count,
|
|
"resolved_material_count": result.resolved_material_count,
|
|
}
|
|
|
|
|
|
def _serialize_auto_populate_result(result: AutoPopulateMaterialsResult) -> dict[str, Any]:
|
|
return {
|
|
"cad_file_id": result.cad_file_id,
|
|
"updated_product_ids": result.updated_product_ids,
|
|
"updated_product_count": len(result.updated_product_ids),
|
|
"queued_thumbnail_regeneration": result.queued_thumbnail_regeneration,
|
|
"part_colors": result.part_colors,
|
|
"part_colors_count": len(result.part_colors or {}),
|
|
"cad_parts": result.cad_parts,
|
|
}
|
|
|
|
|
|
def _serialize_bbox_result(result: BBoxResolutionResult) -> dict[str, Any]:
|
|
return {
|
|
"bbox_data": result.bbox_data,
|
|
"has_bbox": result.has_bbox,
|
|
"source_kind": result.source_kind,
|
|
"step_path": result.step_path,
|
|
"glb_path": result.glb_path,
|
|
}
|
|
|
|
|
|
def _serialize_cad_file_result(cad_file: CadFile) -> dict[str, Any]:
|
|
parsed_objects = cad_file.parsed_objects or {}
|
|
objects = parsed_objects.get("objects")
|
|
object_count = len(objects) if isinstance(objects, list) else None
|
|
return {
|
|
"cad_file_id": str(cad_file.id),
|
|
"step_path": cad_file.stored_path,
|
|
"original_name": cad_file.original_name,
|
|
"processing_status": cad_file.processing_status.value if getattr(cad_file, "processing_status", None) else None,
|
|
"object_count": object_count,
|
|
"has_parsed_objects": bool(parsed_objects),
|
|
"gltf_path": cad_file.gltf_path,
|
|
}
|
|
|
|
|
|
def _workflow_node_ids(workflow_context: WorkflowContext, step: StepName) -> list[str]:
|
|
return [node.id for node in workflow_context.ordered_nodes if node.step == step]
|
|
|
|
|
|
def _workflow_node_map(workflow_context: WorkflowContext) -> dict[str, Any]:
|
|
return {node.id: node for node in workflow_context.ordered_nodes}
|
|
|
|
|
|
def _upstream_node_ids(workflow_context: WorkflowContext, node_id: str) -> list[str]:
|
|
return [edge.from_node for edge in workflow_context.edges if edge.to_node == node_id]
|
|
|
|
|
|
def _downstream_node_ids(workflow_context: WorkflowContext, node_id: str) -> list[str]:
|
|
return [edge.to_node for edge in workflow_context.edges if edge.from_node == node_id]
|
|
|
|
|
|
def _connected_node_ids_by_step(
|
|
workflow_context: WorkflowContext,
|
|
*,
|
|
node_id: str,
|
|
step: StepName,
|
|
direction: str,
|
|
) -> list[str]:
|
|
node_map = _workflow_node_map(workflow_context)
|
|
if direction == "upstream":
|
|
candidate_ids = _upstream_node_ids(workflow_context, node_id)
|
|
elif direction == "downstream":
|
|
candidate_ids = _downstream_node_ids(workflow_context, node_id)
|
|
else:
|
|
raise ValueError(f"Unsupported graph direction: {direction}")
|
|
return [
|
|
candidate_id
|
|
for candidate_id in candidate_ids
|
|
if node_map.get(candidate_id) is not None and node_map[candidate_id].step == step
|
|
]
|
|
|
|
|
|
def _connected_upstream_artifacts(
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node_id: str,
|
|
) -> list[dict[str, Any]]:
|
|
preferred_upstream_ids = set(_upstream_node_ids(workflow_context, node_id))
|
|
artifacts = _collect_upstream_artifacts(state)
|
|
if not preferred_upstream_ids:
|
|
return []
|
|
return [artifact for artifact in artifacts if artifact["node_id"] in preferred_upstream_ids]
|
|
|
|
|
|
def _predict_task_output_metadata(
|
|
*,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
task_kwargs: dict[str, Any],
|
|
) -> dict[str, Any]:
|
|
if node.step == StepName.THUMBNAIL_SAVE:
|
|
renderer = str(task_kwargs.get("renderer") or "blender")
|
|
output_format = "png" if renderer == "threejs" or bool(task_kwargs.get("transparent_bg")) else "jpg"
|
|
output_dir = Path(settings.upload_dir) / "thumbnails"
|
|
return {
|
|
"artifact_role": "thumbnail_output",
|
|
"predicted_output_path": str(output_dir / f"{workflow_context.context_id}.{output_format}"),
|
|
"predicted_asset_type": "thumbnail",
|
|
"publish_asset_enabled": True,
|
|
"graph_authoritative_output_enabled": True,
|
|
"graph_output_node_ids": [node.id],
|
|
"notify_handoff_enabled": False,
|
|
}
|
|
|
|
if state.setup is None or state.setup.order_line is None or state.setup.cad_file is None:
|
|
return {}
|
|
|
|
step_path = Path(state.setup.cad_file.stored_path)
|
|
output_name_suffix = task_kwargs.get("output_name_suffix")
|
|
order_line_id = str(state.setup.order_line.id)
|
|
|
|
if node.step == StepName.BLENDER_STILL:
|
|
output_extension = _resolve_render_output_extension(state.setup.order_line)
|
|
if output_extension not in {"png", "jpg", "webp"}:
|
|
output_extension = "png"
|
|
output_filename = f"line_{order_line_id}.{output_extension}"
|
|
if output_name_suffix:
|
|
output_filename = f"line_{order_line_id}_{output_name_suffix}.{output_extension}"
|
|
return {
|
|
"artifact_role": "render_output",
|
|
"predicted_output_path": str(
|
|
build_order_line_step_render_path(step_path, order_line_id, output_filename)
|
|
),
|
|
"predicted_asset_type": "still",
|
|
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
|
|
"graph_authoritative_output_enabled": bool(
|
|
task_kwargs.get("graph_authoritative_output_enabled", False)
|
|
),
|
|
"graph_output_node_ids": list(task_kwargs.get("graph_output_node_ids") or []),
|
|
"notify_handoff_enabled": bool(task_kwargs.get("emit_legacy_notifications", False)),
|
|
"graph_notify_node_ids": list(task_kwargs.get("graph_notify_node_ids") or []),
|
|
}
|
|
|
|
if node.step == StepName.EXPORT_BLEND:
|
|
output_filename = f"{step_path.stem}_production.blend"
|
|
if output_name_suffix:
|
|
output_filename = f"{step_path.stem}_production_{output_name_suffix}.blend"
|
|
predicted_output_path = str(build_order_line_export_path(order_line_id, output_filename))
|
|
return {
|
|
"artifact_role": "blend_export",
|
|
"predicted_output_path": predicted_output_path,
|
|
"predicted_asset_type": "blend_production",
|
|
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
|
|
"graph_authoritative_output_enabled": bool(
|
|
task_kwargs.get("graph_authoritative_output_enabled", False)
|
|
),
|
|
"graph_output_node_ids": list(task_kwargs.get("graph_output_node_ids") or []),
|
|
"notify_handoff_enabled": bool(task_kwargs.get("emit_legacy_notifications", False)),
|
|
"graph_notify_node_ids": list(task_kwargs.get("graph_notify_node_ids") or []),
|
|
}
|
|
|
|
if node.step == StepName.BLENDER_TURNTABLE:
|
|
output_name = str(task_kwargs.get("output_name") or "turntable")
|
|
output_name_suffix = task_kwargs.get("output_name_suffix")
|
|
if output_name_suffix:
|
|
output_name = f"{output_name}_{output_name_suffix}"
|
|
output_dir = task_kwargs.get("output_dir")
|
|
predicted_output_path = None
|
|
if isinstance(output_dir, str) and output_dir.strip():
|
|
predicted_output_path = str(Path(output_dir) / f"{output_name}.mp4")
|
|
else:
|
|
predicted_output_path = str(
|
|
build_order_line_step_render_path(step_path, order_line_id, f"{output_name}.mp4")
|
|
)
|
|
return {
|
|
"artifact_role": "turntable_output",
|
|
"predicted_output_path": predicted_output_path,
|
|
"predicted_asset_type": "turntable",
|
|
"publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)),
|
|
"graph_authoritative_output_enabled": bool(
|
|
task_kwargs.get("graph_authoritative_output_enabled", False)
|
|
),
|
|
"graph_output_node_ids": list(task_kwargs.get("graph_output_node_ids") or []),
|
|
"notify_handoff_enabled": bool(task_kwargs.get("emit_legacy_notifications", False)),
|
|
"graph_notify_node_ids": list(task_kwargs.get("graph_notify_node_ids") or []),
|
|
}
|
|
|
|
return {}
|
|
|
|
|
|
def _collect_upstream_artifacts(state: WorkflowGraphState) -> list[dict[str, Any]]:
|
|
artifacts: list[dict[str, Any]] = []
|
|
for node_id, output in state.node_outputs.items():
|
|
predicted_output_path = output.get("predicted_output_path")
|
|
artifact_role = output.get("artifact_role")
|
|
if not artifact_role and not predicted_output_path:
|
|
continue
|
|
artifacts.append(
|
|
{
|
|
"node_id": node_id,
|
|
"artifact_role": artifact_role,
|
|
"predicted_output_path": predicted_output_path,
|
|
"predicted_asset_type": output.get("predicted_asset_type"),
|
|
"publish_asset_enabled": bool(output.get("publish_asset_enabled", False)),
|
|
"graph_authoritative_output_enabled": bool(
|
|
output.get("graph_authoritative_output_enabled", False)
|
|
),
|
|
"graph_output_node_ids": list(output.get("graph_output_node_ids") or []),
|
|
"notify_handoff_enabled": bool(output.get("notify_handoff_enabled", False)),
|
|
"task_id": output.get("task_id"),
|
|
**(
|
|
{"graph_notify_node_ids": list(output.get("graph_notify_node_ids") or [])}
|
|
if output.get("graph_notify_node_ids")
|
|
else {}
|
|
),
|
|
}
|
|
)
|
|
return artifacts
|
|
|
|
|
|
def _resolve_cad_file_context(
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
) -> CadFile:
|
|
if state.cad_file is not None:
|
|
return state.cad_file
|
|
|
|
try:
|
|
cad_file_id = workflow_context.context_id
|
|
except AttributeError as exc:
|
|
raise WorkflowGraphRuntimeError("cad_file context_id is missing") from exc
|
|
|
|
try:
|
|
parsed_cad_file_id = uuid.UUID(cad_file_id)
|
|
except ValueError as exc:
|
|
raise WorkflowGraphRuntimeError(f"cad_file context is not a valid UUID: {cad_file_id}") from exc
|
|
|
|
cad_file = session.get(CadFile, parsed_cad_file_id)
|
|
if cad_file is None:
|
|
raise WorkflowGraphRuntimeError(f"cad_file context not found: {cad_file_id}")
|
|
|
|
state.cad_file = cad_file
|
|
return cad_file
|
|
|
|
|
|
def _resolve_thumbnail_request(
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node_id: str,
|
|
) -> dict[str, Any] | None:
|
|
preferred_upstream_ids = set(_upstream_node_ids(workflow_context, node_id))
|
|
if preferred_upstream_ids:
|
|
for upstream_node in reversed(workflow_context.ordered_nodes):
|
|
if upstream_node.id not in preferred_upstream_ids:
|
|
continue
|
|
output = state.node_outputs.get(upstream_node.id)
|
|
if output and output.get("thumbnail_request") is True:
|
|
return output
|
|
for output in reversed(list(state.node_outputs.values())):
|
|
if output.get("thumbnail_request") is True:
|
|
return output
|
|
return None
|
|
|
|
|
|
def _normalize_turntable_task_kwargs(task_kwargs: dict[str, Any]) -> dict[str, Any]:
|
|
normalized = dict(task_kwargs)
|
|
raw_duration = normalized.get("duration_s")
|
|
if raw_duration in (None, ""):
|
|
return normalized
|
|
|
|
try:
|
|
duration_s = float(raw_duration)
|
|
except (TypeError, ValueError):
|
|
return normalized
|
|
|
|
try:
|
|
fps = int(float(normalized.get("fps", 0)))
|
|
except (TypeError, ValueError):
|
|
return normalized
|
|
|
|
if duration_s <= 0 or fps <= 0:
|
|
return normalized
|
|
|
|
normalized["duration_s"] = duration_s
|
|
normalized["frame_count"] = max(1, int(round(duration_s * fps)))
|
|
return normalized
|
|
|
|
|
|
def _build_task_kwargs(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
) -> dict[str, Any]:
|
|
task_kwargs = dict(node.params)
|
|
connected_output_node_ids: list[str] = []
|
|
connected_notify_node_ids: list[str] = []
|
|
render_defaults: dict[str, Any] = {}
|
|
|
|
if state.setup is not None and state.setup.is_ready and state.setup.order_line is not None:
|
|
render_invocation = build_order_line_render_invocation(
|
|
state.setup,
|
|
template_context=state.template,
|
|
position_context=resolve_render_position_context(session, state.setup.order_line),
|
|
material_context=state.materials,
|
|
artifact_kind_override=_artifact_kind_override_for_step(node.step),
|
|
)
|
|
render_defaults = render_invocation.task_defaults()
|
|
|
|
if node.step == StepName.BLENDER_STILL:
|
|
task_kwargs = _filter_graph_render_overrides(StepName.BLENDER_STILL, task_kwargs)
|
|
task_kwargs = {
|
|
key: value
|
|
for key, value in {
|
|
**render_defaults,
|
|
**task_kwargs,
|
|
}.items()
|
|
if key in _STILL_TASK_KEYS
|
|
}
|
|
elif node.step == StepName.BLENDER_TURNTABLE:
|
|
task_kwargs = _filter_graph_render_overrides(StepName.BLENDER_TURNTABLE, task_kwargs)
|
|
task_kwargs = {
|
|
key: value
|
|
for key, value in {
|
|
**render_defaults,
|
|
**task_kwargs,
|
|
}.items()
|
|
if key in _TURNTABLE_TASK_KEYS
|
|
}
|
|
task_kwargs = _normalize_turntable_task_kwargs(task_kwargs)
|
|
if state.setup is not None and state.setup.is_ready and state.setup.cad_file is not None:
|
|
task_kwargs["output_dir"] = str(
|
|
build_order_line_step_render_path(
|
|
state.setup.cad_file.stored_path,
|
|
str(state.setup.order_line.id),
|
|
"turntable.mp4",
|
|
).parent
|
|
)
|
|
elif node.step == StepName.THUMBNAIL_SAVE:
|
|
thumbnail_request = _resolve_thumbnail_request(workflow_context, state, node.id) or {}
|
|
task_kwargs = {
|
|
key: value
|
|
for key, value in {
|
|
**thumbnail_request,
|
|
**task_kwargs,
|
|
}.items()
|
|
if key in _THUMBNAIL_TASK_KEYS
|
|
}
|
|
|
|
task_kwargs["workflow_run_id"] = str(workflow_context.workflow_run_id)
|
|
task_kwargs["workflow_node_id"] = node.id
|
|
if workflow_context.execution_mode in {"graph", "shadow"} and node.step in {
|
|
StepName.BLENDER_STILL,
|
|
StepName.EXPORT_BLEND,
|
|
StepName.BLENDER_TURNTABLE,
|
|
}:
|
|
connected_output_node_ids = _connected_node_ids_by_step(
|
|
workflow_context,
|
|
node_id=node.id,
|
|
step=StepName.OUTPUT_SAVE,
|
|
direction="downstream",
|
|
)
|
|
if connected_output_node_ids:
|
|
task_kwargs["publish_asset_enabled"] = False
|
|
task_kwargs["graph_output_node_ids"] = connected_output_node_ids
|
|
if workflow_context.execution_mode == "graph":
|
|
task_kwargs["graph_authoritative_output_enabled"] = True
|
|
else:
|
|
task_kwargs["observer_output_enabled"] = True
|
|
if workflow_context.execution_mode == "graph":
|
|
connected_notify_node_ids = _connected_node_ids_by_step(
|
|
workflow_context,
|
|
node_id=node.id,
|
|
step=StepName.NOTIFY,
|
|
direction="downstream",
|
|
)
|
|
if connected_notify_node_ids:
|
|
task_kwargs["emit_legacy_notifications"] = True
|
|
task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids
|
|
if workflow_context.execution_mode == "shadow":
|
|
task_kwargs["publish_asset_enabled"] = False
|
|
task_kwargs["emit_events"] = False
|
|
task_kwargs["job_document_enabled"] = False
|
|
task_kwargs["output_name_suffix"] = f"shadow-{str(workflow_context.workflow_run_id)[:8]}"
|
|
return task_kwargs
|
|
|
|
|
|
def _artifact_kind_override_for_step(step: StepName) -> str | None:
|
|
if step == StepName.BLENDER_TURNTABLE:
|
|
return "turntable_video"
|
|
if step == StepName.BLENDER_STILL:
|
|
return "still_image"
|
|
if step == StepName.EXPORT_BLEND:
|
|
return "blend_asset"
|
|
return None
|
|
|
|
|
|
def _execute_order_line_setup(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del node
|
|
del node_params
|
|
shadow_mode = workflow_context.execution_mode == "shadow"
|
|
if shadow_mode:
|
|
setup = prepare_order_line_render_context(
|
|
session,
|
|
workflow_context.context_id,
|
|
persist_state=False,
|
|
)
|
|
else:
|
|
setup = prepare_order_line_render_context(session, workflow_context.context_id)
|
|
state.setup = setup
|
|
payload = _serialize_setup_result(setup)
|
|
payload["shadow_mode"] = shadow_mode
|
|
if setup.status == "ready":
|
|
return payload, "completed", None
|
|
if setup.status == "skip":
|
|
return payload, "skipped", setup.reason
|
|
return payload, "failed", setup.reason or "order_line_setup_failed"
|
|
|
|
|
|
def _execute_resolve_template(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del node
|
|
del workflow_context
|
|
if state.setup is None or not state.setup.is_ready:
|
|
if state.setup is not None and state.setup.status == "skip":
|
|
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
|
|
raise WorkflowGraphRuntimeError("resolve_template requires a ready order_line_setup result")
|
|
result = resolve_order_line_template_context(
|
|
session,
|
|
state.setup,
|
|
template_id_override=node_params.get("template_id_override"),
|
|
material_library_path_override=node_params.get("material_library_path"),
|
|
require_template=bool(node_params.get("require_template", False)),
|
|
disable_materials=bool(node_params.get("disable_materials", False)),
|
|
target_collection_override=node_params.get("target_collection"),
|
|
material_replace_mode=node_params.get("material_replace_mode"),
|
|
lighting_only_mode=node_params.get("lighting_only_mode"),
|
|
shadow_catcher_mode=node_params.get("shadow_catcher_mode"),
|
|
camera_orbit_mode=node_params.get("camera_orbit_mode"),
|
|
template_input_overrides=extract_template_input_overrides(node_params),
|
|
)
|
|
state.template = result
|
|
return _serialize_template_result(result), "completed", None
|
|
|
|
|
|
def _execute_material_map_resolve(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del node
|
|
del session, workflow_context
|
|
if state.setup is None or not state.setup.is_ready:
|
|
if state.setup is not None and state.setup.status == "skip":
|
|
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
|
|
raise WorkflowGraphRuntimeError("material_map_resolve requires a ready order_line_setup result")
|
|
|
|
line = state.setup.order_line
|
|
cad_file = state.setup.cad_file
|
|
if line is None:
|
|
raise WorkflowGraphRuntimeError("material_map_resolve requires an order line")
|
|
|
|
material_library = state.template.material_library if state.template is not None else None
|
|
template = state.template.template if state.template is not None else None
|
|
result = resolve_order_line_material_map(
|
|
line,
|
|
cad_file,
|
|
state.setup.materials_source,
|
|
material_library=material_library,
|
|
template=template,
|
|
material_override=node_params.get("material_override"),
|
|
disable_materials=bool(node_params.get("disable_materials", False)),
|
|
)
|
|
state.materials = result
|
|
return _serialize_material_result(result), "completed", None
|
|
|
|
|
|
def _execute_auto_populate_materials(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del node
|
|
if state.setup is None or state.setup.cad_file is None:
|
|
if state.setup is not None and state.setup.status == "skip":
|
|
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
|
|
raise WorkflowGraphRuntimeError("auto_populate_materials requires a resolved cad_file")
|
|
shadow_mode = workflow_context.execution_mode == "shadow"
|
|
persist_updates = bool(node_params.get("persist_updates", not shadow_mode))
|
|
if shadow_mode:
|
|
persist_updates = False
|
|
refresh_material_source = bool(node_params.get("refresh_material_source", True))
|
|
include_populated_products = bool(node_params.get("include_populated_products", False))
|
|
if shadow_mode:
|
|
result = auto_populate_materials_for_cad(
|
|
session,
|
|
str(state.setup.cad_file.id),
|
|
persist_updates=False,
|
|
include_populated_products=include_populated_products,
|
|
)
|
|
else:
|
|
result = auto_populate_materials_for_cad(
|
|
session,
|
|
str(state.setup.cad_file.id),
|
|
persist_updates=persist_updates,
|
|
include_populated_products=include_populated_products,
|
|
)
|
|
state.auto_populate = result
|
|
if (
|
|
persist_updates
|
|
and refresh_material_source
|
|
and not shadow_mode
|
|
and state.setup.order_line is not None
|
|
and state.setup.order_line.product is not None
|
|
):
|
|
session.refresh(state.setup.order_line.product)
|
|
state.setup.materials_source = state.setup.order_line.product.cad_part_materials or []
|
|
payload = _serialize_auto_populate_result(result)
|
|
payload["shadow_mode"] = shadow_mode
|
|
payload["persist_updates"] = persist_updates
|
|
payload["refresh_material_source"] = refresh_material_source
|
|
payload["include_populated_products"] = include_populated_products
|
|
return payload, "completed", None
|
|
|
|
|
|
def _execute_glb_bbox(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del node
|
|
del session, workflow_context
|
|
if state.setup is None or state.setup.cad_file is None:
|
|
if state.setup is not None and state.setup.status == "skip":
|
|
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
|
|
raise WorkflowGraphRuntimeError("glb_bbox requires a resolved cad_file")
|
|
|
|
step_path = state.setup.cad_file.stored_path
|
|
glb_path = node_params.get("glb_path")
|
|
source_preference = str(node_params.get("source_preference") or "auto")
|
|
if glb_path is None and source_preference != "step_only" and state.setup.glb_reuse_path is not None:
|
|
glb_path = str(state.setup.glb_reuse_path)
|
|
elif glb_path is None and source_preference != "step_only":
|
|
step_file = Path(step_path)
|
|
fallback_glb = step_file.parent / f"{step_file.stem}_thumbnail.glb"
|
|
if fallback_glb.exists():
|
|
glb_path = str(fallback_glb)
|
|
|
|
if source_preference == "glb_only" and not glb_path:
|
|
payload = {
|
|
"bbox_data": None,
|
|
"has_bbox": False,
|
|
"source_kind": "none",
|
|
"step_path": step_path,
|
|
"glb_path": None,
|
|
"source_preference": source_preference,
|
|
}
|
|
return payload, "failed", "glb_only requested but no GLB artifact is available"
|
|
|
|
result = resolve_cad_bbox(step_path, glb_path=glb_path)
|
|
state.bbox = result
|
|
payload = _serialize_bbox_result(result)
|
|
payload["source_preference"] = source_preference
|
|
return payload, "completed", None
|
|
|
|
|
|
def _execute_resolve_step_path(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del node
|
|
del node_params
|
|
cad_file = _resolve_cad_file_context(session, workflow_context, state)
|
|
return _serialize_cad_file_result(cad_file), "completed", None
|
|
|
|
|
|
def _execute_stl_cache_generate(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del node
|
|
del node_params
|
|
cad_file = _resolve_cad_file_context(session, workflow_context, state)
|
|
step_path = Path(cad_file.stored_path)
|
|
stl_dir = step_path.parent / "stl_cache"
|
|
payload = _serialize_cad_file_result(cad_file)
|
|
payload.update(
|
|
{
|
|
"cache_mode": "compatibility_noop",
|
|
"cache_required": False,
|
|
"stl_cache_dir": str(stl_dir),
|
|
"reason": "HartOMat CAD graph uses direct OCC/GLB export instead of legacy STL cache generation.",
|
|
}
|
|
)
|
|
return payload, "completed", None
|
|
|
|
|
|
def _execute_thumbnail_render_request(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
renderer: str,
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del node
|
|
cad_file = _resolve_cad_file_context(session, workflow_context, state)
|
|
payload: dict[str, Any] = {
|
|
"cad_file_id": str(cad_file.id),
|
|
"step_path": cad_file.stored_path,
|
|
"renderer": renderer,
|
|
"thumbnail_request": True,
|
|
}
|
|
for key in ("width", "height", "transparent_bg", "render_engine", "samples"):
|
|
value = node_params.get(key)
|
|
if value not in (None, ""):
|
|
payload[key] = value
|
|
return payload, "completed", None
|
|
|
|
|
|
def _execute_blender_thumbnail_render(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
return _execute_thumbnail_render_request(
|
|
session=session,
|
|
workflow_context=workflow_context,
|
|
state=state,
|
|
node=node,
|
|
node_params=node_params,
|
|
renderer="blender",
|
|
)
|
|
|
|
|
|
def _execute_threejs_thumbnail_render(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
return _execute_thumbnail_render_request(
|
|
session=session,
|
|
workflow_context=workflow_context,
|
|
state=state,
|
|
node=node,
|
|
node_params=node_params,
|
|
renderer="threejs",
|
|
)
|
|
|
|
|
|
def _execute_output_save(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del session
|
|
if state.setup is None or state.setup.order_line is None:
|
|
raise WorkflowGraphRuntimeError("output_save requires an order_line_setup result")
|
|
|
|
if state.setup.status == "skip":
|
|
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
|
|
if not state.setup.is_ready:
|
|
return _serialize_setup_result(state.setup), "failed", state.setup.reason or "output_save_blocked"
|
|
|
|
order_line = state.setup.order_line
|
|
payload: dict[str, Any] = {
|
|
"order_line_id": str(order_line.id),
|
|
"authoritative_result_path": order_line.result_path,
|
|
"shadow_mode": workflow_context.execution_mode == "shadow",
|
|
}
|
|
upstream_artifacts = _connected_upstream_artifacts(workflow_context, state, node.id)
|
|
expected_artifact_role = str(node_params.get("expected_artifact_role") or "").strip() or None
|
|
require_upstream_artifact = bool(node_params.get("require_upstream_artifact", False))
|
|
if expected_artifact_role is not None:
|
|
upstream_artifacts = [
|
|
artifact for artifact in upstream_artifacts if artifact.get("artifact_role") == expected_artifact_role
|
|
]
|
|
if workflow_context.execution_mode == "shadow":
|
|
payload["publication_mode"] = "shadow_observer_only"
|
|
elif any(artifact["publish_asset_enabled"] for artifact in upstream_artifacts):
|
|
payload["publication_mode"] = "deferred_to_render_task"
|
|
else:
|
|
payload["publication_mode"] = "awaiting_graph_authoritative_save"
|
|
payload["expected_artifact_role"] = expected_artifact_role
|
|
payload["require_upstream_artifact"] = require_upstream_artifact
|
|
if upstream_artifacts:
|
|
payload["artifact_count"] = len(upstream_artifacts)
|
|
payload["upstream_artifacts"] = upstream_artifacts
|
|
elif require_upstream_artifact:
|
|
payload["artifact_count"] = 0
|
|
return payload, "failed", "No upstream render artifact is connected to this output node"
|
|
if state.template is not None and state.template.template is not None:
|
|
payload["template_name"] = state.template.template.name
|
|
if state.materials is not None:
|
|
payload["material_map_count"] = len(state.materials.material_map or {})
|
|
|
|
deferred_handoff_node_ids = [
|
|
str(artifact.get("node_id"))
|
|
for artifact in upstream_artifacts
|
|
if artifact.get("task_id")
|
|
]
|
|
if deferred_handoff_node_ids:
|
|
payload["handoff_state"] = "armed"
|
|
payload["handoff_node_ids"] = deferred_handoff_node_ids
|
|
payload["handoff_node_count"] = len(deferred_handoff_node_ids)
|
|
return payload, "pending", None
|
|
|
|
return payload, "completed", None
|
|
|
|
|
|
def _execute_notify(
|
|
*,
|
|
session: Session,
|
|
workflow_context: WorkflowContext,
|
|
state: WorkflowGraphState,
|
|
node,
|
|
node_params: dict[str, Any],
|
|
) -> tuple[dict[str, Any], str, str | None]:
|
|
del session
|
|
if state.setup is None or state.setup.order_line is None:
|
|
raise WorkflowGraphRuntimeError("notify requires an order_line_setup result")
|
|
|
|
if state.setup.status == "skip":
|
|
return _serialize_setup_result(state.setup), "skipped", state.setup.reason
|
|
if not state.setup.is_ready:
|
|
return _serialize_setup_result(state.setup), "failed", state.setup.reason or "notify_blocked"
|
|
|
|
payload: dict[str, Any] = {
|
|
"order_line_id": str(state.setup.order_line.id),
|
|
"shadow_mode": workflow_context.execution_mode == "shadow",
|
|
"channel": str(node_params.get("channel") or "audit_log"),
|
|
}
|
|
require_armed_render = bool(node_params.get("require_armed_render", False))
|
|
payload["require_armed_render"] = require_armed_render
|
|
|
|
if workflow_context.execution_mode == "shadow":
|
|
payload["notification_mode"] = "shadow_suppressed"
|
|
return payload, "skipped", "shadow mode suppresses user notifications"
|
|
|
|
connected_artifacts = _connected_upstream_artifacts(workflow_context, state, node.id)
|
|
armed_node_ids = [
|
|
artifact["node_id"]
|
|
for artifact in connected_artifacts
|
|
if artifact["notify_handoff_enabled"]
|
|
]
|
|
if not armed_node_ids:
|
|
payload["notification_mode"] = "not_armed"
|
|
if require_armed_render:
|
|
return payload, "failed", "No graph render task is configured for notification handoff"
|
|
return payload, "skipped", "No graph render task is configured for notification handoff"
|
|
|
|
payload["notification_mode"] = "deferred_to_render_task"
|
|
payload["armed_node_ids"] = armed_node_ids
|
|
payload["armed_node_count"] = len(armed_node_ids)
|
|
payload["handoff_state"] = "armed"
|
|
return payload, "pending", None
|
|
|
|
|
|
_BRIDGE_EXECUTORS = {
|
|
StepName.RESOLVE_STEP_PATH: _execute_resolve_step_path,
|
|
StepName.BLENDER_RENDER: _execute_blender_thumbnail_render,
|
|
StepName.THREEJS_RENDER: _execute_threejs_thumbnail_render,
|
|
StepName.ORDER_LINE_SETUP: _execute_order_line_setup,
|
|
StepName.RESOLVE_TEMPLATE: _execute_resolve_template,
|
|
StepName.MATERIAL_MAP_RESOLVE: _execute_material_map_resolve,
|
|
StepName.AUTO_POPULATE_MATERIALS: _execute_auto_populate_materials,
|
|
StepName.GLB_BBOX: _execute_glb_bbox,
|
|
StepName.STL_CACHE_GENERATE: _execute_stl_cache_generate,
|
|
StepName.OUTPUT_SAVE: _execute_output_save,
|
|
StepName.NOTIFY: _execute_notify,
|
|
}
|