84 lines
2.6 KiB
Python
84 lines
2.6 KiB
Python
from __future__ import annotations
|
|
|
|
from datetime import datetime
|
|
|
|
from sqlalchemy.orm import Session
|
|
|
|
from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun
|
|
from app.domains.rendering.workflow_executor import WorkflowContext, WorkflowDispatchResult
|
|
|
|
|
|
def create_workflow_run(
|
|
session: Session,
|
|
*,
|
|
workflow_def_id,
|
|
order_line_id,
|
|
workflow_context: WorkflowContext,
|
|
execution_mode: str | None = None,
|
|
) -> WorkflowRun:
|
|
run = WorkflowRun(
|
|
workflow_def_id=workflow_def_id,
|
|
order_line_id=order_line_id,
|
|
execution_mode=execution_mode or workflow_context.execution_mode,
|
|
status="pending",
|
|
started_at=datetime.utcnow(),
|
|
)
|
|
session.add(run)
|
|
session.flush()
|
|
|
|
workflow_context.workflow_run_id = run.id
|
|
for node in workflow_context.ordered_nodes:
|
|
metadata = {"step": node.step.value}
|
|
if node.ui and node.ui.label:
|
|
metadata["label"] = node.ui.label
|
|
session.add(
|
|
WorkflowNodeResult(
|
|
run_id=run.id,
|
|
node_name=node.id,
|
|
status="pending",
|
|
output=metadata,
|
|
)
|
|
)
|
|
|
|
session.flush()
|
|
return run
|
|
|
|
|
|
def apply_graph_dispatch_result(
|
|
run: WorkflowRun,
|
|
workflow_context: WorkflowContext,
|
|
dispatch_result: WorkflowDispatchResult,
|
|
) -> None:
|
|
node_map = {node.id: node for node in workflow_context.ordered_nodes}
|
|
results_by_name = {node_result.node_name: node_result for node_result in run.node_results}
|
|
|
|
run.celery_task_id = dispatch_result.task_ids[0] if dispatch_result.task_ids else None
|
|
|
|
for node_id, node_result in results_by_name.items():
|
|
node = node_map.get(node_id)
|
|
if node is None:
|
|
continue
|
|
|
|
metadata = dict(node_result.output or {})
|
|
metadata.setdefault("step", node.step.value)
|
|
if node.ui and node.ui.label:
|
|
metadata.setdefault("label", node.ui.label)
|
|
|
|
task_id = dispatch_result.node_task_ids.get(node_id)
|
|
if task_id is not None:
|
|
node_result.status = "queued"
|
|
metadata["task_id"] = task_id
|
|
node_result.output = metadata
|
|
continue
|
|
|
|
if node_id in dispatch_result.skipped_node_ids:
|
|
node_result.status = "skipped"
|
|
node_result.output = metadata
|
|
node_result.log = f"No Celery task mapping for step '{node.step.value}'"
|
|
|
|
|
|
def mark_workflow_run_failed(run: WorkflowRun, error_message: str) -> None:
|
|
run.status = "failed"
|
|
run.completed_at = datetime.utcnow()
|
|
run.error_message = error_message[:2000]
|