289 lines
8.9 KiB
Python
289 lines
8.9 KiB
Python
from __future__ import annotations
|
|
|
|
import uuid
|
|
from pathlib import Path
|
|
|
|
import pytest
|
|
from sqlalchemy import select
|
|
from sqlalchemy.orm import selectinload
|
|
|
|
from app.config import settings
|
|
from app.domains.orders.models import Order, OrderLine
|
|
from app.domains.products.models import CadFile, Product
|
|
from app.domains.rendering.dispatch_service import dispatch_render_with_workflow
|
|
from app.domains.rendering.models import OutputType, WorkflowDefinition, WorkflowRun
|
|
from app.domains.rendering.workflow_config_utils import build_preset_workflow_config
|
|
|
|
|
|
def _use_test_database(monkeypatch) -> None:
|
|
monkeypatch.setattr(settings, "postgres_host", "postgres")
|
|
monkeypatch.setattr(settings, "postgres_port", 5432)
|
|
monkeypatch.setattr(settings, "postgres_user", "hartomat")
|
|
monkeypatch.setattr(settings, "postgres_password", "hartomat")
|
|
monkeypatch.setattr(settings, "postgres_db", "hartomat_test")
|
|
|
|
|
|
async def _seed_order_line(
|
|
db,
|
|
admin_user,
|
|
*,
|
|
workflow_config: dict | None = None,
|
|
) -> dict[str, object]:
|
|
product = Product(
|
|
pim_id=f"PIM-{uuid.uuid4().hex[:8]}",
|
|
name="Workflow Test Product",
|
|
)
|
|
output_type = OutputType(
|
|
name=f"Workflow Output {uuid.uuid4().hex[:8]}",
|
|
render_backend="auto",
|
|
)
|
|
order = Order(
|
|
order_number=f"WF-{uuid.uuid4().hex[:10]}",
|
|
created_by=admin_user.id,
|
|
)
|
|
db.add_all([product, output_type, order])
|
|
await db.flush()
|
|
|
|
workflow_definition = None
|
|
if workflow_config is not None:
|
|
workflow_definition = WorkflowDefinition(
|
|
name=f"Workflow {uuid.uuid4().hex[:8]}",
|
|
output_type_id=output_type.id,
|
|
config=workflow_config,
|
|
is_active=True,
|
|
)
|
|
db.add(workflow_definition)
|
|
await db.flush()
|
|
output_type.workflow_definition_id = workflow_definition.id
|
|
|
|
order_line = OrderLine(
|
|
order_id=order.id,
|
|
product_id=product.id,
|
|
output_type_id=output_type.id,
|
|
)
|
|
db.add(order_line)
|
|
await db.commit()
|
|
|
|
return {
|
|
"order_line": order_line,
|
|
"workflow_definition": workflow_definition,
|
|
"output_type": output_type,
|
|
}
|
|
|
|
|
|
async def _seed_renderable_order_line(
|
|
db,
|
|
admin_user,
|
|
tmp_path: Path,
|
|
) -> OrderLine:
|
|
step_path = tmp_path / "dispatch" / "product.step"
|
|
step_path.parent.mkdir(parents=True, exist_ok=True)
|
|
step_path.write_text("STEP", encoding="utf-8")
|
|
|
|
cad_file = CadFile(
|
|
original_name="product.step",
|
|
stored_path=str(step_path),
|
|
file_hash=f"hash-{uuid.uuid4().hex}",
|
|
parsed_objects={"objects": ["Body"]},
|
|
)
|
|
product = Product(
|
|
pim_id=f"PIM-{uuid.uuid4().hex[:8]}",
|
|
name="Dispatch Product",
|
|
category_key="dispatch",
|
|
cad_file=cad_file,
|
|
cad_part_materials=[{"part_name": "Body", "material": "Steel"}],
|
|
)
|
|
output_type = OutputType(
|
|
name=f"Workflow Output {uuid.uuid4().hex[:8]}",
|
|
render_backend="auto",
|
|
)
|
|
order = Order(
|
|
order_number=f"WF-{uuid.uuid4().hex[:10]}",
|
|
created_by=admin_user.id,
|
|
)
|
|
order_line = OrderLine(
|
|
order=order,
|
|
product=product,
|
|
output_type=output_type,
|
|
)
|
|
db.add_all([cad_file, product, output_type, order, order_line])
|
|
await db.commit()
|
|
await db.refresh(order_line)
|
|
return order_line
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_dispatch_render_with_workflow_falls_back_to_legacy_without_workflow_definition(
|
|
db,
|
|
admin_user,
|
|
monkeypatch,
|
|
):
|
|
_use_test_database(monkeypatch)
|
|
seeded = await _seed_order_line(db, admin_user)
|
|
|
|
monkeypatch.setattr(
|
|
"app.domains.rendering.dispatch_service._legacy_dispatch",
|
|
lambda order_line_id: {"backend": "legacy", "order_line_id": order_line_id},
|
|
)
|
|
|
|
result = dispatch_render_with_workflow(str(seeded["order_line"].id))
|
|
|
|
await db.rollback()
|
|
|
|
assert result == {
|
|
"backend": "legacy",
|
|
"order_line_id": str(seeded["order_line"].id),
|
|
}
|
|
runs = (await db.execute(select(WorkflowRun))).scalars().all()
|
|
assert runs == []
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_dispatch_render_with_workflow_creates_run_and_node_results_for_preset_dispatch(
|
|
db,
|
|
admin_user,
|
|
monkeypatch,
|
|
):
|
|
_use_test_database(monkeypatch)
|
|
seeded = await _seed_order_line(
|
|
db,
|
|
admin_user,
|
|
workflow_config=build_preset_workflow_config("still", {"width": 1024, "height": 1024}),
|
|
)
|
|
|
|
monkeypatch.setattr(
|
|
"app.domains.rendering.workflow_builder.dispatch_workflow",
|
|
lambda workflow_type, order_line_id, params=None: "canvas-123",
|
|
)
|
|
|
|
result = dispatch_render_with_workflow(str(seeded["order_line"].id))
|
|
|
|
await db.rollback()
|
|
|
|
run_result = await db.execute(
|
|
select(WorkflowRun)
|
|
.where(WorkflowRun.id == uuid.UUID(result["workflow_run_id"]))
|
|
.options(selectinload(WorkflowRun.node_results))
|
|
)
|
|
run = run_result.scalar_one()
|
|
|
|
assert result["backend"] == "workflow"
|
|
assert result["workflow_type"] == "still"
|
|
assert result["celery_task_id"] == "canvas-123"
|
|
assert run.workflow_def_id == seeded["workflow_definition"].id
|
|
assert run.order_line_id == seeded["order_line"].id
|
|
assert run.celery_task_id == "canvas-123"
|
|
assert {node_result.node_name for node_result in run.node_results} == {
|
|
"setup",
|
|
"template",
|
|
"render",
|
|
"output",
|
|
}
|
|
assert all(node_result.status == "pending" for node_result in run.node_results)
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_dispatch_render_with_workflow_falls_back_when_workflow_runtime_preparation_is_invalid(
|
|
db,
|
|
admin_user,
|
|
monkeypatch,
|
|
):
|
|
_use_test_database(monkeypatch)
|
|
seeded = await _seed_order_line(
|
|
db,
|
|
admin_user,
|
|
workflow_config={
|
|
"version": 1,
|
|
"nodes": [
|
|
{"id": "render", "step": "blender_still", "params": {}},
|
|
],
|
|
"edges": [
|
|
{"from": "missing", "to": "render"},
|
|
],
|
|
},
|
|
)
|
|
|
|
monkeypatch.setattr(
|
|
"app.domains.rendering.dispatch_service._legacy_dispatch",
|
|
lambda order_line_id: {"backend": "legacy", "order_line_id": order_line_id},
|
|
)
|
|
|
|
result = dispatch_render_with_workflow(str(seeded["order_line"].id))
|
|
|
|
await db.rollback()
|
|
|
|
assert result == {
|
|
"backend": "legacy",
|
|
"order_line_id": str(seeded["order_line"].id),
|
|
}
|
|
runs = (await db.execute(select(WorkflowRun))).scalars().all()
|
|
assert runs == []
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_workflow_dispatch_endpoint_returns_workflow_run_with_node_results(
|
|
client,
|
|
db,
|
|
admin_user,
|
|
auth_headers,
|
|
tmp_path,
|
|
monkeypatch,
|
|
):
|
|
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
|
|
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
|
|
workflow_definition = WorkflowDefinition(
|
|
name=f"Dispatch Workflow {uuid.uuid4().hex[:8]}",
|
|
config=build_preset_workflow_config("still_with_exports", {"width": 640, "height": 640}),
|
|
is_active=True,
|
|
)
|
|
db.add(workflow_definition)
|
|
await db.commit()
|
|
await db.refresh(workflow_definition)
|
|
|
|
calls: list[tuple[str, list[str], dict]] = []
|
|
|
|
def _fake_send_task(task_name: str, args: list[str], kwargs: dict):
|
|
calls.append((task_name, args, kwargs))
|
|
return type("Result", (), {"id": f"task-{len(calls)}"})()
|
|
|
|
context_id = str(order_line.id)
|
|
monkeypatch.setattr("app.tasks.celery_app.celery_app.send_task", _fake_send_task)
|
|
response = await client.post(
|
|
f"/api/workflows/{workflow_definition.id}/dispatch",
|
|
params={"context_id": context_id},
|
|
headers=auth_headers,
|
|
)
|
|
|
|
assert response.status_code == 200
|
|
body = response.json()
|
|
|
|
assert body["context_id"] == context_id
|
|
assert body["execution_mode"] == "graph"
|
|
assert body["dispatched"] == 2
|
|
assert body["task_ids"] == ["task-1", "task-2"]
|
|
assert calls == [
|
|
(
|
|
"app.domains.rendering.tasks.render_order_line_still_task",
|
|
[context_id],
|
|
{"width": 640, "height": 640},
|
|
),
|
|
(
|
|
"app.domains.rendering.tasks.export_blend_for_order_line_task",
|
|
[context_id],
|
|
{},
|
|
),
|
|
]
|
|
|
|
node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]}
|
|
assert body["workflow_run"]["status"] == "pending"
|
|
assert body["workflow_run"]["celery_task_id"] == "task-1"
|
|
assert node_results["render"]["status"] == "queued"
|
|
assert node_results["render"]["output"]["task_id"] == "task-1"
|
|
assert node_results["blend"]["status"] == "queued"
|
|
assert node_results["blend"]["output"]["task_id"] == "task-2"
|
|
assert node_results["setup"]["status"] == "completed"
|
|
assert node_results["setup"]["output"]["order_line_id"] == str(order_line.id)
|
|
assert node_results["template"]["status"] == "completed"
|
|
assert node_results["template"]["output"]["use_materials"] is False
|
|
assert node_results["output"]["status"] == "skipped"
|