chore: snapshot workflow migration progress

This commit is contained in:
2026-04-12 11:49:04 +02:00
parent 0cd02513d5
commit 3e810c74a3
163 changed files with 31774 additions and 2753 deletions
+23 -10
View File
@@ -113,32 +113,44 @@ def parsed_anschlagplatten(parsed_excel_all):
# ── Test-DB (nutzt separate Test-Datenbank) ──────────────────────────────────
import os
import uuid
import pytest_asyncio
from typing import AsyncGenerator
from httpx import AsyncClient, ASGITransport
from sqlalchemy.engine import make_url
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
TEST_DB_URL = os.environ.get(
"TEST_DATABASE_URL",
"postgresql+asyncpg://hartomat:hartomat@localhost:5432/hartomat_test"
)
from app.config import settings
from tests.db_test_utils import reset_public_schema_async, resolve_test_db_url
def _resolve_test_db_url() -> str:
return resolve_test_db_url(async_driver=True)
def _sync_settings_to_test_database() -> None:
resolved = make_url(resolve_test_db_url(async_driver=False))
settings.postgres_host = resolved.host or settings.postgres_host
settings.postgres_port = int(resolved.port or settings.postgres_port)
settings.postgres_user = resolved.username or settings.postgres_user
settings.postgres_password = resolved.password or settings.postgres_password
settings.postgres_db = resolved.database or settings.postgres_db
_sync_settings_to_test_database()
@pytest_asyncio.fixture
async def test_engine():
from app.database import Base
from sqlalchemy import text
import app.models # noqa - register all models
engine = create_async_engine(TEST_DB_URL, echo=False)
engine = create_async_engine(_resolve_test_db_url(), echo=False)
async with engine.begin() as conn:
await reset_public_schema_async(conn)
await conn.run_sync(Base.metadata.create_all)
yield engine
# Use CASCADE to handle circular FK dependencies in drop
async with engine.begin() as conn:
await conn.execute(text("DROP SCHEMA public CASCADE"))
await conn.execute(text("CREATE SCHEMA public"))
await reset_public_schema_async(conn)
await engine.dispose()
@@ -229,6 +241,7 @@ def mock_celery_tasks(monkeypatch):
task_paths = [
"app.domains.materials.tasks.refresh_asset_library_catalog",
"app.tasks.step_tasks.process_step_file",
"app.tasks.step_tasks.render_graph_thumbnail",
"app.tasks.step_tasks.render_step_thumbnail",
"app.domains.imports.tasks.validate_excel_import",
"app.domains.rendering.tasks.render_still_task",
+85
View File
@@ -0,0 +1,85 @@
from __future__ import annotations
from contextlib import contextmanager
import importlib
import os
from typing import Iterator
from sqlalchemy import text
from sqlalchemy.engine import make_url
from sqlalchemy.orm import Session
from sqlalchemy import create_engine
from app.database import Base
def resolve_test_db_url(*, async_driver: bool) -> str:
explicit_url = os.environ.get("TEST_DATABASE_URL")
if explicit_url:
db_url = explicit_url
else:
host = os.environ.get("TEST_POSTGRES_HOST") or os.environ.get("POSTGRES_HOST") or "localhost"
port = os.environ.get("TEST_POSTGRES_PORT") or os.environ.get("POSTGRES_PORT") or "5432"
user = os.environ.get("TEST_POSTGRES_USER") or os.environ.get("POSTGRES_USER") or "hartomat"
password = os.environ.get("TEST_POSTGRES_PASSWORD") or os.environ.get("POSTGRES_PASSWORD") or "hartomat"
default_db = f"{os.environ.get('POSTGRES_DB', 'hartomat')}_test"
database = os.environ.get("TEST_POSTGRES_DB") or os.environ.get("TEST_DB_NAME") or default_db
driver = "postgresql+asyncpg" if async_driver else "postgresql"
db_url = f"{driver}://{user}:{password}@{host}:{port}/{database}"
normalized_url = db_url if async_driver else db_url.replace("+asyncpg", "")
database_name = make_url(normalized_url).database or ""
if not database_name.endswith("_test"):
raise RuntimeError(
f"Refusing to run destructive test database setup against non-test database '{database_name}'."
)
return normalized_url
def reset_public_schema_sync(connection) -> None:
connection.execute(text("DROP SCHEMA IF EXISTS public CASCADE"))
connection.execute(text("CREATE SCHEMA public"))
async def reset_public_schema_async(connection) -> None:
await connection.execute(text("DROP SCHEMA IF EXISTS public CASCADE"))
await connection.execute(text("CREATE SCHEMA public"))
def import_all_model_modules() -> None:
module_names = (
"app.domains.tenants.models",
"app.domains.auth.models",
"app.domains.imports.models",
"app.domains.products.models",
"app.domains.orders.models",
"app.domains.notifications.models",
"app.domains.billing.models",
"app.domains.rendering.models",
"app.domains.materials.models",
"app.domains.media.models",
"app.domains.admin.models",
"app.models.system_setting",
"app.models.worker_config",
"app.models.chat",
)
for module_name in module_names:
importlib.import_module(module_name)
@contextmanager
def sync_test_session() -> Iterator[Session]:
import_all_model_modules()
engine = create_engine(resolve_test_db_url(async_driver=False))
with engine.begin() as conn:
reset_public_schema_sync(conn)
Base.metadata.create_all(conn)
session = Session(engine)
try:
yield session
finally:
session.close()
with engine.begin() as conn:
reset_public_schema_sync(conn)
engine.dispose()
@@ -1,6 +1,10 @@
"""Tests for notification config service."""
import pytest
from sqlalchemy import select
from app.domains.notifications.models import AuditLog
from app.domains.notifications.service import (
emit_notification,
upsert_notification_config,
get_notification_configs,
)
@@ -25,3 +29,25 @@ async def test_upsert_updates_existing(db, admin_user):
cfg = next((c for c in configs if c.event_type == "order_submitted"), None)
assert cfg is not None
assert cfg.enabled is False
@pytest.mark.asyncio
async def test_emit_notification_persists_naive_utc_timestamp(db, admin_user):
"""Notification writes must match the legacy naive Postgres timestamp columns."""
await emit_notification(
db,
actor_user_id=admin_user.id,
target_user_id=admin_user.id,
action="order.submitted",
entity_type="order",
entity_id="order-123",
details={"order_number": "SA-2026-00001"},
)
row = (
await db.execute(
select(AuditLog).where(AuditLog.action == "order.submitted")
)
).scalar_one()
assert row.timestamp.tzinfo is None
@@ -34,9 +34,138 @@ async def test_create_output_type_infers_artifact_kind_from_format_and_animation
payload = response.json()
assert payload["workflow_family"] == "order_line"
assert payload["artifact_kind"] == "turntable_video"
assert payload["workflow_rollout_mode"] == "legacy_only"
assert payload["invocation_overrides"] == {}
@pytest.mark.asyncio
async def test_output_type_contract_catalog_exposes_backend_authored_rules(
client,
auth_headers,
):
response = await client.get(
"/api/output-types/contract-catalog",
headers=auth_headers,
)
assert response.status_code == 200, response.text
payload = response.json()
assert payload["workflow_families"] == ["order_line", "cad_file"]
assert payload["workflow_rollout_modes"] == ["legacy_only", "shadow", "graph"]
assert payload["artifact_kinds"] == [
"still_image",
"turntable_video",
"model_export",
"thumbnail_image",
"blend_asset",
"package",
"custom",
]
assert payload["allowed_artifact_kinds_by_family"]["cad_file"] == [
"model_export",
"thumbnail_image",
"package",
"custom",
]
assert payload["allowed_output_formats_by_family"]["order_line"] == [
"png",
"jpg",
"jpeg",
"webp",
"mp4",
"webm",
"mov",
"blend",
]
assert payload["allowed_invocation_override_keys_by_artifact_kind"]["turntable_video"] == [
"width",
"height",
"engine",
"samples",
"bg_color",
"noise_threshold",
"denoiser",
"denoising_input_passes",
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
"frame_count",
"fps",
"turntable_axis",
]
assert payload["default_output_format_by_artifact_kind"]["blend_asset"] == "blend"
assert payload["parameter_ownership"]["output_type_profile_keys"] == [
"transparent_bg",
"cycles_device",
"material_override",
]
assert payload["parameter_ownership"]["template_runtime_keys"] == [
"target_collection",
"lighting_only",
"shadow_catcher",
"camera_orbit",
"template_inputs",
]
assert payload["parameter_ownership"]["workflow_node_keys_by_step"]["resolve_template"] == [
"template_id_override",
"require_template",
"material_library_path",
"disable_materials",
"target_collection",
"material_replace_mode",
"lighting_only_mode",
"shadow_catcher_mode",
"camera_orbit_mode",
]
assert "target_collection" in payload["parameter_ownership"]["workflow_node_keys_by_step"]["blender_still"]
assert "camera_orbit" in payload["parameter_ownership"]["workflow_node_keys_by_step"]["blender_turntable"]
@pytest.mark.asyncio
async def test_create_output_type_infers_blend_asset_from_blend_format(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Blend {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "blend",
"render_backend": "celery",
"workflow_family": "order_line",
},
headers=auth_headers,
)
assert response.status_code == 201, response.text
payload = response.json()
assert payload["workflow_family"] == "order_line"
assert payload["artifact_kind"] == "blend_asset"
@pytest.mark.asyncio
async def test_create_output_type_rejects_non_blend_artifact_for_blend_format(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Bad Blend {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "blend",
"render_backend": "celery",
"workflow_family": "order_line",
"artifact_kind": "still_image",
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert response.json()["detail"] == "Output format 'blend' requires artifact kind 'blend_asset'"
@pytest.mark.asyncio
async def test_create_output_type_rejects_workflow_family_mismatch(
client,
@@ -69,6 +198,52 @@ async def test_create_output_type_rejects_workflow_family_mismatch(
assert "Workflow family mismatch" in response.json()["detail"]
@pytest.mark.asyncio
async def test_create_output_type_rejects_workflow_artifact_mismatch(
client,
db,
auth_headers,
):
workflow = WorkflowDefinition(
name=f"Blend Export {uuid.uuid4().hex[:8]}",
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "blend"},
],
},
is_active=True,
)
db.add(workflow)
await db.commit()
await db.refresh(workflow)
response = await client.post(
"/api/output-types",
json={
"name": f"Still {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"artifact_kind": "still_image",
"workflow_definition_id": str(workflow.id),
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Workflow artifact mismatch" in response.json()["detail"]
assert "blend_asset" in response.json()["detail"]
@pytest.mark.asyncio
async def test_create_output_type_rejects_artifact_kind_incompatible_with_family(
client,
@@ -91,6 +266,53 @@ async def test_create_output_type_rejects_artifact_kind_incompatible_with_family
assert "not allowed for workflow_family" in response.json()["detail"]
@pytest.mark.asyncio
async def test_create_output_type_rejects_output_format_incompatible_with_family(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Bad CAD Blend {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "blend",
"render_backend": "celery",
"workflow_family": "cad_file",
"artifact_kind": "custom",
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Output format 'blend' is not allowed for workflow_family 'cad_file'" in response.json()["detail"]
@pytest.mark.asyncio
async def test_create_output_type_preserves_legacy_safe_custom_png_output_type(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Legacy Custom Still {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"artifact_kind": "custom",
},
headers=auth_headers,
)
assert response.status_code == 201, response.text
payload = response.json()
assert payload["workflow_family"] == "order_line"
assert payload["artifact_kind"] == "custom"
assert payload["output_format"] == "png"
@pytest.mark.asyncio
async def test_create_output_type_rejects_turntable_video_without_animation(
client,
@@ -159,6 +381,99 @@ async def test_update_output_type_rejects_mixed_family_workflow(
assert response.json()["detail"] == "Output types cannot link mixed-family workflows"
@pytest.mark.asyncio
async def test_patch_output_type_rejects_workflow_artifact_mismatch(
client,
db,
auth_headers,
):
output_type_response = await client.post(
"/api/output-types",
json={
"name": f"Still {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"artifact_kind": "still_image",
},
headers=auth_headers,
)
assert output_type_response.status_code == 201, output_type_response.text
output_type = output_type_response.json()
workflow = WorkflowDefinition(
name=f"Blend Export {uuid.uuid4().hex[:8]}",
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "blend"},
],
},
is_active=True,
)
db.add(workflow)
await db.commit()
await db.refresh(workflow)
response = await client.patch(
f"/api/output-types/{output_type['id']}",
json={"workflow_definition_id": str(workflow.id)},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Workflow artifact mismatch" in response.json()["detail"]
assert "blend_asset" in response.json()["detail"]
@pytest.mark.asyncio
async def test_patch_output_type_updates_workflow_rollout_mode(
client,
db,
auth_headers,
):
workflow = WorkflowDefinition(
name=f"Still Graph {uuid.uuid4().hex[:8]}",
config=build_preset_workflow_config("still_graph"),
is_active=True,
)
db.add(workflow)
await db.commit()
await db.refresh(workflow)
create_response = await client.post(
"/api/output-types",
json={
"name": f"Rollout {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"workflow_definition_id": str(workflow.id),
},
headers=auth_headers,
)
assert create_response.status_code == 201, create_response.text
output_type = create_response.json()
patch_response = await client.patch(
f"/api/output-types/{output_type['id']}",
json={"workflow_rollout_mode": "graph"},
headers=auth_headers,
)
assert patch_response.status_code == 200, patch_response.text
assert patch_response.json()["workflow_rollout_mode"] == "graph"
@pytest.mark.asyncio
async def test_create_output_type_backfills_invocation_overrides_from_legacy_render_settings(
client,
@@ -189,6 +504,25 @@ async def test_create_output_type_backfills_invocation_overrides_from_legacy_ren
"height": 900,
"engine": "cycles",
}
assert payload["invocation_profile"]["artifact_kind"] == "still_image"
assert payload["invocation_profile"]["allowed_override_keys"] == [
"width",
"height",
"engine",
"samples",
"bg_color",
"noise_threshold",
"denoiser",
"denoising_input_passes",
"denoising_prefilter",
"denoising_quality",
"denoising_use_gpu",
]
assert payload["invocation_profile"]["invocation_overrides"] == {
"width": 1600,
"height": 900,
"engine": "cycles",
}
assert payload["render_settings"]["width"] == 1600
assert payload["render_settings"]["height"] == 900
assert payload["render_settings"]["engine"] == "cycles"
@@ -235,6 +569,57 @@ async def test_patch_output_type_invocation_overrides_syncs_legacy_render_settin
assert payload["render_settings"]["engine"] == "cycles"
@pytest.mark.asyncio
async def test_create_output_type_rejects_unknown_invocation_override_key(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Bad Override {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
"invocation_overrides": {
"width": 1600,
"bogus": "value",
},
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Unsupported invocation override keys" in response.json()["detail"]
@pytest.mark.asyncio
async def test_create_output_type_rejects_disallowed_invocation_override_for_blend_asset(
client,
auth_headers,
):
response = await client.post(
"/api/output-types",
json={
"name": f"Blend Override {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "blend",
"render_backend": "celery",
"workflow_family": "order_line",
"invocation_overrides": {
"width": 1600,
},
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert response.json()["detail"] == (
"Invocation overrides not allowed for artifact kind 'blend_asset': width"
)
@pytest.mark.asyncio
async def test_patch_output_type_recomputes_artifact_kind_when_switching_family(
client,
@@ -266,3 +651,35 @@ async def test_patch_output_type_recomputes_artifact_kind_when_switching_family(
payload = response.json()
assert payload["workflow_family"] == "cad_file"
assert payload["artifact_kind"] == "thumbnail_image"
@pytest.mark.asyncio
async def test_patch_output_type_rejects_output_format_incompatible_with_family(
client,
auth_headers,
):
output_type_response = await client.post(
"/api/output-types",
json={
"name": f"Still {uuid.uuid4().hex[:8]}",
"renderer": "blender",
"output_format": "png",
"render_backend": "celery",
"workflow_family": "order_line",
},
headers=auth_headers,
)
assert output_type_response.status_code == 201, output_type_response.text
output_type = output_type_response.json()
response = await client.patch(
f"/api/output-types/{output_type['id']}",
json={
"output_format": "gltf",
"artifact_kind": "custom",
},
headers=auth_headers,
)
assert response.status_code == 400, response.text
assert "Output format 'gltf' is not allowed for workflow_family 'order_line'" in response.json()["detail"]
@@ -0,0 +1,574 @@
from __future__ import annotations
import importlib.util
import selectors
import sys
from pathlib import Path
from types import SimpleNamespace
import pytest
def test_resolve_render_samples_uses_system_settings_when_omitted(monkeypatch):
from app.services.render_blender import _resolve_render_samples
monkeypatch.setattr(
"app.services.step_processor._get_all_settings",
lambda: {
"blender_cycles_samples": "32",
"blender_eevee_samples": "12",
},
)
assert _resolve_render_samples("cycles", None) == 32
assert _resolve_render_samples("eevee", None) == 12
assert _resolve_render_samples("cycles", 48) == 48
def test_resolve_tessellation_settings_uses_profile_specific_values(monkeypatch):
from app.services.render_blender import resolve_tessellation_settings
monkeypatch.setattr(
"app.services.step_processor._get_all_settings",
lambda: {
"tessellation_engine": "occ",
"scene_linear_deflection": "0.1",
"scene_angular_deflection": "0.1",
"render_linear_deflection": "0.03",
"render_angular_deflection": "0.05",
},
)
assert resolve_tessellation_settings("scene") == (0.1, 0.1, "occ")
assert resolve_tessellation_settings("render") == (0.03, 0.05, "occ")
def test_render_still_passes_resolved_samples_to_blender_cli(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_still
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
glb_path.parent.mkdir(parents=True, exist_ok=True)
glb_path.write_text("GLB", encoding="utf-8")
output_path = tmp_path / "render.png"
output_path.write_text("PNG", encoding="utf-8")
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "blender_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.stdout = object()
self.stderr = object()
self.pid = 1234
self.returncode = 0
def wait(self, timeout: int | None = None) -> int:
del timeout
return self.returncode
def wait(self, timeout: int | None = None) -> int:
del timeout
return self.returncode
def wait(self, timeout: int = 10) -> int:
return self.returncode
class _FakeSelector:
def register(self, *_args, **_kwargs) -> None:
return None
def get_map(self) -> dict:
return {}
def close(self) -> None:
return None
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
captured["env"] = env
return _FakeProc()
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender._resolve_render_samples", lambda engine, samples: 32)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr(selectors, "DefaultSelector", _FakeSelector)
result = render_still(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=None,
width=640,
height=480,
)
assert captured["cmd"][10] == "32"
assert captured["env"]["BLENDER_DEFAULT_SAMPLES"] == "32"
assert result["engine_used"] == "cycles"
def test_render_still_passes_template_inputs_to_blender_cli(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_still
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
glb_path.parent.mkdir(parents=True, exist_ok=True)
glb_path.write_text("GLB", encoding="utf-8")
output_path = tmp_path / "render.png"
output_path.write_text("PNG", encoding="utf-8")
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "blender_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.stdout = object()
self.stderr = object()
self.pid = 1234
self.returncode = 0
def wait(self, timeout: int = 10) -> int:
return self.returncode
class _FakeSelector:
def register(self, *_args, **_kwargs) -> None:
return None
def get_map(self) -> dict:
return {}
def close(self) -> None:
return None
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
return _FakeProc()
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender._resolve_render_samples", lambda engine, samples: 32)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr(selectors, "DefaultSelector", _FakeSelector)
render_still(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=None,
width=640,
height=480,
template_inputs={"studio_variant": "warm"},
)
assert "--template-inputs" in captured["cmd"]
idx = captured["cmd"].index("--template-inputs")
assert captured["cmd"][idx + 1] == '{"studio_variant": "warm"}'
def test_render_still_uses_settings_sensitive_render_glb_path(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_still
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
output_path = tmp_path / "render.png"
output_path.write_text("PNG", encoding="utf-8")
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "blender_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.stdout = object()
self.stderr = object()
self.pid = 1234
self.returncode = 0
def wait(self, timeout: int = 10) -> int:
return self.returncode
class _FakeSelector:
def register(self, *_args, **_kwargs) -> None:
return None
def get_map(self) -> dict:
return {}
def close(self) -> None:
return None
def _fake_glb_from_step(step_path, glb_path, tessellation_engine="occ", tessellation_profile="render"):
captured["glb_path"] = glb_path
captured["tessellation_engine"] = tessellation_engine
captured["tessellation_profile"] = tessellation_profile
glb_path.write_text("GLB", encoding="utf-8")
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
return _FakeProc()
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender._resolve_render_samples", lambda engine, samples: 32)
monkeypatch.setattr(
"app.services.step_processor._get_all_settings",
lambda: {
"tessellation_engine": "occ",
"render_linear_deflection": "0.03",
"render_angular_deflection": "0.05",
"blender_cycles_samples": "32",
"blender_eevee_samples": "12",
},
)
monkeypatch.setattr("app.services.render_blender._glb_from_step", _fake_glb_from_step)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr(selectors, "DefaultSelector", _FakeSelector)
render_still(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=None,
width=640,
height=480,
)
expected_glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
assert captured["glb_path"] == expected_glb_path
assert captured["tessellation_profile"] == "render"
assert captured["cmd"][5] == str(expected_glb_path)
def test_render_turntable_passes_template_inputs_to_blender_cli(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_turntable_to_file
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
glb_path.parent.mkdir(parents=True, exist_ok=True)
glb_path.write_text("GLB", encoding="utf-8")
output_path = tmp_path / "turntable.mp4"
output_path.parent.mkdir(parents=True, exist_ok=True)
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "turntable_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.pid = 1234
self.returncode = 0
def communicate(self, timeout: int | None = None) -> tuple[str, str]:
frames_dir = Path(captured["cmd"][6])
frames_dir.mkdir(parents=True, exist_ok=True)
(frames_dir / "frame_0001.png").write_text("PNG", encoding="utf-8")
return ("[turntable_render] ok\n", "")
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
return _FakeProc()
def _fake_ffmpeg(cmd, capture_output, text, timeout):
output_path.write_text("MP4", encoding="utf-8")
return SimpleNamespace(returncode=0, stdout="", stderr="")
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr("app.services.render_blender.subprocess.run", _fake_ffmpeg)
monkeypatch.setattr("app.services.render_blender.build_turntable_ffmpeg_cmd", lambda *args, **kwargs: ["ffmpeg", str(output_path)])
monkeypatch.setattr("app.services.render_blender.resolve_tessellation_settings", lambda *args, **kwargs: (0.03, 0.05, "occ"))
render_turntable_to_file(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=32,
template_inputs={"studio_variant": "warm"},
)
assert "--template-inputs" in captured["cmd"]
idx = captured["cmd"].index("--template-inputs")
assert captured["cmd"][idx + 1] == '{"studio_variant": "warm"}'
def test_render_cinematic_passes_template_inputs_to_blender_cli(tmp_path, monkeypatch):
from app.services.render_blender import build_tessellated_glb_path, render_cinematic_to_file
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05)
glb_path.parent.mkdir(parents=True, exist_ok=True)
glb_path.write_text("GLB", encoding="utf-8")
output_path = tmp_path / "cinematic.mp4"
output_path.parent.mkdir(parents=True, exist_ok=True)
scripts_dir = tmp_path / "render-scripts"
scripts_dir.mkdir()
(scripts_dir / "cinematic_render.py").write_text("# test stub\n", encoding="utf-8")
captured: dict[str, object] = {}
class _FakeProc:
def __init__(self) -> None:
self.stdout = object()
self.stderr = object()
self.pid = 1234
self.returncode = 0
def wait(self, timeout: int | None = None) -> int:
del timeout
return self.returncode
class _FakeSelector:
def __init__(self) -> None:
self._registered: list[object] = []
self._delivered = False
def register(self, fileobj, _event, data):
self._registered.append((fileobj, data))
def unregister(self, fileobj):
self._registered = [item for item in self._registered if item[0] is not fileobj]
def get_map(self) -> dict[int, object]:
return {idx: item for idx, item in enumerate(self._registered)}
def select(self, timeout=None):
del timeout
if self._delivered:
for fileobj, _data in list(self._registered):
if hasattr(fileobj, "readline"):
fileobj.readline = lambda: ""
self._registered.clear()
return []
self._delivered = True
events = []
for fileobj, data in list(self._registered):
events.append((SimpleNamespace(fileobj=fileobj, data=data), None))
return events
def close(self):
return None
class _FakeStream:
def __init__(self, lines: list[str]) -> None:
self._lines = list(lines)
def readline(self) -> str:
if not self._lines:
return ""
return self._lines.pop(0)
def _fake_popen(cmd, stdout, stderr, text, env, start_new_session):
captured["cmd"] = cmd
frames_dir = Path(cmd[6])
frames_dir.mkdir(parents=True, exist_ok=True)
(frames_dir / "frame_0001.png").write_text("PNG", encoding="utf-8")
proc = _FakeProc()
proc.stdout = _FakeStream(["[cinematic_render] ok\n"])
proc.stderr = _FakeStream([])
return proc
def _fake_ffmpeg(cmd, capture_output, text, timeout):
output_path.write_text("MP4", encoding="utf-8")
return SimpleNamespace(returncode=0, stdout="", stderr="")
monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir))
monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender")
monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen)
monkeypatch.setattr("app.services.render_blender.subprocess.run", _fake_ffmpeg)
monkeypatch.setattr("app.services.render_blender.build_turntable_ffmpeg_cmd", lambda *args, **kwargs: ["ffmpeg", str(output_path)])
monkeypatch.setattr("app.services.render_blender.resolve_tessellation_settings", lambda *args, **kwargs: (0.03, 0.05, "occ"))
monkeypatch.setattr("selectors.DefaultSelector", _FakeSelector)
render_cinematic_to_file(
step_path=step_path,
output_path=output_path,
engine="cycles",
samples=32,
template_inputs={"studio_variant": "warm"},
)
assert "--template-inputs" in captured["cmd"]
idx = captured["cmd"].index("--template-inputs")
assert captured["cmd"][idx + 1] == '{"studio_variant": "warm"}'
def test_render_still_task_keeps_samples_unset_until_render_service(tmp_path, monkeypatch):
from app.domains.rendering.tasks import render_still_task
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
output_path = tmp_path / "render.png"
captured: dict[str, object] = {}
def _fake_render_still(**kwargs):
captured.update(kwargs)
return {"total_duration_s": 0.1}
monkeypatch.setattr("app.domains.rendering.tasks.log_task_event", lambda *args, **kwargs: None)
monkeypatch.setattr("app.services.render_blender.render_still", _fake_render_still)
task_self = SimpleNamespace(
request=SimpleNamespace(id="task-still"),
retry=lambda *, exc, countdown: (_ for _ in ()).throw(exc),
)
result = render_still_task.run.__func__(task_self, str(step_path), str(output_path))
assert captured["samples"] is None
assert result["total_duration_s"] == 0.1
def test_blender_args_prefers_backend_default_samples_env(monkeypatch):
module_path = (
Path(__file__).resolve().parents[2]
/ "render-worker"
/ "scripts"
/ "_blender_args.py"
)
if not module_path.exists():
pytest.skip(f"{module_path} not present in this runtime")
spec = importlib.util.spec_from_file_location("test_blender_args_module", module_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
monkeypatch.setenv("BLENDER_DEFAULT_SAMPLES", "32")
monkeypatch.setattr(
sys,
"argv",
[
"blender_render.py",
"--",
"input.glb",
"output.png",
"512",
"512",
"cycles",
"",
],
)
args = module.parse_args()
assert args.samples == 32
def test_blender_args_parses_template_inputs(monkeypatch):
module_path = (
Path(__file__).resolve().parents[2]
/ "render-worker"
/ "scripts"
/ "_blender_args.py"
)
if not module_path.exists():
pytest.skip(f"{module_path} not present in this runtime")
spec = importlib.util.spec_from_file_location("test_blender_args_module_template_inputs", module_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
monkeypatch.setattr(
sys,
"argv",
[
"blender_render.py",
"--",
"input.glb",
"output.png",
"512",
"512",
"cycles",
"64",
"30",
"auto",
"0",
"",
"Product",
"",
"{}",
"[]",
"0",
"0",
"0",
"0",
"",
"",
"",
"",
"",
"",
"--template-inputs",
'{"studio_variant":"warm"}',
],
)
args = module.parse_args()
assert args.template_inputs == {"studio_variant": "warm"}
def test_render_to_file_preserves_explicit_zero_samples(tmp_path, monkeypatch):
from app.services.step_processor import render_to_file
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
output_path = tmp_path / "render.png"
captured: dict[str, object] = {}
monkeypatch.setattr(
"app.services.step_processor._get_all_settings",
lambda: {
"thumbnail_renderer": "blender",
"thumbnail_format": "png",
"blender_engine": "cycles",
"blender_cycles_samples": "32",
"blender_eevee_samples": "12",
"cycles_device": "auto",
"blender_smooth_angle": "30",
"tessellation_engine": "occ",
},
)
monkeypatch.setattr("app.services.step_processor.ensure_group_writable_dir", lambda _path: None)
monkeypatch.setattr("app.services.render_blender.is_blender_available", lambda: True)
def _fake_render_still(**kwargs):
captured.update(kwargs)
kwargs["output_path"].write_text("PNG", encoding="utf-8")
return {"total_duration_s": 0.1, "engine_used": kwargs["engine"]}
monkeypatch.setattr("app.services.render_blender.render_still", _fake_render_still)
success, render_log = render_to_file(
str(step_path),
str(output_path),
samples=0,
)
assert success is True
assert captured["samples"] == 0
assert render_log["samples"] == 0
@@ -0,0 +1,152 @@
from __future__ import annotations
import os
import uuid
from contextlib import contextmanager
from pathlib import Path
import pytest
from sqlalchemy import select, text
from sqlalchemy.orm import Session
from app.domains.auth.models import User, UserRole
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.orders.models import Order, OrderLine, OrderStatus
from app.domains.products.models import CadFile, Product
from app.domains.rendering.models import OutputType
from tests.db_test_utils import sync_test_session as sync_test_session_ctx
@pytest.fixture
def sync_session():
with sync_test_session_ctx() as session:
yield session
def _seed_order_line(session: Session, tmp_path: Path) -> OrderLine:
step_path = tmp_path / "parts" / "bearing.step"
step_path.parent.mkdir(parents=True, exist_ok=True)
step_path.write_text("STEP", encoding="utf-8")
user = User(
id=uuid.uuid4(),
email=f"publish-{uuid.uuid4().hex[:8]}@test.local",
password_hash="hash",
full_name="Publish Tester",
role=UserRole.admin,
is_active=True,
)
cad_file = CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path=str(step_path),
file_hash=f"hash-{uuid.uuid4().hex}",
)
product = Product(
id=uuid.uuid4(),
pim_id="P-2000",
name="Bearing Publish",
category_key="bearings",
cad_file_id=cad_file.id,
cad_file=cad_file,
)
output_type = OutputType(
id=uuid.uuid4(),
name="HQ Still",
renderer="blender",
output_format="png",
render_settings={"width": 1600, "height": 900},
)
order = Order(
id=uuid.uuid4(),
order_number=f"ORD-{uuid.uuid4().hex[:8]}",
status=OrderStatus.processing,
created_by=user.id,
)
line = OrderLine(
id=uuid.uuid4(),
order_id=order.id,
product_id=product.id,
product=product,
output_type_id=output_type.id,
output_type=output_type,
render_status="processing",
)
session.add_all([user, cad_file, product, output_type, order, line])
session.commit()
return line
def test_publish_asset_canonicalizes_still_outputs(sync_session, tmp_path, monkeypatch):
from app.config import settings
from app.domains.rendering.tasks import publish_asset
upload_dir = tmp_path / "uploads"
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
line = _seed_order_line(sync_session, tmp_path)
source_output = tmp_path / "parts" / "renders" / "line.png"
source_output.parent.mkdir(parents=True, exist_ok=True)
source_output.write_bytes(b"png")
@contextmanager
def _session_ctx():
yield sync_session
monkeypatch.setattr("app.core.db_utils.get_sync_session", _session_ctx)
asset_id = publish_asset.run(
str(line.id),
"still",
str(source_output),
render_config={"renderer": "blender", "engine_used": "cycles"},
)
sync_session.expire_all()
stored_line = sync_session.get(OrderLine, line.id)
stored_asset = sync_session.execute(
select(MediaAsset).where(MediaAsset.id == uuid.UUID(asset_id))
).scalar_one()
assert stored_line.result_path == f"{upload_dir}/renders/{line.id}/Bearing_Publish_HQ_Still.png"
assert Path(stored_line.result_path).is_file()
assert stored_asset.storage_key == f"renders/{line.id}/Bearing_Publish_HQ_Still.png"
assert stored_asset.asset_type == MediaAssetType.still
def test_publish_asset_canonicalizes_blend_storage_key_without_touching_order_line(sync_session, tmp_path, monkeypatch):
from app.config import settings
from app.domains.rendering.tasks import publish_asset
upload_dir = tmp_path / "uploads"
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
line = _seed_order_line(sync_session, tmp_path)
source_output = tmp_path / "parts" / "bearing_production.blend"
source_output.parent.mkdir(parents=True, exist_ok=True)
source_output.write_bytes(b"blend")
@contextmanager
def _session_ctx():
yield sync_session
monkeypatch.setattr("app.core.db_utils.get_sync_session", _session_ctx)
asset_id = publish_asset.run(
str(line.id),
"blend_production",
str(source_output),
render_config={"artifact_type": "blend_production"},
)
sync_session.expire_all()
stored_line = sync_session.get(OrderLine, line.id)
stored_asset = sync_session.execute(
select(MediaAsset).where(MediaAsset.id == uuid.UUID(asset_id))
).scalar_one()
assert stored_line.result_path is None
assert stored_asset.storage_key == str(source_output)
assert stored_asset.asset_type == MediaAssetType.blend_production
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,77 @@
from app.domains.rendering.template_input_audit import (
extract_template_input_marker,
suggest_workflow_input_schema,
)
def test_extract_template_input_marker_from_combined_property() -> None:
marker = extract_template_input_marker(props={"template_input": "studio_variant=warm"})
assert marker == ("studio_variant", "warm")
def test_extract_template_input_marker_from_json_property() -> None:
marker = extract_template_input_marker(
props={"hartomat_template_input": '{"key":"lighting_profile","value":"shadow"}'}
)
assert marker == ("lighting_profile", "shadow")
def test_extract_template_input_marker_from_split_properties() -> None:
marker = extract_template_input_marker(
props={"template_input_key": "alpha_mode", "template_input_value": "transparent"}
)
assert marker == ("alpha_mode", "transparent")
def test_extract_template_input_marker_from_name_pattern() -> None:
marker = extract_template_input_marker(name="template-input:studio_variant=warm")
assert marker == ("studio_variant", "warm")
def test_suggest_workflow_input_schema_builds_select_fields() -> None:
schema = suggest_workflow_input_schema(
[
("studio_variant", "warm"),
("studio_variant", "cool"),
("alpha_mode", "transparent"),
("alpha_mode", "opaque"),
]
)
assert schema == [
{
"default": "opaque",
"key": "alpha_mode",
"label": "Alpha Mode",
"options": [
{"label": "Opaque", "value": "opaque"},
{"label": "Transparent", "value": "transparent"},
],
"section": "Template Inputs",
"type": "select",
},
{
"default": "cool",
"key": "studio_variant",
"label": "Studio Variant",
"options": [
{"label": "Cool", "value": "cool"},
{"label": "Warm", "value": "warm"},
],
"section": "Template Inputs",
"type": "select",
},
]
def test_suggest_workflow_input_schema_builds_boolean_field() -> None:
schema = suggest_workflow_input_schema([("shadow_pass", "true"), ("shadow_pass", "false")])
assert schema == [
{
"default": False,
"key": "shadow_pass",
"label": "Shadow Pass",
"section": "Template Inputs",
"type": "boolean",
}
]
@@ -40,8 +40,9 @@ def test_build_preset_workflow_config_creates_graph_still_variant():
assert config["ui"]["execution_mode"] == "graph"
assert [node["step"] for node in config["nodes"]] == [
"order_line_setup",
"auto_populate_materials",
"resolve_template",
"auto_populate_materials",
"glb_bbox",
"material_map_resolve",
"blender_still",
"output_save",
@@ -51,6 +52,7 @@ def test_build_preset_workflow_config_creates_graph_still_variant():
assert render_node["params"]["width"] == 1600
assert render_node["params"]["height"] == 900
assert render_node["params"]["samples"] == 128
assert render_node["params"]["use_custom_render_settings"] is False
def test_canonicalize_workflow_config_migrates_legacy_preset():
@@ -215,6 +217,111 @@ def test_canonicalize_workflow_config_defaults_execution_mode_for_canonical_conf
assert canonical["ui"]["execution_mode"] == "legacy"
def test_canonicalize_workflow_config_rebuilds_canonical_still_graph_preset():
canonical = canonicalize_workflow_config(
{
"version": 1,
"ui": {"preset": "still_graph", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "resolve_materials", "step": "material_map_resolve", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1280, "height": 720, "samples": 32}},
{"id": "output", "step": "output_save", "params": {}},
{"id": "notify", "step": "notify", "params": {}},
],
"edges": [
{"from": "setup", "to": "resolve_materials"},
{"from": "resolve_materials", "to": "template"},
{"from": "template", "to": "render"},
{"from": "render", "to": "output"},
{"from": "render", "to": "notify"},
],
}
)
assert canonical["ui"]["preset"] == "still_graph"
assert canonical["ui"]["execution_mode"] == "graph"
assert [node["step"] for node in canonical["nodes"]] == [
"order_line_setup",
"resolve_template",
"auto_populate_materials",
"glb_bbox",
"material_map_resolve",
"blender_still",
"output_save",
"notify",
]
render_node = next(node for node in canonical["nodes"] if node["step"] == "blender_still")
assert render_node["params"]["width"] == 1280
assert render_node["params"]["height"] == 720
assert render_node["params"]["samples"] == 32
assert render_node["params"]["use_custom_render_settings"] is False
def test_build_workflow_blueprint_config_cad_intake_supplies_bbox_to_threejs_thumbnail():
config = build_workflow_blueprint_config("cad_intake")
assert config["ui"]["family"] == "cad_file"
assert [node["step"] for node in config["nodes"]] == [
"resolve_step_path",
"occ_object_extract",
"occ_glb_export",
"glb_bbox",
"stl_cache_generate",
"blender_render",
"threejs_render",
"thumbnail_save",
"thumbnail_save",
]
assert {"from": "export_glb", "to": "bbox"} in config["edges"]
assert {"from": "bbox", "to": "threejs_thumb"} in config["edges"]
def test_canonicalize_workflow_config_rebuilds_reference_blueprints():
canonical = canonicalize_workflow_config(
{
"version": 1,
"ui": {"preset": "custom", "execution_mode": "legacy", "blueprint": "order_rendering"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
],
"edges": [],
}
)
assert canonical["ui"]["blueprint"] == "order_rendering"
assert canonical["ui"]["family"] == "order_line"
assert any(node["step"] == "blender_turntable" for node in canonical["nodes"])
assert any(node["step"] == "export_blend" for node in canonical["nodes"])
def test_canonicalize_workflow_config_rebuilds_starter_blueprints():
canonical = canonicalize_workflow_config(
{
"version": 1,
"ui": {"preset": "custom", "execution_mode": "legacy", "blueprint": "starter_cad_intake"},
"nodes": [],
"edges": [],
}
)
assert canonical["ui"]["blueprint"] == "starter_cad_intake"
assert canonical["ui"]["family"] == "cad_file"
assert canonical["nodes"] == [
{
"id": "resolve_step",
"step": "resolve_step_path",
"params": {},
"ui": {
"type": "inputNode",
"position": {"x": 120, "y": 140},
"label": "Resolve STEP Path",
},
}
]
def test_workflow_config_requires_canonicalization_for_legacy_payloads():
assert workflow_config_requires_canonicalization(
{
@@ -235,11 +342,13 @@ def test_build_workflow_blueprint_config_creates_cad_intake_family_graph():
assert config["version"] == 1
assert config["ui"]["preset"] == "custom"
assert config["ui"]["family"] == "cad_file"
assert config["ui"]["blueprint"] == "cad_intake"
assert [node["step"] for node in config["nodes"]] == [
"resolve_step_path",
"occ_object_extract",
"occ_glb_export",
"glb_bbox",
"stl_cache_generate",
"blender_render",
"threejs_render",
@@ -253,6 +362,7 @@ def test_build_workflow_blueprint_config_creates_order_rendering_family_graph():
assert config["version"] == 1
assert config["ui"]["preset"] == "custom"
assert config["ui"]["family"] == "order_line"
assert config["ui"]["blueprint"] == "order_rendering"
assert any(node["step"] == "blender_still" for node in config["nodes"])
assert any(node["step"] == "blender_turntable" for node in config["nodes"])
@@ -260,11 +370,34 @@ def test_build_workflow_blueprint_config_creates_order_rendering_family_graph():
assert sum(1 for node in config["nodes"] if node["step"] == "notify") == 3
def test_build_workflow_blueprint_config_creates_still_graph_reference():
config = build_workflow_blueprint_config("still_graph_reference")
assert config["version"] == 1
assert config["ui"]["preset"] == "custom"
assert config["ui"]["family"] == "order_line"
assert config["ui"]["blueprint"] == "still_graph_reference"
assert config["ui"]["execution_mode"] == "graph"
assert [node["step"] for node in config["nodes"]] == [
"order_line_setup",
"resolve_template",
"auto_populate_materials",
"glb_bbox",
"material_map_resolve",
"blender_still",
"output_save",
"notify",
]
render_node = next(node for node in config["nodes"] if node["step"] == "blender_still")
assert render_node["params"]["use_custom_render_settings"] is False
def test_build_starter_workflow_config_creates_minimal_valid_custom_graph():
config = build_starter_workflow_config()
assert config["version"] == 1
assert config["ui"]["preset"] == "custom"
assert config["ui"]["family"] == "order_line"
assert config["ui"]["blueprint"] == "starter_order_rendering"
assert config["nodes"] == [
{
@@ -7,6 +7,7 @@ from types import SimpleNamespace
import pytest
from PIL import Image, PngImagePlugin
from sqlalchemy import select
from sqlalchemy.engine import make_url
from sqlalchemy.orm import selectinload
from app.config import settings
@@ -18,15 +19,128 @@ from app.domains.rendering.workflow_comparison_service import (
_build_artifact,
evaluate_rollout_gate,
)
from app.domains.rendering.workflow_config_utils import build_preset_workflow_config
from app.domains.rendering.workflow_config_utils import (
build_preset_workflow_config,
build_workflow_blueprint_config,
)
from tests.db_test_utils import resolve_test_db_url
def _use_test_database(monkeypatch) -> None:
monkeypatch.setattr(settings, "postgres_host", "postgres")
monkeypatch.setattr(settings, "postgres_port", 5432)
monkeypatch.setattr(settings, "postgres_user", "hartomat")
monkeypatch.setattr(settings, "postgres_password", "hartomat")
monkeypatch.setattr(settings, "postgres_db", "hartomat_test")
resolved = make_url(resolve_test_db_url(async_driver=False))
monkeypatch.setattr(settings, "postgres_host", resolved.host or settings.postgres_host)
monkeypatch.setattr(settings, "postgres_port", int(resolved.port or settings.postgres_port))
monkeypatch.setattr(settings, "postgres_user", resolved.username or settings.postgres_user)
monkeypatch.setattr(settings, "postgres_password", resolved.password or settings.postgres_password)
monkeypatch.setattr(settings, "postgres_db", resolved.database or settings.postgres_db)
def _build_valid_custom_still_graph(
*,
execution_mode: str = "graph",
width: int = 1024,
height: int = 768,
include_output: bool = False,
include_notify: bool = False,
) -> dict[str, object]:
nodes: list[dict[str, object]] = [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "populate_materials", "step": "auto_populate_materials", "params": {}},
{"id": "resolve_materials", "step": "material_map_resolve", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": width, "height": height}},
]
edges: list[dict[str, str]] = [
{"from": "setup", "to": "template"},
{"from": "setup", "to": "populate_materials"},
{"from": "template", "to": "resolve_materials"},
{"from": "populate_materials", "to": "resolve_materials"},
{"from": "template", "to": "render"},
{"from": "resolve_materials", "to": "render"},
]
if include_output:
nodes.append({"id": "output", "step": "output_save", "params": {}})
edges.append({"from": "render", "to": "output"})
if include_notify:
nodes.append({"id": "notify", "step": "notify", "params": {}})
edges.append({"from": "render", "to": "notify"})
return {
"version": 1,
"ui": {"preset": "custom", "execution_mode": execution_mode},
"nodes": nodes,
"edges": edges,
}
def _build_valid_custom_turntable_graph(
*,
execution_mode: str = "graph",
fps: int = 24,
frame_count: int = 96,
include_output: bool = False,
include_notify: bool = False,
) -> dict[str, object]:
duration_s = frame_count / fps
nodes: list[dict[str, object]] = [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "populate_materials", "step": "auto_populate_materials", "params": {}},
{"id": "bbox", "step": "glb_bbox", "params": {}},
{"id": "resolve_materials", "step": "material_map_resolve", "params": {}},
{"id": "turntable", "step": "blender_turntable", "params": {"fps": fps, "duration_s": duration_s}},
]
edges: list[dict[str, str]] = [
{"from": "setup", "to": "template"},
{"from": "setup", "to": "populate_materials"},
{"from": "setup", "to": "bbox"},
{"from": "template", "to": "resolve_materials"},
{"from": "populate_materials", "to": "resolve_materials"},
{"from": "bbox", "to": "turntable"},
{"from": "template", "to": "turntable"},
{"from": "resolve_materials", "to": "turntable"},
]
if include_output:
nodes.append({"id": "output", "step": "output_save", "params": {}})
edges.append({"from": "turntable", "to": "output"})
if include_notify:
nodes.append({"id": "notify", "step": "notify", "params": {}})
edges.append({"from": "turntable", "to": "notify"})
return {
"version": 1,
"ui": {"preset": "custom", "execution_mode": execution_mode},
"nodes": nodes,
"edges": edges,
}
def _build_valid_custom_blend_graph(*, include_output: bool = False) -> dict[str, object]:
nodes: list[dict[str, object]] = [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
]
edges: list[dict[str, str]] = [
{"from": "setup", "to": "template"},
{"from": "template", "to": "blend"},
]
if include_output:
nodes.append({"id": "output", "step": "output_save", "params": {}})
edges.append({"from": "blend", "to": "output"})
return {
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": nodes,
"edges": edges,
}
def _derive_rollout_mode_from_config(workflow_config: dict | None) -> str:
execution_mode = ((workflow_config or {}).get("ui") or {}).get("execution_mode")
if execution_mode == "graph":
return "graph"
if execution_mode == "shadow":
return "shadow"
return "legacy_only"
async def _seed_order_line(
@@ -61,6 +175,7 @@ async def _seed_order_line(
db.add(workflow_definition)
await db.flush()
output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = _derive_rollout_mode_from_config(workflow_config)
order_line = OrderLine(
order_id=order.id,
@@ -148,6 +263,54 @@ async def test_dispatch_render_with_workflow_falls_back_to_legacy_without_workfl
assert runs == []
@pytest.mark.asyncio
async def test_dispatch_render_with_workflow_falls_back_on_artifact_contract_mismatch(
db,
admin_user,
monkeypatch,
):
_use_test_database(monkeypatch)
seeded = await _seed_order_line(
db,
admin_user,
workflow_config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "blend"},
],
},
)
output_type = seeded["output_type"]
output_type.artifact_kind = "still_image"
await db.commit()
monkeypatch.setattr(
"app.domains.rendering.dispatch_service._legacy_dispatch",
lambda order_line_id: {"backend": "legacy", "order_line_id": order_line_id},
)
result = dispatch_render_with_workflow(str(seeded["order_line"].id))
await db.rollback()
assert result["backend"] == "legacy"
assert result["order_line_id"] == str(seeded["order_line"].id)
assert result["rollout_gate_status"] == "workflow_contract_mismatch"
assert result["workflow_rollout_ready"] is False
assert result["output_type_rollout_ready"] is False
assert any("Expected artifact kind: still_image." in reason for reason in result["rollout_gate_reasons"])
assert any("blend_asset" in reason for reason in result["rollout_gate_reasons"])
runs = (await db.execute(select(WorkflowRun))).scalars().all()
assert runs == []
@pytest.mark.asyncio
async def test_dispatch_render_with_workflow_creates_run_and_node_results_for_preset_dispatch(
db,
@@ -203,15 +366,11 @@ async def test_dispatch_render_with_workflow_falls_back_when_workflow_runtime_pr
seeded = await _seed_order_line(
db,
admin_user,
workflow_config={
"version": 1,
"nodes": [
{"id": "render", "step": "blender_still", "params": {}},
],
"edges": [
{"from": "missing", "to": "render"},
],
},
workflow_config=build_preset_workflow_config("still", {"width": 640, "height": 640}),
)
monkeypatch.setattr(
"app.domains.rendering.workflow_executor.prepare_workflow_context",
lambda *_args, **_kwargs: (_ for _ in ()).throw(RuntimeError("prep exploded")),
)
monkeypatch.setattr(
@@ -248,19 +407,7 @@ async def test_dispatch_render_with_workflow_graph_mode_dispatches_supported_cus
workflow_definition = WorkflowDefinition(
name=f"Graph Workflow {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id,
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
config=_build_valid_custom_still_graph(execution_mode="graph"),
is_active=True,
)
db.add(workflow_definition)
@@ -268,6 +415,7 @@ async def test_dispatch_render_with_workflow_graph_mode_dispatches_supported_cus
output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "graph"
await db.commit()
monkeypatch.setattr(
@@ -315,21 +463,7 @@ async def test_dispatch_render_with_workflow_graph_mode_uses_output_save_as_auth
workflow_definition = WorkflowDefinition(
name=f"Graph Output Save {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id,
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
{"from": "render", "to": "output"},
],
},
config=_build_valid_custom_still_graph(execution_mode="graph", include_output=True),
is_active=True,
)
db.add(workflow_definition)
@@ -337,6 +471,7 @@ async def test_dispatch_render_with_workflow_graph_mode_uses_output_save_as_auth
output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "graph"
await db.commit()
calls: list[tuple[str, list[str], dict]] = []
@@ -367,8 +502,10 @@ async def test_dispatch_render_with_workflow_graph_mode_uses_output_save_as_auth
assert calls[0][2]["publish_asset_enabled"] is False
assert calls[0][2]["graph_authoritative_output_enabled"] is True
assert calls[0][2]["graph_output_node_ids"] == ["output"]
assert node_results["output"].status == "completed"
assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["render"]
@pytest.mark.asyncio
@@ -395,6 +532,7 @@ async def test_dispatch_render_with_workflow_graph_mode_canonicalizes_legacy_pre
output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "graph"
await db.commit()
monkeypatch.setattr(
@@ -421,7 +559,7 @@ async def test_dispatch_render_with_workflow_graph_mode_canonicalizes_legacy_pre
assert node_results["setup"].status == "completed"
assert node_results["template"].status == "completed"
assert node_results["render"].status == "queued"
assert node_results["output"].status == "completed"
assert node_results["output"].status == "pending"
@pytest.mark.asyncio
@@ -436,21 +574,7 @@ async def test_dispatch_render_with_workflow_graph_mode_falls_back_to_legacy_on_
workflow_definition = WorkflowDefinition(
name=f"Graph Workflow {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id,
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{
"id": "setup",
"step": "order_line_setup",
"params": {"failure_policy": {"fallback_to_legacy": True}},
},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
],
"edges": [
{"from": "setup", "to": "render"},
],
},
config=_build_valid_custom_still_graph(execution_mode="graph"),
is_active=True,
)
db.add(workflow_definition)
@@ -458,6 +582,7 @@ async def test_dispatch_render_with_workflow_graph_mode_falls_back_to_legacy_on_
output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "graph"
await db.commit()
monkeypatch.setattr(
@@ -490,6 +615,40 @@ async def test_dispatch_render_with_workflow_graph_mode_falls_back_to_legacy_on_
assert run.error_message == "graph dispatch exploded"
@pytest.mark.asyncio
async def test_dispatch_render_with_graph_capable_workflow_respects_legacy_only_rollout_mode(
db,
admin_user,
monkeypatch,
):
_use_test_database(monkeypatch)
seeded = await _seed_order_line(
db,
admin_user,
workflow_config=_build_valid_custom_still_graph(execution_mode="graph"),
)
output_type = seeded["output_type"]
output_type.workflow_rollout_mode = "legacy_only"
await db.commit()
monkeypatch.setattr(
"app.domains.rendering.dispatch_service._legacy_dispatch",
lambda order_line_id: {"backend": "legacy", "order_line_id": order_line_id},
)
result = dispatch_render_with_workflow(str(seeded["order_line"].id))
await db.rollback()
assert result["backend"] == "legacy"
assert result["order_line_id"] == str(seeded["order_line"].id)
assert result["workflow_rollout_mode"] == "legacy_only"
assert result["configured_execution_mode"] == "graph"
assert result["rollout_gate_status"] == "rollout_legacy_only"
assert result["workflow_rollout_ready"] is False
assert result["output_type_rollout_ready"] is False
@pytest.mark.asyncio
async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritative_and_dispatches_graph_observer(
db,
@@ -502,19 +661,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritat
workflow_definition = WorkflowDefinition(
name=f"Shadow Workflow {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id,
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "shadow"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
config=_build_valid_custom_still_graph(execution_mode="shadow"),
is_active=True,
)
db.add(workflow_definition)
@@ -522,6 +669,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritat
output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "shadow"
await db.commit()
calls: list[tuple[str, list[str], dict]] = []
@@ -592,6 +740,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_canonicalizes_legacy_pr
output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "shadow"
await db.commit()
calls: list[tuple[str, list[str], dict]] = []
@@ -610,21 +759,13 @@ async def test_dispatch_render_with_workflow_shadow_mode_canonicalizes_legacy_pr
await db.rollback()
run_result = await db.execute(
select(WorkflowRun)
.where(WorkflowRun.id == uuid.UUID(result["shadow_workflow_run_id"]))
.options(selectinload(WorkflowRun.node_results))
)
run = run_result.scalar_one()
node_results = {node_result.node_name: node_result for node_result in run.node_results}
assert result["backend"] == "legacy"
assert result["execution_mode"] == "shadow"
assert result["shadow_status"] == "dispatched"
assert result["shadow_task_ids"] == ["legacy-shadow-task-1"]
assert run.execution_mode == "shadow"
assert node_results["output"].status == "completed"
assert calls[0][2]["publish_asset_enabled"] is False
assert result["shadow_status"] == "skipped"
assert result["rollout_gate_status"] == "shadow_skipped"
assert "shadow_workflow_run_id" not in result
assert "material_assignments" in result["shadow_error"]
assert calls == []
@pytest.mark.asyncio
@@ -639,17 +780,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_ignores_graph_failures_
workflow_definition = WorkflowDefinition(
name=f"Shadow Workflow {uuid.uuid4().hex[:8]}",
output_type_id=order_line.output_type_id,
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "shadow"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}},
],
"edges": [
{"from": "setup", "to": "render"},
],
},
config=_build_valid_custom_still_graph(execution_mode="shadow"),
is_active=True,
)
db.add(workflow_definition)
@@ -657,6 +788,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_ignores_graph_failures_
output_type = await db.get(OutputType, order_line.output_type_id)
assert output_type is not None
output_type.workflow_definition_id = workflow_definition.id
output_type.workflow_rollout_mode = "shadow"
await db.commit()
monkeypatch.setattr(
@@ -730,6 +862,32 @@ def test_evaluate_rollout_gate_warns_on_small_visual_delta(tmp_path: Path):
assert any("warn threshold" in reason for reason in gate["reasons"])
def test_evaluate_rollout_gate_passes_near_zero_visual_delta(tmp_path: Path):
authoritative = tmp_path / "authoritative.png"
observer = tmp_path / "observer.png"
Image.new("RGBA", (1024, 1024), color=(106, 106, 106, 255)).save(authoritative)
Image.new("RGBA", (1024, 1024), color=(106, 106, 106, 255)).save(observer)
with Image.open(observer) as image:
image.putpixel((444, 137), (106, 106, 107, 255))
image.putpixel((651, 142), (105, 106, 106, 255))
image.save(observer)
gate = evaluate_rollout_gate(
authoritative_output=_build_artifact(str(authoritative)),
observer_output=_build_artifact(str(observer)),
exact_match=False,
dimensions_match=True,
mean_pixel_delta=((1 + 1) / (1024 * 1024 * 4 * 255)),
)
assert gate["verdict"] == "pass"
assert gate["ready"] is True
assert gate["status"] == "ready_for_rollout"
assert any("pass threshold" in reason for reason in gate["reasons"])
def test_evaluate_rollout_gate_fails_on_missing_observer(tmp_path: Path):
authoritative = tmp_path / "authoritative.png"
Image.new("RGBA", (16, 16), color=(0, 128, 255, 255)).save(authoritative)
@@ -796,7 +954,11 @@ def test_dispatch_render_with_workflow_unit_marks_shadow_dispatch_as_pending_rol
workflow_def_id = uuid.uuid4()
fake_line = SimpleNamespace(
id=uuid.UUID(order_line_id),
output_type=SimpleNamespace(id=output_type_id, workflow_definition_id=workflow_def_id),
output_type=SimpleNamespace(
id=output_type_id,
workflow_definition_id=workflow_def_id,
workflow_rollout_mode="shadow",
),
)
fake_workflow_def = SimpleNamespace(id=workflow_def_id, config={"version": 1}, is_active=True)
fake_run = SimpleNamespace(id=uuid.uuid4())
@@ -951,12 +1113,14 @@ async def test_workflow_dispatch_endpoint_returns_workflow_run_with_node_results
assert node_results["setup"]["output"]["order_line_id"] == str(order_line.id)
assert node_results["template"]["status"] == "completed"
assert node_results["template"]["output"]["use_materials"] is False
assert node_results["output"]["status"] == "completed"
assert node_results["output"]["status"] == "pending"
assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"]["output"]["handoff_state"] == "armed"
assert node_results["output"]["output"]["handoff_node_ids"] == ["render"]
@pytest.mark.asyncio
async def test_workflow_dispatch_endpoint_arms_output_save_for_export_blend(
async def test_workflow_dispatch_endpoint_rejects_output_save_for_export_blend_only_graph(
client,
db,
admin_user,
@@ -968,18 +1132,7 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_export_blend(
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_definition = WorkflowDefinition(
name=f"Blend Output Workflow {uuid.uuid4().hex[:8]}",
config={
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "blend", "step": "export_blend", "params": {}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "blend"},
{"from": "blend", "to": "output"},
],
},
config=_build_valid_custom_blend_graph(include_output=True),
is_active=True,
)
db.add(workflow_definition)
@@ -1000,35 +1153,9 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_export_blend(
headers=auth_headers,
)
assert response.status_code == 200
body = response.json()
assert body["context_id"] == context_id
assert body["execution_mode"] == "graph"
assert body["dispatched"] == 1
assert body["task_ids"] == ["task-1"]
assert calls == [
(
"app.domains.rendering.tasks.export_blend_for_order_line_task",
[context_id],
{
"workflow_run_id": body["workflow_run"]["id"],
"workflow_node_id": "blend",
"publish_asset_enabled": False,
"graph_authoritative_output_enabled": True,
"graph_output_node_ids": ["output"],
},
)
]
node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]}
assert node_results["blend"]["status"] == "queued"
assert node_results["blend"]["output"]["predicted_asset_type"] == "blend_production"
assert node_results["blend"]["output"]["publish_asset_enabled"] is False
assert node_results["blend"]["output"]["graph_authoritative_output_enabled"] is True
assert node_results["blend"]["output"]["graph_output_node_ids"] == ["output"]
assert node_results["output"]["status"] == "completed"
assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save"
assert response.status_code == 422
assert "output_save" in response.json()["detail"]
assert calls == []
@pytest.mark.asyncio
@@ -1044,18 +1171,7 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_turntable(
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_definition = WorkflowDefinition(
name=f"Turntable Output Workflow {uuid.uuid4().hex[:8]}",
config={
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "turntable", "step": "blender_turntable", "params": {"fps": 24, "frame_count": 96}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "turntable"},
{"from": "turntable", "to": "output"},
],
},
config=_build_valid_custom_turntable_graph(include_output=True),
is_active=True,
)
db.add(workflow_definition)
@@ -1091,7 +1207,6 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_turntable(
assert calls[0][2]["graph_authoritative_output_enabled"] is True
assert calls[0][2]["graph_output_node_ids"] == ["output"]
assert calls[0][2]["fps"] == 24
assert calls[0][2]["frame_count"] == 96
node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]}
assert node_results["turntable"]["status"] == "queued"
@@ -1099,8 +1214,10 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_turntable(
assert node_results["turntable"]["output"]["publish_asset_enabled"] is False
assert node_results["turntable"]["output"]["graph_authoritative_output_enabled"] is True
assert node_results["turntable"]["output"]["graph_output_node_ids"] == ["output"]
assert node_results["output"]["status"] == "completed"
assert node_results["output"]["status"] == "pending"
assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"]["output"]["handoff_state"] == "armed"
assert node_results["output"]["output"]["handoff_node_ids"] == ["turntable"]
@pytest.mark.asyncio
@@ -1116,18 +1233,7 @@ async def test_workflow_dispatch_endpoint_arms_notify_handoff_for_render_node(
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_definition = WorkflowDefinition(
name=f"Notify Workflow {uuid.uuid4().hex[:8]}",
config={
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "render", "step": "blender_still", "params": {}},
{"id": "notify", "step": "notify", "params": {}},
],
"edges": [
{"from": "setup", "to": "render"},
{"from": "render", "to": "notify"},
],
},
config=_build_valid_custom_still_graph(include_notify=True),
is_active=True,
)
db.add(workflow_definition)
@@ -1166,9 +1272,10 @@ async def test_workflow_dispatch_endpoint_arms_notify_handoff_for_render_node(
node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]}
assert node_results["render"]["status"] == "queued"
assert node_results["render"]["output"]["graph_notify_node_ids"] == ["notify"]
assert node_results["notify"]["status"] == "completed"
assert node_results["notify"]["status"] == "pending"
assert node_results["notify"]["output"]["notification_mode"] == "deferred_to_render_task"
assert node_results["notify"]["output"]["armed_node_ids"] == ["render"]
assert node_results["notify"]["output"]["handoff_state"] == "armed"
@pytest.mark.asyncio
@@ -1246,19 +1353,7 @@ async def test_workflow_draft_dispatch_endpoint_dispatches_unsaved_render_graph(
json={
"workflow_id": str(workflow_definition.id),
"context_id": str(order_line.id),
"config": {
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}, "ui": {"label": "Setup"}},
{"id": "template", "step": "resolve_template", "params": {}, "ui": {"label": "Template"}},
{"id": "render", "step": "blender_still", "params": {"width": 800, "height": 600}, "ui": {"label": "Render"}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
"config": _build_valid_custom_still_graph(width=800, height=600),
},
)
@@ -1306,17 +1401,7 @@ async def test_workflow_draft_dispatch_endpoint_marks_submitted_order_processing
headers=auth_headers,
json={
"context_id": str(order_line.id),
"config": {
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}, "ui": {"label": "Setup"}},
{"id": "render", "step": "blender_still", "params": {}, "ui": {"label": "Render"}},
],
"edges": [
{"from": "setup", "to": "render"},
],
},
"config": _build_valid_custom_still_graph(),
},
)
@@ -1413,19 +1498,7 @@ async def test_workflow_preflight_endpoint_supports_direct_cad_file_graphs(
)
workflow_definition = WorkflowDefinition(
name=f"CAD Workflow {uuid.uuid4().hex[:8]}",
config={
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "input", "step": "resolve_step_path", "params": {}, "ui": {"label": "Resolve STEP"}},
{"id": "render", "step": "blender_render", "params": {"width": 512, "height": 512}, "ui": {"label": "Thumbnail"}},
{"id": "save", "step": "thumbnail_save", "params": {}, "ui": {"label": "Save Thumbnail"}},
],
"edges": [
{"from": "input", "to": "render"},
{"from": "render", "to": "save"},
],
},
config=build_workflow_blueprint_config("cad_intake"),
is_active=True,
)
db.add_all([cad_file, workflow_definition])
@@ -1443,7 +1516,7 @@ async def test_workflow_preflight_endpoint_supports_direct_cad_file_graphs(
assert body["context_kind"] == "cad_file"
assert body["expected_context_kind"] == "cad_file"
assert body["execution_mode"] == "graph"
assert body["execution_mode"] == "legacy"
assert body["graph_dispatch_allowed"] is True
assert body["resolved_cad_file_id"] == str(cad_file.id)
assert all(node["status"] == "ready" for node in body["nodes"])
@@ -1464,19 +1537,7 @@ async def test_workflow_draft_preflight_endpoint_validates_unsaved_render_graph(
headers=auth_headers,
json={
"context_id": str(order_line.id),
"config": {
"version": 1,
"ui": {"preset": "custom", "execution_mode": "graph"},
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}, "ui": {"label": "Setup"}},
{"id": "template", "step": "resolve_template", "params": {}, "ui": {"label": "Template"}},
{"id": "render", "step": "blender_still", "params": {"width": 640, "height": 640}, "ui": {"label": "Render"}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
"config": _build_valid_custom_still_graph(width=640, height=640),
},
)
@@ -1489,7 +1550,13 @@ async def test_workflow_draft_preflight_endpoint_validates_unsaved_render_graph(
assert body["execution_mode"] == "graph"
assert body["graph_dispatch_allowed"] is True
assert body["resolved_order_line_id"] == str(order_line.id)
assert [node["node_id"] for node in body["nodes"]] == ["setup", "template", "render"]
assert [node["node_id"] for node in body["nodes"]] == [
"setup",
"template",
"populate_materials",
"resolve_materials",
"render",
]
@pytest.mark.asyncio
@@ -1646,7 +1713,9 @@ async def test_workflow_run_comparison_endpoint_reports_metadata_only_difference
assert body["exact_match"] is False
assert body["dimensions_match"] is True
assert body["mean_pixel_delta"] == 0.0
assert "metadata differs" in body["summary"]
assert body["summary"] == (
"Observer output matches the authoritative legacy output within the visual pass threshold."
)
@pytest.mark.asyncio
@@ -1695,7 +1764,9 @@ async def test_workflow_run_comparison_endpoint_finds_shadow_output_in_step_file
admin_user,
auth_headers,
tmp_path,
monkeypatch,
):
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_run = WorkflowRun(
order_line_id=order_line.id,
@@ -1710,7 +1781,7 @@ async def test_workflow_run_comparison_endpoint_finds_shadow_output_in_step_file
authoritative_path = render_dir / "authoritative.png"
Image.new("RGBA", (12, 12), (32, 160, 255, 255)).save(authoritative_path)
step_shadow_dir = Path("/app/uploads/step_files/renders")
step_shadow_dir = Path(settings.upload_dir) / "step_files" / "renders" / str(order_line.id)
step_shadow_dir.mkdir(parents=True, exist_ok=True)
shadow_path = step_shadow_dir / f"line_{order_line.id}_shadow-{str(workflow_run.id)[:8]}.png"
Image.new("RGBA", (12, 12), (32, 160, 255, 255)).save(shadow_path)
@@ -1729,3 +1800,52 @@ async def test_workflow_run_comparison_endpoint_finds_shadow_output_in_step_file
assert body["status"] == "matched"
assert body["observer_output"]["exists"] is True
assert body["observer_output"]["path"] == str(shadow_path)
@pytest.mark.asyncio
async def test_workflow_run_comparison_endpoint_treats_near_zero_visual_delta_as_match(
client,
db,
admin_user,
auth_headers,
tmp_path,
):
order_line = await _seed_renderable_order_line(db, admin_user, tmp_path)
workflow_run = WorkflowRun(
order_line_id=order_line.id,
execution_mode="shadow",
status="completed",
)
db.add(workflow_run)
await db.flush()
render_dir = tmp_path / "comparison-near-zero" / str(order_line.id)
render_dir.mkdir(parents=True, exist_ok=True)
authoritative_path = render_dir / "authoritative.png"
shadow_path = render_dir / f"line_{order_line.id}_shadow-{str(workflow_run.id)[:8]}.png"
Image.new("RGBA", (1024, 1024), (106, 106, 106, 255)).save(authoritative_path)
Image.new("RGBA", (1024, 1024), (106, 106, 106, 255)).save(shadow_path)
with Image.open(shadow_path) as image:
image.putpixel((444, 137), (106, 106, 107, 255))
image.putpixel((651, 142), (105, 106, 106, 255))
image.save(shadow_path)
order_line.result_path = str(authoritative_path)
order_line.render_status = "completed"
await db.commit()
response = await client.get(
f"/api/workflows/runs/{workflow_run.id}/comparison",
headers=auth_headers,
)
assert response.status_code == 200
body = response.json()
assert body["status"] == "matched"
assert body["exact_match"] is False
assert body["dimensions_match"] is True
assert body["mean_pixel_delta"] is not None
assert body["mean_pixel_delta"] <= 1e-6
assert "pass threshold" in body["summary"]
@@ -6,10 +6,10 @@ from pathlib import Path
from types import SimpleNamespace
import pytest
from sqlalchemy import create_engine, select, text
from sqlalchemy import select, text
from sqlalchemy.orm import Session, selectinload
from app.database import Base
from app.core.render_paths import build_order_line_export_path, build_order_line_step_render_path
from app.core.process_steps import StepName
from app.domains.auth.models import User, UserRole
from app.domains.materials.models import AssetLibrary
@@ -27,25 +27,13 @@ from app.domains.rendering.workflow_graph_runtime import (
from app.domains.rendering.workflow_run_service import create_workflow_run
from app.domains.rendering.workflow_runtime_services import OrderLineRenderSetupResult
import app.models # noqa: F401
from tests.db_test_utils import reset_public_schema_sync, resolve_test_db_url
from tests.db_test_utils import sync_test_session as sync_test_session_ctx
@pytest.fixture
def sync_session():
engine = create_engine(resolve_test_db_url(async_driver=False))
with engine.begin() as conn:
reset_public_schema_sync(conn)
Base.metadata.create_all(conn)
session = Session(engine)
try:
with sync_test_session_ctx() as session:
yield session
finally:
session.close()
with engine.begin() as conn:
reset_public_schema_sync(conn)
engine.dispose()
def _seed_renderable_order_line(
@@ -137,6 +125,19 @@ def _seed_renderable_order_line(
target_collection="Product",
material_replace_enabled=True,
lighting_only=False,
workflow_input_schema=[
{
"key": "studio_variant",
"label": "Studio Variant",
"type": "select",
"section": "Template Inputs",
"default": "default",
"options": [
{"value": "default", "label": "Default"},
{"value": "warm", "label": "Warm"},
],
}
],
is_active=True,
output_types=[output_type],
)
@@ -329,6 +330,193 @@ def test_execute_graph_workflow_routes_cad_thumbnail_save_using_upstream_threejs
assert node_results["save"].output["predicted_output_path"].endswith(f"{cad_file.id}.png")
def test_execute_graph_workflow_serializes_template_schema_and_template_inputs(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
monkeypatch.setattr(
"app.domains.rendering.workflow_runtime_services.resolve_material_map",
lambda raw_map: {key: f"resolved:{value}" for key, value in raw_map.items()},
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{
"id": "template",
"step": "resolve_template",
"params": {
"template_id_override": str(template.id),
"template_input__studio_variant": "warm",
},
},
],
"edges": [
{"from": "setup", "to": "template"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
run = create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
refreshed_run = sync_session.execute(
select(WorkflowRun)
.where(WorkflowRun.id == run.id)
.options(selectinload(WorkflowRun.node_results))
).scalar_one()
node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results}
assert dispatch_result.task_ids == []
assert node_results["template"].status == "completed"
assert node_results["template"].output["workflow_input_schema"] == template.workflow_input_schema
assert node_results["template"].output["template_inputs"] == {"studio_variant": "warm"}
assert node_results["template"].output["template_input_count"] == 1
def test_execute_graph_workflow_passes_template_inputs_to_still_task(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]):
send_calls.append((task_name, args, kwargs))
return SimpleNamespace(id="task-still-template-inputs")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{
"id": "template",
"step": "resolve_template",
"params": {
"template_id_override": str(template.id),
"template_input__studio_variant": "warm",
},
},
{"id": "render", "step": "blender_still", "params": {}},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
assert dispatch_result.task_ids == ["task-still-template-inputs"]
assert len(send_calls) == 1
assert send_calls[0][0] == "app.domains.rendering.tasks.render_order_line_still_task"
assert send_calls[0][1] == [str(line.id)]
assert send_calls[0][2]["template_inputs"] == {"studio_variant": "warm"}
def test_execute_graph_workflow_passes_template_inputs_and_duration_to_turntable_task(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]):
send_calls.append((task_name, args, kwargs))
return SimpleNamespace(id="task-turntable-template-inputs")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{
"id": "template",
"step": "resolve_template",
"params": {
"template_id_override": str(template.id),
"template_input__studio_variant": "warm",
},
},
{
"id": "render",
"step": "blender_turntable",
"params": {
"fps": 12,
"duration_s": 7,
"frame_count": 999,
},
},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
assert dispatch_result.task_ids == ["task-turntable-template-inputs"]
assert len(send_calls) == 1
assert send_calls[0][0] == "app.domains.rendering.tasks.render_turntable_task"
assert send_calls[0][1] == [str(line.id)]
assert send_calls[0][2]["template_inputs"] == {"studio_variant": "warm"}
assert send_calls[0][2]["duration_s"] == 7.0
assert send_calls[0][2]["fps"] == 12
assert send_calls[0][2]["frame_count"] == 84
def test_execute_graph_workflow_completes_cad_bridge_only_nodes_without_queueing(
sync_session,
tmp_path,
@@ -660,6 +848,108 @@ def test_build_task_kwargs_autoscales_default_samples_via_shared_render_invocati
assert kwargs["samples"] == 64
def test_build_task_kwargs_ignores_authoritative_still_overrides_without_opt_in(
tmp_path,
monkeypatch,
):
step_path = tmp_path / "cad" / "bearing.step"
step_path.parent.mkdir(parents=True, exist_ok=True)
step_path.write_text("STEP", encoding="utf-8")
output_type = OutputType(
id=uuid.uuid4(),
name="Still Preview",
renderer="blender",
output_format="png",
render_settings={
"width": 2048,
"height": 1536,
"engine": "cycles",
"samples": 128,
"noise_threshold": "0.05",
},
transparent_bg=True,
cycles_device="cuda",
)
cad_file = CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path=str(step_path),
file_hash="hash-graph-2",
parsed_objects={"objects": ["InnerRing", "OuterRing"]},
)
product = Product(
id=uuid.uuid4(),
pim_id="P-graph-2",
name="Bearing G2",
category_key="bearings",
cad_file_id=cad_file.id,
cad_file=cad_file,
)
line = OrderLine(
id=uuid.uuid4(),
order_id=uuid.uuid4(),
product_id=product.id,
product=product,
output_type_id=output_type.id,
output_type=output_type,
)
state = WorkflowGraphState(
setup=OrderLineRenderSetupResult(
status="ready",
order_line=line,
cad_file=cad_file,
part_colors={"InnerRing": "Steel raw"},
)
)
workflow_context = SimpleNamespace(
workflow_run_id=uuid.uuid4(),
execution_mode="graph",
ordered_nodes=[],
edges=[],
)
node = SimpleNamespace(
id="render",
step=StepName.BLENDER_STILL,
params={
"width": 1024,
"height": 768,
"samples": 16,
"render_engine": "eevee",
"transparent_bg": False,
"cycles_device": "cpu",
"noise_threshold": "0.2",
},
)
monkeypatch.setattr(
"app.domains.rendering.workflow_graph_runtime.resolve_render_position_context",
lambda _session, _line: SimpleNamespace(
rotation_x=0.0,
rotation_y=0.0,
rotation_z=0.0,
focal_length_mm=None,
sensor_width_mm=None,
),
)
kwargs = _build_task_kwargs(
session=object(),
workflow_context=workflow_context,
state=state,
node=node,
)
assert kwargs["width"] == 2048
assert kwargs["height"] == 1536
assert kwargs["engine"] == "cycles"
assert kwargs["samples"] == 128
assert kwargs["transparent_bg"] is True
assert kwargs["cycles_device"] == "cuda"
assert kwargs["noise_threshold"] == "0.05"
assert "render_engine" not in kwargs
def test_execute_graph_workflow_respects_custom_render_settings_opt_in_for_still_task(
sync_session,
tmp_path,
@@ -838,6 +1128,221 @@ def test_execute_graph_workflow_preserves_turntable_timing_without_custom_render
assert kwargs["output_name_suffix"].startswith("shadow-")
def test_execute_graph_workflow_respects_custom_render_settings_opt_in_for_turntable_task(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
assert line.output_type is not None
line.output_type.render_settings = {
"width": 2048,
"height": 2048,
"engine": "cycles",
"samples": 128,
"fps": 30,
"frame_count": 180,
}
sync_session.commit()
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]):
send_calls.append((task_name, args, kwargs))
return SimpleNamespace(id="task-custom-turntable")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{
"id": "render",
"step": "blender_turntable",
"params": {
"use_custom_render_settings": True,
"width": 1024,
"height": 768,
"samples": 32,
"render_engine": "eevee",
"fps": 12,
"duration_s": 6,
},
},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
assert dispatch_result.task_ids == ["task-custom-turntable"]
assert len(send_calls) == 1
task_name, args, kwargs = send_calls[0]
assert task_name == "app.domains.rendering.tasks.render_turntable_task"
assert args == [str(line.id)]
assert kwargs["width"] == 1024
assert kwargs["height"] == 768
assert kwargs["samples"] == 32
assert kwargs["render_engine"] == "eevee"
assert kwargs["engine"] == "cycles"
assert kwargs["fps"] == 12
assert kwargs["duration_s"] == 6.0
assert kwargs["frame_count"] == 72
def test_execute_graph_workflow_preserves_template_camera_orbit_without_custom_render_settings(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
template.camera_orbit = False
assert line.output_type is not None
line.output_type.render_settings = {
"width": 2048,
"height": 2048,
"engine": "cycles",
"samples": 128,
"fps": 30,
"frame_count": 180,
}
sync_session.commit()
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]):
send_calls.append((task_name, args, kwargs))
return SimpleNamespace(id="task-turntable-camera-orbit")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "template", "step": "resolve_template", "params": {}},
{
"id": "render",
"step": "blender_turntable",
"params": {
"fps": 24,
"frame_count": 120,
},
},
],
"edges": [
{"from": "setup", "to": "template"},
{"from": "template", "to": "render"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
assert dispatch_result.task_ids == ["task-turntable-camera-orbit"]
assert len(send_calls) == 1
assert send_calls[0][2]["camera_orbit"] is False
def test_execute_graph_workflow_serializes_template_override_modes(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
template = sync_session.execute(select(RenderTemplate)).unique().scalar_one()
template.target_collection = "TemplateCollection"
template.material_replace_enabled = False
template.lighting_only = False
template.shadow_catcher_enabled = False
template.camera_orbit = True
sync_session.commit()
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{
"id": "template",
"step": "resolve_template",
"params": {
"target_collection": "NodeCollection",
"material_library_path": "/libraries/materials.blend",
"material_replace_mode": "enabled",
"lighting_only_mode": "enabled",
"shadow_catcher_mode": "enabled",
"camera_orbit_mode": "disabled",
},
},
],
"edges": [
{"from": "setup", "to": "template"},
],
},
context_id=str(line.id),
execution_mode="graph",
)
run = create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
refreshed_run = sync_session.execute(
select(WorkflowRun)
.where(WorkflowRun.id == run.id)
.options(selectinload(WorkflowRun.node_results))
).scalar_one()
node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results}
assert dispatch_result.task_ids == []
assert node_results["template"].status == "completed"
assert node_results["template"].output["target_collection"] == "NodeCollection"
assert node_results["template"].output["use_materials"] is True
assert node_results["template"].output["lighting_only"] is True
assert node_results["template"].output["shadow_catcher"] is True
assert node_results["template"].output["camera_orbit"] is False
def test_execute_graph_workflow_retries_bridge_node_and_persists_attempt_metadata(
sync_session,
monkeypatch,
@@ -1010,16 +1515,22 @@ def test_execute_graph_workflow_supports_output_save_bridge_node(
assert send_calls[0][2]["graph_authoritative_output_enabled"] is True
assert send_calls[0][2]["graph_output_node_ids"] == ["output"]
assert node_results["render"].status == "queued"
assert node_results["output"].status == "completed"
assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"].output["order_line_id"] == str(line.id)
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["render"]
assert node_results["output"].output["artifact_count"] == 1
assert node_results["output"].output["upstream_artifacts"] == [
{
"node_id": "render",
"artifact_role": "render_output",
"predicted_output_path": str(
tmp_path / "cad" / "renders" / f"line_{line.id}.png"
build_order_line_step_render_path(
line.product.cad_file.stored_path,
str(line.id),
f"line_{line.id}.png",
)
),
"predicted_asset_type": "still",
"publish_asset_enabled": False,
@@ -1086,14 +1597,16 @@ def test_execute_graph_workflow_arms_output_save_handoff_for_export_blend(
assert send_calls[0][2]["graph_authoritative_output_enabled"] is True
assert send_calls[0][2]["graph_output_node_ids"] == ["output"]
assert node_results["blend"].status == "queued"
assert node_results["output"].status == "completed"
assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["blend"]
assert node_results["output"].output["artifact_count"] == 1
assert node_results["output"].output["upstream_artifacts"] == [
{
"node_id": "blend",
"artifact_role": "blend_export",
"predicted_output_path": str(tmp_path / "cad" / "bearing_production.blend"),
"predicted_output_path": str(build_order_line_export_path(str(line.id), "bearing_production.blend")),
"predicted_asset_type": "blend_production",
"publish_asset_enabled": False,
"graph_authoritative_output_enabled": True,
@@ -1160,14 +1673,18 @@ def test_execute_graph_workflow_arms_output_save_handoff_for_turntable(
assert send_calls[0][2]["graph_output_node_ids"] == ["output"]
assert send_calls[0][2]["workflow_node_id"] == "turntable"
assert node_results["turntable"].status == "queued"
assert node_results["output"].status == "completed"
assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save"
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["turntable"]
assert node_results["output"].output["artifact_count"] == 1
assert node_results["output"].output["upstream_artifacts"] == [
{
"node_id": "turntable",
"artifact_role": "turntable_output",
"predicted_output_path": str(tmp_path / "cad" / "renders" / "turntable.mp4"),
"predicted_output_path": str(
build_order_line_step_render_path(line.product.cad_file.stored_path, str(line.id), "turntable.mp4")
),
"predicted_asset_type": "turntable",
"publish_asset_enabled": False,
"graph_authoritative_output_enabled": True,
@@ -1178,6 +1695,150 @@ def test_execute_graph_workflow_arms_output_save_handoff_for_turntable(
]
def test_execute_graph_workflow_arms_shadow_output_save_handoff_for_turntable(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
send_calls: list[tuple[str, list[str], dict[str, object]]] = []
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
lambda task_name, args, kwargs: send_calls.append((task_name, args, kwargs))
or SimpleNamespace(id="task-shadow-turntable-output-save"),
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "turntable", "step": "blender_turntable", "params": {"fps": 24, "frame_count": 96}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "turntable"},
{"from": "turntable", "to": "output"},
],
},
context_id=str(line.id),
execution_mode="shadow",
)
run = create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
refreshed_run = sync_session.execute(
select(WorkflowRun)
.where(WorkflowRun.id == run.id)
.options(selectinload(WorkflowRun.node_results))
).scalar_one()
node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results}
assert dispatch_result.task_ids == ["task-shadow-turntable-output-save"]
assert len(send_calls) == 1
assert send_calls[0][0] == "app.domains.rendering.tasks.render_turntable_task"
assert send_calls[0][1] == [str(line.id)]
assert send_calls[0][2]["publish_asset_enabled"] is False
assert send_calls[0][2]["observer_output_enabled"] is True
assert send_calls[0][2]["graph_output_node_ids"] == ["output"]
assert "graph_authoritative_output_enabled" not in send_calls[0][2]
assert node_results["turntable"].status == "queued"
assert node_results["output"].status == "pending"
assert node_results["output"].output["publication_mode"] == "shadow_observer_only"
assert node_results["output"].output["handoff_state"] == "armed"
assert node_results["output"].output["handoff_node_ids"] == ["turntable"]
assert node_results["output"].output["artifact_count"] == 1
assert node_results["output"].output["upstream_artifacts"] == [
{
"node_id": "turntable",
"artifact_role": "turntable_output",
"predicted_output_path": str(
build_order_line_step_render_path(
line.product.cad_file.stored_path,
str(line.id),
f"turntable_shadow-{str(run.id)[:8]}.mp4",
)
),
"predicted_asset_type": "turntable",
"publish_asset_enabled": False,
"graph_authoritative_output_enabled": False,
"graph_output_node_ids": ["output"],
"notify_handoff_enabled": False,
"task_id": "task-shadow-turntable-output-save",
}
]
def test_execute_graph_workflow_routes_shadow_render_tasks_to_light_queue_when_available(
sync_session,
tmp_path,
monkeypatch,
):
line = _seed_renderable_order_line(sync_session, tmp_path)
send_calls: list[tuple[str, list[str], dict[str, object], dict[str, object]]] = []
monkeypatch.setattr(
"app.domains.rendering.workflow_graph_runtime._inspect_active_worker_queues",
lambda timeout=1.0: {"asset_pipeline", "asset_pipeline_light"},
)
def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object], **task_options):
send_calls.append((task_name, args, kwargs, task_options))
return SimpleNamespace(id="task-shadow-light-queue")
monkeypatch.setattr(
"app.tasks.celery_app.celery_app.send_task",
_fake_send_task,
)
workflow_context = prepare_workflow_context(
{
"version": 1,
"nodes": [
{"id": "setup", "step": "order_line_setup", "params": {}},
{"id": "turntable", "step": "blender_turntable", "params": {"fps": 24, "frame_count": 96}},
{"id": "output", "step": "output_save", "params": {}},
],
"edges": [
{"from": "setup", "to": "turntable"},
{"from": "turntable", "to": "output"},
],
},
context_id=str(line.id),
execution_mode="shadow",
)
run = create_workflow_run(
sync_session,
workflow_def_id=None,
order_line_id=line.id,
workflow_context=workflow_context,
)
dispatch_result = execute_graph_workflow(sync_session, workflow_context)
sync_session.commit()
refreshed_run = sync_session.execute(
select(WorkflowRun)
.where(WorkflowRun.id == run.id)
.options(selectinload(WorkflowRun.node_results))
).scalar_one()
node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results}
assert dispatch_result.task_ids == ["task-shadow-light-queue"]
assert len(send_calls) == 1
assert send_calls[0][0] == "app.domains.rendering.tasks.render_turntable_task"
assert send_calls[0][3]["queue"] == "asset_pipeline_light"
assert node_results["turntable"].output["task_queue"] == "asset_pipeline_light"
def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch(
sync_session,
tmp_path,
@@ -1240,12 +1901,21 @@ def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch
assert send_calls[0][2]["graph_output_node_ids"] == ["still_output"]
assert send_calls[1][0] == "app.domains.rendering.tasks.render_turntable_task"
assert send_calls[1][2]["graph_output_node_ids"] == ["turntable_output"]
assert node_results["still_output"].status == "pending"
assert node_results["still_output"].output["handoff_state"] == "armed"
assert node_results["still_output"].output["handoff_node_ids"] == ["still"]
assert node_results["still_output"].output["artifact_count"] == 1
assert node_results["still_output"].output["upstream_artifacts"] == [
{
"node_id": "still",
"artifact_role": "render_output",
"predicted_output_path": str(tmp_path / "cad" / "renders" / f"line_{line.id}.png"),
"predicted_output_path": str(
build_order_line_step_render_path(
line.product.cad_file.stored_path,
str(line.id),
f"line_{line.id}.png",
)
),
"predicted_asset_type": "still",
"publish_asset_enabled": False,
"graph_authoritative_output_enabled": True,
@@ -1254,12 +1924,17 @@ def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch
"task_id": "task-branch-1",
}
]
assert node_results["turntable_output"].status == "pending"
assert node_results["turntable_output"].output["handoff_state"] == "armed"
assert node_results["turntable_output"].output["handoff_node_ids"] == ["turntable"]
assert node_results["turntable_output"].output["artifact_count"] == 1
assert node_results["turntable_output"].output["upstream_artifacts"] == [
{
"node_id": "turntable",
"artifact_role": "turntable_output",
"predicted_output_path": str(tmp_path / "cad" / "renders" / "turntable.mp4"),
"predicted_output_path": str(
build_order_line_step_render_path(line.product.cad_file.stored_path, str(line.id), "turntable.mp4")
),
"predicted_asset_type": "turntable",
"publish_asset_enabled": False,
"graph_authoritative_output_enabled": True,
@@ -1379,9 +2054,10 @@ def test_execute_graph_workflow_arms_notify_handoff_for_graph_render_task(
assert send_calls[0][2]["emit_legacy_notifications"] is True
assert send_calls[0][2]["graph_notify_node_ids"] == ["notify"]
assert node_results["render"].output["graph_notify_node_ids"] == ["notify"]
assert node_results["notify"].status == "completed"
assert node_results["notify"].status == "pending"
assert node_results["notify"].output["notification_mode"] == "deferred_to_render_task"
assert node_results["notify"].output["armed_node_ids"] == ["render"]
assert node_results["notify"].output["handoff_state"] == "armed"
def test_execute_graph_workflow_routes_notify_handoffs_per_connected_branch(
@@ -1451,10 +2127,14 @@ def test_execute_graph_workflow_routes_notify_handoffs_per_connected_branch(
assert send_calls[1][2]["graph_notify_node_ids"] == ["turntable_notify"]
assert node_results["still"].output["graph_notify_node_ids"] == ["still_notify"]
assert node_results["turntable"].output["graph_notify_node_ids"] == ["turntable_notify"]
assert node_results["still_notify"].status == "completed"
assert node_results["still_notify"].status == "pending"
assert node_results["still_notify"].output["notification_mode"] == "deferred_to_render_task"
assert node_results["still_notify"].output["armed_node_ids"] == ["still"]
assert node_results["turntable_notify"].status == "completed"
assert node_results["still_notify"].output["handoff_state"] == "armed"
assert node_results["turntable_notify"].status == "pending"
assert node_results["turntable_notify"].output["notification_mode"] == "deferred_to_render_task"
assert node_results["turntable_notify"].output["armed_node_ids"] == ["turntable"]
assert node_results["turntable_notify"].output["handoff_state"] == "armed"
def test_execute_graph_workflow_suppresses_notify_node_in_shadow_mode(
@@ -1,7 +1,9 @@
import pytest
from app.core.process_steps import StepName
from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.models import OutputType, WorkflowDefinition, WorkflowRun
from app.domains.rendering.workflow_config_utils import build_preset_workflow_config
from app.domains.rendering.workflow_graph_runtime import _STILL_TASK_KEYS, _TURNTABLE_TASK_KEYS
from app.domains.rendering.workflow_node_registry import (
get_node_definition,
list_node_definitions,
@@ -14,11 +16,55 @@ def test_node_registry_covers_all_step_names():
expected_steps = {step.value for step in StepName}
assert registered_steps == expected_steps
assert all(definition.family in {"cad_file", "order_line"} for definition in definitions)
assert all(definition.family in {"cad_file", "order_line", "shared"} for definition in definitions)
assert all(definition.module_key for definition in definitions)
assert all(definition.legacy_source for definition in definitions)
def test_node_registry_module_keys_are_unique():
definitions = list_node_definitions()
module_keys = [definition.module_key for definition in definitions]
assert len(module_keys) == len(set(module_keys))
def test_node_registry_defaults_match_declared_fields():
definitions = list_node_definitions()
for definition in definitions:
field_keys = {field.key for field in definition.fields}
default_keys = set(definition.defaults)
assert default_keys <= field_keys
def test_node_registry_contracts_have_valid_shape():
definitions = list_node_definitions()
for definition in definitions:
input_context = definition.input_contract.get("context")
output_context = definition.output_contract.get("context")
if definition.family == "shared":
assert input_context is None
assert output_context is None
else:
assert input_context == definition.family
assert output_context == definition.family
required = definition.input_contract.get("requires", [])
required_any = definition.input_contract.get("requires_any", [])
provides = definition.output_contract.get("provides", [])
assert len(required) == len(set(required))
assert len(required_any) == len(set(required_any))
assert len(provides) == len(set(provides))
assert len(definition.artifact_roles_consumed) == len(set(definition.artifact_roles_consumed))
assert len(definition.artifact_roles_produced) == len(set(definition.artifact_roles_produced))
field_keys = [field.key for field in definition.fields]
assert len(field_keys) == len(set(field_keys))
def test_turntable_node_definition_exposes_expected_schema():
definition = get_node_definition(StepName.BLENDER_TURNTABLE)
@@ -27,7 +73,11 @@ def test_turntable_node_definition_exposes_expected_schema():
assert definition.module_key == "render.production.turntable"
assert definition.node_type == "renderFramesNode"
assert definition.defaults["fps"] == 24
assert definition.defaults["frame_count"] == 120
assert definition.defaults["duration_s"] == 5
assert definition.defaults["turntable_degrees"] == 360
assert definition.defaults["turntable_axis"] == "world_z"
assert definition.defaults["camera_orbit"] is True
assert definition.input_contract["context"] == "order_line"
assert definition.output_contract["provides"] == ["rendered_frames", "rendered_video"]
assert "material_assignments" in definition.artifact_roles_consumed
@@ -55,6 +105,22 @@ def test_turntable_node_definition_exposes_expected_schema():
}
def test_graph_render_node_fields_are_supported_by_runtime_dispatch():
still_definition = get_node_definition(StepName.BLENDER_STILL)
turntable_definition = get_node_definition(StepName.BLENDER_TURNTABLE)
assert still_definition is not None
assert turntable_definition is not None
still_runtime_fields = {field.key for field in still_definition.fields if field.key != "use_custom_render_settings"}
turntable_runtime_fields = {
field.key for field in turntable_definition.fields if field.key != "use_custom_render_settings"
}
assert still_runtime_fields <= _STILL_TASK_KEYS
assert turntable_runtime_fields <= _TURNTABLE_TASK_KEYS
def test_order_line_setup_and_template_contracts_expose_runtime_outputs():
setup = get_node_definition(StepName.ORDER_LINE_SETUP)
template = get_node_definition(StepName.RESOLVE_TEMPLATE)
@@ -87,12 +153,40 @@ def test_order_line_setup_and_template_contracts_expose_runtime_outputs():
"use_materials",
"override_material",
}
assert {field.key for field in bbox.fields} == {"glb_path"}
assert {field.key for field in bbox.fields} == {"glb_path", "source_preference"}
assert bbox.family == "shared"
assert bbox.input_contract == {"requires": ["glb_preview"]}
assert bbox.output_contract == {"provides": ["bbox"]}
assert {field.key for field in template.fields} == {
"template_id_override",
"require_template",
"material_library_path",
"disable_materials",
"target_collection",
"material_replace_mode",
"lighting_only_mode",
"shadow_catcher_mode",
"camera_orbit_mode",
}
assert {field.key for field in get_node_definition(StepName.MATERIAL_MAP_RESOLVE).fields} == {
"disable_materials",
"material_override",
}
assert {field.key for field in get_node_definition(StepName.AUTO_POPULATE_MATERIALS).fields} == {
"persist_updates",
"refresh_material_source",
"include_populated_products",
}
assert output.input_contract["requires"] == ["order_line_context"]
assert output.input_contract["requires_any"] == ["rendered_image", "rendered_frames", "rendered_video"]
assert set(output.output_contract["provides"]) >= {"media_asset", "workflow_result"}
assert {field.key for field in output.fields} == {
"expected_artifact_role",
"require_upstream_artifact",
}
assert export_blend.defaults["output_name_suffix"] == ""
assert {field.key for field in export_blend.fields} == {"output_name_suffix"}
assert notify.defaults == {"channel": "audit_log", "require_armed_render": False}
assert notify.input_contract["requires"] == ["order_line_context"]
assert notify.input_contract["requires_any"] == [
"rendered_image",
@@ -100,6 +194,58 @@ def test_order_line_setup_and_template_contracts_expose_runtime_outputs():
"rendered_video",
"workflow_result",
]
assert {field.key for field in notify.fields} == {"channel", "require_armed_render"}
def test_cad_and_export_contract_nodes_only_expose_supported_settings():
occ_glb_export = get_node_definition(StepName.OCC_GLB_EXPORT)
thumbnail_save = get_node_definition(StepName.THUMBNAIL_SAVE)
export_blend = get_node_definition(StepName.EXPORT_BLEND)
stl_cache_generate = get_node_definition(StepName.STL_CACHE_GENERATE)
assert occ_glb_export is not None
assert thumbnail_save is not None
assert export_blend is not None
assert stl_cache_generate is not None
assert occ_glb_export.family == "cad_file"
assert occ_glb_export.fields == []
assert occ_glb_export.defaults == {}
assert occ_glb_export.input_contract == {"context": "cad_file", "requires": ["step_path"]}
assert occ_glb_export.output_contract == {"context": "cad_file", "provides": ["glb_preview"]}
assert occ_glb_export.artifact_roles_consumed == ["step_path"]
assert occ_glb_export.artifact_roles_produced == ["glb_preview"]
assert "does not expose per-node overrides yet" in occ_glb_export.description
assert thumbnail_save.family == "cad_file"
assert thumbnail_save.fields == []
assert thumbnail_save.defaults == {}
assert thumbnail_save.input_contract == {"context": "cad_file", "requires": ["rendered_image"]}
assert thumbnail_save.output_contract == {"context": "cad_file", "provides": ["cad_thumbnail_media"]}
assert thumbnail_save.artifact_roles_consumed == ["rendered_image"]
assert thumbnail_save.artifact_roles_produced == ["cad_thumbnail_media"]
assert "connected upstream thumbnail request node" in thumbnail_save.description
assert export_blend.family == "order_line"
assert export_blend.defaults == {"output_name_suffix": ""}
assert {field.key for field in export_blend.fields} == {"output_name_suffix"}
assert export_blend.input_contract == {
"context": "order_line",
"requires": ["order_line_context", "render_template"],
}
assert export_blend.output_contract == {"context": "order_line", "provides": ["blend_asset"]}
assert export_blend.artifact_roles_consumed == ["order_line_context", "render_template"]
assert export_blend.artifact_roles_produced == ["blend_asset"]
assert "Only the optional filename suffix is workflow-configurable today." in export_blend.description
assert stl_cache_generate.family == "cad_file"
assert stl_cache_generate.fields == []
assert stl_cache_generate.defaults == {}
assert stl_cache_generate.input_contract == {"context": "cad_file", "requires": ["step_path"]}
assert stl_cache_generate.output_contract == {"context": "cad_file", "provides": ["stl_cache"]}
assert stl_cache_generate.artifact_roles_consumed == ["step_path"]
assert stl_cache_generate.artifact_roles_produced == ["stl_cache"]
assert "Compatibility node for legacy CAD flows." in stl_cache_generate.description
@pytest.mark.asyncio
@@ -146,6 +292,16 @@ async def test_node_definitions_endpoint_returns_registry(client, auth_headers):
"material_override",
}
blender_turntable = next(
definition for definition in body["definitions"] if definition["step"] == StepName.BLENDER_TURNTABLE.value
)
assert blender_turntable["defaults"]["fps"] == 24
assert blender_turntable["defaults"]["frame_count"] == 120
assert blender_turntable["defaults"]["duration_s"] == 5
assert blender_turntable["defaults"]["turntable_degrees"] == 360
assert blender_turntable["defaults"]["turntable_axis"] == "world_z"
assert blender_turntable["defaults"]["camera_orbit"] is True
glb_bbox = next(
definition for definition in body["definitions"] if definition["step"] == StepName.GLB_BBOX.value
)
@@ -162,7 +318,30 @@ async def test_node_definitions_endpoint_returns_registry(client, auth_headers):
"step": None,
"unit": None,
"options": [],
}
"allow_blank": True,
"max_length": None,
"text_format": "absolute_glb_path",
},
{
"key": "source_preference",
"label": "Source Preference",
"type": "select",
"description": "Prefer a prepared GLB, force STEP fallback, or fail when no GLB artifact is available.",
"section": "Inputs",
"default": "auto",
"min": None,
"max": None,
"step": None,
"unit": None,
"options": [
{"value": "auto", "label": "Auto"},
{"value": "step_only", "label": "STEP Only"},
{"value": "glb_only", "label": "GLB Only"},
],
"allow_blank": True,
"max_length": None,
"text_format": "plain",
},
]
@@ -203,6 +382,85 @@ async def test_workflow_crud_roundtrip_preserves_execution_mode(client, auth_hea
assert fetched["config"]["ui"]["execution_mode"] == "shadow"
@pytest.mark.asyncio
async def test_workflow_crud_exposes_supported_artifact_kinds(client, auth_headers):
create_response = await client.post(
"/api/workflows",
headers=auth_headers,
json={
"name": "Still Workflow Contract",
"config": build_preset_workflow_config("still_graph"),
"is_active": True,
},
)
assert create_response.status_code == 201, create_response.text
created = create_response.json()
assert created["family"] == "order_line"
assert created["supported_artifact_kinds"] == ["still_image"]
get_response = await client.get(f"/api/workflows/{created['id']}", headers=auth_headers)
assert get_response.status_code == 200
fetched = get_response.json()
assert fetched["supported_artifact_kinds"] == ["still_image"]
@pytest.mark.asyncio
async def test_workflow_crud_exposes_rollout_summary(client, db, auth_headers):
workflow = WorkflowDefinition(
name="Shadow Rollout Workflow",
config=build_preset_workflow_config("still_graph") | {
"ui": {
**(build_preset_workflow_config("still_graph").get("ui") or {}),
"execution_mode": "shadow",
}
},
is_active=True,
)
db.add(workflow)
await db.flush()
output_type = OutputType(
name="Shadow Still Output",
workflow_definition_id=workflow.id,
workflow_family="order_line",
artifact_kind="still_image",
workflow_rollout_mode="shadow",
render_backend="celery",
)
db.add(output_type)
await db.flush()
workflow_run = WorkflowRun(
workflow_def_id=workflow.id,
execution_mode="shadow",
status="completed",
)
db.add(workflow_run)
await db.commit()
response = await client.get(f"/api/workflows/{workflow.id}", headers=auth_headers)
assert response.status_code == 200, response.text
body = response.json()
assert body["rollout_summary"]["linked_output_type_count"] == 1
assert body["rollout_summary"]["linked_output_type_names"] == ["Shadow Still Output"]
assert body["rollout_summary"]["linked_output_types"] == [
{
"id": str(output_type.id),
"name": "Shadow Still Output",
"is_active": True,
"artifact_kind": "still_image",
"workflow_rollout_mode": "shadow",
}
]
assert body["rollout_summary"]["rollout_modes"] == ["shadow"]
assert body["rollout_summary"]["has_blocking_contracts"] is False
assert body["rollout_summary"]["latest_shadow_run"]["workflow_run_id"] == str(workflow_run.id)
assert body["rollout_summary"]["latest_shadow_run"]["execution_mode"] == "shadow"
@pytest.mark.asyncio
async def test_admin_backfill_workflows_rewrites_legacy_configs(client, db, auth_headers):
legacy = WorkflowDefinition(
@@ -5,6 +5,7 @@ import uuid
from pathlib import Path
import pytest
from PIL import Image, PngImagePlugin
from sqlalchemy import select, text
from sqlalchemy.orm import Session
@@ -15,6 +16,7 @@ from app.domains.orders.models import Order, OrderLine, OrderStatus
from app.domains.products.models import CadFile, Product
from app.domains.rendering.models import OutputType, RenderTemplate
from app.domains.rendering.workflow_runtime_services import (
_build_effective_material_lookup,
auto_populate_materials_for_cad,
build_order_line_render_invocation,
emit_order_line_render_notifications,
@@ -101,6 +103,75 @@ def _seed_order_line_graph(session: Session, tmp_path: Path) -> OrderLine:
return line
def _write_png_with_metadata(path: Path, *, rgba: tuple[int, int, int, int], date_text: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
image = Image.new("RGBA", (8, 8), rgba)
metadata = PngImagePlugin.PngInfo()
metadata.add_text("Date", date_text)
metadata.add_text("Software", "Blender")
image.save(path, pnginfo=metadata)
def test_effective_material_lookup_keeps_product_assignments_authoritative_and_adds_manifest_aliases():
cad_file = CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path="/tmp/bearing.step",
file_hash=f"hash-{uuid.uuid4().hex}",
resolved_material_assignments={
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
},
"usd_only_part": {
"source_name": "UsdOnlyPart",
"prim_path": "/Root/Assembly/usd_only_part",
"canonical_material": "HARTOMAT_050101_Elastomer-Black",
},
},
)
effective = _build_effective_material_lookup(
cad_file,
[
{"part_name": "InnerRing", "material": "Steel raw"},
],
)
assert effective["InnerRing"] == "Steel raw"
assert effective["inner_ring"] == "Steel raw"
assert effective["UsdOnlyPart"] == "HARTOMAT_050101_Elastomer-Black"
assert effective["usd_only_part"] == "HARTOMAT_050101_Elastomer-Black"
def test_effective_material_lookup_backfills_manifest_part_keys_from_legacy_serialized_names():
cad_file = CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path="/tmp/bearing.step",
file_hash=f"hash-{uuid.uuid4().hex}",
resolved_material_assignments={
"rwdr_b_f_802044_tr4_h122bk": {
"source_name": "RWDR_B_F-802044_TR4_H122BK",
"prim_path": "/Root/Assembly/rwdr_b_f_802044_tr4_h122bk",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
},
},
)
effective = _build_effective_material_lookup(
cad_file,
[
{"part_name": "RWDR_B_F-802044_TR4_H122B-69186", "material": "Steel--Stahl"},
],
)
assert effective["RWDR_B_F-802044_TR4_H122B-69186"] == "Steel--Stahl"
assert effective["RWDR_B_F-802044_TR4_H122BK"] == "Steel--Stahl"
assert effective["rwdr_b_f_802044_tr4_h122bk"] == "Steel--Stahl"
def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd(sync_session, tmp_path, monkeypatch):
from app.config import settings
@@ -118,7 +189,10 @@ def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd
}
usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_text("USD", encoding="utf-8")
usd_asset_path.write_text(
"hartomat:canonicalMaterialName\nhartomat:partKey\n",
encoding="utf-8",
)
sync_session.add(
MediaAsset(
@@ -127,6 +201,9 @@ def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd
product_id=line.product_id,
asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint",
},
)
)
sync_session.commit()
@@ -230,6 +307,264 @@ def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd(sync_se
assert line.render_status == "processing"
def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd_cache_key(sync_session, tmp_path, monkeypatch):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
upload_dir = Path(settings.upload_dir)
upload_dir.mkdir(parents=True, exist_ok=True)
line = _seed_order_line_graph(sync_session, tmp_path)
line.product.cad_file.resolved_material_assignments = {
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
}
usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_text(
"hartomat:canonicalMaterialName\nhartomat:partKey\n",
encoding="utf-8",
)
glb_asset_path = upload_dir / "step_files" / "bearing_thumbnail.glb"
glb_asset_path.parent.mkdir(parents=True, exist_ok=True)
glb_asset_path.write_text("GLB", encoding="utf-8")
sync_session.add_all(
[
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash",
},
),
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.gltf_geometry,
storage_key="step_files/bearing_thumbnail.glb",
),
]
)
sync_session.commit()
queued: list[str] = []
class _Task:
@staticmethod
def delay(cad_file_id: str) -> None:
queued.append(cad_file_id)
monkeypatch.setattr(
"app.tasks.step_tasks.generate_usd_master_task",
_Task(),
)
result = prepare_order_line_render_context(sync_session, str(line.id))
expected_glb = tmp_path / "parts" / "bearing_thumbnail.glb"
assert result.is_ready
assert result.usd_render_path is None
assert result.glb_reuse_path == expected_glb
assert expected_glb.exists()
assert queued == [str(line.product.cad_file_id)]
def test_prepare_order_line_render_context_accepts_binary_usd_without_literal_hartomat_markers(sync_session, tmp_path, monkeypatch):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
upload_dir = Path(settings.upload_dir)
upload_dir.mkdir(parents=True, exist_ok=True)
line = _seed_order_line_graph(sync_session, tmp_path)
line.product.cad_file.resolved_material_assignments = {
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
}
usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_bytes(b"PXR-USDC\x00binary-usd-with-customdata-not-greppable")
sync_session.add(
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint",
},
)
)
sync_session.commit()
queued: list[str] = []
class _Task:
@staticmethod
def delay(cad_file_id: str) -> None:
queued.append(cad_file_id)
monkeypatch.setattr(
"app.tasks.step_tasks.generate_usd_master_task",
_Task(),
)
result = prepare_order_line_render_context(sync_session, str(line.id))
assert result.is_ready
assert result.usd_render_path == usd_asset_path
assert result.glb_reuse_path is None
assert queued == []
def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd_file_markers(sync_session, tmp_path, monkeypatch):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
upload_dir = Path(settings.upload_dir)
upload_dir.mkdir(parents=True, exist_ok=True)
line = _seed_order_line_graph(sync_session, tmp_path)
line.product.cad_file.resolved_material_assignments = {
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
}
usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_text("legacy-usd-without-hartomat-markers", encoding="utf-8")
glb_asset_path = upload_dir / "step_files" / "bearing_thumbnail.glb"
glb_asset_path.parent.mkdir(parents=True, exist_ok=True)
glb_asset_path.write_text("GLB", encoding="utf-8")
sync_session.add_all(
[
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint",
},
),
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.gltf_geometry,
storage_key="step_files/bearing_thumbnail.glb",
),
]
)
sync_session.commit()
queued: list[str] = []
class _Task:
@staticmethod
def delay(cad_file_id: str) -> None:
queued.append(cad_file_id)
monkeypatch.setattr(
"app.tasks.step_tasks.generate_usd_master_task",
_Task(),
)
result = prepare_order_line_render_context(sync_session, str(line.id))
expected_glb = tmp_path / "parts" / "bearing_thumbnail.glb"
assert result.is_ready
assert result.usd_render_path is None
assert result.glb_reuse_path == expected_glb
assert expected_glb.exists()
assert queued == [str(line.product.cad_file_id)]
def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd_material_field(sync_session, tmp_path, monkeypatch):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
upload_dir = Path(settings.upload_dir)
upload_dir.mkdir(parents=True, exist_ok=True)
line = _seed_order_line_graph(sync_session, tmp_path)
line.product.cad_file.resolved_material_assignments = {
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"material": "SCHAEFFLER_010101_Steel-Bare",
}
}
usd_asset_path = upload_dir / "usd" / "bearing.usd"
usd_asset_path.parent.mkdir(parents=True, exist_ok=True)
usd_asset_path.write_text("USD", encoding="utf-8")
glb_asset_path = upload_dir / "step_files" / "bearing_thumbnail.glb"
glb_asset_path.parent.mkdir(parents=True, exist_ok=True)
glb_asset_path.write_text("GLB", encoding="utf-8")
sync_session.add_all(
[
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.usd_master,
storage_key="usd/bearing.usd",
),
MediaAsset(
id=uuid.uuid4(),
cad_file_id=line.product.cad_file_id,
product_id=line.product_id,
asset_type=MediaAssetType.gltf_geometry,
storage_key="step_files/bearing_thumbnail.glb",
),
]
)
sync_session.commit()
queued: list[str] = []
class _Task:
@staticmethod
def delay(cad_file_id: str) -> None:
queued.append(cad_file_id)
monkeypatch.setattr(
"app.tasks.step_tasks.generate_usd_master_task",
_Task(),
)
result = prepare_order_line_render_context(sync_session, str(line.id))
expected_glb = tmp_path / "parts" / "bearing_thumbnail.glb"
assert result.is_ready
assert result.usd_render_path is None
assert result.glb_reuse_path == expected_glb
assert expected_glb.exists()
assert queued == [str(line.product.cad_file_id)]
def test_prepare_order_line_render_context_skips_closed_orders(sync_session, tmp_path, monkeypatch):
from app.config import settings
@@ -322,6 +657,11 @@ def test_build_order_line_render_invocation_applies_output_and_line_overrides(tm
material_map={"InnerRing": "SteelPolished"},
use_materials=True,
override_material="Studio White",
target_collection="Assembly",
lighting_only=True,
shadow_catcher=True,
camera_orbit=False,
template_inputs={"studio_variant": "warm"},
category_key="bearings",
output_type_id=str(output_type.id),
),
@@ -357,6 +697,7 @@ def test_build_order_line_render_invocation_applies_output_and_line_overrides(tm
assert invocation.part_names_ordered == ["InnerRing", "OuterRing"]
assert invocation.rotation_x == 12.0
assert invocation.focal_length_mm == 50.0
assert invocation.template_inputs == {"studio_variant": "warm"}
still_kwargs = invocation.as_still_renderer_kwargs(
step_path=str(step_path),
@@ -374,6 +715,7 @@ def test_build_order_line_render_invocation_applies_output_and_line_overrides(tm
assert still_kwargs["cycles_device"] == "cuda"
assert still_kwargs["material_library_path"] == "/libraries/materials.blend"
assert still_kwargs["material_override"] == "Studio White"
assert still_kwargs["template_inputs"] == {"studio_variant": "warm"}
assert still_kwargs["job_id"] == "job-1"
assert still_kwargs["order_line_id"] == "line-1"
@@ -437,6 +779,11 @@ def test_build_order_line_render_invocation_autoscales_samples_and_prefers_mater
material_map={"InnerRing": "TemplateSteel"},
use_materials=True,
override_material="Template White",
target_collection="Product",
lighting_only=False,
shadow_catcher=False,
camera_orbit=True,
template_inputs={"studio_variant": "warm"},
category_key="bearings",
output_type_id=str(output_type.id),
),
@@ -480,11 +827,13 @@ def test_build_order_line_render_invocation_autoscales_samples_and_prefers_mater
assert turntable_kwargs["samples"] == 64
assert turntable_kwargs["material_map"] == {"InnerRing": "ResolvedSteel"}
assert turntable_kwargs["material_library_path"] is None
assert turntable_kwargs["template_inputs"] == {"studio_variant": "warm"}
assert cinematic_kwargs["width"] == 1024
assert cinematic_kwargs["height"] == 512
assert cinematic_kwargs["engine"] == "eevee"
assert cinematic_kwargs["samples"] == 64
assert cinematic_kwargs["material_override"] == "Resolved White"
assert cinematic_kwargs["template_inputs"] == {"studio_variant": "warm"}
def test_resolve_order_line_template_context_uses_exact_template_and_override(sync_session, tmp_path, monkeypatch):
@@ -584,6 +933,153 @@ def test_resolve_order_line_template_context_supports_explicit_template_and_libr
"InnerRing": "resolved:Steel raw",
"OuterRing": "resolved:Steel raw",
}
assert result.target_collection == "ForcedCollection"
assert result.lighting_only is False
assert result.shadow_catcher is False
assert result.camera_orbit is True
def test_resolve_order_line_template_context_applies_template_override_modes(
sync_session,
tmp_path,
monkeypatch,
):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
line = _seed_order_line_graph(sync_session, tmp_path)
template = RenderTemplate(
id=uuid.uuid4(),
name="Overrideable Template",
category_key="bearings",
blend_file_path="/templates/overrideable.blend",
original_filename="overrideable.blend",
target_collection="TemplateCollection",
material_replace_enabled=False,
lighting_only=False,
shadow_catcher_enabled=False,
camera_orbit=True,
is_active=True,
output_types=[line.output_type],
)
sync_session.add(template)
sync_session.add(
AssetLibrary(
id=uuid.uuid4(),
name="Default Library",
blend_file_path="/libraries/materials.blend",
is_active=True,
)
)
sync_session.commit()
monkeypatch.setattr(
"app.domains.rendering.workflow_runtime_services.resolve_material_map",
lambda raw_map: {key: f"resolved:{value}" for key, value in raw_map.items()},
)
setup = prepare_order_line_render_context(sync_session, str(line.id))
result = resolve_order_line_template_context(
sync_session,
setup,
template_id_override=str(template.id),
material_library_path_override="/libraries/materials.blend",
target_collection_override="NodeCollection",
material_replace_mode="enabled",
lighting_only_mode="enabled",
shadow_catcher_mode="enabled",
camera_orbit_mode="disabled",
)
assert result.template is not None
assert result.use_materials is True
assert result.material_map == {
"InnerRing": "resolved:Steel raw",
"OuterRing": "resolved:Steel raw",
}
assert result.target_collection == "NodeCollection"
assert result.lighting_only is True
assert result.shadow_catcher is True
assert result.camera_orbit is False
def test_resolve_order_line_template_context_exposes_template_schema_and_invocation_inputs(
sync_session,
tmp_path,
monkeypatch,
):
from app.config import settings
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
line = _seed_order_line_graph(sync_session, tmp_path)
template = RenderTemplate(
id=uuid.uuid4(),
name="Schema Template",
category_key="bearings",
blend_file_path="/templates/schema-template.blend",
original_filename="schema-template.blend",
target_collection="Product",
material_replace_enabled=True,
lighting_only=False,
shadow_catcher_enabled=False,
camera_orbit=True,
workflow_input_schema=[
{
"key": "studio_variant",
"label": "Studio Variant",
"type": "select",
"section": "Template Inputs",
"default": "default",
"options": [
{"value": "default", "label": "Default"},
{"value": "warm", "label": "Warm"},
],
},
{
"key": "camera_profile",
"label": "Camera Profile",
"type": "text",
"section": "Template Inputs",
"default": "macro",
},
],
is_active=True,
output_types=[line.output_type],
)
sync_session.add(template)
sync_session.add(
AssetLibrary(
id=uuid.uuid4(),
name="Default Library",
blend_file_path="/libraries/materials.blend",
is_active=True,
)
)
sync_session.commit()
monkeypatch.setattr(
"app.domains.rendering.workflow_runtime_services.resolve_material_map",
lambda raw_map: {key: f"resolved:{value}" for key, value in raw_map.items()},
)
setup = prepare_order_line_render_context(sync_session, str(line.id))
template_context = resolve_order_line_template_context(
sync_session,
setup,
template_id_override=str(template.id),
template_input_overrides={"studio_variant": "warm"},
)
invocation = build_order_line_render_invocation(setup, template_context=template_context)
assert template_context.workflow_input_schema == template.workflow_input_schema
assert template_context.template_inputs == {
"studio_variant": "warm",
"camera_profile": "macro",
}
assert invocation.template_inputs == {
"studio_variant": "warm",
"camera_profile": "macro",
}
def test_resolve_order_line_template_context_can_disable_material_resolution(sync_session, tmp_path, monkeypatch):
@@ -1077,6 +1573,56 @@ def test_persist_order_line_output_canonicalizes_step_file_outputs(sync_session,
assert asset.storage_key == f"renders/{line.id}/{expected_path.name}"
def test_png_persistence_strips_volatile_metadata_for_primary_and_observer_outputs(
sync_session,
tmp_path,
monkeypatch,
):
from app.config import settings
upload_dir = tmp_path / "uploads"
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
line = _seed_order_line_graph(sync_session, tmp_path)
primary_source = upload_dir / "step_files" / "renders" / f"line_{line.id}.png"
observer_source = upload_dir / "step_files" / "renders" / f"line_{line.id}_shadow.png"
_write_png_with_metadata(
primary_source,
rgba=(12, 34, 56, 255),
date_text="2026/04/10 17:05:27",
)
_write_png_with_metadata(
observer_source,
rgba=(12, 34, 56, 255),
date_text="2026/04/10 17:06:30",
)
primary_result = persist_order_line_output(
sync_session,
line,
success=True,
output_path=str(primary_source),
render_log={"renderer": "blender", "engine_used": "cycles"},
)
observer_result = persist_order_line_media_asset(
sync_session,
line,
success=True,
output_path=str(observer_source),
asset_type=MediaAssetType.still,
render_log={"renderer": "blender", "engine_used": "cycles"},
)
primary_bytes = Path(primary_result.result_path or "").read_bytes()
observer_bytes = Path(observer_result.result_path or "").read_bytes()
assert primary_bytes == observer_bytes
assert b"Date" not in primary_bytes
assert b"Date" not in observer_bytes
assert Image.open(primary_result.result_path).getpixel((0, 0)) == (12, 34, 56, 255)
assert Image.open(observer_result.result_path).getpixel((0, 0)) == (12, 34, 56, 255)
def test_persist_order_line_output_classifies_blend_outputs_as_blend_assets(sync_session, tmp_path, monkeypatch):
from app.config import settings
@@ -1,6 +1,7 @@
import pytest
from pydantic import ValidationError
from app.core.process_steps import StepName
from app.domains.rendering.workflow_schema import WorkflowConfig
@@ -72,6 +73,35 @@ def test_workflow_schema_rejects_unknown_node_params():
)
def test_workflow_schema_rejects_unregistered_nodes_from_registry(monkeypatch):
from app.domains.rendering import workflow_schema as schema_module
original = schema_module.get_node_definition
def fake_get_node_definition(step):
if step == StepName.GLB_BBOX:
return None
return original(step)
monkeypatch.setattr(schema_module, "get_node_definition", fake_get_node_definition)
with pytest.raises(ValidationError, match="is not registered in workflow_node_registry"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "bbox",
"step": StepName.GLB_BBOX.value,
"params": {},
},
],
"edges": [],
"ui": {"family": "order_line"},
}
)
def test_workflow_schema_accepts_known_node_params():
config = WorkflowConfig.model_validate(
{
@@ -92,6 +122,149 @@ def test_workflow_schema_accepts_known_node_params():
assert config.ui.family == "order_line"
def test_workflow_schema_rejects_invalid_glb_path_format():
with pytest.raises(ValidationError, match="must point to a .glb file"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "bbox",
"step": "glb_bbox",
"params": {"glb_path": "/tmp/model.gltf"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_template_id_override_format():
with pytest.raises(ValidationError, match="must be a valid UUID"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "template",
"step": "resolve_template",
"params": {"template_id_override": "not-a-uuid"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_material_library_path_format():
with pytest.raises(ValidationError, match="must point to a .blend file"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "template",
"step": "resolve_template",
"params": {"material_library_path": "/tmp/library.txt"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_noise_threshold_format():
with pytest.raises(ValidationError, match="must be a valid numeric string"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "render",
"step": "blender_still",
"params": {"noise_threshold": "fast"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_bg_color_format():
with pytest.raises(ValidationError, match="must be a hex color"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "turntable",
"step": "blender_turntable",
"params": {"bg_color": "blue"},
},
],
"edges": [],
}
)
def test_workflow_schema_rejects_invalid_output_name_suffix_format():
with pytest.raises(ValidationError, match="may only contain letters, numbers"):
WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "blend",
"step": "export_blend",
"params": {"output_name_suffix": "../unsafe"},
},
],
"edges": [],
}
)
def test_workflow_schema_accepts_empty_optional_text_overrides():
config = WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{
"id": "template",
"step": "resolve_template",
"params": {
"template_id_override": "",
"material_library_path": "",
},
},
{
"id": "render",
"step": "blender_still",
"params": {
"noise_threshold": "",
"material_override": "",
},
},
{
"id": "turntable",
"step": "blender_turntable",
"params": {"bg_color": ""},
},
{
"id": "blend",
"step": "export_blend",
"params": {"output_name_suffix": ""},
},
],
"edges": [],
"ui": {"family": "order_line"},
}
)
assert config.ui is not None
assert config.ui.family == "order_line"
def test_workflow_schema_rejects_ui_family_mismatch():
with pytest.raises(ValidationError, match="ui.family"):
WorkflowConfig.model_validate(
@@ -226,6 +399,32 @@ def test_workflow_schema_accepts_transitive_contract_wiring():
assert config.ui.execution_mode == "graph"
def test_workflow_schema_accepts_cad_intake_contract_wiring_with_shared_bbox_node():
config = WorkflowConfig.model_validate(
{
"version": 1,
"nodes": [
{"id": "resolve_step", "step": "resolve_step_path", "params": {}},
{"id": "export_glb", "step": "occ_glb_export", "params": {}},
{"id": "bbox", "step": "glb_bbox", "params": {}},
{"id": "threejs_thumb", "step": "threejs_render", "params": {}},
{"id": "save", "step": "thumbnail_save", "params": {}},
],
"edges": [
{"from": "resolve_step", "to": "export_glb"},
{"from": "export_glb", "to": "bbox"},
{"from": "export_glb", "to": "threejs_thumb"},
{"from": "bbox", "to": "threejs_thumb"},
{"from": "threejs_thumb", "to": "save"},
],
"ui": {"family": "cad_file", "execution_mode": "graph"},
}
)
assert config.ui is not None
assert config.ui.family == "cad_file"
def test_workflow_schema_rejects_mixed_family_graph_execution():
with pytest.raises(ValidationError, match="single-family"):
WorkflowConfig.model_validate(
@@ -0,0 +1,227 @@
from __future__ import annotations
import importlib.util
from pathlib import Path
import sys
import types
def _load_render_pipeline_script():
candidates = [
Path(__file__).resolve().parents[3] / "scripts" / "test_render_pipeline.py",
Path("/compose/scripts/test_render_pipeline.py"),
]
script_path = next((candidate for candidate in candidates if candidate.exists()), None)
assert script_path is not None
if "requests" not in sys.modules:
requests_stub = types.ModuleType("requests")
requests_stub.Response = object
requests_stub.Session = object
requests_stub.exceptions = types.SimpleNamespace(
ConnectionError=RuntimeError,
ChunkedEncodingError=RuntimeError,
ReadTimeout=RuntimeError,
)
sys.modules["requests"] = requests_stub
spec = importlib.util.spec_from_file_location("test_render_pipeline_script", script_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def test_build_output_type_workflow_link_payload_sets_graph_rollout_mode_explicitly():
module = _load_render_pipeline_script()
payload = module.build_output_type_workflow_link_payload(
workflow_definition_id="workflow-graph-123",
execution_mode="graph",
)
assert payload == {
"workflow_definition_id": "workflow-graph-123",
"workflow_rollout_mode": "graph",
"is_active": True,
}
def test_build_output_type_workflow_link_payload_sets_shadow_rollout_mode_explicitly():
module = _load_render_pipeline_script()
payload = module.build_output_type_workflow_link_payload(
workflow_definition_id="workflow-shadow-123",
execution_mode="shadow",
)
assert payload == {
"workflow_definition_id": "workflow-shadow-123",
"workflow_rollout_mode": "shadow",
"is_active": True,
}
def test_build_output_type_workflow_link_payload_keeps_legacy_rollout_implicit():
module = _load_render_pipeline_script()
payload = module.build_output_type_workflow_link_payload(
workflow_definition_id="workflow-legacy-123",
execution_mode="legacy",
)
assert payload == {
"workflow_definition_id": "workflow-legacy-123",
"is_active": True,
}
def test_build_graph_still_config_matches_canonical_still_graph_contract():
module = _load_render_pipeline_script()
config = module.build_graph_still_config(
execution_mode="shadow",
render_params={
"resolution": [1920, 1080],
"engine": "cycles",
"samples": 128,
},
)
assert config["ui"] == {
"preset": "still_graph",
"execution_mode": "shadow",
"family": "order_line",
}
assert [node["id"] for node in config["nodes"]] == [
"setup",
"template",
"populate_materials",
"bbox",
"resolve_materials",
"render",
"output",
"notify",
]
assert config["edges"] == [
{"from": "setup", "to": "template"},
{"from": "setup", "to": "populate_materials"},
{"from": "setup", "to": "bbox"},
{"from": "template", "to": "resolve_materials"},
{"from": "populate_materials", "to": "resolve_materials"},
{"from": "resolve_materials", "to": "render"},
{"from": "bbox", "to": "render"},
{"from": "template", "to": "render"},
{"from": "render", "to": "output"},
{"from": "render", "to": "notify"},
]
render_node = next(node for node in config["nodes"] if node["id"] == "render")
assert render_node["params"] == {
"width": 1920,
"height": 1080,
"render_engine": "cycles",
"samples": 128,
"use_custom_render_settings": False,
}
def test_render_template_candidates_for_output_type_matches_m2m_and_legacy_fields():
module = _load_render_pipeline_script()
templates = [
{
"id": "template-active-m2m",
"is_active": True,
"output_type_ids": ["ot-1", "ot-2"],
"output_type_id": None,
},
{
"id": "template-active-legacy",
"is_active": True,
"output_type_ids": [],
"output_type_id": "ot-1",
},
{
"id": "template-inactive",
"is_active": False,
"output_type_ids": ["ot-1"],
"output_type_id": None,
},
]
matches = module.render_template_candidates_for_output_type(templates, "ot-1")
assert [template["id"] for template in matches] == [
"template-active-m2m",
"template-active-legacy",
]
def test_build_graph_still_config_can_inherit_output_type_render_settings():
module = _load_render_pipeline_script()
config = module.build_graph_still_config(
execution_mode="shadow",
use_custom_render_settings=False,
)
render_node = next(node for node in config["nodes"] if node["id"] == "render")
assert render_node["params"] == {
"use_custom_render_settings": False,
}
def test_choose_template_backed_output_type_prefers_requested_name():
module = _load_render_pipeline_script()
output_types = [
{
"id": "ot-1",
"name": "HQ-Blender-Alpha-HDR",
"renderer": "blender",
"artifact_kind": "still_image",
"is_animation": False,
},
{
"id": "ot-2",
"name": "Turntable",
"renderer": "blender",
"artifact_kind": "turntable_video",
"is_animation": True,
},
]
templates = [
{
"id": "template-1",
"is_active": True,
"output_type_ids": ["ot-1"],
"output_type_id": None,
}
]
output_type, matches = module.choose_template_backed_output_type(
output_types,
templates,
preferred_name="HQ-Blender-Alpha-HDR",
)
assert output_type["id"] == "ot-1"
assert [template["id"] for template in matches] == ["template-1"]
def test_build_output_type_workflow_snapshot_keeps_restore_contract():
module = _load_render_pipeline_script()
snapshot = module.build_output_type_workflow_snapshot(
{
"workflow_definition_id": "workflow-123",
"workflow_rollout_mode": "shadow",
"is_active": False,
}
)
assert snapshot == {
"workflow_definition_id": "workflow-123",
"workflow_rollout_mode": "shadow",
"is_active": False,
}
@@ -0,0 +1,48 @@
from __future__ import annotations
import uuid
import pytest
@pytest.mark.integration
@pytest.mark.asyncio
async def test_cad_model_endpoint_falls_back_to_gltf_geometry_asset(
client,
db,
auth_headers,
tmp_path,
):
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.products.models import CadFile, ProcessingStatus
glb_path = tmp_path / "example.glb"
glb_path.write_bytes(b"glTF")
cad = CadFile(
id=uuid.uuid4(),
original_name="example.step",
stored_path=str(tmp_path / "example.step"),
file_hash="cad-model-endpoint-fallback",
file_size=123,
processing_status=ProcessingStatus.completed,
gltf_path=None,
)
db.add(cad)
await db.flush()
asset = MediaAsset(
id=uuid.uuid4(),
cad_file_id=cad.id,
asset_type=MediaAssetType.gltf_geometry,
storage_key=str(glb_path),
mime_type="model/gltf-binary",
)
db.add(asset)
await db.commit()
response = await client.get(f"/api/cad/{cad.id}/model", headers=auth_headers)
assert response.status_code == 200
assert response.headers["content-type"] == "model/gltf-binary"
assert response.content == b"glTF"
@@ -0,0 +1,30 @@
from __future__ import annotations
import uuid
import pytest
@pytest.mark.asyncio
async def test_batch_delete_assets_awaits_global_admin_guard(client, auth_headers, monkeypatch):
guard_calls: list[str] = []
deleted_asset_ids: list[str] = []
async def _guard(user):
guard_calls.append(str(user.id))
return user
async def _delete_media_asset(_db, asset_id):
deleted_asset_ids.append(str(asset_id))
return True
monkeypatch.setattr("app.utils.auth.require_global_admin", _guard)
monkeypatch.setattr("app.domains.media.service.delete_media_asset", _delete_media_asset)
asset_ids = [str(uuid.uuid4()), str(uuid.uuid4())]
response = await client.post("/api/media/batch-delete", json=asset_ids, headers=auth_headers)
assert response.status_code == 200, response.text
assert len(guard_calls) == 1
assert deleted_asset_ids == asset_ids
assert response.json() == {"deleted": 2, "requested": 2}
@@ -0,0 +1,12 @@
from app.api.routers.admin import SETTINGS_DEFAULTS, _settings_to_out
def test_settings_to_out_uses_consistent_tessellation_fallbacks() -> None:
raw = dict(SETTINGS_DEFAULTS)
raw.pop("scene_angular_deflection", None)
raw.pop("render_angular_deflection", None)
settings = _settings_to_out(raw)
assert settings.scene_angular_deflection == 0.1
assert settings.render_angular_deflection == 0.05
+67
View File
@@ -0,0 +1,67 @@
from pathlib import Path
from app.config import settings
from app.domains.materials.library_paths import (
asset_library_dir,
list_asset_library_blends,
resolve_asset_library_blend_path,
)
def test_asset_library_dir_uses_upload_dir(monkeypatch, tmp_path):
monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads"))
assert asset_library_dir() == tmp_path / "uploads" / "asset-libraries"
def test_resolve_asset_library_blend_path_prefers_existing_configured_path(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
library_dir = upload_dir / "asset-libraries"
library_dir.mkdir(parents=True, exist_ok=True)
configured = tmp_path / "external" / "materials.blend"
configured.parent.mkdir(parents=True, exist_ok=True)
configured.write_bytes(b"blend")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
resolved = resolve_asset_library_blend_path(
blend_file_path=str(configured),
asset_library_id="ignored",
)
assert resolved == str(configured)
def test_resolve_asset_library_blend_path_falls_back_to_id_named_file(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
library_dir = upload_dir / "asset-libraries"
library_dir.mkdir(parents=True, exist_ok=True)
expected = library_dir / "1234.blend"
expected.write_bytes(b"blend")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
resolved = resolve_asset_library_blend_path(
blend_file_path=str(library_dir / "missing.blend"),
asset_library_id="1234",
)
assert resolved == str(expected)
def test_resolve_asset_library_blend_path_falls_back_to_newest_available_file(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
library_dir = upload_dir / "asset-libraries"
library_dir.mkdir(parents=True, exist_ok=True)
older = library_dir / "older.blend"
newer = library_dir / "newer.blend"
older.write_bytes(b"older")
newer.write_bytes(b"newer")
newer.touch()
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
resolved = resolve_asset_library_blend_path(
blend_file_path=str(library_dir / "missing.blend"),
asset_library_id="missing",
)
assert resolved == str(newer)
assert list_asset_library_blends() == [newer, older]
+63
View File
@@ -0,0 +1,63 @@
from __future__ import annotations
import uuid
from pathlib import Path
import app.models # noqa: F401 Ensures SQLAlchemy relationships are registered.
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.pipeline.tasks.export_glb import _usd_cache_hit_refresh_reason
from app.domains.products.models import CadFile
def _build_cad_file() -> CadFile:
return CadFile(
id=uuid.uuid4(),
original_name="bearing.step",
stored_path="/tmp/bearing.step",
file_hash=f"hash-{uuid.uuid4().hex}",
resolved_material_assignments={
"inner_ring": {
"source_name": "InnerRing",
"prim_path": "/Root/Assembly/inner_ring",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
)
def _build_usd_asset() -> MediaAsset:
return MediaAsset(
id=uuid.uuid4(),
cad_file_id=uuid.uuid4(),
asset_type=MediaAssetType.usd_master,
storage_key="step_files/bearing_master.usd",
render_config={
"cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint",
},
)
def test_usd_cache_hit_refresh_reason_accepts_binary_usd_without_literal_hartomat_tokens(tmp_path: Path):
cad_file = _build_cad_file()
usd_asset = _build_usd_asset()
usd_path = tmp_path / "bearing_master.usd"
usd_path.write_text("#usda 1.0\n", encoding="utf-8")
refresh_reason = _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_path)
assert refresh_reason is None
def test_usd_cache_hit_refresh_reason_accepts_current_hartomat_usd(tmp_path: Path):
cad_file = _build_cad_file()
usd_asset = _build_usd_asset()
usd_path = tmp_path / "bearing_master.usd"
usd_path.write_text(
"hartomat:canonicalMaterialName\nhartomat:partKey\n",
encoding="utf-8",
)
refresh_reason = _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_path)
assert refresh_reason is None
+220
View File
@@ -0,0 +1,220 @@
from __future__ import annotations
import importlib.util
import json
import struct
from pathlib import Path
def _load_export_module():
candidates = [
Path(__file__).resolve().parents[2] / "render-worker" / "scripts" / "export_step_to_gltf.py",
Path("/compose/render-worker/scripts/export_step_to_gltf.py"),
]
module_path = next((path for path in candidates if path.exists()), None)
assert module_path is not None
spec = importlib.util.spec_from_file_location("test_export_step_to_gltf", module_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def _write_minimal_glb(path: Path, payload: dict) -> None:
json_bytes = json.dumps(payload, separators=(",", ":")).encode()
pad = (4 - len(json_bytes) % 4) % 4
json_bytes += b" " * pad
chunk = struct.pack("<II", len(json_bytes), 0x4E4F534A) + json_bytes
header = struct.pack("<III", 0x46546C67, 2, 12 + len(chunk))
path.write_bytes(header + chunk)
def _read_glb_json(path: Path) -> dict:
data = path.read_bytes()
json_len = struct.unpack_from("<I", data, 12)[0]
return json.loads(data[20 : 20 + json_len])
def test_atomic_export_helpers_publish_temp_glb_over_existing_output(tmp_path: Path):
module = _load_export_module()
output_path = tmp_path / "existing.glb"
output_path.write_bytes(b"old")
temp_path = module._prepare_atomic_export_path(output_path)
assert temp_path != output_path
assert temp_path.parent == output_path.parent
assert not temp_path.exists()
temp_path.write_bytes(b"new")
module._finalize_atomic_export(temp_path, output_path)
assert output_path.read_bytes() == b"new"
assert not temp_path.exists()
def test_inject_glb_extras_preserves_exact_leaf_mesh_part_keys_when_available(tmp_path: Path):
module = _load_export_module()
glb_path = tmp_path / "instance.glb"
payload = {
"asset": {"version": "2.0"},
"scene": 0,
"scenes": [{"nodes": [0]}],
"nodes": [
{"name": "Assembly", "children": [1, 2, 3]},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_AF21",
"translation": [0.1, 0.2, 0.3],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"mesh": 0,
"translation": [0.1, 0.2, 0.3],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_AF6_",
"translation": [0.4, 0.5, 0.6],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
],
"meshes": [{"primitives": []}],
}
_write_minimal_glb(glb_path, payload)
module._inject_glb_extras(
glb_path,
{"partKeyMap": {}},
part_key_map={
"KERO_Z-575693-QP-DRH_ISB_1": "kero_z_575693_qp_drh_isb_1",
"KERO_Z-575693-QP-DRH_ISB_1_1": "kero_z_575693_qp_drh_isb_1_1",
"KERO_Z-575693-QP-DRH_ISB_1_AF6_": "kero_z_575693_qp_drh_isb_1_af6",
},
part_key_occurrences={
"KERO_Z-575693-QP-DRH_ISB_1_1": ["kero_z_575693_qp_drh_isb_1_1"],
},
)
result = _read_glb_json(glb_path)
assert result["nodes"][1]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1"
assert result["nodes"][2]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1_1"
assert result["nodes"][3]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1"
def test_inject_glb_extras_keeps_unique_leaf_mesh_part_keys_without_semantic_siblings(tmp_path: Path):
module = _load_export_module()
glb_path = tmp_path / "leaf.glb"
payload = {
"asset": {"version": "2.0"},
"scene": 0,
"scenes": [{"nodes": [0]}],
"nodes": [
{"name": "Assembly", "children": [1]},
{
"name": "UNIQUE_PART_1_1",
"mesh": 0,
"translation": [0.0, 0.0, 0.0],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
],
"meshes": [{"primitives": []}],
}
_write_minimal_glb(glb_path, payload)
module._inject_glb_extras(
glb_path,
{"partKeyMap": {}},
part_key_map={
"UNIQUE_PART_1_1": "unique_part_1_1",
},
)
result = _read_glb_json(glb_path)
assert result["nodes"][1]["extras"]["partKey"] == "unique_part_1_1"
def test_inject_glb_extras_falls_back_to_semantic_siblings_when_exact_mesh_key_is_missing_even_if_instance_transforms_differ(tmp_path: Path):
module = _load_export_module()
glb_path = tmp_path / "instance-mismatch.glb"
payload = {
"asset": {"version": "2.0"},
"scene": 0,
"scenes": [{"nodes": [0]}],
"nodes": [
{"name": "Assembly", "children": [1, 2, 3]},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_AF21",
"translation": [0.1, 0.2, 0.3],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_AF22",
"translation": [-0.1, -0.2, 0.3],
"rotation": [0.0, 0.0, 0.0, 1.0],
},
{
"name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"mesh": 0,
"translation": [0.9, 0.8, 0.3],
"rotation": [0.0, 0.0, 0.70710678, 0.70710678],
},
],
"meshes": [{"primitives": []}],
}
_write_minimal_glb(glb_path, payload)
module._inject_glb_extras(
glb_path,
{"partKeyMap": {}},
part_key_map={
"KERO_Z-575693-QP-DRH_ISB_1": "kero_z_575693_qp_drh_isb_1",
"KERO_Z-575693-QP-DRH_ISB_1_AF21": "kero_z_575693_qp_drh_isb_1_af21",
"KERO_Z-575693-QP-DRH_ISB_1_AF22": "kero_z_575693_qp_drh_isb_1_af22",
},
)
result = _read_glb_json(glb_path)
assert result["nodes"][3]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1"
def test_inject_glb_extras_assigns_distinct_occurrence_keys_to_repeated_leaf_meshes(tmp_path: Path):
module = _load_export_module()
glb_path = tmp_path / "repeated.glb"
payload = {
"asset": {"version": "2.0"},
"scene": 0,
"scenes": [{"nodes": [0]}],
"nodes": [
{"name": "Assembly", "children": [1, 2, 3]},
{"name": "KERO_Z-575693-QP-DRH_ISB_1_1", "mesh": 0},
{"name": "KERO_Z-575693-QP-DRH_ISB_1_1", "mesh": 1},
{"name": "KERO_Z-575693-QP-DRH_ISB_1_1", "mesh": 2},
],
"meshes": [
{"primitives": []},
{"primitives": []},
{"primitives": []},
],
}
_write_minimal_glb(glb_path, payload)
module._inject_glb_extras(
glb_path,
{"partKeyMap": {}},
part_key_map={
"KERO_Z-575693-QP-DRH_ISB_1_1": "kero_z_575693_qp_drh_isb_1_1",
},
part_key_occurrences={
"KERO_Z-575693-QP-DRH_ISB_1_1": [
"kero_z_575693_qp_drh_isb_1_1",
"kero_z_575693_qp_drh_isb_1_1_2",
"kero_z_575693_qp_drh_isb_1_1_3",
],
},
)
result = _read_glb_json(glb_path)
assert result["nodes"][1]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1_1"
assert result["nodes"][2]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1_1_2"
assert result["nodes"][3]["extras"]["partKey"] == "kero_z_575693_qp_drh_isb_1_1_3"
+203
View File
@@ -0,0 +1,203 @@
from types import SimpleNamespace
from app.services.part_key_service import build_scene_manifest, get_effective_assignments
def test_build_scene_manifest_prefers_canonical_material_from_resolved_assignments():
cad_file = SimpleNamespace(
id="cad-1",
resolved_material_assignments={
"roller_part": {
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"prim_path": "/Root/Assembly/roller_part",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
manual_material_overrides=None,
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert manifest["parts"] == [
{
"part_key": "roller_part",
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"prim_path": "/Root/Assembly/roller_part",
"effective_material": "HARTOMAT_010101_Steel-Bare",
"assignment_provenance": "auto",
"is_unassigned": False,
}
]
assert manifest["unassigned_parts"] == []
assert get_effective_assignments(cad_file) == {
"roller_part": "HARTOMAT_010101_Steel-Bare",
}
def test_build_scene_manifest_normalizes_legacy_schaeffler_material_names():
cad_file = SimpleNamespace(
id="cad-legacy",
resolved_material_assignments={
"roller_part": {
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"prim_path": "/Root/Assembly/roller_part",
"canonical_material": "SCHAEFFLER_010101_Steel-Bare",
}
},
manual_material_overrides={"manual_part": "SCHAEFFLER_020101_Durotect-Blue"},
source_material_assignments={"roller": "SCHAEFFLER_030103_Elastomer-Black"},
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert manifest["parts"] == [
{
"part_key": "roller_part",
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_1",
"prim_path": "/Root/Assembly/roller_part",
"effective_material": "HARTOMAT_010101_Steel-Bare",
"assignment_provenance": "auto",
"is_unassigned": False,
}
]
assert get_effective_assignments(cad_file) == {
"roller_part": "HARTOMAT_010101_Steel-Bare",
}
def test_build_scene_manifest_adds_semantic_alias_for_deduplicated_instance_keys():
cad_file = SimpleNamespace(
id="cad-alias",
resolved_material_assignments={
"roller_part_2": {
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_2",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
manual_material_overrides=None,
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert manifest["parts"] == [
{
"part_key": "roller_part_2",
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_2",
"effective_material": "HARTOMAT_010101_Steel-Bare",
"assignment_provenance": "auto",
"is_unassigned": False,
},
{
"part_key": "roller_part",
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_2",
"effective_material": "HARTOMAT_010101_Steel-Bare",
"assignment_provenance": "auto",
"is_unassigned": False,
},
]
assert manifest["unassigned_parts"] == []
assert get_effective_assignments(cad_file) == {
"roller_part_2": "HARTOMAT_010101_Steel-Bare",
"roller_part": "HARTOMAT_010101_Steel-Bare",
}
def test_build_scene_manifest_skips_alias_when_canonical_key_already_exists():
cad_file = SimpleNamespace(
id="cad-existing-alias",
resolved_material_assignments={
"roller_part": {
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
},
"roller_part_2": {
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_2",
"canonical_material": "HARTOMAT_020202_Steel-Bare",
},
},
manual_material_overrides=None,
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert [part["part_key"] for part in manifest["parts"]] == [
"roller_part",
"roller_part_2",
]
assert get_effective_assignments(cad_file) == {
"roller_part": "HARTOMAT_010101_Steel-Bare",
"roller_part_2": "HARTOMAT_020202_Steel-Bare",
}
def test_build_scene_manifest_alias_inherits_leaf_manual_override():
cad_file = SimpleNamespace(
id="cad-manual-alias",
resolved_material_assignments={
"roller_part_3": {
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_3",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
manual_material_overrides={
"roller_part_3": "SCHAEFFLER_020101_Durotect-Blue",
},
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
alias_part = next(part for part in manifest["parts"] if part["part_key"] == "roller_part")
assert alias_part == {
"part_key": "roller_part",
"source_name": "RollerPart",
"prim_path": "/Root/Assembly/roller_part_3",
"effective_material": "HARTOMAT_020101_Durotect-Blue",
"assignment_provenance": "manual",
"is_unassigned": False,
}
assert get_effective_assignments(cad_file) == {
"roller_part_3": "HARTOMAT_020101_Durotect-Blue",
"roller_part": "HARTOMAT_020101_Durotect-Blue",
}
def test_build_scene_manifest_adds_semantic_alias_for_exporter_af_suffix_keys():
cad_file = SimpleNamespace(
id="cad-af-alias",
resolved_material_assignments={
"kero_z_575693_qp_drh_isb_1_af6": {
"source_name": "KERO_Z-575693-QP-DRH_ISB_1_AF6_",
"prim_path": "/Root/Assembly/kero_z_575693_qp_drh_isb_1_af6",
"canonical_material": "HARTOMAT_010101_Steel-Bare",
}
},
manual_material_overrides=None,
source_material_assignments=None,
parsed_objects=None,
)
manifest = build_scene_manifest(cad_file)
assert [part["part_key"] for part in manifest["parts"]] == [
"kero_z_575693_qp_drh_isb_1_af6",
"kero_z_575693_qp_drh_isb_1",
]
assert get_effective_assignments(cad_file) == {
"kero_z_575693_qp_drh_isb_1_af6": "HARTOMAT_010101_Steel-Bare",
"kero_z_575693_qp_drh_isb_1": "HARTOMAT_010101_Steel-Bare",
}
+81
View File
@@ -0,0 +1,81 @@
from pathlib import Path
from app.config import settings
from app.core.render_paths import (
ensure_group_writable_dir,
resolve_public_asset_url,
resolve_result_path,
result_path_to_storage_key,
result_path_to_public_url,
)
def test_result_path_to_public_url_for_canonical_render(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
render_file = upload_dir / "renders" / "line-1" / "bearing.png"
render_file.parent.mkdir(parents=True, exist_ok=True)
render_file.write_bytes(b"png")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
result_path = str(render_file)
assert result_path_to_public_url(result_path, require_exists=True) == "/renders/line-1/bearing.png"
assert resolve_result_path(result_path) == render_file
assert resolve_public_asset_url("/renders/line-1/bearing.png") == render_file
def test_result_path_to_public_url_for_legacy_shared_render(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
render_file = upload_dir / "renders" / "line-2" / "legacy.png"
render_file.parent.mkdir(parents=True, exist_ok=True)
render_file.write_bytes(b"png")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
legacy_path = "/shared/renders/line-2/legacy.png"
assert resolve_result_path(legacy_path) == render_file
assert result_path_to_public_url(legacy_path, require_exists=True) == "/renders/line-2/legacy.png"
def test_result_path_to_public_url_hides_missing_or_non_public_paths(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
step_render = upload_dir / "step_files" / "renders" / "line_123.png"
step_render.parent.mkdir(parents=True, exist_ok=True)
step_render.write_bytes(b"png")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
missing_public = str(upload_dir / "renders" / "line-3" / "missing.png")
assert result_path_to_public_url(str(step_render), require_exists=True) is None
assert result_path_to_public_url(missing_public, require_exists=True) is None
def test_result_path_to_storage_key_normalizes_legacy_and_public_variants(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
render_file = upload_dir / "renders" / "line-4" / "normalized.png"
render_file.parent.mkdir(parents=True, exist_ok=True)
render_file.write_bytes(b"png")
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
assert result_path_to_storage_key(str(render_file)) == "renders/line-4/normalized.png"
assert result_path_to_storage_key("/shared/renders/line-4/normalized.png") == "renders/line-4/normalized.png"
assert result_path_to_storage_key("/renders/line-4/normalized.png") == "renders/line-4/normalized.png"
def test_ensure_group_writable_dir_normalizes_existing_upload_tree(monkeypatch, tmp_path):
upload_dir = tmp_path / "uploads"
target = upload_dir / "step_files" / "renders"
target.mkdir(parents=True, exist_ok=True)
upload_dir.chmod(0o755)
(upload_dir / "step_files").chmod(0o755)
target.chmod(0o755)
monkeypatch.setattr(settings, "upload_dir", str(upload_dir))
ensured = ensure_group_writable_dir(target)
assert ensured == target
for path in (upload_dir, upload_dir / "step_files", target):
mode = path.stat().st_mode & 0o7777
assert mode & 0o020
assert mode & 0o010
assert mode & 0o2000
+220
View File
@@ -0,0 +1,220 @@
from __future__ import annotations
import uuid
from contextlib import contextmanager
from pathlib import Path
from types import SimpleNamespace
def _patch_render_thumbnail_dependencies(monkeypatch, tmp_path: Path):
from app.domains.pipeline.tasks.render_thumbnail import render_graph_thumbnail, render_step_thumbnail
step_path = tmp_path / "bearing.step"
step_path.write_text("STEP", encoding="utf-8")
usd_path = tmp_path / "bearing.usdc"
usd_path.write_text("USD", encoding="utf-8")
cad_file = SimpleNamespace(
id=uuid.uuid4(),
stored_path=str(step_path),
step_file_hash=None,
mesh_attributes={},
tenant_id=uuid.uuid4(),
)
class _FakeSession:
def get(self, _model, _object_id):
return cad_file
def commit(self):
return None
@contextmanager
def _fake_pipeline_session(_tenant_id=None):
yield _FakeSession()
@contextmanager
def _fake_sample_cap():
yield
queued_glb_exports: list[str] = []
workflow_updates: list[tuple[str, str, str | None, str | None]] = []
postprocess_calls: list[str] = []
regenerate_calls: list[tuple[tuple, dict]] = []
monkeypatch.setattr(
"app.domains.pipeline.tasks.render_thumbnail._pipeline_session",
_fake_pipeline_session,
)
monkeypatch.setattr(
"app.domains.pipeline.tasks.render_thumbnail._capped_thumbnail_samples",
_fake_sample_cap,
)
monkeypatch.setattr(
"app.core.tenant_context.resolve_tenant_id_for_cad",
lambda cad_file_id: "tenant-1",
)
monkeypatch.setattr(
"app.domains.products.cache_service.compute_step_hash",
lambda _path: "hash-123",
)
monkeypatch.setattr(
"app.services.step_processor.regenerate_cad_thumbnail",
lambda *args, **kwargs: (regenerate_calls.append((args, kwargs)), True)[1],
)
monkeypatch.setattr(
"app.domains.pipeline.tasks.render_thumbnail._resolve_thumbnail_render_context",
lambda _session, _cad: {
"material_library_path": "/tmp/materials.blend",
"material_map": {"part-a": "HARTOMAT_010101_Steel-Bare"},
"part_names_ordered": ["part-a", "part-b"],
"usd_path": usd_path,
},
)
monkeypatch.setattr(
"app.services.step_processor.extract_mesh_edge_data",
lambda _step_path: {"sharp_edge_pairs": []},
)
monkeypatch.setattr(
"app.domains.rendering.workflow_runtime_services.resolve_cad_bbox",
lambda step_path, glb_path=None: (
postprocess_calls.append("bbox"),
SimpleNamespace(
bbox_data={
"dimensions_mm": {"x": 1.0, "y": 2.0, "z": 3.0},
"bbox_center_mm": {"x": 0.5, "y": 1.0, "z": 1.5},
}
),
)[1],
)
monkeypatch.setattr(
"app.domains.pipeline.tasks.extract_metadata._auto_populate_materials_for_cad",
lambda cad_file_id, tenant_id=None: postprocess_calls.append("auto_populate"),
)
monkeypatch.setattr(
"app.core.websocket.publish_event_sync",
lambda tenant_id, payload: postprocess_calls.append("websocket"),
)
monkeypatch.setattr(
"app.domains.pipeline.tasks.export_glb.generate_gltf_geometry_task.delay",
lambda cad_file_id: queued_glb_exports.append(cad_file_id),
)
monkeypatch.setattr(
"app.domains.rendering.tasks._update_workflow_run_status",
lambda order_line_id, status, error=None, *, workflow_run_id=None, workflow_node_id=None: workflow_updates.append(
(order_line_id, status, workflow_run_id, workflow_node_id)
),
)
return (
render_step_thumbnail,
render_graph_thumbnail,
queued_glb_exports,
workflow_updates,
postprocess_calls,
regenerate_calls,
)
def test_render_step_thumbnail_skips_legacy_glb_follow_up_for_graph_runs(monkeypatch, tmp_path):
render_step_thumbnail, _render_graph_thumbnail, queued_glb_exports, workflow_updates, postprocess_calls, regenerate_calls = _patch_render_thumbnail_dependencies(
monkeypatch,
tmp_path,
)
render_step_thumbnail.run(
"cad-123",
workflow_run_id="run-123",
workflow_node_id="save-thumb",
renderer="threejs",
width=512,
height=512,
transparent_bg=True,
)
assert queued_glb_exports == []
assert workflow_updates == [("cad-123", "completed", "run-123", "save-thumb")]
assert postprocess_calls == ["bbox", "auto_populate", "websocket"]
assert regenerate_calls == [(
("cad-123",),
{
"part_colors": {},
"renderer": "threejs",
"render_engine": None,
"samples": None,
"width": 512,
"height": 512,
"transparent_bg": True,
"material_library_path": "/tmp/materials.blend",
"material_map": {"part-a": "HARTOMAT_010101_Steel-Bare"},
"part_names_ordered": ["part-a", "part-b"],
"usd_path": tmp_path / "bearing.usdc",
},
)]
def test_render_step_thumbnail_keeps_legacy_glb_follow_up_without_workflow_run(monkeypatch, tmp_path):
render_step_thumbnail, _render_graph_thumbnail, queued_glb_exports, workflow_updates, postprocess_calls, _regenerate_calls = _patch_render_thumbnail_dependencies(
monkeypatch,
tmp_path,
)
render_step_thumbnail.run("cad-123")
assert queued_glb_exports == ["cad-123"]
assert workflow_updates == [("cad-123", "completed", None, None)]
assert postprocess_calls == ["bbox", "auto_populate", "websocket"]
def test_render_graph_thumbnail_skips_legacy_postprocess_and_glb_follow_up(monkeypatch, tmp_path):
_render_step_thumbnail, render_graph_thumbnail, queued_glb_exports, workflow_updates, postprocess_calls, _regenerate_calls = _patch_render_thumbnail_dependencies(
monkeypatch,
tmp_path,
)
render_graph_thumbnail.run(
"cad-123",
workflow_run_id="run-123",
workflow_node_id="save-thumb",
renderer="threejs",
width=512,
height=512,
transparent_bg=True,
)
assert queued_glb_exports == []
assert workflow_updates == [("cad-123", "completed", "run-123", "save-thumb")]
assert postprocess_calls == []
def test_regenerate_thumbnail_skips_retry_for_missing_cad_resource(monkeypatch):
from app.domains.pipeline.tasks.render_thumbnail import regenerate_thumbnail
from app.services.step_processor import MissingCadResourceError
retry_calls: list[tuple[tuple, dict]] = []
monkeypatch.setattr("app.core.task_logs.log_task_event", lambda *args, **kwargs: None)
monkeypatch.setattr(
"app.core.tenant_context.resolve_tenant_id_for_cad",
lambda cad_file_id: "tenant-1",
)
monkeypatch.setattr(
"app.services.step_processor.regenerate_cad_thumbnail",
lambda *args, **kwargs: (_ for _ in ()).throw(MissingCadResourceError("CAD file not found: cad-123")),
)
monkeypatch.setattr(
regenerate_thumbnail,
"retry",
lambda *args, **kwargs: retry_calls.append((args, kwargs)),
)
regenerate_thumbnail.run(
"cad-123",
{},
renderer="blender",
width=512,
height=512,
)
assert retry_calls == []
@@ -0,0 +1,83 @@
from __future__ import annotations
import importlib.util
from pathlib import Path
def _load_blender_materials_module():
candidates = [
Path(__file__).resolve().parents[1] / "render-worker" / "scripts" / "_blender_materials.py",
Path("/compose/render-worker/scripts/_blender_materials.py"),
]
module_path = next((path for path in candidates if path.exists()), None)
assert module_path is not None
spec = importlib.util.spec_from_file_location("test_blender_materials", module_path)
assert spec is not None
assert spec.loader is not None
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def test_lookup_material_name_matches_usd_part_keys_without_serial_suffixes():
module = _load_blender_materials_module()
mat_map = module.build_mat_map_lower(
{
"RWDR_B_F-802044_TR4_H122B-69186": "Steel--Stahl",
"RWDR_B_F-802044_TR4_H122B-72661": "Steel--Stahl",
"O_RING_RG_F-802044_TR4_H-120220": "Eslastomer_black--Elastomer_schwarz",
"O_RING_RG_F-802044_TR4_H-120399": "Eslastomer_black--Elastomer_schwarz",
"F-802044-3001_IR_TR2-H_A1-25921_AF0": "Steel--Stahl",
"F-802044-3001_IR_TR2-H_A1-53810_AF0": "Steel--Stahl",
}
)
assert (
module.lookup_material_name(
"RWDR_B_F-802044_TR4_H122BK",
mat_map,
"rwdr_b_f_802044_tr4_h122bk",
)
== "Steel--Stahl"
)
assert (
module.lookup_material_name(
"O_RING_RG_F-802044_TR4_H122BK_1",
mat_map,
"o_ring_rg_f_802044_tr4_h122bk_1",
)
== "Eslastomer_black--Elastomer_schwarz"
)
assert (
module.lookup_material_name(
"F-802044-3001_IR_TR2-H_A1_04",
mat_map,
"f_802044_3001_ir_tr2_h_a1_04",
)
== "Steel--Stahl"
)
def test_lookup_material_name_keeps_ambiguous_fuzzy_matches_unresolved():
module = _load_blender_materials_module()
mat_map = module.build_mat_map_lower(
{
"PART_ALPHA-11111": "Steel--Stahl",
"PART_ALPHA-22222": "Bronze--Bronze",
}
)
assert module.lookup_material_name("PART_ALPHA", mat_map) is None
def test_iter_object_name_variants_strips_blender_duplicate_suffix():
module = _load_blender_materials_module()
assert list(module._iter_object_name_variants("BearingHousing.001")) == [
"BearingHousing.001",
"BearingHousing",
]
assert list(module._iter_object_name_variants("BearingHousing")) == [
"BearingHousing",
]