chore: snapshot workflow migration progress

This commit is contained in:
2026-04-12 11:49:04 +02:00
parent 0cd02513d5
commit 3e810c74a3
163 changed files with 31774 additions and 2753 deletions
+86 -15
View File
@@ -3,10 +3,12 @@ import uuid
from datetime import datetime, timedelta
from typing import Any, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update as sql_update, func, case, distinct, and_, extract
from pydantic import BaseModel
from pydantic import BaseModel, ValidationError
from app.database import get_db
from app.core.render_paths import resolve_result_path, result_path_to_storage_key
from app.models.user import User
from app.models.system_setting import SystemSetting
from app.models.cad_file import CadFile, ProcessingStatus
@@ -27,7 +29,7 @@ SETTINGS_DEFAULTS: dict[str, str] = {
"blender_eevee_samples": "64",
"thumbnail_format": "jpg",
"blender_smooth_angle": "30",
"cycles_device": "auto",
"cycles_device": "gpu",
"render_backend": "celery",
"blender_max_concurrent_renders": "3",
"product_thumbnail_priority": '["latest_render","cad_thumbnail"]',
@@ -63,7 +65,7 @@ class SettingsOut(BaseModel):
blender_eevee_samples: int = 64
thumbnail_format: str = "jpg"
blender_smooth_angle: int = 30
cycles_device: str = "auto"
cycles_device: str = "gpu"
render_backend: str = "celery"
blender_max_concurrent_renders: int = 3
product_thumbnail_priority: str = '["latest_render","cad_thumbnail"]'
@@ -225,9 +227,9 @@ def _settings_to_out(raw: dict[str, str]) -> SettingsOut:
smtp_password=raw.get("smtp_password", ""),
smtp_from_address=raw.get("smtp_from_address", ""),
scene_linear_deflection=float(raw.get("scene_linear_deflection", "0.1")),
scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.5")),
scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.1")),
render_linear_deflection=float(raw.get("render_linear_deflection", "0.03")),
render_angular_deflection=float(raw.get("render_angular_deflection", "0.2")),
render_angular_deflection=float(raw.get("render_angular_deflection", "0.05")),
gltf_scale_factor=float(raw.get("gltf_scale_factor", "0.001")),
gltf_smooth_normals=raw.get("gltf_smooth_normals", "true") == "true",
viewer_max_distance=float(raw.get("viewer_max_distance", "50")),
@@ -680,7 +682,10 @@ async def seed_workflows(
):
"""Create the standard workflow definitions if they do not already exist."""
from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.workflow_config_utils import build_preset_workflow_config
from app.domains.rendering.workflow_config_utils import (
build_preset_workflow_config,
build_workflow_blueprint_config,
)
STANDARD_WORKFLOWS = [
{
@@ -697,6 +702,13 @@ async def seed_workflows(
{"render_engine": "eevee", "samples": 64, "resolution": [1920, 1080]},
),
},
{
"name": "Still Image — Graph",
"config": build_preset_workflow_config(
"still_graph",
{"render_engine": "cycles", "samples": 256, "resolution": [1920, 1080]},
),
},
{
"name": "Turntable Animation",
"config": build_preset_workflow_config(
@@ -711,6 +723,18 @@ async def seed_workflows(
{"render_engine": "cycles", "samples": 128, "angles": [0, 45, 90]},
),
},
{
"name": "CAD Intake Blueprint",
"config": build_workflow_blueprint_config("cad_intake"),
},
{
"name": "Order Rendering Blueprint",
"config": build_workflow_blueprint_config("order_rendering"),
},
{
"name": "Still Graph Blueprint",
"config": build_workflow_blueprint_config("still_graph_reference"),
},
]
existing_result = await db.execute(select(WorkflowDefinition))
@@ -730,6 +754,57 @@ async def seed_workflows(
return {"created": created, "message": f"Created {created} workflow definition(s)"}
@router.post("/settings/backfill-workflows", status_code=status.HTTP_200_OK)
async def backfill_workflows(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Rewrite persisted legacy workflow configs into canonical DAG form."""
from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.workflow_config_utils import (
canonicalize_workflow_config,
workflow_config_requires_canonicalization,
)
from app.domains.rendering.workflow_schema import WorkflowConfig
result = await db.execute(select(WorkflowDefinition).order_by(WorkflowDefinition.created_at))
workflows = result.scalars().all()
updated: list[dict[str, str]] = []
invalid: list[dict[str, str]] = []
for workflow in workflows:
if not workflow_config_requires_canonicalization(workflow.config):
continue
try:
normalized = canonicalize_workflow_config(workflow.config)
WorkflowConfig.model_validate(normalized)
except (ValidationError, ValueError) as exc:
invalid.append(
{
"id": str(workflow.id),
"name": workflow.name,
"error": str(exc),
}
)
continue
workflow.config = normalized
flag_modified(workflow, "config")
updated.append({"id": str(workflow.id), "name": workflow.name})
await db.commit()
return {
"scanned": len(workflows),
"updated": len(updated),
"invalid": invalid,
"workflows": updated,
"message": f"Canonicalized {len(updated)} workflow definition(s)",
}
@router.get("/settings/renderer-status")
async def renderer_status(
admin: User = Depends(require_global_admin),
@@ -756,13 +831,10 @@ async def import_existing_media_assets(
created = 0
skipped = 0
from app.config import settings as _app_settings
def _normalize_key(path: str) -> str:
"""Strip UPLOAD_DIR prefix to store relative storage keys."""
key = str(path)
prefix = str(_app_settings.upload_dir).rstrip("/") + "/"
return key[len(prefix):] if key.startswith(prefix) else key
"""Normalize mixed legacy/canonical paths to a stable relative storage key."""
key = result_path_to_storage_key(path)
return key or str(path)
# 1. CadFiles with thumbnail_path
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
@@ -843,7 +915,6 @@ async def purge_render_media(
"""
import logging
from pathlib import Path
from app.config import settings
from app.core.storage import get_storage
from app.domains.media.models import MediaAsset, MediaAssetType
@@ -865,8 +936,8 @@ async def purge_render_media(
# Delete backing file
key = asset.storage_key
try:
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
if candidate.exists():
candidate = resolve_result_path(key)
if candidate is not None and candidate.exists():
freed_bytes += candidate.stat().st_size
candidate.unlink()
deleted_files += 1
+37 -9
View File
@@ -13,6 +13,9 @@ from sqlalchemy import select
from sqlalchemy.orm import selectinload
from app.database import get_db
from app.core.render_paths import resolve_result_path
from app.config import settings
from app.domains.media.models import MediaAsset, MediaAssetType
from app.models.cad_file import CadFile, ProcessingStatus
from app.models.order import Order
from app.models.order_item import OrderItem
@@ -191,6 +194,38 @@ async def _get_cad_file(cad_id: uuid.UUID, db: AsyncSession) -> CadFile:
return cad
async def _resolve_gltf_path(cad: CadFile, db: AsyncSession) -> Path | None:
"""Resolve the best available GLTF/GLB path for a CAD file.
Prefer the legacy cad_files.gltf_path for compatibility, but fall back to
the canonical media_assets.gltf_geometry record written by the newer export
pipeline.
"""
if cad.gltf_path:
legacy_path = resolve_result_path(cad.gltf_path) or Path(cad.gltf_path)
if legacy_path.exists():
return legacy_path
asset_result = await db.execute(
select(MediaAsset)
.where(
MediaAsset.cad_file_id == cad.id,
MediaAsset.asset_type == MediaAssetType.gltf_geometry,
MediaAsset.is_archived == False, # noqa: E712
)
.order_by(MediaAsset.created_at.desc())
)
asset = asset_result.scalars().first()
if asset and asset.storage_key:
asset_path = resolve_result_path(asset.storage_key)
if asset_path is None:
asset_path = Path(settings.upload_dir) / asset.storage_key.lstrip("/")
if asset_path.exists():
return asset_path
return None
@router.get("/{id}/thumbnail")
async def get_thumbnail(
id: uuid.UUID,
@@ -228,20 +263,13 @@ async def get_model(
):
"""Serve the glTF file for a CAD file."""
cad = await _get_cad_file(id, db)
if not cad.gltf_path:
gltf_path = await _resolve_gltf_path(cad, db)
if gltf_path is None:
raise HTTPException(
status_code=404,
detail="glTF model not yet generated for this CAD file",
)
gltf_path = Path(cad.gltf_path)
if not gltf_path.exists():
raise HTTPException(
status_code=404,
detail="glTF file missing from storage",
)
# glTF files may be either .gltf (JSON) or .glb (binary)
suffix = gltf_path.suffix.lower()
if suffix == ".glb":
+5 -19
View File
@@ -30,6 +30,7 @@ from app.schemas.order_line import OrderLineCreate, OrderLineOut
from app.schemas.product import ProductOut
from app.schemas.output_type import OutputTypeOut
from app.services.order_service import generate_order_number
from app.core.render_paths import resolve_result_path, result_path_to_public_url
from app.utils.auth import get_current_user, require_admin_or_pm, require_pm_or_above
router = APIRouter(prefix="/orders", tags=["orders"])
@@ -41,13 +42,7 @@ def _is_privileged(user: User) -> bool:
def _result_path_to_url(result_path: str) -> str | None:
"""Convert an internal result_path to a servable static URL."""
if "/renders/" in result_path:
idx = result_path.index("/renders/")
return result_path[idx:]
if "/thumbnails/" in result_path:
idx = result_path.index("/thumbnails/")
return result_path[idx:]
return None
return result_path_to_public_url(result_path, require_exists=True)
def _build_line_out(line: OrderLine) -> OrderLineOut:
@@ -1544,15 +1539,6 @@ async def download_renders(
if not lines:
raise HTTPException(404, detail="No completed renders found for this order")
from app.config import settings as app_settings
def _resolve_path(p: str) -> str:
"""Translate container-relative paths to backend filesystem paths."""
# Flamenco worker mounts the uploads volume at /shared, backend at /app/uploads
if p.startswith("/shared/"):
return app_settings.upload_dir + p[len("/shared"):]
return p
buf = io.BytesIO()
# Track names used to avoid duplicates
name_counts: dict[str, int] = {}
@@ -1561,8 +1547,8 @@ async def download_renders(
for line in lines:
if not line.result_path:
continue
fs_path = _resolve_path(line.result_path)
if not os.path.isfile(fs_path):
resolved_path = resolve_result_path(line.result_path)
if resolved_path is None or not resolved_path.is_file():
continue
# Build a meaningful filename
product_name = (line.product.name or line.product.pim_id or "product") if line.product else "product"
@@ -1587,7 +1573,7 @@ async def download_renders(
name_counts[base_name] = 0
archive_name = base_name
zf.write(fs_path, archive_name)
zf.write(resolved_path, archive_name)
if not zf.infolist():
raise HTTPException(404, detail="No render files found on disk")
+128 -11
View File
@@ -12,6 +12,7 @@ from app.models.order_line import OrderLine
from app.models.output_type import (
OUTPUT_TYPE_ARTIFACT_KINDS,
OUTPUT_TYPE_WORKFLOW_FAMILIES,
OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES,
OutputType,
VALID_RENDER_BACKENDS,
)
@@ -21,12 +22,19 @@ from app.models.user import User
from app.domains.rendering.models import WorkflowDefinition
from app.domains.rendering.output_type_contracts import (
apply_invocation_overrides_to_render_settings,
build_output_type_contract_catalog,
build_output_type_invocation_profile,
derive_supported_artifact_kinds_from_workflow_config,
infer_output_type_artifact_kind,
infer_workflow_family_from_config,
InvalidInvocationOverridesError,
merge_output_type_invocation_overrides,
normalize_invocation_overrides,
resolve_output_type_invocation_overrides,
validate_and_normalize_invocation_overrides,
validate_output_type_contract,
)
from app.domains.rendering.schemas import OutputTypeContractCatalogOut, OutputTypeInvocationProfileOut
router = APIRouter(prefix="/output-types", tags=["output-types"])
@@ -34,6 +42,34 @@ router = APIRouter(prefix="/output-types", tags=["output-types"])
def _ot_to_out(ot: OutputType) -> OutputTypeOut:
"""Convert an OutputType ORM instance to OutputTypeOut with pricing convenience fields."""
out = OutputTypeOut.model_validate(ot)
resolved_invocation_overrides = resolve_output_type_invocation_overrides(
ot.render_settings,
getattr(ot, "invocation_overrides", None),
artifact_kind=ot.artifact_kind,
is_animation=ot.is_animation,
)
out.invocation_overrides = resolved_invocation_overrides
out.render_settings = apply_invocation_overrides_to_render_settings(
ot.render_settings,
resolved_invocation_overrides,
)
out.invocation_profile = OutputTypeInvocationProfileOut.model_validate(
build_output_type_invocation_profile(
renderer=ot.renderer,
render_backend=ot.render_backend,
workflow_family=ot.workflow_family,
artifact_kind=ot.artifact_kind,
output_format=ot.output_format,
is_animation=ot.is_animation,
workflow_definition_id=ot.workflow_definition_id,
workflow_rollout_mode=getattr(ot, "workflow_rollout_mode", "legacy_only"),
transparent_bg=ot.transparent_bg,
cycles_device=ot.cycles_device,
material_override=ot.material_override,
render_settings=ot.render_settings,
invocation_overrides=getattr(ot, "invocation_overrides", None),
)
)
if ot.pricing_tier:
out.pricing_tier_name = f"{ot.pricing_tier.category_key}/{ot.pricing_tier.quality_level}"
out.price_per_item = float(ot.pricing_tier.price_per_item)
@@ -62,6 +98,7 @@ async def _validate_output_type_workflow_link(
*,
workflow_definition_id: uuid.UUID | None,
workflow_family: str,
artifact_kind: str,
) -> None:
if workflow_definition_id is None:
return
@@ -86,6 +123,17 @@ async def _validate_output_type_workflow_link(
),
)
supported_artifact_kinds = derive_supported_artifact_kinds_from_workflow_config(workflow_definition.config)
if artifact_kind not in supported_artifact_kinds:
supported = ", ".join(supported_artifact_kinds) if supported_artifact_kinds else "none"
raise HTTPException(
400,
detail=(
f"Workflow artifact mismatch: output type expects '{artifact_kind}', "
f"but workflow '{workflow_definition.name}' supports [{supported}]"
),
)
def _ensure_output_type_contract_is_valid(
*,
@@ -105,6 +153,23 @@ def _ensure_output_type_contract_is_valid(
raise HTTPException(400, detail=str(exc)) from exc
def _normalize_explicit_invocation_overrides(
raw: dict | None,
*,
artifact_kind: str,
is_animation: bool,
) -> dict:
try:
return validate_and_normalize_invocation_overrides(
raw,
artifact_kind=artifact_kind,
is_animation=is_animation,
reject_unknown_keys=True,
)
except InvalidInvocationOverridesError as exc:
raise HTTPException(400, detail=str(exc)) from exc
@router.get("", response_model=list[OutputTypeOut])
async def list_output_types(
include_inactive: bool = Query(False),
@@ -133,6 +198,13 @@ async def list_output_types(
return await _enrich_workflow_names(db, items)
@router.get("/contract-catalog", response_model=OutputTypeContractCatalogOut)
async def get_output_type_contract_catalog(
user: User = Depends(get_current_user),
):
return OutputTypeContractCatalogOut.model_validate(build_output_type_contract_catalog())
@router.post("", response_model=OutputTypeOut, status_code=status.HTTP_201_CREATED)
async def create_output_type(
body: OutputTypeCreate,
@@ -146,25 +218,39 @@ async def create_output_type(
400,
detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}",
)
if body.workflow_rollout_mode not in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES:
raise HTTPException(
400,
detail=f"Invalid workflow_rollout_mode. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES))}",
)
existing = await db.execute(select(OutputType).where(OutputType.name == body.name))
if existing.scalar_one_or_none():
raise HTTPException(409, detail=f"Output type '{body.name}' already exists")
data = body.model_dump()
explicit_invocation = normalize_invocation_overrides(body.invocation_overrides)
if not explicit_invocation:
explicit_invocation = normalize_invocation_overrides(body.render_settings)
data["invocation_overrides"] = explicit_invocation
data["render_settings"] = apply_invocation_overrides_to_render_settings(
body.render_settings,
explicit_invocation,
)
data["artifact_kind"] = data.get("artifact_kind") or infer_output_type_artifact_kind(
body.output_format,
body.is_animation,
body.workflow_family,
)
explicit_invocation = _normalize_explicit_invocation_overrides(
body.invocation_overrides,
artifact_kind=data["artifact_kind"],
is_animation=body.is_animation,
)
if not explicit_invocation:
explicit_invocation = normalize_invocation_overrides(body.render_settings)
data["invocation_overrides"] = resolve_output_type_invocation_overrides(
body.render_settings,
explicit_invocation,
artifact_kind=data["artifact_kind"],
is_animation=body.is_animation,
)
data["render_settings"] = apply_invocation_overrides_to_render_settings(
body.render_settings,
data["invocation_overrides"],
)
if data["artifact_kind"] not in OUTPUT_TYPE_ARTIFACT_KINDS:
raise HTTPException(
400,
@@ -180,7 +266,10 @@ async def create_output_type(
db,
workflow_definition_id=body.workflow_definition_id,
workflow_family=body.workflow_family,
artifact_kind=data["artifact_kind"],
)
if body.workflow_definition_id is None:
data["workflow_rollout_mode"] = "legacy_only"
ot = OutputType(**data)
db.add(ot)
@@ -214,6 +303,11 @@ async def update_output_type(
400,
detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}",
)
if "workflow_rollout_mode" in data and data["workflow_rollout_mode"] not in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES:
raise HTTPException(
400,
detail=f"Invalid workflow_rollout_mode. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES))}",
)
candidate_workflow_family = data.get("workflow_family", ot.workflow_family)
candidate_workflow_definition_id = data.get("workflow_definition_id", ot.workflow_definition_id)
@@ -226,16 +320,25 @@ async def update_output_type(
if render_settings_supplied or invocation_supplied:
candidate_render_settings = data.get("render_settings", ot.render_settings)
if invocation_supplied:
candidate_invocation_overrides = normalize_invocation_overrides(data.get("invocation_overrides"))
candidate_invocation_overrides = _normalize_explicit_invocation_overrides(
data.get("invocation_overrides"),
artifact_kind=candidate_artifact_kind,
is_animation=candidate_is_animation,
)
else:
candidate_invocation_overrides = merge_output_type_invocation_overrides(
candidate_render_settings,
None,
)
data["invocation_overrides"] = candidate_invocation_overrides
data["render_settings"] = apply_invocation_overrides_to_render_settings(
data["invocation_overrides"] = resolve_output_type_invocation_overrides(
candidate_render_settings,
candidate_invocation_overrides,
artifact_kind=candidate_artifact_kind,
is_animation=candidate_is_animation,
)
data["render_settings"] = apply_invocation_overrides_to_render_settings(
candidate_render_settings,
data["invocation_overrides"],
)
should_recompute_artifact_kind = (
@@ -263,12 +366,26 @@ async def update_output_type(
output_format=candidate_output_format,
is_animation=candidate_is_animation,
)
if render_settings_supplied or invocation_supplied or should_recompute_artifact_kind:
data["invocation_overrides"] = resolve_output_type_invocation_overrides(
data.get("render_settings", ot.render_settings),
data.get("invocation_overrides", ot.invocation_overrides),
artifact_kind=candidate_artifact_kind,
is_animation=candidate_is_animation,
)
data["render_settings"] = apply_invocation_overrides_to_render_settings(
data.get("render_settings", ot.render_settings),
data["invocation_overrides"],
)
await _validate_output_type_workflow_link(
db,
workflow_definition_id=candidate_workflow_definition_id,
workflow_family=candidate_workflow_family,
artifact_kind=candidate_artifact_kind,
)
if candidate_workflow_definition_id is None:
data["workflow_rollout_mode"] = "legacy_only"
for field_name, value in data.items():
setattr(ot, field_name, value)
+9 -18
View File
@@ -16,6 +16,11 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload, joinedload
from app.config import settings
from app.core.render_paths import (
resolve_result_path,
resolve_public_asset_url,
result_path_to_public_url,
)
from app.database import get_db
from app.models.cad_file import CadFile, ProcessingStatus
from app.models.material import Material
@@ -829,24 +834,12 @@ VIDEO_EXTENSIONS = {".mp4", ".webm", ".avi", ".mov"}
def _result_path_to_url(result_path: str) -> str | None:
"""Convert an internal result_path to a servable static URL."""
# Flamenco / shared renders: /shared/renders/X/file.jpg → /renders/X/file.jpg
if "/renders/" in result_path:
idx = result_path.index("/renders/")
return result_path[idx:]
# Celery renders stored as thumbnails: /app/uploads/thumbnails/X.png → /thumbnails/X.png
if "/thumbnails/" in result_path:
idx = result_path.index("/thumbnails/")
return result_path[idx:]
return None
return result_path_to_public_url(result_path, require_exists=False)
def _resolve_disk_path(url: str) -> Path | None:
"""Given a servable URL like /renders/X/file.jpg, resolve to disk path."""
if url.startswith("/renders/"):
return Path(settings.upload_dir) / "renders" / url[len("/renders/"):]
if url.startswith("/thumbnails/"):
return Path(settings.upload_dir) / "thumbnails" / url[len("/thumbnails/"):]
return None
return resolve_public_asset_url(url)
@router.get("/{product_id}/renders")
@@ -983,9 +976,8 @@ async def download_product_renders(
raise HTTPException(404, detail="No completed renders found for the selected lines")
def _resolve_path(p: str) -> str:
if p.startswith("/shared/"):
return settings.upload_dir + p[len("/shared"):]
return p
resolved = resolve_result_path(p)
return str(resolved) if resolved is not None else p
def _safe(s: str) -> str:
return re.sub(r"[^\w\-.]", "_", s).strip("_")
@@ -1147,4 +1139,3 @@ async def delete_render_position(
raise HTTPException(404, detail="Render position not found")
await db.delete(pos)
await db.commit()
+34 -1
View File
@@ -1,17 +1,20 @@
"""Render Templates API — CRUD + .blend file upload/download + material library."""
import json
import uuid
import shutil
from datetime import datetime
from pathlib import Path
from typing import Any
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, status
from fastapi.responses import FileResponse
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update as sql_update, delete as sql_delete
from pydantic import BaseModel
from pydantic import BaseModel, TypeAdapter, ValidationError
from app.database import get_db
from app.config import settings as app_settings
from app.domains.rendering.workflow_node_registry import WorkflowNodeFieldDefinition
from app.models.user import User
from app.models.render_template import RenderTemplate
from app.models.output_type import OutputType
@@ -46,6 +49,7 @@ class RenderTemplateOut(BaseModel):
lighting_only: bool
shadow_catcher_enabled: bool
camera_orbit: bool
workflow_input_schema: list[WorkflowNodeFieldDefinition]
is_active: bool
created_at: str
updated_at: str
@@ -62,6 +66,7 @@ class RenderTemplateUpdate(BaseModel):
lighting_only: bool | None = None
shadow_catcher_enabled: bool | None = None
camera_orbit: bool | None = None
workflow_input_schema: list[WorkflowNodeFieldDefinition] | None = None
is_active: bool | None = None
@@ -72,6 +77,29 @@ class MaterialLibraryInfo(BaseModel):
path: str | None = None
_workflow_input_schema_adapter = TypeAdapter(list[WorkflowNodeFieldDefinition])
def _normalize_workflow_input_schema(schema: Any) -> list[dict[str, Any]]:
if schema in (None, "", "null"):
return []
try:
validated = _workflow_input_schema_adapter.validate_python(schema)
except ValidationError as exc:
raise HTTPException(status_code=422, detail={"workflow_input_schema": exc.errors()}) from exc
return [field.model_dump(mode="json") for field in validated]
def _parse_form_workflow_input_schema(raw_schema: str | None) -> list[dict[str, Any]]:
if raw_schema in (None, "", "null"):
return []
try:
payload = json.loads(raw_schema)
except json.JSONDecodeError as exc:
raise HTTPException(status_code=422, detail="workflow_input_schema must be valid JSON") from exc
return _normalize_workflow_input_schema(payload)
def _to_out(t: RenderTemplate) -> dict:
ot_name = None
if t.output_type:
@@ -94,6 +122,7 @@ def _to_out(t: RenderTemplate) -> dict:
"lighting_only": t.lighting_only,
"shadow_catcher_enabled": t.shadow_catcher_enabled,
"camera_orbit": t.camera_orbit,
"workflow_input_schema": t.workflow_input_schema or [],
"is_active": t.is_active,
"created_at": t.created_at.isoformat() if t.created_at else "",
"updated_at": t.updated_at.isoformat() if t.updated_at else "",
@@ -126,6 +155,7 @@ async def create_render_template(
lighting_only: bool = Form(False),
shadow_catcher_enabled: bool = Form(False),
camera_orbit: bool = Form(True),
workflow_input_schema: str | None = Form(None),
user: User = Depends(require_admin_or_pm),
db: AsyncSession = Depends(get_db),
):
@@ -182,6 +212,7 @@ async def create_render_template(
lighting_only=lighting_only,
shadow_catcher_enabled=shadow_catcher_enabled,
camera_orbit=camera_orbit,
workflow_input_schema=_parse_form_workflow_input_schema(workflow_input_schema),
)
db.add(tmpl)
await db.flush()
@@ -224,6 +255,8 @@ async def update_render_template(
# Normalise empty strings to None for nullable fields
if "category_key" in updates and updates["category_key"] in ("", "null"):
updates["category_key"] = None
if "workflow_input_schema" in updates:
updates["workflow_input_schema"] = _normalize_workflow_input_schema(updates["workflow_input_schema"])
# Handle M2M output_type_ids
new_ot_ids: list[str] | None = updates.pop("output_type_ids", None)
+15 -1
View File
@@ -519,6 +519,12 @@ async def trigger_gpu_probe(current_user: User = Depends(require_global_admin)):
return {"task_id": str(result.id), "queued": True}
@router.post("/gpu-probe", status_code=http_status.HTTP_202_ACCEPTED)
async def trigger_gpu_probe_legacy_alias(current_user: User = Depends(require_global_admin)):
"""Backward-compatible alias used by the current admin frontend."""
return await trigger_gpu_probe(current_user)
@router.get("/probe/gpu/result")
async def get_gpu_probe_result(
current_user: User = Depends(require_global_admin),
@@ -535,6 +541,15 @@ async def get_gpu_probe_result(
return json.loads(setting.value)
@router.get("/gpu-probe")
async def get_gpu_probe_result_legacy_alias(
current_user: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Backward-compatible alias used by the current admin frontend."""
return await get_gpu_probe_result(current_user, db)
# ---------------------------------------------------------------------------
# Render health check
# ---------------------------------------------------------------------------
@@ -733,4 +748,3 @@ async def update_worker_config(
enabled=cfg.enabled,
updated_at=cfg.updated_at.isoformat(),
)