From 3e810c74a3a20f86cbfb75f5ee0939675ee49ea0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hartmut=20N=C3=B6renberg?= Date: Sun, 12 Apr 2026 11:49:04 +0200 Subject: [PATCH] chore: snapshot workflow migration progress --- .env.example | 14 + .gitignore | 30 +- .../067_output_type_workflow_rollout_mode.py | 41 + .../068_material_metadata_brand_cleanup.py | 55 + ...9_render_template_workflow_input_schema.py | 31 + backend/app/api/routers/admin.py | 101 +- backend/app/api/routers/cad.py | 46 +- backend/app/api/routers/orders.py | 24 +- backend/app/api/routers/output_types.py | 139 +- backend/app/api/routers/products.py | 27 +- backend/app/api/routers/render_templates.py | 35 +- backend/app/api/routers/worker.py | 16 +- backend/app/config.py | 1 + backend/app/core/config_service.py | 2 +- backend/app/core/render_paths.py | 194 ++ backend/app/database.py | 33 +- .../app/domains/materials/library_paths.py | 51 + backend/app/domains/materials/tasks.py | 16 +- backend/app/domains/media/router.py | 55 +- backend/app/domains/notifications/service.py | 13 +- backend/app/domains/orders/service.py | 11 +- .../app/domains/pipeline/tasks/export_glb.py | 113 +- .../pipeline/tasks/extract_metadata.py | 28 +- .../pipeline/tasks/render_order_line.py | 3 +- .../pipeline/tasks/render_thumbnail.py | 445 ++- .../app/domains/rendering/dispatch_service.py | 168 +- backend/app/domains/rendering/models.py | 32 +- .../rendering/output_type_contracts.py | 505 +++- backend/app/domains/rendering/schemas.py | 146 +- backend/app/domains/rendering/tasks.py | 1678 +++++++++-- .../domains/rendering/template_input_audit.py | 146 + .../app/domains/rendering/workflow_builder.py | 1 + .../rendering/workflow_comparison_service.py | 29 +- .../rendering/workflow_config_utils.py | 24 +- .../domains/rendering/workflow_executor.py | 48 +- .../rendering/workflow_graph_runtime.py | 320 ++- .../rendering/workflow_node_registry.py | 269 +- .../app/domains/rendering/workflow_router.py | 934 ++++++- .../rendering/workflow_runtime_services.py | 478 +++- .../app/domains/rendering/workflow_schema.py | 173 +- backend/app/main.py | 7 +- backend/app/models/output_type.py | 2 + backend/app/services/chat_service.py | 8 +- backend/app/services/part_key_service.py | 162 +- backend/app/services/render_blender.py | 353 ++- backend/app/services/step_processor.py | 190 +- backend/app/services/template_service.py | 20 +- backend/app/tasks/celery_app.py | 11 +- backend/app/tasks/step_tasks.py | 1 + backend/pyproject.toml | 1 + backend/start.sh | 7 +- backend/tests/conftest.py | 33 +- backend/tests/db_test_utils.py | 85 + .../domains/test_notifications_service.py | 26 + .../tests/domains/test_output_types_api.py | 417 +++ .../domains/test_render_blender_samples.py | 574 ++++ .../domains/test_rendering_publish_asset.py | 152 + .../tests/domains/test_rendering_service.py | 1339 +++++++++ .../domains/test_template_input_audit.py | 77 + .../domains/test_workflow_config_utils.py | 135 +- .../domains/test_workflow_dispatch_service.py | 564 ++-- .../domains/test_workflow_graph_runtime.py | 734 ++++- .../domains/test_workflow_node_registry.py | 266 +- .../domains/test_workflow_runtime_services.py | 548 +++- backend/tests/domains/test_workflow_schema.py | 199 ++ .../domains/test_workflow_smoke_harness.py | 227 ++ .../integration/test_cad_model_endpoint.py | 48 + .../integration/test_media_batch_delete.py | 30 + backend/tests/test_admin_settings_defaults.py | 12 + backend/tests/test_asset_library_paths.py | 67 + backend/tests/test_export_glb_task.py | 63 + backend/tests/test_export_step_to_gltf.py | 220 ++ backend/tests/test_part_key_service.py | 203 ++ backend/tests/test_render_paths.py | 81 + backend/tests/test_render_thumbnail_task.py | 220 ++ .../test_render_worker_material_lookup.py | 83 + backend/uv.lock | 2470 +++++++++++++++++ docker-compose.yml | 36 +- docs/workflows/CURRENT_EXECUTION_BATCH.md | 106 +- docs/workflows/FIRST_WAVE_EXECUTION.md | 252 ++ docs/workflows/NEXT_BATCH_ORCHESTRATION.md | 399 +++ .../NODE_BASED_PRODUCTION_ARCHITECTURE.md | 245 ++ docs/workflows/TEMPLATE_INPUT_AUDIT.md | 52 + docs/workflows/WORKFLOW_DELIVERY_CHECKLIST.md | 32 +- .../WORKFLOW_IMPLEMENTATION_BACKLOG.md | 26 +- docs/workflows/WORKFLOW_MIGRATION_PLAN.md | 21 + docs/workflows/template-inputs.md | 82 + frontend/src/App.tsx | 226 +- .../src/__tests__/api/outputTypes.test.ts | 253 ++ frontend/src/__tests__/api/workflows.test.ts | 121 +- .../components/WorkflowNodeInspector.test.tsx | 307 ++ .../src/__tests__/components/cadUtils.test.ts | 159 +- .../outputTypeRolloutPresentation.test.ts | 49 + .../workflowAuthoringActions.test.ts | 50 + .../workflowAuthoringGuidance.test.ts | 227 ++ .../components/workflowEditorUi.test.tsx | 606 +++- .../components/workflowGraphDraft.test.ts | 270 +- .../components/workflowModuleBundles.test.ts | 414 +++ .../components/workflowNodeCatalog.test.ts | 158 ++ .../workflowNodePresentation.test.ts | 37 + frontend/src/api/outputTypes.ts | 628 ++++- frontend/src/api/renderTemplates.ts | 7 +- frontend/src/api/workflows.ts | 562 +++- .../src/components/admin/OutputTypeTable.tsx | 1142 +++++--- .../components/admin/RenderTemplateTable.tsx | 67 +- .../admin/outputTypeRolloutPresentation.ts | 80 + .../src/components/cad/InlineCadViewer.tsx | 163 +- frontend/src/components/cad/ThreeDViewer.tsx | 206 +- frontend/src/components/cad/cadUtils.ts | 407 ++- .../src/components/cad/useGeometryMerge.ts | 32 +- .../components/workflows/NodeCommandMenu.tsx | 100 +- .../workflows/NodeDefinitionsPanel.tsx | 196 +- .../workflows/WorkflowAuthoringOverview.tsx | 242 ++ .../WorkflowAuthoringSectionContent.tsx | 97 + .../workflows/WorkflowCanvasToolbar.tsx | 502 +++- .../WorkflowCanvasUtilitySidebar.tsx | 29 +- .../workflows/WorkflowListSidebar.tsx | 18 + .../workflows/WorkflowModuleBundlePanel.tsx | 83 + .../workflows/WorkflowNodeCatalogBrowser.tsx | 630 +++-- .../workflows/WorkflowNodeContractCard.tsx | 36 +- .../workflows/WorkflowNodeInspector.tsx | 339 ++- .../workflows/WorkflowPreflightPanel.tsx | 37 + .../WorkflowReferenceBundlePanel.tsx | 83 + .../workflows/WorkflowRunsPanel.tsx | 99 + .../workflows/WorkflowStarterPathPanel.tsx | 93 + .../workflows/WorkflowUtilityRail.tsx | 4 +- .../workflows/useWorkflowCanvasController.ts | 398 ++- .../workflows/workflowAuthoringActions.ts | 79 + .../workflows/workflowAuthoringGuidance.ts | 296 ++ .../workflows/workflowAuthoringSections.ts | 81 + .../workflows/workflowAuthoringSurface.ts | 145 + .../workflows/workflowBlueprints.ts | 15 +- .../workflows/workflowGraphDraft.ts | 679 ++++- .../workflows/workflowModuleBundles.ts | 156 ++ .../workflows/workflowNodeCatalog.ts | 278 +- .../workflows/workflowNodeLibrary.ts | 101 +- .../workflows/workflowNodePresentation.ts | 60 + .../workflows/workflowReferenceBundles.ts | 203 ++ .../workflows/workflowRolloutPresentation.ts | 76 + frontend/src/contexts/WebSocketContext.tsx | 15 +- frontend/src/hooks/useWebSocket.ts | 41 +- frontend/src/pages/Login.tsx | 2 + frontend/src/pages/Materials.tsx | 8 +- frontend/src/pages/WorkflowEditor.tsx | 535 +++- frontend/vite.config.ts | 39 + render-worker/scripts/_blender_args.py | 16 + render-worker/scripts/_blender_materials.py | 192 +- render-worker/scripts/_blender_scene_setup.py | 2 + .../scripts/_blender_template_inputs.py | 183 ++ render-worker/scripts/blender_render.py | 2 + render-worker/scripts/cinematic_render.py | 15 + render-worker/scripts/export_step_to_gltf.py | 381 ++- render-worker/scripts/export_step_to_usd.py | 89 +- render-worker/scripts/turntable_render.py | 15 + scripts/audit_render_templates.py | 220 ++ scripts/compare_live_cad_parity.py | 176 ++ scripts/compare_live_still_parity.py | 1456 ++++++++++ scripts/compare_live_turntable_parity.py | 543 ++++ scripts/recover_nvidia_runtime_pm.sh | 113 + scripts/repo_hygiene.sh | 254 ++ scripts/rerender_closed_legacy_still.sh | 246 ++ scripts/test_render_pipeline.py | 1169 +++++++- scripts/workflow_sequential_gates.sh | 128 + 163 files changed, 31774 insertions(+), 2753 deletions(-) create mode 100644 backend/alembic/versions/067_output_type_workflow_rollout_mode.py create mode 100644 backend/alembic/versions/068_material_metadata_brand_cleanup.py create mode 100644 backend/alembic/versions/069_render_template_workflow_input_schema.py create mode 100644 backend/app/core/render_paths.py create mode 100644 backend/app/domains/materials/library_paths.py create mode 100644 backend/app/domains/rendering/template_input_audit.py create mode 100644 backend/tests/db_test_utils.py create mode 100644 backend/tests/domains/test_render_blender_samples.py create mode 100644 backend/tests/domains/test_rendering_publish_asset.py create mode 100644 backend/tests/domains/test_template_input_audit.py create mode 100644 backend/tests/domains/test_workflow_smoke_harness.py create mode 100644 backend/tests/integration/test_cad_model_endpoint.py create mode 100644 backend/tests/integration/test_media_batch_delete.py create mode 100644 backend/tests/test_admin_settings_defaults.py create mode 100644 backend/tests/test_asset_library_paths.py create mode 100644 backend/tests/test_export_glb_task.py create mode 100644 backend/tests/test_export_step_to_gltf.py create mode 100644 backend/tests/test_part_key_service.py create mode 100644 backend/tests/test_render_paths.py create mode 100644 backend/tests/test_render_thumbnail_task.py create mode 100644 backend/tests/test_render_worker_material_lookup.py create mode 100644 backend/uv.lock create mode 100644 docs/workflows/FIRST_WAVE_EXECUTION.md create mode 100644 docs/workflows/NEXT_BATCH_ORCHESTRATION.md create mode 100644 docs/workflows/NODE_BASED_PRODUCTION_ARCHITECTURE.md create mode 100644 docs/workflows/TEMPLATE_INPUT_AUDIT.md create mode 100644 docs/workflows/template-inputs.md create mode 100644 frontend/src/__tests__/api/outputTypes.test.ts create mode 100644 frontend/src/__tests__/components/WorkflowNodeInspector.test.tsx create mode 100644 frontend/src/__tests__/components/outputTypeRolloutPresentation.test.ts create mode 100644 frontend/src/__tests__/components/workflowAuthoringActions.test.ts create mode 100644 frontend/src/__tests__/components/workflowAuthoringGuidance.test.ts create mode 100644 frontend/src/__tests__/components/workflowModuleBundles.test.ts create mode 100644 frontend/src/__tests__/components/workflowNodeCatalog.test.ts create mode 100644 frontend/src/__tests__/components/workflowNodePresentation.test.ts create mode 100644 frontend/src/components/admin/outputTypeRolloutPresentation.ts create mode 100644 frontend/src/components/workflows/WorkflowAuthoringOverview.tsx create mode 100644 frontend/src/components/workflows/WorkflowAuthoringSectionContent.tsx create mode 100644 frontend/src/components/workflows/WorkflowModuleBundlePanel.tsx create mode 100644 frontend/src/components/workflows/WorkflowReferenceBundlePanel.tsx create mode 100644 frontend/src/components/workflows/WorkflowStarterPathPanel.tsx create mode 100644 frontend/src/components/workflows/workflowAuthoringActions.ts create mode 100644 frontend/src/components/workflows/workflowAuthoringGuidance.ts create mode 100644 frontend/src/components/workflows/workflowAuthoringSections.ts create mode 100644 frontend/src/components/workflows/workflowAuthoringSurface.ts create mode 100644 frontend/src/components/workflows/workflowModuleBundles.ts create mode 100644 frontend/src/components/workflows/workflowNodePresentation.ts create mode 100644 frontend/src/components/workflows/workflowReferenceBundles.ts create mode 100644 frontend/src/components/workflows/workflowRolloutPresentation.ts create mode 100644 render-worker/scripts/_blender_template_inputs.py create mode 100755 scripts/audit_render_templates.py create mode 100755 scripts/compare_live_cad_parity.py create mode 100644 scripts/compare_live_still_parity.py create mode 100644 scripts/compare_live_turntable_parity.py create mode 100755 scripts/recover_nvidia_runtime_pm.sh create mode 100755 scripts/repo_hygiene.sh create mode 100755 scripts/rerender_closed_legacy_still.sh create mode 100755 scripts/workflow_sequential_gates.sh diff --git a/.env.example b/.env.example index 1c21c09..7559e9b 100644 --- a/.env.example +++ b/.env.example @@ -8,6 +8,18 @@ POSTGRES_PORT=5432 # Redis REDIS_URL=redis://redis:6379/0 +# Prevent Python services from writing __pycache__ into bind-mounted source dirs. +PYTHONDONTWRITEBYTECODE=1 +# Redirect any unavoidable bytecode writes away from bind mounts. +PYTHONPYCACHEPREFIX=/tmp/pycache +# Run Celery containers as your host user to avoid root-owned files on bind mounts. +# Typical Linux value: `id -u` +APP_UID=1000 + +# Docker defaults: +# - inside Compose, service discovery uses `postgres` / `redis` +# - host-side tools and tests are normalized to `localhost` automatically by backend/app/config.py + # JWT JWT_SECRET_KEY=your-secret-key-here-change-in-production JWT_ALGORITHM=HS256 @@ -39,5 +51,7 @@ MINIO_BUCKET=uploads # Blender >= 5.0.1 must be installed on the host at /opt/blender # The render-worker container mounts it read-only via volumes: - /opt/blender:/opt/blender:ro BLENDER_VERSION=5.0.1 +# Set explicitly to `cpu` on hosts without a usable NVIDIA/Cycles device to suppress startup warnings. +CYCLES_DEVICE=gpu # Set to host path if Blender is not at /opt/blender: # BLENDER_BIN=/usr/local/blender/blender diff --git a/.gitignore b/.gitignore index b933650..d18e8c9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,28 +1,32 @@ -node_modules/ .env .env.local .DS_Store *.log -# core dump files (not directories named 'core') +.gstack/ + +# Local scratch area managed by scripts/repo_hygiene.sh +tmp/ + +# Core dumps managed by scripts/repo_hygiene.sh /core /blender-renderer/core +backend/core -# Python cache +# Python cache / environments managed by scripts/repo_hygiene.sh __pycache__/ *.py[cod] *.pyo - -# Node / Vite build output -dist/ -node_modules/ - -# Celery beat schedule +.venv/ +backend/.venv/ +.pytest_cache/ +.coverage celerybeat-schedule celerybeat.pid -# Test cache -.pytest_cache/ -.coverage +# Frontend dependencies and build output +node_modules/ +dist/ +frontend/dist/ # IDE .vscode/ @@ -37,9 +41,7 @@ celerybeat.pid *.step *.stl *.xls -+.xslx *.csv *.xlsx *.blend1 -backend/core diff --git a/backend/alembic/versions/067_output_type_workflow_rollout_mode.py b/backend/alembic/versions/067_output_type_workflow_rollout_mode.py new file mode 100644 index 0000000..5899d71 --- /dev/null +++ b/backend/alembic/versions/067_output_type_workflow_rollout_mode.py @@ -0,0 +1,41 @@ +"""Add workflow rollout mode to output types. + +Revision ID: 067 +Revises: 066 +""" +from alembic import op +import sqlalchemy as sa + + +revision = "067" +down_revision = "066" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "output_types", + sa.Column( + "workflow_rollout_mode", + sa.String(length=20), + nullable=False, + server_default="legacy_only", + ), + ) + op.execute( + """ + UPDATE output_types AS ot + SET workflow_rollout_mode = CASE + WHEN coalesce(wd.config->'ui'->>'execution_mode', 'legacy') = 'graph' THEN 'graph' + WHEN coalesce(wd.config->'ui'->>'execution_mode', 'legacy') = 'shadow' THEN 'shadow' + ELSE 'legacy_only' + END + FROM workflow_definitions AS wd + WHERE ot.workflow_definition_id = wd.id + """ + ) + + +def downgrade() -> None: + op.drop_column("output_types", "workflow_rollout_mode") diff --git a/backend/alembic/versions/068_material_metadata_brand_cleanup.py b/backend/alembic/versions/068_material_metadata_brand_cleanup.py new file mode 100644 index 0000000..a6e569c --- /dev/null +++ b/backend/alembic/versions/068_material_metadata_brand_cleanup.py @@ -0,0 +1,55 @@ +"""Clean up persisted legacy Schaeffler material metadata. + +Revision ID: 068 +Revises: 067 +""" +from alembic import op + + +revision = "068" +down_revision = "067" +branch_labels = None +depends_on = None + +_OLD_PREFIX = "SCHAEFFLER_" +_NEW_PREFIX = "HARTOMAT_" + + +def _replace_jsonb_prefix(table_name: str, column_name: str, old_prefix: str, new_prefix: str) -> None: + op.execute( + f""" + UPDATE {table_name} + SET {column_name} = replace({column_name}::text, '{old_prefix}', '{new_prefix}')::jsonb + WHERE {column_name} IS NOT NULL + AND {column_name}::text LIKE '%{old_prefix}%' + """ + ) + + +def _replace_text_prefix(table_name: str, column_name: str, old_prefix: str, new_prefix: str) -> None: + op.execute( + f""" + UPDATE {table_name} + SET {column_name} = replace({column_name}, '{old_prefix}', '{new_prefix}') + WHERE {column_name} IS NOT NULL + AND {column_name} LIKE '%{old_prefix}%' + """ + ) + + +def upgrade() -> None: + _replace_jsonb_prefix("cad_files", "resolved_material_assignments", _OLD_PREFIX, _NEW_PREFIX) + _replace_jsonb_prefix("cad_files", "manual_material_overrides", _OLD_PREFIX, _NEW_PREFIX) + _replace_jsonb_prefix("cad_files", "source_material_assignments", _OLD_PREFIX, _NEW_PREFIX) + + _replace_text_prefix("output_types", "material_override", _OLD_PREFIX, _NEW_PREFIX) + _replace_text_prefix("order_lines", "material_override", _OLD_PREFIX, _NEW_PREFIX) + + +def downgrade() -> None: + _replace_jsonb_prefix("cad_files", "resolved_material_assignments", _NEW_PREFIX, _OLD_PREFIX) + _replace_jsonb_prefix("cad_files", "manual_material_overrides", _NEW_PREFIX, _OLD_PREFIX) + _replace_jsonb_prefix("cad_files", "source_material_assignments", _NEW_PREFIX, _OLD_PREFIX) + + _replace_text_prefix("output_types", "material_override", _NEW_PREFIX, _OLD_PREFIX) + _replace_text_prefix("order_lines", "material_override", _NEW_PREFIX, _OLD_PREFIX) diff --git a/backend/alembic/versions/069_render_template_workflow_input_schema.py b/backend/alembic/versions/069_render_template_workflow_input_schema.py new file mode 100644 index 0000000..2cd8a9c --- /dev/null +++ b/backend/alembic/versions/069_render_template_workflow_input_schema.py @@ -0,0 +1,31 @@ +"""Add workflow input schema to render templates. + +Revision ID: 069 +Revises: 068 +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +revision = "069" +down_revision = "068" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "render_templates", + sa.Column( + "workflow_input_schema", + postgresql.JSONB(astext_type=sa.Text()), + nullable=False, + server_default=sa.text("'[]'::jsonb"), + ), + ) + op.alter_column("render_templates", "workflow_input_schema", server_default=None) + + +def downgrade() -> None: + op.drop_column("render_templates", "workflow_input_schema") diff --git a/backend/app/api/routers/admin.py b/backend/app/api/routers/admin.py index ee9d40e..92171a5 100644 --- a/backend/app/api/routers/admin.py +++ b/backend/app/api/routers/admin.py @@ -3,10 +3,12 @@ import uuid from datetime import datetime, timedelta from typing import Any, Optional from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select, update as sql_update, func, case, distinct, and_, extract -from pydantic import BaseModel +from pydantic import BaseModel, ValidationError from app.database import get_db +from app.core.render_paths import resolve_result_path, result_path_to_storage_key from app.models.user import User from app.models.system_setting import SystemSetting from app.models.cad_file import CadFile, ProcessingStatus @@ -27,7 +29,7 @@ SETTINGS_DEFAULTS: dict[str, str] = { "blender_eevee_samples": "64", "thumbnail_format": "jpg", "blender_smooth_angle": "30", - "cycles_device": "auto", + "cycles_device": "gpu", "render_backend": "celery", "blender_max_concurrent_renders": "3", "product_thumbnail_priority": '["latest_render","cad_thumbnail"]', @@ -63,7 +65,7 @@ class SettingsOut(BaseModel): blender_eevee_samples: int = 64 thumbnail_format: str = "jpg" blender_smooth_angle: int = 30 - cycles_device: str = "auto" + cycles_device: str = "gpu" render_backend: str = "celery" blender_max_concurrent_renders: int = 3 product_thumbnail_priority: str = '["latest_render","cad_thumbnail"]' @@ -225,9 +227,9 @@ def _settings_to_out(raw: dict[str, str]) -> SettingsOut: smtp_password=raw.get("smtp_password", ""), smtp_from_address=raw.get("smtp_from_address", ""), scene_linear_deflection=float(raw.get("scene_linear_deflection", "0.1")), - scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.5")), + scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.1")), render_linear_deflection=float(raw.get("render_linear_deflection", "0.03")), - render_angular_deflection=float(raw.get("render_angular_deflection", "0.2")), + render_angular_deflection=float(raw.get("render_angular_deflection", "0.05")), gltf_scale_factor=float(raw.get("gltf_scale_factor", "0.001")), gltf_smooth_normals=raw.get("gltf_smooth_normals", "true") == "true", viewer_max_distance=float(raw.get("viewer_max_distance", "50")), @@ -680,7 +682,10 @@ async def seed_workflows( ): """Create the standard workflow definitions if they do not already exist.""" from app.domains.rendering.models import WorkflowDefinition - from app.domains.rendering.workflow_config_utils import build_preset_workflow_config + from app.domains.rendering.workflow_config_utils import ( + build_preset_workflow_config, + build_workflow_blueprint_config, + ) STANDARD_WORKFLOWS = [ { @@ -697,6 +702,13 @@ async def seed_workflows( {"render_engine": "eevee", "samples": 64, "resolution": [1920, 1080]}, ), }, + { + "name": "Still Image — Graph", + "config": build_preset_workflow_config( + "still_graph", + {"render_engine": "cycles", "samples": 256, "resolution": [1920, 1080]}, + ), + }, { "name": "Turntable Animation", "config": build_preset_workflow_config( @@ -711,6 +723,18 @@ async def seed_workflows( {"render_engine": "cycles", "samples": 128, "angles": [0, 45, 90]}, ), }, + { + "name": "CAD Intake Blueprint", + "config": build_workflow_blueprint_config("cad_intake"), + }, + { + "name": "Order Rendering Blueprint", + "config": build_workflow_blueprint_config("order_rendering"), + }, + { + "name": "Still Graph Blueprint", + "config": build_workflow_blueprint_config("still_graph_reference"), + }, ] existing_result = await db.execute(select(WorkflowDefinition)) @@ -730,6 +754,57 @@ async def seed_workflows( return {"created": created, "message": f"Created {created} workflow definition(s)"} +@router.post("/settings/backfill-workflows", status_code=status.HTTP_200_OK) +async def backfill_workflows( + admin: User = Depends(require_global_admin), + db: AsyncSession = Depends(get_db), +): + """Rewrite persisted legacy workflow configs into canonical DAG form.""" + from app.domains.rendering.models import WorkflowDefinition + from app.domains.rendering.workflow_config_utils import ( + canonicalize_workflow_config, + workflow_config_requires_canonicalization, + ) + from app.domains.rendering.workflow_schema import WorkflowConfig + + result = await db.execute(select(WorkflowDefinition).order_by(WorkflowDefinition.created_at)) + workflows = result.scalars().all() + + updated: list[dict[str, str]] = [] + invalid: list[dict[str, str]] = [] + + for workflow in workflows: + if not workflow_config_requires_canonicalization(workflow.config): + continue + + try: + normalized = canonicalize_workflow_config(workflow.config) + WorkflowConfig.model_validate(normalized) + except (ValidationError, ValueError) as exc: + invalid.append( + { + "id": str(workflow.id), + "name": workflow.name, + "error": str(exc), + } + ) + continue + + workflow.config = normalized + flag_modified(workflow, "config") + updated.append({"id": str(workflow.id), "name": workflow.name}) + + await db.commit() + + return { + "scanned": len(workflows), + "updated": len(updated), + "invalid": invalid, + "workflows": updated, + "message": f"Canonicalized {len(updated)} workflow definition(s)", + } + + @router.get("/settings/renderer-status") async def renderer_status( admin: User = Depends(require_global_admin), @@ -756,13 +831,10 @@ async def import_existing_media_assets( created = 0 skipped = 0 - from app.config import settings as _app_settings - def _normalize_key(path: str) -> str: - """Strip UPLOAD_DIR prefix to store relative storage keys.""" - key = str(path) - prefix = str(_app_settings.upload_dir).rstrip("/") + "/" - return key[len(prefix):] if key.startswith(prefix) else key + """Normalize mixed legacy/canonical paths to a stable relative storage key.""" + key = result_path_to_storage_key(path) + return key or str(path) # 1. CadFiles with thumbnail_path await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'")) @@ -843,7 +915,6 @@ async def purge_render_media( """ import logging from pathlib import Path - from app.config import settings from app.core.storage import get_storage from app.domains.media.models import MediaAsset, MediaAssetType @@ -865,8 +936,8 @@ async def purge_render_media( # Delete backing file key = asset.storage_key try: - candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key - if candidate.exists(): + candidate = resolve_result_path(key) + if candidate is not None and candidate.exists(): freed_bytes += candidate.stat().st_size candidate.unlink() deleted_files += 1 diff --git a/backend/app/api/routers/cad.py b/backend/app/api/routers/cad.py index c248689..39e7a38 100644 --- a/backend/app/api/routers/cad.py +++ b/backend/app/api/routers/cad.py @@ -13,6 +13,9 @@ from sqlalchemy import select from sqlalchemy.orm import selectinload from app.database import get_db +from app.core.render_paths import resolve_result_path +from app.config import settings +from app.domains.media.models import MediaAsset, MediaAssetType from app.models.cad_file import CadFile, ProcessingStatus from app.models.order import Order from app.models.order_item import OrderItem @@ -191,6 +194,38 @@ async def _get_cad_file(cad_id: uuid.UUID, db: AsyncSession) -> CadFile: return cad +async def _resolve_gltf_path(cad: CadFile, db: AsyncSession) -> Path | None: + """Resolve the best available GLTF/GLB path for a CAD file. + + Prefer the legacy cad_files.gltf_path for compatibility, but fall back to + the canonical media_assets.gltf_geometry record written by the newer export + pipeline. + """ + if cad.gltf_path: + legacy_path = resolve_result_path(cad.gltf_path) or Path(cad.gltf_path) + if legacy_path.exists(): + return legacy_path + + asset_result = await db.execute( + select(MediaAsset) + .where( + MediaAsset.cad_file_id == cad.id, + MediaAsset.asset_type == MediaAssetType.gltf_geometry, + MediaAsset.is_archived == False, # noqa: E712 + ) + .order_by(MediaAsset.created_at.desc()) + ) + asset = asset_result.scalars().first() + if asset and asset.storage_key: + asset_path = resolve_result_path(asset.storage_key) + if asset_path is None: + asset_path = Path(settings.upload_dir) / asset.storage_key.lstrip("/") + if asset_path.exists(): + return asset_path + + return None + + @router.get("/{id}/thumbnail") async def get_thumbnail( id: uuid.UUID, @@ -228,20 +263,13 @@ async def get_model( ): """Serve the glTF file for a CAD file.""" cad = await _get_cad_file(id, db) - - if not cad.gltf_path: + gltf_path = await _resolve_gltf_path(cad, db) + if gltf_path is None: raise HTTPException( status_code=404, detail="glTF model not yet generated for this CAD file", ) - gltf_path = Path(cad.gltf_path) - if not gltf_path.exists(): - raise HTTPException( - status_code=404, - detail="glTF file missing from storage", - ) - # glTF files may be either .gltf (JSON) or .glb (binary) suffix = gltf_path.suffix.lower() if suffix == ".glb": diff --git a/backend/app/api/routers/orders.py b/backend/app/api/routers/orders.py index 18028ce..288573b 100644 --- a/backend/app/api/routers/orders.py +++ b/backend/app/api/routers/orders.py @@ -30,6 +30,7 @@ from app.schemas.order_line import OrderLineCreate, OrderLineOut from app.schemas.product import ProductOut from app.schemas.output_type import OutputTypeOut from app.services.order_service import generate_order_number +from app.core.render_paths import resolve_result_path, result_path_to_public_url from app.utils.auth import get_current_user, require_admin_or_pm, require_pm_or_above router = APIRouter(prefix="/orders", tags=["orders"]) @@ -41,13 +42,7 @@ def _is_privileged(user: User) -> bool: def _result_path_to_url(result_path: str) -> str | None: """Convert an internal result_path to a servable static URL.""" - if "/renders/" in result_path: - idx = result_path.index("/renders/") - return result_path[idx:] - if "/thumbnails/" in result_path: - idx = result_path.index("/thumbnails/") - return result_path[idx:] - return None + return result_path_to_public_url(result_path, require_exists=True) def _build_line_out(line: OrderLine) -> OrderLineOut: @@ -1544,15 +1539,6 @@ async def download_renders( if not lines: raise HTTPException(404, detail="No completed renders found for this order") - from app.config import settings as app_settings - - def _resolve_path(p: str) -> str: - """Translate container-relative paths to backend filesystem paths.""" - # Flamenco worker mounts the uploads volume at /shared, backend at /app/uploads - if p.startswith("/shared/"): - return app_settings.upload_dir + p[len("/shared"):] - return p - buf = io.BytesIO() # Track names used to avoid duplicates name_counts: dict[str, int] = {} @@ -1561,8 +1547,8 @@ async def download_renders( for line in lines: if not line.result_path: continue - fs_path = _resolve_path(line.result_path) - if not os.path.isfile(fs_path): + resolved_path = resolve_result_path(line.result_path) + if resolved_path is None or not resolved_path.is_file(): continue # Build a meaningful filename product_name = (line.product.name or line.product.pim_id or "product") if line.product else "product" @@ -1587,7 +1573,7 @@ async def download_renders( name_counts[base_name] = 0 archive_name = base_name - zf.write(fs_path, archive_name) + zf.write(resolved_path, archive_name) if not zf.infolist(): raise HTTPException(404, detail="No render files found on disk") diff --git a/backend/app/api/routers/output_types.py b/backend/app/api/routers/output_types.py index a0524e1..7d744fa 100644 --- a/backend/app/api/routers/output_types.py +++ b/backend/app/api/routers/output_types.py @@ -12,6 +12,7 @@ from app.models.order_line import OrderLine from app.models.output_type import ( OUTPUT_TYPE_ARTIFACT_KINDS, OUTPUT_TYPE_WORKFLOW_FAMILIES, + OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES, OutputType, VALID_RENDER_BACKENDS, ) @@ -21,12 +22,19 @@ from app.models.user import User from app.domains.rendering.models import WorkflowDefinition from app.domains.rendering.output_type_contracts import ( apply_invocation_overrides_to_render_settings, + build_output_type_contract_catalog, + build_output_type_invocation_profile, + derive_supported_artifact_kinds_from_workflow_config, infer_output_type_artifact_kind, infer_workflow_family_from_config, + InvalidInvocationOverridesError, merge_output_type_invocation_overrides, normalize_invocation_overrides, + resolve_output_type_invocation_overrides, + validate_and_normalize_invocation_overrides, validate_output_type_contract, ) +from app.domains.rendering.schemas import OutputTypeContractCatalogOut, OutputTypeInvocationProfileOut router = APIRouter(prefix="/output-types", tags=["output-types"]) @@ -34,6 +42,34 @@ router = APIRouter(prefix="/output-types", tags=["output-types"]) def _ot_to_out(ot: OutputType) -> OutputTypeOut: """Convert an OutputType ORM instance to OutputTypeOut with pricing convenience fields.""" out = OutputTypeOut.model_validate(ot) + resolved_invocation_overrides = resolve_output_type_invocation_overrides( + ot.render_settings, + getattr(ot, "invocation_overrides", None), + artifact_kind=ot.artifact_kind, + is_animation=ot.is_animation, + ) + out.invocation_overrides = resolved_invocation_overrides + out.render_settings = apply_invocation_overrides_to_render_settings( + ot.render_settings, + resolved_invocation_overrides, + ) + out.invocation_profile = OutputTypeInvocationProfileOut.model_validate( + build_output_type_invocation_profile( + renderer=ot.renderer, + render_backend=ot.render_backend, + workflow_family=ot.workflow_family, + artifact_kind=ot.artifact_kind, + output_format=ot.output_format, + is_animation=ot.is_animation, + workflow_definition_id=ot.workflow_definition_id, + workflow_rollout_mode=getattr(ot, "workflow_rollout_mode", "legacy_only"), + transparent_bg=ot.transparent_bg, + cycles_device=ot.cycles_device, + material_override=ot.material_override, + render_settings=ot.render_settings, + invocation_overrides=getattr(ot, "invocation_overrides", None), + ) + ) if ot.pricing_tier: out.pricing_tier_name = f"{ot.pricing_tier.category_key}/{ot.pricing_tier.quality_level}" out.price_per_item = float(ot.pricing_tier.price_per_item) @@ -62,6 +98,7 @@ async def _validate_output_type_workflow_link( *, workflow_definition_id: uuid.UUID | None, workflow_family: str, + artifact_kind: str, ) -> None: if workflow_definition_id is None: return @@ -86,6 +123,17 @@ async def _validate_output_type_workflow_link( ), ) + supported_artifact_kinds = derive_supported_artifact_kinds_from_workflow_config(workflow_definition.config) + if artifact_kind not in supported_artifact_kinds: + supported = ", ".join(supported_artifact_kinds) if supported_artifact_kinds else "none" + raise HTTPException( + 400, + detail=( + f"Workflow artifact mismatch: output type expects '{artifact_kind}', " + f"but workflow '{workflow_definition.name}' supports [{supported}]" + ), + ) + def _ensure_output_type_contract_is_valid( *, @@ -105,6 +153,23 @@ def _ensure_output_type_contract_is_valid( raise HTTPException(400, detail=str(exc)) from exc +def _normalize_explicit_invocation_overrides( + raw: dict | None, + *, + artifact_kind: str, + is_animation: bool, +) -> dict: + try: + return validate_and_normalize_invocation_overrides( + raw, + artifact_kind=artifact_kind, + is_animation=is_animation, + reject_unknown_keys=True, + ) + except InvalidInvocationOverridesError as exc: + raise HTTPException(400, detail=str(exc)) from exc + + @router.get("", response_model=list[OutputTypeOut]) async def list_output_types( include_inactive: bool = Query(False), @@ -133,6 +198,13 @@ async def list_output_types( return await _enrich_workflow_names(db, items) +@router.get("/contract-catalog", response_model=OutputTypeContractCatalogOut) +async def get_output_type_contract_catalog( + user: User = Depends(get_current_user), +): + return OutputTypeContractCatalogOut.model_validate(build_output_type_contract_catalog()) + + @router.post("", response_model=OutputTypeOut, status_code=status.HTTP_201_CREATED) async def create_output_type( body: OutputTypeCreate, @@ -146,25 +218,39 @@ async def create_output_type( 400, detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}", ) + if body.workflow_rollout_mode not in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES: + raise HTTPException( + 400, + detail=f"Invalid workflow_rollout_mode. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES))}", + ) existing = await db.execute(select(OutputType).where(OutputType.name == body.name)) if existing.scalar_one_or_none(): raise HTTPException(409, detail=f"Output type '{body.name}' already exists") data = body.model_dump() - explicit_invocation = normalize_invocation_overrides(body.invocation_overrides) - if not explicit_invocation: - explicit_invocation = normalize_invocation_overrides(body.render_settings) - data["invocation_overrides"] = explicit_invocation - data["render_settings"] = apply_invocation_overrides_to_render_settings( - body.render_settings, - explicit_invocation, - ) data["artifact_kind"] = data.get("artifact_kind") or infer_output_type_artifact_kind( body.output_format, body.is_animation, body.workflow_family, ) + explicit_invocation = _normalize_explicit_invocation_overrides( + body.invocation_overrides, + artifact_kind=data["artifact_kind"], + is_animation=body.is_animation, + ) + if not explicit_invocation: + explicit_invocation = normalize_invocation_overrides(body.render_settings) + data["invocation_overrides"] = resolve_output_type_invocation_overrides( + body.render_settings, + explicit_invocation, + artifact_kind=data["artifact_kind"], + is_animation=body.is_animation, + ) + data["render_settings"] = apply_invocation_overrides_to_render_settings( + body.render_settings, + data["invocation_overrides"], + ) if data["artifact_kind"] not in OUTPUT_TYPE_ARTIFACT_KINDS: raise HTTPException( 400, @@ -180,7 +266,10 @@ async def create_output_type( db, workflow_definition_id=body.workflow_definition_id, workflow_family=body.workflow_family, + artifact_kind=data["artifact_kind"], ) + if body.workflow_definition_id is None: + data["workflow_rollout_mode"] = "legacy_only" ot = OutputType(**data) db.add(ot) @@ -214,6 +303,11 @@ async def update_output_type( 400, detail=f"Invalid workflow_family. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_FAMILIES))}", ) + if "workflow_rollout_mode" in data and data["workflow_rollout_mode"] not in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES: + raise HTTPException( + 400, + detail=f"Invalid workflow_rollout_mode. Choose: {', '.join(sorted(OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES))}", + ) candidate_workflow_family = data.get("workflow_family", ot.workflow_family) candidate_workflow_definition_id = data.get("workflow_definition_id", ot.workflow_definition_id) @@ -226,16 +320,25 @@ async def update_output_type( if render_settings_supplied or invocation_supplied: candidate_render_settings = data.get("render_settings", ot.render_settings) if invocation_supplied: - candidate_invocation_overrides = normalize_invocation_overrides(data.get("invocation_overrides")) + candidate_invocation_overrides = _normalize_explicit_invocation_overrides( + data.get("invocation_overrides"), + artifact_kind=candidate_artifact_kind, + is_animation=candidate_is_animation, + ) else: candidate_invocation_overrides = merge_output_type_invocation_overrides( candidate_render_settings, None, ) - data["invocation_overrides"] = candidate_invocation_overrides - data["render_settings"] = apply_invocation_overrides_to_render_settings( + data["invocation_overrides"] = resolve_output_type_invocation_overrides( candidate_render_settings, candidate_invocation_overrides, + artifact_kind=candidate_artifact_kind, + is_animation=candidate_is_animation, + ) + data["render_settings"] = apply_invocation_overrides_to_render_settings( + candidate_render_settings, + data["invocation_overrides"], ) should_recompute_artifact_kind = ( @@ -263,12 +366,26 @@ async def update_output_type( output_format=candidate_output_format, is_animation=candidate_is_animation, ) + if render_settings_supplied or invocation_supplied or should_recompute_artifact_kind: + data["invocation_overrides"] = resolve_output_type_invocation_overrides( + data.get("render_settings", ot.render_settings), + data.get("invocation_overrides", ot.invocation_overrides), + artifact_kind=candidate_artifact_kind, + is_animation=candidate_is_animation, + ) + data["render_settings"] = apply_invocation_overrides_to_render_settings( + data.get("render_settings", ot.render_settings), + data["invocation_overrides"], + ) await _validate_output_type_workflow_link( db, workflow_definition_id=candidate_workflow_definition_id, workflow_family=candidate_workflow_family, + artifact_kind=candidate_artifact_kind, ) + if candidate_workflow_definition_id is None: + data["workflow_rollout_mode"] = "legacy_only" for field_name, value in data.items(): setattr(ot, field_name, value) diff --git a/backend/app/api/routers/products.py b/backend/app/api/routers/products.py index cbb2ad3..fd00a68 100644 --- a/backend/app/api/routers/products.py +++ b/backend/app/api/routers/products.py @@ -16,6 +16,11 @@ from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload, joinedload from app.config import settings +from app.core.render_paths import ( + resolve_result_path, + resolve_public_asset_url, + result_path_to_public_url, +) from app.database import get_db from app.models.cad_file import CadFile, ProcessingStatus from app.models.material import Material @@ -829,24 +834,12 @@ VIDEO_EXTENSIONS = {".mp4", ".webm", ".avi", ".mov"} def _result_path_to_url(result_path: str) -> str | None: """Convert an internal result_path to a servable static URL.""" - # Flamenco / shared renders: /shared/renders/X/file.jpg → /renders/X/file.jpg - if "/renders/" in result_path: - idx = result_path.index("/renders/") - return result_path[idx:] - # Celery renders stored as thumbnails: /app/uploads/thumbnails/X.png → /thumbnails/X.png - if "/thumbnails/" in result_path: - idx = result_path.index("/thumbnails/") - return result_path[idx:] - return None + return result_path_to_public_url(result_path, require_exists=False) def _resolve_disk_path(url: str) -> Path | None: """Given a servable URL like /renders/X/file.jpg, resolve to disk path.""" - if url.startswith("/renders/"): - return Path(settings.upload_dir) / "renders" / url[len("/renders/"):] - if url.startswith("/thumbnails/"): - return Path(settings.upload_dir) / "thumbnails" / url[len("/thumbnails/"):] - return None + return resolve_public_asset_url(url) @router.get("/{product_id}/renders") @@ -983,9 +976,8 @@ async def download_product_renders( raise HTTPException(404, detail="No completed renders found for the selected lines") def _resolve_path(p: str) -> str: - if p.startswith("/shared/"): - return settings.upload_dir + p[len("/shared"):] - return p + resolved = resolve_result_path(p) + return str(resolved) if resolved is not None else p def _safe(s: str) -> str: return re.sub(r"[^\w\-.]", "_", s).strip("_") @@ -1147,4 +1139,3 @@ async def delete_render_position( raise HTTPException(404, detail="Render position not found") await db.delete(pos) await db.commit() - diff --git a/backend/app/api/routers/render_templates.py b/backend/app/api/routers/render_templates.py index e40e7c2..e7a2940 100644 --- a/backend/app/api/routers/render_templates.py +++ b/backend/app/api/routers/render_templates.py @@ -1,17 +1,20 @@ """Render Templates API — CRUD + .blend file upload/download + material library.""" +import json import uuid import shutil from datetime import datetime from pathlib import Path +from typing import Any from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, status from fastapi.responses import FileResponse from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select, update as sql_update, delete as sql_delete -from pydantic import BaseModel +from pydantic import BaseModel, TypeAdapter, ValidationError from app.database import get_db from app.config import settings as app_settings +from app.domains.rendering.workflow_node_registry import WorkflowNodeFieldDefinition from app.models.user import User from app.models.render_template import RenderTemplate from app.models.output_type import OutputType @@ -46,6 +49,7 @@ class RenderTemplateOut(BaseModel): lighting_only: bool shadow_catcher_enabled: bool camera_orbit: bool + workflow_input_schema: list[WorkflowNodeFieldDefinition] is_active: bool created_at: str updated_at: str @@ -62,6 +66,7 @@ class RenderTemplateUpdate(BaseModel): lighting_only: bool | None = None shadow_catcher_enabled: bool | None = None camera_orbit: bool | None = None + workflow_input_schema: list[WorkflowNodeFieldDefinition] | None = None is_active: bool | None = None @@ -72,6 +77,29 @@ class MaterialLibraryInfo(BaseModel): path: str | None = None +_workflow_input_schema_adapter = TypeAdapter(list[WorkflowNodeFieldDefinition]) + + +def _normalize_workflow_input_schema(schema: Any) -> list[dict[str, Any]]: + if schema in (None, "", "null"): + return [] + try: + validated = _workflow_input_schema_adapter.validate_python(schema) + except ValidationError as exc: + raise HTTPException(status_code=422, detail={"workflow_input_schema": exc.errors()}) from exc + return [field.model_dump(mode="json") for field in validated] + + +def _parse_form_workflow_input_schema(raw_schema: str | None) -> list[dict[str, Any]]: + if raw_schema in (None, "", "null"): + return [] + try: + payload = json.loads(raw_schema) + except json.JSONDecodeError as exc: + raise HTTPException(status_code=422, detail="workflow_input_schema must be valid JSON") from exc + return _normalize_workflow_input_schema(payload) + + def _to_out(t: RenderTemplate) -> dict: ot_name = None if t.output_type: @@ -94,6 +122,7 @@ def _to_out(t: RenderTemplate) -> dict: "lighting_only": t.lighting_only, "shadow_catcher_enabled": t.shadow_catcher_enabled, "camera_orbit": t.camera_orbit, + "workflow_input_schema": t.workflow_input_schema or [], "is_active": t.is_active, "created_at": t.created_at.isoformat() if t.created_at else "", "updated_at": t.updated_at.isoformat() if t.updated_at else "", @@ -126,6 +155,7 @@ async def create_render_template( lighting_only: bool = Form(False), shadow_catcher_enabled: bool = Form(False), camera_orbit: bool = Form(True), + workflow_input_schema: str | None = Form(None), user: User = Depends(require_admin_or_pm), db: AsyncSession = Depends(get_db), ): @@ -182,6 +212,7 @@ async def create_render_template( lighting_only=lighting_only, shadow_catcher_enabled=shadow_catcher_enabled, camera_orbit=camera_orbit, + workflow_input_schema=_parse_form_workflow_input_schema(workflow_input_schema), ) db.add(tmpl) await db.flush() @@ -224,6 +255,8 @@ async def update_render_template( # Normalise empty strings to None for nullable fields if "category_key" in updates and updates["category_key"] in ("", "null"): updates["category_key"] = None + if "workflow_input_schema" in updates: + updates["workflow_input_schema"] = _normalize_workflow_input_schema(updates["workflow_input_schema"]) # Handle M2M output_type_ids new_ot_ids: list[str] | None = updates.pop("output_type_ids", None) diff --git a/backend/app/api/routers/worker.py b/backend/app/api/routers/worker.py index 2184bfe..c069a37 100644 --- a/backend/app/api/routers/worker.py +++ b/backend/app/api/routers/worker.py @@ -519,6 +519,12 @@ async def trigger_gpu_probe(current_user: User = Depends(require_global_admin)): return {"task_id": str(result.id), "queued": True} +@router.post("/gpu-probe", status_code=http_status.HTTP_202_ACCEPTED) +async def trigger_gpu_probe_legacy_alias(current_user: User = Depends(require_global_admin)): + """Backward-compatible alias used by the current admin frontend.""" + return await trigger_gpu_probe(current_user) + + @router.get("/probe/gpu/result") async def get_gpu_probe_result( current_user: User = Depends(require_global_admin), @@ -535,6 +541,15 @@ async def get_gpu_probe_result( return json.loads(setting.value) +@router.get("/gpu-probe") +async def get_gpu_probe_result_legacy_alias( + current_user: User = Depends(require_global_admin), + db: AsyncSession = Depends(get_db), +): + """Backward-compatible alias used by the current admin frontend.""" + return await get_gpu_probe_result(current_user, db) + + # --------------------------------------------------------------------------- # Render health check # --------------------------------------------------------------------------- @@ -733,4 +748,3 @@ async def update_worker_config( enabled=cfg.enabled, updated_at=cfg.updated_at.isoformat(), ) - diff --git a/backend/app/config.py b/backend/app/config.py index 501477d..f0627d7 100644 --- a/backend/app/config.py +++ b/backend/app/config.py @@ -75,6 +75,7 @@ class Settings(BaseSettings): # Redis / Celery redis_url: str = "redis://localhost:6379/0" + workflow_shadow_render_queue: str = "asset_pipeline_light" @model_validator(mode="after") def normalize_runtime_hosts(self) -> "Settings": diff --git a/backend/app/core/config_service.py b/backend/app/core/config_service.py index bb2a4bb..8f58a2a 100644 --- a/backend/app/core/config_service.py +++ b/backend/app/core/config_service.py @@ -39,7 +39,7 @@ class RenderConfig(BaseModel): blender_eevee_samples: int = 64 thumbnail_format: str = "jpg" blender_smooth_angle: int = 30 - cycles_device: str = "auto" + cycles_device: str = "gpu" render_backend: str = "celery" product_thumbnail_priority: list[str] = Field( default_factory=lambda: ["latest_render", "cad_thumbnail"] diff --git a/backend/app/core/render_paths.py b/backend/app/core/render_paths.py new file mode 100644 index 0000000..d89c949 --- /dev/null +++ b/backend/app/core/render_paths.py @@ -0,0 +1,194 @@ +from __future__ import annotations + +import os +from pathlib import Path + +from app.config import settings + +SHARED_DIR_MODE = 0o2775 + + +def _managed_directory_chain(path: Path) -> list[Path]: + """Return upload-root-relative directories that should share writable perms.""" + resolved_path = path.resolve(strict=False) + upload_root = Path(settings.upload_dir).resolve(strict=False) + + if resolved_path != upload_root and upload_root not in resolved_path.parents: + return [path] + + chain: list[Path] = [upload_root] + current = upload_root + try: + relative_parts = resolved_path.relative_to(upload_root).parts + except ValueError: + return [path] + + for part in relative_parts: + current = current / part + chain.append(current) + return chain + + +def _normalize_directory_mode(path: Path, *, mode: int = SHARED_DIR_MODE) -> None: + try: + current_mode = path.stat().st_mode & 0o7777 + except OSError: + return + + desired_mode = mode + if current_mode == desired_mode: + return + + try: + os.chmod(path, desired_mode) + except OSError: + # Best-effort only: callers still get the path, but existing root-owned + # trees can be repaired when the process has sufficient permissions. + return + + +def ensure_group_writable_dir(path: str | Path, *, mode: int = SHARED_DIR_MODE) -> Path: + """Create a directory and normalize upload-tree permissions for shared workers.""" + dir_path = Path(path) + for candidate in _managed_directory_chain(dir_path): + candidate.mkdir(parents=True, exist_ok=True) + _normalize_directory_mode(candidate, mode=mode) + return dir_path + + +def resolve_public_asset_url(url: str | None) -> Path | None: + """Resolve a public static asset URL like /renders/... to a local disk path.""" + if not url: + return None + + normalized = url.replace("\\", "/") + if normalized.startswith("/renders/"): + candidate = Path(settings.upload_dir) / "renders" / normalized[len("/renders/"):] + elif normalized.startswith("/thumbnails/"): + candidate = Path(settings.upload_dir) / "thumbnails" / normalized[len("/thumbnails/"):] + else: + return None + + return candidate + + +def resolve_result_path(result_path: str | None) -> Path | None: + """Resolve stored result_path variants to a local disk path. + + Supports canonical /app/uploads/... paths, legacy /shared/... paths, public + URLs, and bare storage keys such as renders//file.png. + """ + if not result_path: + return None + + normalized = result_path.replace("\\", "/") + + for marker in ("/uploads/", "/shared/"): + if marker in normalized: + relative = normalized.split(marker, 1)[1].lstrip("/") + return Path(settings.upload_dir) / relative + + public_candidate = resolve_public_asset_url(normalized) + if public_candidate is not None: + return public_candidate + + stripped = normalized.lstrip("/") + if stripped.startswith(("renders/", "thumbnails/", "exports/", "usd/", "step_files/")): + return Path(settings.upload_dir) / stripped + + if Path(normalized).is_absolute(): + return Path(normalized) + + return None + + +def result_path_to_storage_key(result_path: str | None) -> str | None: + """Normalize stored paths to a canonical relative storage key when possible.""" + if not result_path: + return None + + normalized = result_path.replace("\\", "/") + disk_path = resolve_result_path(result_path) + if disk_path is not None: + try: + return disk_path.relative_to(Path(settings.upload_dir)).as_posix() + except ValueError: + pass + + public_candidate = normalized.lstrip("/") + if public_candidate.startswith(("renders/", "thumbnails/", "exports/", "usd/", "step_files/")): + return public_candidate + + return normalized + + +def result_path_to_public_url( + result_path: str | None, + *, + require_exists: bool = False, +) -> str | None: + """Convert internal result paths to a servable public URL. + + Returns only /renders/... or /thumbnails/... URLs. Non-public internal paths + like step_files/renders stay hidden from API/UI callers. + """ + if not result_path: + return None + + disk_path = resolve_result_path(result_path) + if require_exists: + if disk_path is None or not disk_path.is_file(): + return None + + normalized = result_path.replace("\\", "/") + for marker in ("/renders/", "/thumbnails/"): + if marker in normalized: + idx = normalized.index(marker) + public_url = normalized[idx:] + candidate = resolve_public_asset_url(public_url) + if require_exists and (candidate is None or not candidate.is_file()): + return None + return public_url + + if disk_path is None: + return None + + try: + relative = disk_path.relative_to(Path(settings.upload_dir)) + except ValueError: + return None + + relative_str = relative.as_posix() + if relative_str.startswith(("renders/", "thumbnails/")): + if require_exists and not disk_path.is_file(): + return None + return f"/{relative_str}" + + return None + + +def build_order_line_step_render_path( + step_path: str | Path, + order_line_id: str, + filename: str, + *, + ensure_exists: bool = False, +) -> Path: + """Build a unique per-order-line render-worker artifact path beside the STEP file.""" + artifact_dir = Path(step_path).parent / "renders" / str(order_line_id) + if ensure_exists: + ensure_group_writable_dir(artifact_dir) + return artifact_dir / filename + + +def build_order_line_export_path( + order_line_id: str, + filename: str, + *, + ensure_exists: bool = False, +) -> Path: + """Build a unique per-order-line export artifact path under the shared upload root.""" + artifact_dir = Path(settings.upload_dir) / "exports" / str(order_line_id) + if ensure_exists: + ensure_group_writable_dir(artifact_dir) + return artifact_dir / filename diff --git a/backend/app/database.py b/backend/app/database.py index d269ce9..a8a082d 100644 --- a/backend/app/database.py +++ b/backend/app/database.py @@ -1,13 +1,11 @@ from __future__ import annotations -from typing import TYPE_CHECKING, AsyncGenerator, Optional +from typing import AsyncGenerator, Optional +from starlette.requests import Request from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker from sqlalchemy.orm import DeclarativeBase from sqlalchemy import text from app.config import settings -if TYPE_CHECKING: - from starlette.requests import Request - engine = create_async_engine( settings.database_url, echo=False, @@ -27,22 +25,21 @@ class Base(DeclarativeBase): pass -async def get_db(request: "Request | None" = None) -> AsyncGenerator[AsyncSession, None]: +async def get_db(request: Request) -> AsyncGenerator[AsyncSession, None]: async with AsyncSessionLocal() as session: # Auto-apply RLS context if TenantContextMiddleware populated request.state - if request is not None: - tenant_id = getattr(request.state, "tenant_id", None) - role = getattr(request.state, "role", None) - if tenant_id: - # global_admin and legacy admin bypass RLS to see all tenants - _bypass_roles = {"global_admin", "admin"} - if role in _bypass_roles: - await session.execute(text("SET LOCAL app.current_tenant_id = 'bypass'")) - else: - await session.execute( - text("SET LOCAL app.current_tenant_id = :tid"), - {"tid": tenant_id}, - ) + tenant_id = getattr(request.state, "tenant_id", None) + role = getattr(request.state, "role", None) + if tenant_id: + # global_admin and legacy admin bypass RLS to see all tenants + _bypass_roles = {"global_admin", "admin"} + if role in _bypass_roles: + await session.execute(text("SET LOCAL app.current_tenant_id = 'bypass'")) + else: + await session.execute( + text("SET LOCAL app.current_tenant_id = :tid"), + {"tid": tenant_id}, + ) try: yield session finally: diff --git a/backend/app/domains/materials/library_paths.py b/backend/app/domains/materials/library_paths.py new file mode 100644 index 0000000..2e386d6 --- /dev/null +++ b/backend/app/domains/materials/library_paths.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Any + +from app.config import settings + + +def asset_library_dir() -> Path: + return Path(settings.upload_dir) / "asset-libraries" + + +def list_asset_library_blends() -> list[Path]: + directory = asset_library_dir() + if not directory.is_dir(): + return [] + + return sorted( + (path for path in directory.glob("*.blend") if path.is_file()), + key=lambda path: (path.stat().st_mtime, path.name), + reverse=True, + ) + + +def resolve_asset_library_blend_path( + *, + blend_file_path: str | None = None, + asset_library_id: Any | None = None, +) -> str | None: + """Resolve the best available .blend path for an asset library. + + Resolution order: + 1. explicit configured path, when it exists + 2. canonical uploads/asset-libraries/.blend path + 3. newest available .blend under uploads/asset-libraries + """ + if blend_file_path: + configured = Path(blend_file_path) + if configured.is_file(): + return str(configured) + + if asset_library_id: + candidate = asset_library_dir() / f"{asset_library_id}.blend" + if candidate.is_file(): + return str(candidate) + + available = list_asset_library_blends() + if available: + return str(available[0]) + + return None diff --git a/backend/app/domains/materials/tasks.py b/backend/app/domains/materials/tasks.py index 97ecd77..30a7f88 100644 --- a/backend/app/domains/materials/tasks.py +++ b/backend/app/domains/materials/tasks.py @@ -8,6 +8,7 @@ import subprocess import uuid from pathlib import Path +from app.domains.materials.library_paths import resolve_asset_library_blend_path from app.tasks.celery_app import celery_app logger = logging.getLogger(__name__) @@ -43,7 +44,20 @@ def refresh_asset_library_catalog(self, asset_library_id: str) -> None: if not lib: logger.warning("AssetLibrary %s not found", asset_library_id) return - blend_path = lib.blend_file_path + resolved_path = resolve_asset_library_blend_path( + blend_file_path=lib.blend_file_path, + asset_library_id=lib.id, + ) + if resolved_path and resolved_path != lib.blend_file_path: + logger.warning( + "AssetLibrary %s path repaired from %s to %s before catalog refresh", + asset_library_id, + lib.blend_file_path, + resolved_path, + ) + lib.blend_file_path = resolved_path + db.commit() + blend_path = resolved_path or lib.blend_file_path engine.dispose() if not blend_path or not Path(blend_path).exists(): diff --git a/backend/app/domains/media/router.py b/backend/app/domains/media/router.py index 14dec35..394625a 100644 --- a/backend/app/domains/media/router.py +++ b/backend/app/domains/media/router.py @@ -10,6 +10,7 @@ from sqlalchemy import select, func from sqlalchemy.ext.asyncio import AsyncSession from app.database import get_db +from app.core.render_paths import resolve_result_path from app.domains.auth.models import User from app.domains.media.models import MediaAsset, MediaAssetType from app.domains.media.schemas import MediaAssetOut, MediaAssetBrowseItem, MediaAssetBrowseResponse @@ -19,6 +20,10 @@ from app.utils.auth import get_current_user router = APIRouter(prefix="/api/media", tags=["media"], redirect_slashes=False) +def _resolve_asset_candidate(key: str): + return resolve_result_path(key) + + async def _resolve_thumbnails_bulk(db: AsyncSession, assets: list) -> None: """Resolve thumbnail_url for assets using the same priority as product pages. @@ -275,15 +280,8 @@ async def thumbnail_asset( raise HTTPException(404, "Not a previewable asset") key = asset.storage_key - from app.config import settings - candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key - if not candidate.exists() and "/shared/renders/" in key: - parts = key.split("/") - if len(parts) >= 2: - remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1] - if remapped.exists(): - candidate = remapped - if candidate.exists(): + candidate = _resolve_asset_candidate(key) + if candidate is not None and candidate.exists(): return FileResponse( str(candidate), media_type=mime, headers={"Cache-Control": "max-age=86400, public"}, @@ -314,22 +312,8 @@ async def download_asset( mime = asset.mime_type or "application/octet-stream" # Local file path (absolute or relative to UPLOAD_DIR) - from app.config import settings - candidate = Path(key) - if not candidate.is_absolute(): - candidate = Path(settings.upload_dir) / key - # Legacy path remapping: /shared/renders/{uuid}/{file} → UPLOAD_DIR/renders/{uuid}/{file} - if not candidate.exists() and "/shared/renders/" in key: - import logging - parts = key.split("/") - if len(parts) >= 2: - remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1] - if remapped.exists(): - logging.getLogger(__name__).warning( - "Remapped legacy path %s → %s", key, remapped - ) - candidate = remapped - if candidate.exists(): + candidate = _resolve_asset_candidate(key) + if candidate is not None and candidate.exists(): ext = candidate.suffix.lstrip(".") fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}" return FileResponse( @@ -395,11 +379,8 @@ async def zip_download( fname = base try: # Check absolute path first (local filesystem) - candidate = Path(key) - if not candidate.is_absolute(): - from app.config import settings - candidate = Path(settings.upload_dir) / key - if candidate.exists(): + candidate = _resolve_asset_candidate(key) + if candidate is not None and candidate.exists(): data = candidate.read_bytes() else: data = storage.download_bytes(key) @@ -440,7 +421,7 @@ async def batch_delete_assets( ): """Permanently delete multiple MediaAsset records.""" from app.utils.auth import require_global_admin - require_global_admin(_user) + await require_global_admin(_user) deleted = 0 for aid in asset_ids: @@ -461,23 +442,15 @@ async def cleanup_orphaned_assets( """ import logging from pathlib import Path - from app.config import settings from app.core.storage import get_storage logger = logging.getLogger(__name__) storage = get_storage() def _file_exists(key: str) -> bool: - candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key - if candidate.exists(): + candidate = _resolve_asset_candidate(key) + if candidate is not None and candidate.exists(): return True - # Legacy path remapping - if "/shared/renders/" in key: - parts = key.split("/") - if len(parts) >= 2: - remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1] - if remapped.exists(): - return True # Check MinIO try: storage.download_bytes(key) diff --git a/backend/app/domains/notifications/service.py b/backend/app/domains/notifications/service.py index 2bc3c0f..a7e9ba5 100644 --- a/backend/app/domains/notifications/service.py +++ b/backend/app/domains/notifications/service.py @@ -5,7 +5,7 @@ to create notification rows in the audit_log table. """ import logging import uuid -from datetime import datetime +from datetime import datetime, timezone from sqlalchemy import create_engine, select from sqlalchemy.orm import Session @@ -23,6 +23,11 @@ CHANNEL_ALERT = "alert" # admin-only infrastructure issues _engine = None +def _utcnow_naive() -> datetime: + """Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns.""" + return datetime.now(timezone.utc).replace(tzinfo=None) + + def _get_engine(): global _engine if _engine is None: @@ -53,7 +58,7 @@ async def emit_notification( details=details, notification=True, channel=channel, - timestamp=datetime.utcnow(), + timestamp=_utcnow_naive(), ) db.add(entry) await db.commit() @@ -85,7 +90,7 @@ def emit_notification_sync( details=details, notification=True, channel=channel, - timestamp=datetime.utcnow(), + timestamp=_utcnow_naive(), ) session.add(entry) session.commit() @@ -149,7 +154,7 @@ def emit_batch_render_notification_sync(order_id: str) -> None: }, notification=True, channel=CHANNEL_NOTIFICATION, - timestamp=datetime.utcnow(), + timestamp=_utcnow_naive(), ) session.add(entry) session.commit() diff --git a/backend/app/domains/orders/service.py b/backend/app/domains/orders/service.py index c3ccf87..6e2afb9 100644 --- a/backend/app/domains/orders/service.py +++ b/backend/app/domains/orders/service.py @@ -1,5 +1,5 @@ """Order service — order number generation and business logic.""" -from datetime import datetime +from datetime import datetime, timezone from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select, func, create_engine, update as sql_update from sqlalchemy.orm import Session @@ -9,9 +9,14 @@ import logging logger = logging.getLogger(__name__) +def _utcnow_naive() -> datetime: + """Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns.""" + return datetime.now(timezone.utc).replace(tzinfo=None) + + async def generate_order_number(db: AsyncSession) -> str: """Generate next sequential order number: SA-2026-XXXXX.""" - year = datetime.utcnow().year + year = datetime.now(timezone.utc).year prefix = f"SA-{year}-" # Use MAX to find the highest existing sequence number this year. @@ -68,7 +73,7 @@ def check_order_completion(order_id: str) -> bool: return False # Auto-advance to completed - now = datetime.utcnow() + now = _utcnow_naive() session.execute( sql_update(Order) .where(Order.id == order_id) diff --git a/backend/app/domains/pipeline/tasks/export_glb.py b/backend/app/domains/pipeline/tasks/export_glb.py index a29d370..22fa428 100644 --- a/backend/app/domains/pipeline/tasks/export_glb.py +++ b/backend/app/domains/pipeline/tasks/export_glb.py @@ -13,8 +13,25 @@ from app.core.pipeline_logger import PipelineLogger logger = logging.getLogger(__name__) +def _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_render_path) -> str | None: + """Reuse the runtime freshness checks before accepting a USD cache hit.""" + from app.domains.rendering.workflow_runtime_services import _usd_master_refresh_reason + + return _usd_master_refresh_reason( + cad_file, + usd_asset=usd_asset, + usd_render_path=usd_render_path, + ) + + @celery_app.task(bind=True, name="app.tasks.step_tasks.generate_gltf_geometry_task", queue="asset_pipeline", max_retries=1) -def generate_gltf_geometry_task(self, cad_file_id: str): +def generate_gltf_geometry_task( + self, + cad_file_id: str, + workflow_run_id: str | None = None, + workflow_node_id: str | None = None, + **_: object, +): """Export a geometry GLB directly from STEP via OCC (no STL intermediary). Pipeline: @@ -94,10 +111,10 @@ def generate_gltf_geometry_task(self, cad_file_id: str): _current_hash = _compute_step_hash(str(step_path_str)) _cache_hit_asset_id = None - # Composite cache key includes deflection settings so changing them invalidates cache - # v3: removed BRepBuilderAPI_Transform, writer handles mm→m from STEP unit metadata + # Composite cache key includes deflection settings so changing them invalidates cache. + # v5: occurrence-aware part-key stamping for repeated leaf meshes changed. effective_cache_key = ( - f"v3:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}" + f"v5:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}" if _current_hash else None ) @@ -112,6 +129,9 @@ def generate_gltf_geometry_task(self, cad_file_id: str): if stored_key == effective_cache_key: _asset_disk_path = _Path(app_settings.upload_dir) / existing_geo.storage_key if _asset_disk_path.exists(): + if cad_file.gltf_path != str(_asset_disk_path): + cad_file.gltf_path = str(_asset_disk_path) + session.commit() logger.info("[CACHE] cache key match — skipping geometry GLB tessellation for %s", cad_file_id) pl.step_done("export_glb_geometry", result={"cached": True, "asset_id": str(existing_geo.id)}) _cache_hit_asset_id = str(existing_geo.id) @@ -133,6 +153,20 @@ def generate_gltf_geometry_task(self, cad_file_id: str): generate_usd_master_task.delay(cad_file_id) except Exception: logger.debug("Could not queue generate_usd_master_task from cache-hit path (non-fatal)") + try: + from app.domains.rendering.tasks import _update_workflow_run_status + + _update_workflow_run_status( + cad_file_id, + "completed", + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + ) + except Exception: + logger.exception( + "Failed to update workflow state for cached GLB export %s", + cad_file_id, + ) return {"cached": True, "asset_id": _cache_hit_asset_id} step = _Path(step_path_str) @@ -219,6 +253,9 @@ def generate_gltf_geometry_task(self, cad_file_id: str): existing.render_config = {"cache_key": effective_cache_key} if product_id: existing.product_id = _uuid.UUID(product_id) + cad_file = _sess.get(CadFile, _uuid.UUID(cad_file_id)) + if cad_file is not None: + cad_file.gltf_path = str(output_path) _sess.commit() asset_id = str(existing.id) else: @@ -232,12 +269,26 @@ def generate_gltf_geometry_task(self, cad_file_id: str): render_config={"cache_key": effective_cache_key}, ) _sess.add(asset) + cad_file = _sess.get(CadFile, _uuid.UUID(cad_file_id)) + if cad_file is not None: + cad_file.gltf_path = str(output_path) _sess.commit() asset_id = str(asset.id) _eng2.dispose() pl.step_done("export_glb_geometry", result={"glb_path": str(output_path), "asset_id": asset_id}) logger.info("generate_gltf_geometry_task: MediaAsset %s created for cad %s", asset_id, cad_file_id) + try: + from app.domains.rendering.tasks import _update_workflow_run_status + + _update_workflow_run_status( + cad_file_id, + "completed", + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + ) + except Exception: + logger.exception("Failed to update workflow state for GLB export %s", cad_file_id) # Auto-chain USD master export so the canonical scene is always up to date try: @@ -346,6 +397,33 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict: angular_deflection = float(sys_settings.get("render_angular_deflection", "0.05")) sharp_threshold = float(sys_settings.get("sharp_edge_threshold", "20.0")) + scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts")) + script_path = scripts_dir / "export_step_to_usd.py" + materials_helper_path = scripts_dir / "_blender_materials.py" + + if not script_path.exists(): + err = f"export_step_to_usd.py not found at {script_path}" + pl.step_error("usd_master", err, None) + raise RuntimeError(err) + + # Cache must include the active render-script revision. Otherwise + # material resolution fixes never invalidate previously generated USD masters. + script_fingerprint = "unknown" + try: + import hashlib as _hashlib_script + + _script_hash = _hashlib_script.sha256() + for candidate in (script_path, materials_helper_path): + if not candidate.exists(): + continue + _script_hash.update(candidate.read_bytes()) + script_fingerprint = _script_hash.hexdigest()[:12] + except Exception as exc: + logger.warning( + "[USD_MASTER] failed to fingerprint render scripts, falling back to legacy cache key: %s", + exc, + ) + # Hash-based cache check: skip tessellation if file and settings haven't changed from app.domains.products.cache_service import compute_step_hash as _compute_step_hash_usd _current_hash_usd = _compute_step_hash_usd(str(step_path)) @@ -357,7 +435,7 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict: _json.dumps(material_map, sort_keys=True).encode() ).hexdigest()[:12] if material_map else "none" effective_cache_key = ( - f"{_current_hash_usd}:{linear_deflection}:{angular_deflection}:{sharp_threshold}:{_mat_hash}" + f"{_current_hash_usd}:{linear_deflection}:{angular_deflection}:{sharp_threshold}:{_mat_hash}:{script_fingerprint}" if _current_hash_usd else None ) @@ -372,9 +450,21 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict: if stored_key == effective_cache_key: _usd_disk_path = _Path(app_settings.upload_dir) / existing_usd.storage_key if _usd_disk_path.exists(): - logger.info("[CACHE] cache key match — skipping USD master tessellation for %s", cad_file_id) - pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)}) - _cache_hit_asset_id = str(existing_usd.id) + refresh_reason = _usd_cache_hit_refresh_reason( + cad_file, + existing_usd, + _usd_disk_path, + ) + if refresh_reason is None: + logger.info("[CACHE] cache key match — skipping USD master tessellation for %s", cad_file_id) + pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)}) + _cache_hit_asset_id = str(existing_usd.id) + else: + logger.info( + "[CACHE] USD cache key matched for %s but asset is stale (%s) — rebuilding", + cad_file_id, + refresh_reason, + ) else: logger.info("[CACHE] cache key match but USD asset missing on disk — re-running tessellation for %s", cad_file_id) else: @@ -396,13 +486,6 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict: raise RuntimeError(err) output_path = step_path.parent / f"{step_path.stem}_master.usd" - scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts")) - script_path = scripts_dir / "export_step_to_usd.py" - - if not script_path.exists(): - err = f"export_step_to_usd.py not found at {script_path}" - pl.step_error("usd_master", err, None) - raise RuntimeError(err) cmd = [ _sys.executable, str(script_path), diff --git a/backend/app/domains/pipeline/tasks/extract_metadata.py b/backend/app/domains/pipeline/tasks/extract_metadata.py index fa53489..0a2e910 100644 --- a/backend/app/domains/pipeline/tasks/extract_metadata.py +++ b/backend/app/domains/pipeline/tasks/extract_metadata.py @@ -31,7 +31,13 @@ def _bbox_from_step_cadquery(step_path: str) -> dict | None: @celery_app.task(bind=True, name="app.tasks.step_tasks.process_step_file", queue="step_processing") -def process_step_file(self, cad_file_id: str): +def process_step_file( + self, + cad_file_id: str, + workflow_run_id: str | None = None, + workflow_node_id: str | None = None, + **_: object, +): """Process a STEP file: extract objects, generate thumbnail, convert to glTF. After processing completes, auto-populate cad_part_materials from Excel @@ -122,10 +128,24 @@ def process_step_file(self, cad_file_id: str): r.delete(lock_key) # always release on completion or unhandled error pl.step_done("process_step_file") + try: + from app.domains.rendering.tasks import _update_workflow_run_status - # Queue thumbnail rendering on the dedicated single-concurrency worker - from app.domains.pipeline.tasks.render_thumbnail import render_step_thumbnail - render_step_thumbnail.delay(cad_file_id) + _update_workflow_run_status( + cad_file_id, + "completed", + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + ) + except Exception: + logger.exception("Failed to update workflow state for process_step_file %s", cad_file_id) + + # Legacy flow still auto-queues thumbnail generation here. + # Graph-mode workflows dispatch explicit thumbnail save/render nodes instead. + if workflow_run_id is None: + from app.domains.pipeline.tasks.render_thumbnail import render_step_thumbnail + + render_step_thumbnail.delay(cad_file_id) def _auto_populate_materials_for_cad(cad_file_id: str, tenant_id: str | None = None) -> None: diff --git a/backend/app/domains/pipeline/tasks/render_order_line.py b/backend/app/domains/pipeline/tasks/render_order_line.py index f909deb..fa7304d 100644 --- a/backend/app/domains/pipeline/tasks/render_order_line.py +++ b/backend/app/domains/pipeline/tasks/render_order_line.py @@ -8,6 +8,7 @@ import logging from datetime import datetime from app.tasks.celery_app import celery_app +from app.core.render_paths import ensure_group_writable_dir from app.core.task_logs import log_task_event from app.core.pipeline_logger import PipelineLogger @@ -149,7 +150,7 @@ def render_order_line_task(self, order_line_id: str): product_name = render_invocation.product_name ot_name = render_invocation.output_type_name output_path = render_invocation.output_path - _Path(output_path).parent.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(_Path(output_path).parent) render_width = render_invocation.width render_height = render_invocation.height render_engine = render_invocation.engine diff --git a/backend/app/domains/pipeline/tasks/render_thumbnail.py b/backend/app/domains/pipeline/tasks/render_thumbnail.py index 633c548..df5da95 100644 --- a/backend/app/domains/pipeline/tasks/render_thumbnail.py +++ b/backend/app/domains/pipeline/tasks/render_thumbnail.py @@ -19,6 +19,247 @@ logger = logging.getLogger(__name__) _THUMBNAIL_SAMPLE_CAP = 64 +def _resolve_thumbnail_render_context(session, cad) -> dict[str, object]: + """Reuse workflow material/USD resolution for CAD thumbnails when possible.""" + context: dict[str, object] = {} + if not cad: + return context + + parsed_objects = cad.parsed_objects if isinstance(cad.parsed_objects, dict) else {} + raw_part_names = parsed_objects.get("objects") if isinstance(parsed_objects, dict) else None + if isinstance(raw_part_names, list): + part_names_ordered = [ + str(part_name).strip() + for part_name in raw_part_names + if isinstance(part_name, str) and part_name.strip() + ] + if part_names_ordered: + context["part_names_ordered"] = part_names_ordered + + try: + from sqlalchemy import select + + from app.core.render_paths import resolve_result_path + from app.domains.media.models import MediaAsset, MediaAssetType + from app.domains.products.models import Product + from app.domains.rendering.workflow_runtime_services import ( + _build_effective_material_lookup, + _usd_master_refresh_reason, + ) + from app.services.material_service import resolve_material_map + from app.services.template_service import get_material_library_path_for_session + + product = session.execute( + select(Product) + .where(Product.cad_file_id == cad.id) + .order_by(Product.is_active.desc(), Product.updated_at.desc(), Product.created_at.desc()) + .limit(1) + ).scalar_one_or_none() + + material_library_path = get_material_library_path_for_session(session) + materials_source = product.cad_part_materials or [] if product else [] + raw_material_map = _build_effective_material_lookup(cad, materials_source) + if material_library_path and raw_material_map: + material_map = resolve_material_map(raw_material_map) + if material_map: + context["material_library_path"] = material_library_path + context["material_map"] = material_map + + usd_asset = session.execute( + select(MediaAsset) + .where( + MediaAsset.cad_file_id == cad.id, + MediaAsset.asset_type == MediaAssetType.usd_master, + ) + .order_by(MediaAsset.created_at.desc()) + .limit(1) + ).scalar_one_or_none() + if usd_asset: + usd_path = resolve_result_path(usd_asset.storage_key) + refresh_reason = _usd_master_refresh_reason( + cad, + usd_asset=usd_asset, + usd_render_path=usd_path, + ) + if refresh_reason is None and usd_path and usd_path.exists(): + context["usd_path"] = usd_path + except Exception: + logger.exception("Failed to resolve thumbnail render context for cad %s", getattr(cad, "id", None)) + + return context + + +def _render_thumbnail_core( + *, + cad_file_id: str, + workflow_run_id: str | None = None, + workflow_node_id: str | None = None, + renderer: str | None = None, + render_engine: str | None = None, + samples: int | None = None, + width: int | None = None, + height: int | None = None, + transparent_bg: bool | None = None, + include_postprocess: bool, + queue_legacy_glb_follow_up: bool, +) -> None: + """Render a CAD thumbnail with optional legacy post-processing.""" + pl = PipelineLogger(task_id=None) + pl.step_start("render_step_thumbnail", {"cad_file_id": cad_file_id}) + logger.info("Rendering thumbnail for CAD file: %s", cad_file_id) + + from app.core.tenant_context import resolve_tenant_id_for_cad + + tenant_id = resolve_tenant_id_for_cad(cad_file_id) + + try: + from app.models.cad_file import CadFile + from app.domains.products.cache_service import compute_step_hash + + with _pipeline_session(tenant_id) as session: + cad = session.get(CadFile, cad_file_id) + if cad and cad.stored_path and not cad.step_file_hash: + cad.step_file_hash = compute_step_hash(cad.stored_path) + session.commit() + logger.info("Saved step_file_hash for %s: %s…", cad_file_id, cad.step_file_hash[:12]) + except Exception: + logger.warning("step_file_hash computation failed for %s (non-fatal)", cad_file_id) + + render_context: dict[str, object] = {} + try: + from app.models.cad_file import CadFile + + with _pipeline_session(tenant_id) as session: + cad = session.get(CadFile, cad_file_id) + render_context = _resolve_thumbnail_render_context(session, cad) + except Exception: + logger.warning("thumbnail render context resolution failed for %s; using fallback render path", cad_file_id) + + try: + from app.services.step_processor import regenerate_cad_thumbnail + + pl.info("render_step_thumbnail", "Calling regenerate_cad_thumbnail") + with _capped_thumbnail_samples(): + success = regenerate_cad_thumbnail( + cad_file_id, + part_colors={}, + renderer=renderer, + render_engine=render_engine, + samples=samples, + width=width, + height=height, + transparent_bg=transparent_bg, + **render_context, + ) + if not success: + raise RuntimeError("regenerate_cad_thumbnail returned False") + except Exception as exc: + pl.step_error("render_step_thumbnail", f"Thumbnail render failed: {exc}", exc) + logger.error("Thumbnail render failed for %s: %s", cad_file_id, exc) + raise + + resolved_tenant_id: str | None = None + if include_postprocess: + try: + from app.models.cad_file import CadFile + from app.domains.rendering.workflow_runtime_services import resolve_cad_bbox + + with _pipeline_session(tenant_id) as session: + cad = session.get(CadFile, cad_file_id) + if not cad: + logger.warning("CadFile %s not found in post-render phase", cad_file_id) + else: + step_path = cad.stored_path + attrs = cad.mesh_attributes or {} + + if step_path and not attrs.get("dimensions_mm"): + step_file = Path(step_path) + glb_path = step_file.parent / f"{step_file.stem}_thumbnail.glb" + bbox_data = resolve_cad_bbox(step_path, glb_path=str(glb_path)).bbox_data + if bbox_data: + cad.mesh_attributes = {**attrs, **bbox_data} + attrs = cad.mesh_attributes + dims = bbox_data["dimensions_mm"] + logger.info( + "bbox for %s: %s×%s×%s mm", + cad_file_id, + dims["x"], + dims["y"], + dims["z"], + ) + + if step_path and "sharp_edge_pairs" not in attrs: + try: + from app.services.step_processor import extract_mesh_edge_data + + edge_data = extract_mesh_edge_data(step_path) + if edge_data: + cad.mesh_attributes = {**attrs, **edge_data} + n_pairs = len(edge_data.get("sharp_edge_pairs", [])) + logger.info( + "Sharp edge data extracted for %s: %s sharp edges", + cad_file_id, + n_pairs, + ) + except Exception: + logger.exception( + "Sharp edge extraction failed for %s (non-fatal)", + cad_file_id, + ) + + session.commit() + resolved_tenant_id = str(cad.tenant_id) if cad.tenant_id else None + except Exception: + logger.exception("Post-render processing failed for %s (non-fatal)", cad_file_id) + + try: + from app.domains.pipeline.tasks.extract_metadata import _auto_populate_materials_for_cad + + _auto_populate_materials_for_cad(cad_file_id, tenant_id=tenant_id) + except Exception: + logger.exception( + "Auto material population failed for cad_file %s (non-fatal)", + cad_file_id, + ) + + try: + if resolved_tenant_id: + from app.core.websocket import publish_event_sync + + publish_event_sync( + resolved_tenant_id, + { + "type": "cad_processing_complete", + "cad_file_id": cad_file_id, + "status": "completed", + }, + ) + except Exception: + logger.debug("WebSocket publish for CAD complete skipped (non-fatal)") + + if queue_legacy_glb_follow_up: + try: + from app.domains.pipeline.tasks.export_glb import generate_gltf_geometry_task + + generate_gltf_geometry_task.delay(cad_file_id) + pl.info("render_step_thumbnail", f"Queued generate_gltf_geometry_task for {cad_file_id}") + except Exception: + logger.debug("Could not queue generate_gltf_geometry_task (non-fatal)") + + pl.step_done("render_step_thumbnail") + try: + from app.domains.rendering.tasks import _update_workflow_run_status + + _update_workflow_run_status( + cad_file_id, + "completed", + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + ) + except Exception: + logger.exception("Failed to update workflow state for thumbnail render %s", cad_file_id) + + @contextmanager def _capped_thumbnail_samples(): """Temporarily cap render samples for thumbnail renders. @@ -73,123 +314,88 @@ def _pipeline_session(tenant_id: str | None = None): @celery_app.task(bind=True, name="app.tasks.step_tasks.render_step_thumbnail", queue="asset_pipeline") -def render_step_thumbnail(self, cad_file_id: str): +def render_step_thumbnail( + self, + cad_file_id: str, + workflow_run_id: str | None = None, + workflow_node_id: str | None = None, + renderer: str | None = None, + render_engine: str | None = None, + samples: int | None = None, + width: int | None = None, + height: int | None = None, + transparent_bg: bool | None = None, + **_: object, +): """Render the thumbnail for a freshly-processed STEP file. Runs on the dedicated asset_pipeline queue (concurrency=1) so the blender-renderer service is never overwhelmed by concurrent requests. On success, also auto-populates materials and marks the CadFile as completed. """ - pl = PipelineLogger(task_id=self.request.id) - pl.step_start("render_step_thumbnail", {"cad_file_id": cad_file_id}) - logger.info(f"Rendering thumbnail for CAD file: {cad_file_id}") - - from app.core.tenant_context import resolve_tenant_id_for_cad - _tenant_id = resolve_tenant_id_for_cad(cad_file_id) - - # ── Pre-render: compute hash ────────────────────────────────────────── try: - from app.models.cad_file import CadFile - from app.domains.products.cache_service import compute_step_hash - - with _pipeline_session(_tenant_id) as session: - cad = session.get(CadFile, cad_file_id) - if cad and cad.stored_path and not cad.step_file_hash: - cad.step_file_hash = compute_step_hash(cad.stored_path) - session.commit() - logger.info(f"Saved step_file_hash for {cad_file_id}: {cad.step_file_hash[:12]}…") - except Exception: - logger.warning(f"step_file_hash computation failed for {cad_file_id} (non-fatal)") - - # ── Render thumbnail (with capped samples for 512x512) ────────────── - try: - from app.services.step_processor import regenerate_cad_thumbnail - pl.info("render_step_thumbnail", "Calling regenerate_cad_thumbnail") - with _capped_thumbnail_samples(): - success = regenerate_cad_thumbnail(cad_file_id, part_colors={}) - if not success: - raise RuntimeError("regenerate_cad_thumbnail returned False") + _render_thumbnail_core( + cad_file_id=cad_file_id, + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + renderer=renderer, + render_engine=render_engine, + samples=samples, + width=width, + height=height, + transparent_bg=transparent_bg, + include_postprocess=True, + queue_legacy_glb_follow_up=workflow_run_id is None, + ) except Exception as exc: - pl.step_error("render_step_thumbnail", f"Thumbnail render failed: {exc}", exc) - logger.error(f"Thumbnail render failed for {cad_file_id}: {exc}") raise self.retry(exc=exc, countdown=30, max_retries=2) - # ── Post-render: bbox + sharp edges + materials (single session) ────── + +@celery_app.task(bind=True, name="app.tasks.step_tasks.render_graph_thumbnail", queue="asset_pipeline") +def render_graph_thumbnail( + self, + cad_file_id: str, + workflow_run_id: str | None = None, + workflow_node_id: str | None = None, + renderer: str | None = None, + render_engine: str | None = None, + samples: int | None = None, + width: int | None = None, + height: int | None = None, + transparent_bg: bool | None = None, + **_: object, +): + """Render a CAD thumbnail for graph workflows without legacy follow-up side effects.""" try: - from app.models.cad_file import CadFile - from app.domains.rendering.workflow_runtime_services import resolve_cad_bbox - - with _pipeline_session(_tenant_id) as session: - cad = session.get(CadFile, cad_file_id) - if not cad: - logger.warning(f"CadFile {cad_file_id} not found in post-render phase") - else: - step_path = cad.stored_path - attrs = cad.mesh_attributes or {} - - # Bounding box extraction - if step_path and not attrs.get("dimensions_mm"): - _step = Path(step_path) - _glb = _step.parent / f"{_step.stem}_thumbnail.glb" - bbox_data = resolve_cad_bbox(step_path, glb_path=str(_glb)).bbox_data - if bbox_data: - cad.mesh_attributes = {**attrs, **bbox_data} - attrs = cad.mesh_attributes - dims = bbox_data["dimensions_mm"] - logger.info(f"bbox for {cad_file_id}: {dims['x']}×{dims['y']}×{dims['z']} mm") - - # Sharp edge extraction (PCurve-based, runs on render-worker with OCP) - if step_path and "sharp_edge_pairs" not in attrs: - try: - from app.services.step_processor import extract_mesh_edge_data - edge_data = extract_mesh_edge_data(step_path) - if edge_data: - cad.mesh_attributes = {**attrs, **edge_data} - n_pairs = len(edge_data.get("sharp_edge_pairs", [])) - logger.info(f"Sharp edge data extracted for {cad_file_id}: {n_pairs} sharp edges") - except Exception: - logger.exception(f"Sharp edge extraction failed for {cad_file_id} (non-fatal)") - - session.commit() - - # WebSocket broadcast - _tid = str(cad.tenant_id) if cad.tenant_id else None - except Exception: - logger.exception(f"Post-render processing failed for {cad_file_id} (non-fatal)") - _tid = None - - # Auto-populate materials - try: - from app.domains.pipeline.tasks.extract_metadata import _auto_populate_materials_for_cad - _auto_populate_materials_for_cad(cad_file_id, tenant_id=_tenant_id) - except Exception: - logger.exception(f"Auto material population failed for cad_file {cad_file_id} (non-fatal)") - - # Broadcast WebSocket event - try: - if _tid: - from app.core.websocket import publish_event_sync - publish_event_sync(_tid, { - "type": "cad_processing_complete", - "cad_file_id": cad_file_id, - "status": "completed", - }) - except Exception: - logger.debug("WebSocket publish for CAD complete skipped (non-fatal)") - - # Auto-generate geometry GLB - try: - from app.domains.pipeline.tasks.export_glb import generate_gltf_geometry_task - generate_gltf_geometry_task.delay(cad_file_id) - pl.info("render_step_thumbnail", f"Queued generate_gltf_geometry_task for {cad_file_id}") - except Exception: - logger.debug("Could not queue generate_gltf_geometry_task (non-fatal)") - - pl.step_done("render_step_thumbnail") + _render_thumbnail_core( + cad_file_id=cad_file_id, + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + renderer=renderer, + render_engine=render_engine, + samples=samples, + width=width, + height=height, + transparent_bg=transparent_bg, + include_postprocess=False, + queue_legacy_glb_follow_up=False, + ) + except Exception as exc: + raise self.retry(exc=exc, countdown=30, max_retries=2) @celery_app.task(bind=True, name="app.tasks.step_tasks.regenerate_thumbnail", queue="asset_pipeline") -def regenerate_thumbnail(self, cad_file_id: str, part_colors: dict): +def regenerate_thumbnail( + self, + cad_file_id: str, + part_colors: dict, + renderer: str | None = None, + render_engine: str | None = None, + samples: int | None = None, + width: int | None = None, + height: int | None = None, + transparent_bg: bool | None = None, +): """Regenerate thumbnail with per-part colours.""" pl = PipelineLogger(task_id=self.request.id) pl.step_start("regenerate_thumbnail", {"cad_file_id": cad_file_id}) @@ -200,11 +406,40 @@ def regenerate_thumbnail(self, cad_file_id: str, part_colors: dict): _tenant_id = resolve_tenant_id_for_cad(cad_file_id) try: - from app.services.step_processor import regenerate_cad_thumbnail + from app.services.step_processor import MissingCadResourceError, regenerate_cad_thumbnail + + render_context: dict[str, object] = {} + try: + from app.models.cad_file import CadFile + + with _pipeline_session(_tenant_id) as session: + cad = session.get(CadFile, cad_file_id) + render_context = _resolve_thumbnail_render_context(session, cad) + except Exception: + logger.warning( + "thumbnail render context resolution failed for %s during regeneration; using fallback render path", + cad_file_id, + ) + with _capped_thumbnail_samples(): - success = regenerate_cad_thumbnail(cad_file_id, part_colors) + success = regenerate_cad_thumbnail( + cad_file_id, + part_colors, + renderer=renderer, + render_engine=render_engine, + samples=samples, + width=width, + height=height, + transparent_bg=transparent_bg, + **render_context, + ) if not success: raise RuntimeError("regenerate_cad_thumbnail returned False") + except MissingCadResourceError as exc: + pl.warning("regenerate_thumbnail", f"Skipping stale thumbnail regeneration: {exc}") + logger.warning("Skipping thumbnail regeneration for %s: %s", cad_file_id, exc) + pl.step_done("regenerate_thumbnail") + return except Exception as exc: pl.step_error("regenerate_thumbnail", f"Thumbnail regeneration failed: {exc}", exc) logger.error(f"Thumbnail regeneration failed for {cad_file_id}: {exc}") diff --git a/backend/app/domains/rendering/dispatch_service.py b/backend/app/domains/rendering/dispatch_service.py index 9b79e70..4821dc9 100644 --- a/backend/app/domains/rendering/dispatch_service.py +++ b/backend/app/domains/rendering/dispatch_service.py @@ -16,6 +16,8 @@ import logging logger = logging.getLogger(__name__) +_WORKFLOW_ROLLOUT_MODES = {"legacy_only", "shadow", "graph"} + def _build_rollout_signal( *, @@ -39,6 +41,13 @@ def _build_rollout_signal( } +def _normalize_workflow_rollout_mode(value: str | None) -> str: + normalized = (value or "legacy_only").strip().lower() + if normalized in _WORKFLOW_ROLLOUT_MODES: + return normalized + return "legacy_only" + + def dispatch_render_with_workflow(order_line_id: str) -> dict: """Dispatch a render for the given order line. @@ -54,12 +63,19 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: from app.config import settings from app.domains.orders.models import OrderLine from app.domains.rendering.models import OutputType, WorkflowDefinition + from app.domains.rendering.output_type_contracts import ( + derive_supported_artifact_kinds_from_workflow_config, + ) from app.domains.rendering.workflow_config_utils import ( canonicalize_workflow_config, extract_runtime_workflow, get_workflow_execution_mode, ) - from app.domains.rendering.workflow_executor import prepare_workflow_context + from app.domains.rendering.workflow_executor import ( + WorkflowTaskSubmissionError, + prepare_workflow_context, + submit_prepared_workflow_tasks, + ) from app.domains.rendering.workflow_graph_runtime import ( execute_graph_workflow, find_unsupported_graph_nodes, @@ -150,7 +166,41 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: ) return legacy_result - execution_mode = get_workflow_execution_mode(canonical_config, default="legacy") + supported_artifact_kinds = derive_supported_artifact_kinds_from_workflow_config(canonical_config) + output_type_artifact_kind = getattr(output_type, "artifact_kind", None) + if output_type_artifact_kind and output_type_artifact_kind not in supported_artifact_kinds: + supported = ", ".join(supported_artifact_kinds) if supported_artifact_kinds else "none" + logger.warning( + "order_line %s: workflow_definition_id %s is incompatible with output_type %s artifact_kind %s; " + "falling back to legacy dispatch", + order_line_id, + wf_def.id, + output_type.id, + output_type_artifact_kind, + ) + legacy_result = _legacy_dispatch(order_line_id) + legacy_result.update( + _build_rollout_signal( + gate_status="workflow_contract_mismatch", + ready=False, + reasons=[ + "Linked workflow does not produce the artifact kind required by the output type; legacy dispatch remains authoritative.", + f"Expected artifact kind: {output_type_artifact_kind}. Supported by workflow: [{supported}].", + ], + workflow_def_id=wf_def.id, + output_type_id=output_type.id, + ) + ) + return legacy_result + + configured_execution_mode = get_workflow_execution_mode(canonical_config, default="legacy") + workflow_rollout_mode = _normalize_workflow_rollout_mode( + getattr(output_type, "workflow_rollout_mode", None) + ) + legacy_runtime_gate_status = "workflow_legacy_runtime" + legacy_runtime_reasons = [ + "Workflow definition is active, but execution still uses the legacy runtime path." + ] def _prepare_graph_context(target_mode: str): workflow_context = prepare_workflow_context( @@ -175,7 +225,38 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: session.commit() return run - if execution_mode == "graph": + if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "legacy_only": + logger.info( + "order_line %s: workflow_definition_id %s is graph-capable but output_type %s is pinned to legacy_only rollout", + order_line_id, + wf_def.id, + output_type.id, + ) + legacy_result = _legacy_dispatch(order_line_id) + legacy_result["workflow_rollout_mode"] = workflow_rollout_mode + legacy_result["configured_execution_mode"] = configured_execution_mode + legacy_result.update( + _build_rollout_signal( + gate_status="rollout_legacy_only", + ready=False, + reasons=[ + "Output type rollout mode is pinned to legacy_only; legacy dispatch remains authoritative.", + f"Linked workflow stays attached in configured execution mode '{configured_execution_mode}' until rollout is promoted.", + ], + workflow_def_id=wf_def.id, + output_type_id=output_type.id, + ) + ) + return legacy_result + + if workflow_rollout_mode in {"graph", "shadow"} and configured_execution_mode not in {"graph", "shadow"}: + legacy_runtime_gate_status = "rollout_requires_graph_workflow" + legacy_runtime_reasons = [ + f"Output type rollout mode '{workflow_rollout_mode}' requires a workflow configured for graph or shadow execution.", + f"Linked workflow is still configured for '{configured_execution_mode}', so legacy runtime remains authoritative.", + ] + + if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "graph": try: workflow_context = _prepare_graph_context("graph") except Exception as exc: @@ -225,13 +306,44 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: return legacy_result try: - dispatch_result = execute_graph_workflow(session, workflow_context) + dispatch_result = execute_graph_workflow( + session, + workflow_context, + dispatch_tasks=False, + ) session.commit() + submit_prepared_workflow_tasks(dispatch_result) except Exception as exc: session.rollback() session.add(run) mark_workflow_run_failed(run, str(exc)) session.commit() + if isinstance(exc, WorkflowTaskSubmissionError) and exc.submitted_task_ids: + logger.exception( + "order_line %s: graph workflow submission partially failed after %d task(s); " + "not falling back to legacy to avoid duplicate renders", + order_line_id, + len(exc.submitted_task_ids), + ) + return { + "backend": "workflow_graph", + "execution_mode": "graph", + "workflow_run_id": str(run.id), + "workflow_rollout_mode": workflow_rollout_mode, + "configured_execution_mode": configured_execution_mode, + "submission_status": "partial_failure", + "submitted_task_ids": exc.submitted_task_ids, + **_build_rollout_signal( + gate_status="graph_submission_failed", + ready=False, + reasons=[ + "Graph workflow task submission failed after some tasks were already queued.", + f"Submission error: {exc}.", + ], + workflow_def_id=wf_def.id, + output_type_id=output_type.id, + ), + } logger.exception( "order_line %s: graph workflow execution via definition %s failed, falling back to legacy dispatch", order_line_id, @@ -257,6 +369,8 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: "workflow_run_id": str(run.id), "celery_task_id": dispatch_result.task_ids[0] if dispatch_result.task_ids else None, "task_ids": dispatch_result.task_ids, + "workflow_rollout_mode": workflow_rollout_mode, + "configured_execution_mode": configured_execution_mode, } result.update( _build_rollout_signal( @@ -267,10 +381,10 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: workflow_def_id=wf_def.id, output_type_id=output_type.id, ) - ) + ) return result - if execution_mode == "shadow": + if configured_execution_mode in {"graph", "shadow"} and workflow_rollout_mode == "shadow": legacy_result = _legacy_dispatch(order_line_id) try: @@ -330,13 +444,43 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: return legacy_result try: - dispatch_result = execute_graph_workflow(session, workflow_context) + dispatch_result = execute_graph_workflow( + session, + workflow_context, + dispatch_tasks=False, + ) session.commit() + submit_prepared_workflow_tasks(dispatch_result) except Exception as exc: session.rollback() session.add(run) mark_workflow_run_failed(run, str(exc)) session.commit() + if isinstance(exc, WorkflowTaskSubmissionError) and exc.submitted_task_ids: + logger.exception( + "order_line %s: shadow workflow submission partially failed after %d task(s); " + "legacy dispatch remains authoritative", + order_line_id, + len(exc.submitted_task_ids), + ) + legacy_result["execution_mode"] = "shadow" + legacy_result["shadow_status"] = "partial_failure" + legacy_result["shadow_error"] = str(exc) + legacy_result["shadow_workflow_run_id"] = str(run.id) + legacy_result["shadow_submitted_task_ids"] = exc.submitted_task_ids + legacy_result.update( + _build_rollout_signal( + gate_status="shadow_submission_failed", + ready=False, + reasons=[ + "Shadow workflow task submission failed after some tasks were already queued.", + f"Submission error: {exc}.", + ], + workflow_def_id=wf_def.id, + output_type_id=output_type.id, + ) + ) + return legacy_result logger.exception( "order_line %s: shadow workflow execution via definition %s failed; legacy dispatch remains authoritative", order_line_id, @@ -364,6 +508,8 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: legacy_result["shadow_status"] = "dispatched" legacy_result["shadow_workflow_run_id"] = str(run.id) legacy_result["shadow_task_ids"] = dispatch_result.task_ids + legacy_result["workflow_rollout_mode"] = workflow_rollout_mode + legacy_result["configured_execution_mode"] = configured_execution_mode legacy_result.update( _build_rollout_signal( gate_status="pending_shadow_verdict", @@ -375,7 +521,7 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: workflow_def_id=wf_def.id, output_type_id=output_type.id, ) - ) + ) return legacy_result workflow_type, params = extract_runtime_workflow(canonical_config) @@ -519,12 +665,14 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict: "execution_mode": "legacy", "workflow_run_id": str(run.id), "celery_task_id": celery_task_id, + "workflow_rollout_mode": workflow_rollout_mode, + "configured_execution_mode": configured_execution_mode, } result.update( _build_rollout_signal( - gate_status="workflow_legacy_runtime", + gate_status=legacy_runtime_gate_status, ready=False, - reasons=["Workflow definition is active, but execution still uses the legacy runtime path."], + reasons=legacy_runtime_reasons, workflow_def_id=wf_def.id, output_type_id=output_type.id, ) diff --git a/backend/app/domains/rendering/models.py b/backend/app/domains/rendering/models.py index d791611..1e6f5e9 100644 --- a/backend/app/domains/rendering/models.py +++ b/backend/app/domains/rendering/models.py @@ -1,5 +1,6 @@ import uuid from datetime import datetime +from typing import Any from sqlalchemy import String, DateTime, Boolean, Text, Integer, Float, ForeignKey, Table, Column from sqlalchemy.orm import Mapped, mapped_column, relationship from sqlalchemy.dialects.postgresql import UUID, JSONB @@ -15,6 +16,17 @@ render_template_output_types = Table( ) VALID_RENDER_BACKENDS = {"celery"} +OUTPUT_TYPE_WORKFLOW_FAMILIES = {"cad_file", "order_line"} +OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES = {"legacy_only", "shadow", "graph"} +OUTPUT_TYPE_ARTIFACT_KINDS = { + "still_image", + "turntable_video", + "model_export", + "thumbnail_image", + "blend_asset", + "package", + "custom", +} class OutputType(Base): @@ -23,14 +35,21 @@ class OutputType(Base): id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) name: Mapped[str] = mapped_column(String(200), unique=True, nullable=False) description: Mapped[str | None] = mapped_column(Text, nullable=True) - renderer: Mapped[str] = mapped_column(String(50), nullable=False, default="threejs") + renderer: Mapped[str] = mapped_column(String(50), nullable=False, default="blender") render_settings: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict) output_format: Mapped[str] = mapped_column(String(20), nullable=False, default="png") sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0) compatible_categories: Mapped[list] = mapped_column(JSONB, default=list, server_default="[]") - render_backend: Mapped[str] = mapped_column(String(20), nullable=False, default="auto", server_default="auto") + render_backend: Mapped[str] = mapped_column(String(20), nullable=False, default="celery", server_default="auto") is_animation: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") transparent_bg: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") + workflow_family: Mapped[str] = mapped_column( + String(20), nullable=False, default="order_line", server_default="order_line" + ) + artifact_kind: Mapped[str] = mapped_column( + String(50), nullable=False, default="still_image", server_default="still_image" + ) + invocation_overrides: Mapped[dict] = mapped_column(JSONB, nullable=False, default=dict, server_default="{}") cycles_device: Mapped[str | None] = mapped_column(String(10), nullable=True, default=None) pricing_tier_id: Mapped[int | None] = mapped_column( Integer, ForeignKey("pricing_tiers.id", ondelete="SET NULL"), nullable=True, index=True @@ -49,6 +68,9 @@ class OutputType(Base): workflow_definition_id: Mapped[uuid.UUID | None] = mapped_column( UUID(as_uuid=True), ForeignKey("workflow_definitions.id", ondelete="SET NULL"), nullable=True ) + workflow_rollout_mode: Mapped[str] = mapped_column( + String(20), nullable=False, default="legacy_only", server_default="legacy_only" + ) order_lines: Mapped[list["OrderLine"]] = relationship("OrderLine", back_populates="output_type") pricing_tier: Mapped["PricingTier | None"] = relationship("PricingTier", back_populates="output_types") @@ -70,6 +92,12 @@ class RenderTemplate(Base): lighting_only: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") shadow_catcher_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, server_default="false") camera_orbit: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true") + workflow_input_schema: Mapped[list[dict[str, Any]]] = mapped_column( + JSONB, + nullable=False, + default=list, + server_default="[]", + ) is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true") tenant_id: Mapped[uuid.UUID | None] = mapped_column( UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True diff --git a/backend/app/domains/rendering/output_type_contracts.py b/backend/app/domains/rendering/output_type_contracts.py index 52397b9..db15fe6 100644 --- a/backend/app/domains/rendering/output_type_contracts.py +++ b/backend/app/domains/rendering/output_type_contracts.py @@ -3,6 +3,12 @@ from __future__ import annotations from collections.abc import Mapping from typing import Any, Literal +from app.core.process_steps import StepName +from app.domains.rendering.models import ( + OUTPUT_TYPE_ARTIFACT_KINDS, + OUTPUT_TYPE_WORKFLOW_FAMILIES, + OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES, +) from app.domains.rendering.workflow_config_utils import canonicalize_workflow_config from app.domains.rendering.workflow_node_registry import get_node_definition @@ -22,6 +28,11 @@ OutputTypeArtifactKind = Literal[ _MODEL_EXPORT_FORMATS = {"gltf", "glb", "stl", "obj", "usd", "usdz"} _VIDEO_FORMATS = {"mp4", "webm", "mov"} _IMAGE_FORMATS = {"png", "jpg", "jpeg", "webp"} +_BLEND_FORMATS = {"blend"} +_OUTPUT_FORMATS_BY_FAMILY: dict[OutputTypeWorkflowFamily, set[str]] = { + "cad_file": {*_IMAGE_FORMATS, *_MODEL_EXPORT_FORMATS}, + "order_line": {*_IMAGE_FORMATS, *_VIDEO_FORMATS, *_BLEND_FORMATS}, +} _ARTIFACT_KINDS_BY_FAMILY: dict[OutputTypeWorkflowFamily, set[OutputTypeArtifactKind]] = { "cad_file": {"thumbnail_image", "model_export", "package", "custom"}, "order_line": {"still_image", "turntable_video", "blend_asset", "package", "custom"}, @@ -42,6 +53,83 @@ INVOCATION_OVERRIDE_KEYS = ( "denoising_quality", "denoising_use_gpu", ) +_STATIC_RENDER_OVERRIDE_KEYS = ( + "width", + "height", + "engine", + "samples", + "bg_color", + "noise_threshold", + "denoiser", + "denoising_input_passes", + "denoising_prefilter", + "denoising_quality", + "denoising_use_gpu", +) +_ANIMATION_OVERRIDE_KEYS = ( + "frame_count", + "fps", + "turntable_axis", +) +_TURNABLE_AXES = {"world_x", "world_y", "world_z"} +_WORKFLOW_FAMILY_DISPLAY_ORDER: tuple[OutputTypeWorkflowFamily, ...] = ("order_line", "cad_file") +_WORKFLOW_ROLLOUT_DISPLAY_ORDER: tuple[str, ...] = ("legacy_only", "shadow", "graph") +_ARTIFACT_KIND_DISPLAY_ORDER: tuple[OutputTypeArtifactKind, ...] = ( + "still_image", + "turntable_video", + "model_export", + "thumbnail_image", + "blend_asset", + "package", + "custom", +) +_OUTPUT_FORMAT_DISPLAY_ORDER: tuple[str, ...] = ( + "png", + "jpg", + "jpeg", + "webp", + "mp4", + "webm", + "mov", + "gltf", + "glb", + "stl", + "obj", + "usd", + "usdz", + "blend", +) +_DEFAULT_OUTPUT_FORMAT_BY_ARTIFACT_KIND: dict[OutputTypeArtifactKind, str] = { + "still_image": "png", + "turntable_video": "mp4", + "model_export": "gltf", + "thumbnail_image": "png", + "blend_asset": "blend", + "package": "png", + "custom": "png", +} +_OUTPUT_TYPE_PROFILE_KEYS: tuple[str, ...] = ( + "transparent_bg", + "cycles_device", + "material_override", +) +_TEMPLATE_RUNTIME_KEYS: tuple[str, ...] = ( + "target_collection", + "lighting_only", + "shadow_catcher", + "camera_orbit", + "template_inputs", +) +_WORKFLOW_NODE_PARAMETER_OWNERSHIP_STEPS: tuple[StepName, ...] = ( + StepName.RESOLVE_TEMPLATE, + StepName.BLENDER_STILL, + StepName.BLENDER_TURNTABLE, + StepName.EXPORT_BLEND, +) + + +class InvalidInvocationOverridesError(ValueError): + pass def list_allowed_artifact_kinds_for_family( @@ -55,6 +143,79 @@ def list_allowed_artifact_kinds_for_family( return tuple(sorted(allowed)) +def list_allowed_output_formats_for_family(workflow_family: str) -> tuple[str, ...]: + normalized_family = (workflow_family or "order_line").strip().lower() + if normalized_family == "cad_file": + allowed = _OUTPUT_FORMATS_BY_FAMILY["cad_file"] + else: + allowed = _OUTPUT_FORMATS_BY_FAMILY["order_line"] + return tuple(sorted(allowed)) + + +def build_output_type_contract_catalog() -> dict[str, Any]: + workflow_families = [ + family for family in _WORKFLOW_FAMILY_DISPLAY_ORDER if family in OUTPUT_TYPE_WORKFLOW_FAMILIES + ] + workflow_rollout_modes = [ + mode for mode in _WORKFLOW_ROLLOUT_DISPLAY_ORDER if mode in OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES + ] + artifact_kinds = [ + artifact_kind + for artifact_kind in _ARTIFACT_KIND_DISPLAY_ORDER + if artifact_kind in OUTPUT_TYPE_ARTIFACT_KINDS + ] + + allowed_artifact_kinds_by_family = { + family: [ + artifact_kind + for artifact_kind in artifact_kinds + if artifact_kind in list_allowed_artifact_kinds_for_family(family) + ] + for family in workflow_families + } + allowed_output_formats_by_family = { + family: [ + output_format + for output_format in _OUTPUT_FORMAT_DISPLAY_ORDER + if output_format in list_allowed_output_formats_for_family(family) + ] + for family in workflow_families + } + allowed_invocation_override_keys_by_artifact_kind = { + artifact_kind: list( + list_allowed_invocation_override_keys_for_artifact_kind( + artifact_kind, + is_animation=artifact_kind == "turntable_video", + ) + ) + for artifact_kind in artifact_kinds + } + default_output_format_by_artifact_kind = { + artifact_kind: _DEFAULT_OUTPUT_FORMAT_BY_ARTIFACT_KIND[artifact_kind] + for artifact_kind in artifact_kinds + } + workflow_node_keys_by_step = { + step.value: [field.key for field in definition.fields] + for step in _WORKFLOW_NODE_PARAMETER_OWNERSHIP_STEPS + if (definition := get_node_definition(step.value)) is not None + } + + return { + "workflow_families": workflow_families, + "workflow_rollout_modes": workflow_rollout_modes, + "artifact_kinds": artifact_kinds, + "allowed_artifact_kinds_by_family": allowed_artifact_kinds_by_family, + "allowed_output_formats_by_family": allowed_output_formats_by_family, + "allowed_invocation_override_keys_by_artifact_kind": allowed_invocation_override_keys_by_artifact_kind, + "default_output_format_by_artifact_kind": default_output_format_by_artifact_kind, + "parameter_ownership": { + "output_type_profile_keys": list(_OUTPUT_TYPE_PROFILE_KEYS), + "template_runtime_keys": list(_TEMPLATE_RUNTIME_KEYS), + "workflow_node_keys_by_step": workflow_node_keys_by_step, + }, + } + + def infer_output_type_artifact_kind( output_format: str | None, is_animation: bool, @@ -65,6 +226,8 @@ def infer_output_type_artifact_kind( if is_animation or normalized_format in _VIDEO_FORMATS: return "turntable_video" + if normalized_format in _BLEND_FORMATS: + return "blend_asset" if normalized_format in _MODEL_EXPORT_FORMATS: return "model_export" if normalized_family == "cad_file" and normalized_format in _IMAGE_FORMATS: @@ -91,6 +254,14 @@ def validate_output_type_contract( f"'{workflow_family}'. Allowed: {allowed}" ) + allowed_output_formats = list_allowed_output_formats_for_family(normalized_family) + if normalized_format and normalized_format not in allowed_output_formats: + allowed = ", ".join(allowed_output_formats) + raise ValueError( + f"Output format '{output_format}' is not allowed for workflow_family " + f"'{workflow_family}'. Allowed: {allowed}" + ) + if normalized_family == "cad_file" and is_animation: raise ValueError("CAD-file workflows do not support animated output types") @@ -114,6 +285,20 @@ def validate_output_type_contract( f"({', '.join(sorted(_MODEL_EXPORT_FORMATS))})" ) + if normalized_artifact == "blend_asset": + if is_animation: + raise ValueError("Artifact kind 'blend_asset' does not support is_animation=true") + if normalized_format and normalized_format not in _BLEND_FORMATS: + raise ValueError( + "Artifact kind 'blend_asset' requires a blend output_format " + f"({', '.join(sorted(_BLEND_FORMATS))})" + ) + + if normalized_format in _BLEND_FORMATS and normalized_artifact != "blend_asset": + raise ValueError( + f"Output format '{output_format}' requires artifact kind 'blend_asset'" + ) + def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily | None: normalized = canonicalize_workflow_config(config) @@ -121,6 +306,7 @@ def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily | definition.family for node in normalized.get("nodes", []) if (definition := get_node_definition(node.get("step"))) is not None + if definition.family in {"cad_file", "order_line"} } if not families: return None @@ -129,14 +315,329 @@ def infer_workflow_family_from_config(config: dict) -> ResolvedWorkflowFamily | return next(iter(families)) +def derive_workflow_terminal_node_ids(config: dict[str, Any]) -> tuple[str, ...]: + normalized = canonicalize_workflow_config(config) + nodes = normalized.get("nodes", []) + if not nodes: + return () + + node_ids = { + str(node.get("id")) + for node in nodes + if node.get("id") not in (None, "") + } + upstream_ids = { + str(edge.get("from")) + for edge in normalized.get("edges", []) + if edge.get("from") not in (None, "") + } + return tuple(sorted(node_id for node_id in node_ids if node_id not in upstream_ids)) + + +def derive_supported_artifact_kinds_from_workflow_config( + config: dict[str, Any], +) -> tuple[OutputTypeArtifactKind, ...]: + try: + normalized = canonicalize_workflow_config(config) + except Exception: + return () + nodes = normalized.get("nodes", []) + if not nodes: + return () + + nodes_by_id = { + str(node.get("id")): node + for node in nodes + if node.get("id") not in (None, "") + } + incoming_by_target: dict[str, set[str]] = {} + for edge in normalized.get("edges", []): + source = edge.get("from") + target = edge.get("to") + if source in (None, "") or target in (None, ""): + continue + incoming_by_target.setdefault(str(target), set()).add(str(source)) + + cache: dict[str, set[str]] = {} + + def _collect_upstream_steps(node_id: str) -> set[str]: + cached = cache.get(node_id) + if cached is not None: + return set(cached) + + steps: set[str] = set() + node = nodes_by_id.get(node_id) + if node is not None and node.get("step"): + steps.add(str(node["step"])) + + for upstream_id in incoming_by_target.get(node_id, set()): + steps.update(_collect_upstream_steps(upstream_id)) + + cache[node_id] = set(steps) + return steps + + def _derive_node_artifact_kinds(node_id: str) -> set[OutputTypeArtifactKind]: + node = nodes_by_id.get(node_id) + if node is None: + return set() + + step = str(node.get("step") or "") + if step in {StepName.BLENDER_STILL.value}: + return {"still_image"} + if step in {StepName.BLENDER_TURNTABLE.value}: + return {"turntable_video"} + if step in {StepName.EXPORT_BLEND.value}: + return {"blend_asset"} + if step in { + StepName.OCC_GLB_EXPORT.value, + StepName.STL_CACHE_GENERATE.value, + }: + return {"model_export"} + if step == StepName.THUMBNAIL_SAVE.value: + return {"thumbnail_image"} + if step != StepName.OUTPUT_SAVE.value: + return set() + + upstream_steps = _collect_upstream_steps(node_id) + has_still = StepName.BLENDER_STILL.value in upstream_steps + has_turntable = StepName.BLENDER_TURNTABLE.value in upstream_steps + + if has_still and has_turntable: + return set() + if has_turntable: + return {"turntable_video"} + if has_still: + return {"still_image"} + return set() + + supported: set[OutputTypeArtifactKind] = set() + for terminal_id in derive_workflow_terminal_node_ids(normalized): + supported.update(_derive_node_artifact_kinds(terminal_id)) + return tuple(sorted(supported)) + + +def workflow_supports_artifact_kind( + config: dict[str, Any], + artifact_kind: str, +) -> bool: + normalized_artifact = (artifact_kind or "").strip().lower() + if not normalized_artifact: + return False + return normalized_artifact in derive_supported_artifact_kinds_from_workflow_config(config) + + +def list_allowed_invocation_override_keys_for_artifact_kind( + artifact_kind: str, + *, + is_animation: bool = False, +) -> tuple[str, ...]: + normalized_artifact = (artifact_kind or "").strip().lower() + if normalized_artifact in {"still_image", "thumbnail_image"}: + return _STATIC_RENDER_OVERRIDE_KEYS + if normalized_artifact == "turntable_video": + return _STATIC_RENDER_OVERRIDE_KEYS + _ANIMATION_OVERRIDE_KEYS + if normalized_artifact in {"model_export", "blend_asset"}: + return () + if normalized_artifact in {"package", "custom"}: + return INVOCATION_OVERRIDE_KEYS + if is_animation: + return _STATIC_RENDER_OVERRIDE_KEYS + _ANIMATION_OVERRIDE_KEYS + return _STATIC_RENDER_OVERRIDE_KEYS + + +def _normalize_positive_int_override(key: str, value: Any) -> int: + if isinstance(value, bool): + raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be a positive integer") + try: + normalized = int(str(value).strip()) if isinstance(value, str) else int(value) + except (TypeError, ValueError) as exc: + raise InvalidInvocationOverridesError( + f"Invocation override '{key}' must be a positive integer" + ) from exc + if normalized <= 0: + raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be greater than zero") + return normalized + + +def _normalize_string_override(key: str, value: Any) -> str: + if not isinstance(value, str): + raise InvalidInvocationOverridesError(f"Invocation override '{key}' must be a string") + normalized = value.strip() + if not normalized: + raise InvalidInvocationOverridesError(f"Invocation override '{key}' must not be blank") + return normalized + + +def _normalize_noise_threshold_override(value: Any) -> str: + if isinstance(value, bool): + raise InvalidInvocationOverridesError("Invocation override 'noise_threshold' must be a string or number") + if isinstance(value, (int, float)): + return str(value) + if isinstance(value, str) and value.strip(): + return value.strip() + raise InvalidInvocationOverridesError("Invocation override 'noise_threshold' must be a string or number") + + +def _normalize_gpu_toggle_override(value: Any) -> str: + if isinstance(value, bool): + return "1" if value else "0" + if isinstance(value, int) and value in {0, 1}: + return str(value) + if isinstance(value, str): + normalized = value.strip().lower() + if normalized in {"1", "true", "enabled", "yes"}: + return "1" + if normalized in {"0", "false", "disabled", "no"}: + return "0" + raise InvalidInvocationOverridesError( + "Invocation override 'denoising_use_gpu' must be one of: 1, 0, true, false" + ) + + +def _normalize_invocation_override_value(key: str, value: Any) -> int | str: + if key in {"width", "height", "samples", "frame_count", "fps"}: + return _normalize_positive_int_override(key, value) + if key == "turntable_axis": + normalized = _normalize_string_override(key, value).lower() + if normalized not in _TURNABLE_AXES: + raise InvalidInvocationOverridesError( + "Invocation override 'turntable_axis' must be one of: world_x, world_y, world_z" + ) + return normalized + if key == "noise_threshold": + return _normalize_noise_threshold_override(value) + if key == "denoising_use_gpu": + return _normalize_gpu_toggle_override(value) + return _normalize_string_override(key, value) + + +def validate_and_normalize_invocation_overrides( + raw: Mapping[str, Any] | None, + *, + artifact_kind: str | None = None, + is_animation: bool = False, + reject_unknown_keys: bool = False, +) -> dict[str, Any]: + if raw is None: + return {} + if not isinstance(raw, Mapping): + raise InvalidInvocationOverridesError("invocation_overrides must be an object") + + normalized: dict[str, Any] = {} + unknown_keys: list[str] = [] + for key, value in raw.items(): + key_name = str(key) + if key_name not in INVOCATION_OVERRIDE_KEYS: + if reject_unknown_keys: + unknown_keys.append(key_name) + continue + if value in (None, ""): + continue + normalized[key_name] = _normalize_invocation_override_value(key_name, value) + + if unknown_keys: + supported = ", ".join(INVOCATION_OVERRIDE_KEYS) + raise InvalidInvocationOverridesError( + f"Unsupported invocation override keys: {', '.join(sorted(unknown_keys))}. Supported: {supported}" + ) + + if artifact_kind is not None: + allowed_keys = set( + list_allowed_invocation_override_keys_for_artifact_kind( + artifact_kind, + is_animation=is_animation, + ) + ) + disallowed = sorted(key for key in normalized if key not in allowed_keys) + if disallowed: + raise InvalidInvocationOverridesError( + f"Invocation overrides not allowed for artifact kind '{artifact_kind}': {', '.join(disallowed)}" + ) + return normalized + + +def resolve_output_type_invocation_overrides( + render_settings: Mapping[str, Any] | None, + invocation_overrides: Mapping[str, Any] | None, + *, + artifact_kind: str, + is_animation: bool = False, +) -> dict[str, Any]: + merged = merge_output_type_invocation_overrides(render_settings, invocation_overrides) + allowed_keys = set( + list_allowed_invocation_override_keys_for_artifact_kind( + artifact_kind, + is_animation=is_animation, + ) + ) + return { + key: value + for key, value in merged.items() + if key in allowed_keys + } + + +def build_output_type_invocation_profile( + *, + renderer: str, + render_backend: str, + workflow_family: str, + artifact_kind: str, + output_format: str | None, + is_animation: bool, + workflow_definition_id: Any = None, + workflow_rollout_mode: str = "legacy_only", + transparent_bg: bool = False, + cycles_device: str | None = None, + material_override: str | None = None, + render_settings: Mapping[str, Any] | None = None, + invocation_overrides: Mapping[str, Any] | None = None, +) -> dict[str, Any]: + resolved_artifact_kind = artifact_kind or infer_output_type_artifact_kind( + output_format, + is_animation, + workflow_family, + ) + resolved_overrides = resolve_output_type_invocation_overrides( + render_settings, + invocation_overrides, + artifact_kind=resolved_artifact_kind, + is_animation=is_animation, + ) + return { + "renderer": renderer, + "render_backend": render_backend, + "workflow_family": workflow_family, + "artifact_kind": resolved_artifact_kind, + "output_format": (output_format or "").strip().lower(), + "is_animation": bool(is_animation), + "workflow_definition_id": workflow_definition_id, + "workflow_rollout_mode": workflow_rollout_mode, + "transparent_bg": bool(transparent_bg), + "cycles_device": cycles_device, + "material_override": material_override, + "allowed_override_keys": list( + list_allowed_invocation_override_keys_for_artifact_kind( + resolved_artifact_kind, + is_animation=is_animation, + ) + ), + "invocation_overrides": resolved_overrides, + } + + def normalize_invocation_overrides(raw: Mapping[str, Any] | None) -> dict[str, Any]: if not isinstance(raw, Mapping): return {} normalized: dict[str, Any] = {} for key in INVOCATION_OVERRIDE_KEYS: value = raw.get(key) - if value not in (None, ""): - normalized[key] = value + if value in (None, ""): + continue + try: + normalized[key] = _normalize_invocation_override_value(key, value) + except InvalidInvocationOverridesError: + continue return normalized diff --git a/backend/app/domains/rendering/schemas.py b/backend/app/domains/rendering/schemas.py index 2db6c2d..d99b757 100644 --- a/backend/app/domains/rendering/schemas.py +++ b/backend/app/domains/rendering/schemas.py @@ -1,22 +1,27 @@ import uuid from datetime import datetime -from pydantic import BaseModel +from pydantic import BaseModel, Field class OutputTypeCreate(BaseModel): name: str description: str | None = None - renderer: str = "threejs" + renderer: str = "blender" render_settings: dict = {} output_format: str = "png" sort_order: int = 0 is_active: bool = True compatible_categories: list[str] = [] - render_backend: str = "auto" + render_backend: str = "celery" is_animation: bool = False transparent_bg: bool = False pricing_tier_id: int | None = None cycles_device: str | None = None + workflow_family: str = "order_line" + artifact_kind: str | None = None + invocation_overrides: dict = {} + workflow_definition_id: uuid.UUID | None = None + workflow_rollout_mode: str = "legacy_only" material_override: str | None = None @@ -32,12 +37,43 @@ class OutputTypePatch(BaseModel): render_backend: str | None = None is_animation: bool | None = None transparent_bg: bool | None = None + workflow_family: str | None = None + artifact_kind: str | None = None + invocation_overrides: dict | None = None pricing_tier_id: int | None = None cycles_device: str | None = None workflow_definition_id: uuid.UUID | None = None + workflow_rollout_mode: str | None = None material_override: str | None = None +class OutputTypeInvocationProfileOut(BaseModel): + renderer: str + render_backend: str + workflow_family: str + artifact_kind: str + output_format: str + is_animation: bool + workflow_definition_id: uuid.UUID | None = None + workflow_rollout_mode: str = "legacy_only" + transparent_bg: bool + cycles_device: str | None = None + material_override: str | None = None + allowed_override_keys: list[str] = Field(default_factory=list) + invocation_overrides: dict = Field(default_factory=dict) + + +class OutputTypeContractCatalogOut(BaseModel): + workflow_families: list[str] = Field(default_factory=list) + workflow_rollout_modes: list[str] = Field(default_factory=list) + artifact_kinds: list[str] = Field(default_factory=list) + allowed_artifact_kinds_by_family: dict[str, list[str]] = Field(default_factory=dict) + allowed_output_formats_by_family: dict[str, list[str]] = Field(default_factory=dict) + allowed_invocation_override_keys_by_artifact_kind: dict[str, list[str]] = Field(default_factory=dict) + default_output_format_by_artifact_kind: dict[str, str] = Field(default_factory=dict) + parameter_ownership: dict[str, dict | list[str]] = Field(default_factory=dict) + + class OutputTypeOut(BaseModel): id: uuid.UUID name: str @@ -50,13 +86,18 @@ class OutputTypeOut(BaseModel): render_backend: str is_animation: bool transparent_bg: bool + workflow_family: str + artifact_kind: str + invocation_overrides: dict cycles_device: str | None = None pricing_tier_id: int | None = None pricing_tier_name: str | None = None price_per_item: float | None = None workflow_definition_id: uuid.UUID | None = None + workflow_rollout_mode: str workflow_name: str | None = None material_override: str | None = None + invocation_profile: OutputTypeInvocationProfileOut | None = None is_active: bool created_at: datetime updated_at: datetime @@ -159,11 +200,28 @@ class WorkflowDefinitionOut(BaseModel): name: str output_type_id: uuid.UUID | None config: dict + family: str | None = None + supported_artifact_kinds: list[str] = Field(default_factory=list) + rollout_summary: "WorkflowRolloutSummaryOut" = Field( + default_factory=lambda: WorkflowRolloutSummaryOut() + ) is_active: bool created_at: datetime model_config = {"from_attributes": True} +class WorkflowDraftPreflightRequest(BaseModel): + context_id: str + config: dict + workflow_id: uuid.UUID | None = None + + +class WorkflowDraftDispatchRequest(BaseModel): + context_id: str + config: dict + workflow_id: uuid.UUID | None = None + + class WorkflowNodeResultOut(BaseModel): id: uuid.UUID node_name: str @@ -190,6 +248,38 @@ class WorkflowRunOut(BaseModel): model_config = {"from_attributes": True} +class WorkflowRolloutLatestRunOut(BaseModel): + workflow_run_id: uuid.UUID + execution_mode: str + status: str + created_at: datetime + completed_at: datetime | None = None + + +class WorkflowRolloutLinkedOutputTypeOut(BaseModel): + id: uuid.UUID + name: str + is_active: bool + artifact_kind: str + workflow_rollout_mode: str + + +class WorkflowRolloutSummaryOut(BaseModel): + linked_output_type_count: int = 0 + active_output_type_count: int = 0 + linked_output_type_names: list[str] = Field(default_factory=list) + linked_output_types: list[WorkflowRolloutLinkedOutputTypeOut] = Field(default_factory=list) + rollout_modes: list[str] = Field(default_factory=list) + has_blocking_contracts: bool = False + blocking_reasons: list[str] = Field(default_factory=list) + latest_run: WorkflowRolloutLatestRunOut | None = None + latest_shadow_run: WorkflowRolloutLatestRunOut | None = None + latest_rollout_gate_verdict: str | None = None + latest_rollout_ready: bool | None = None + latest_rollout_status: str | None = None + latest_rollout_reasons: list[str] = Field(default_factory=list) + + class WorkflowComparisonArtifactOut(BaseModel): path: str | None storage_key: str | None @@ -208,8 +298,58 @@ class WorkflowRunComparisonOut(BaseModel): execution_mode: str status: str summary: str + rollout_gate_verdict: str + workflow_rollout_ready: bool + workflow_rollout_status: str + rollout_reasons: list[str] = [] + rollout_thresholds: dict[str, float] = Field(default_factory=dict) authoritative_output: WorkflowComparisonArtifactOut observer_output: WorkflowComparisonArtifactOut exact_match: bool | None dimensions_match: bool | None mean_pixel_delta: float | None + + +class WorkflowPreflightIssueOut(BaseModel): + severity: str + code: str + message: str + node_id: str | None = None + step: str | None = None + + +class WorkflowPreflightNodeOut(BaseModel): + node_id: str + step: str + label: str | None = None + execution_kind: str + supported: bool + status: str + issues: list[WorkflowPreflightIssueOut] = [] + + +class WorkflowPreflightOut(BaseModel): + workflow_id: uuid.UUID | None = None + context_id: str + context_kind: str | None = None + expected_context_kind: str + execution_mode: str + graph_dispatch_allowed: bool + summary: str + resolved_order_line_id: uuid.UUID | None = None + resolved_cad_file_id: uuid.UUID | None = None + unsupported_node_ids: list[str] = [] + issues: list[WorkflowPreflightIssueOut] = [] + nodes: list[WorkflowPreflightNodeOut] = [] + + +class WorkflowOrderLineContextOptionOut(BaseModel): + value: uuid.UUID + label: str + meta: str + + +class WorkflowOrderLineContextGroupOut(BaseModel): + order_id: uuid.UUID + order_label: str + options: list[WorkflowOrderLineContextOptionOut] = [] diff --git a/backend/app/domains/rendering/tasks.py b/backend/app/domains/rendering/tasks.py index fd82255..c9fc554 100644 --- a/backend/app/domains/rendering/tasks.py +++ b/backend/app/domains/rendering/tasks.py @@ -7,13 +7,106 @@ Phase A2: Initial implementation replacing the blender-renderer HTTP service. Phase B: This module will be expanded as part of the Domain-Driven restructure. """ import logging +import uuid from pathlib import Path +from app.core.render_paths import ( + build_order_line_export_path, + build_order_line_step_render_path, + ensure_group_writable_dir, +) from app.tasks.celery_app import celery_app from app.core.task_logs import log_task_event logger = logging.getLogger(__name__) +_RENDER_STILL_CONTROL_PARAM_KEYS = { + "workflow_run_id", + "workflow_node_id", + "publish_asset_enabled", + "observer_output_enabled", + "graph_authoritative_output_enabled", + "graph_output_node_ids", + "graph_notify_node_ids", + "emit_events", + "job_document_enabled", + "emit_legacy_notifications", + "output_name_suffix", +} + + +def _normalize_render_output_extension(value: object) -> str | None: + if value in (None, ""): + return None + normalized = str(value).strip().lower() + if normalized in {"jpeg", "jpg"}: + return "jpg" + if normalized in {"png", "webp", "mp4", "blend"}: + return normalized + return None + + +def _resolve_order_line_still_output_extension( + order_line_id: str, + params: dict | None = None, +) -> str: + override_extension = _normalize_render_output_extension((params or {}).get("output_format")) + if override_extension in {"png", "jpg", "webp"}: + return override_extension + + try: + from sqlalchemy import select + from sqlalchemy.orm import selectinload + + from app.core.db_utils import get_sync_session + from app.domains.orders.models import OrderLine + from app.domains.rendering.workflow_runtime_services import _resolve_render_output_extension + + with get_sync_session() as db: + line = db.execute( + select(OrderLine) + .options(selectinload(OrderLine.output_type)) + .where(OrderLine.id == order_line_id) + ).scalar_one_or_none() + if line is None: + return "png" + resolved = _resolve_render_output_extension(line) + return resolved if resolved in {"png", "jpg", "webp"} else "png" + except Exception as exc: + logger.warning( + "Failed to resolve still output extension for order_line %s: %s", + order_line_id, + exc, + ) + return "png" + + +def _normalize_order_line_still_params(params: dict) -> dict: + """Map legacy workflow/editor params onto render_still kwargs.""" + normalized = dict(params) + normalized.pop("use_custom_render_settings", None) + + legacy_engine = normalized.pop("render_engine", None) + if legacy_engine is not None and normalized.get("engine") is None: + normalized["engine"] = legacy_engine + + resolution = normalized.pop("resolution", None) + if ( + isinstance(resolution, (list, tuple)) + and len(resolution) == 2 + ): + normalized.setdefault("width", int(resolution[0])) + normalized.setdefault("height", int(resolution[1])) + + usd_path = normalized.get("usd_path") + if isinstance(usd_path, str) and usd_path.strip(): + normalized["usd_path"] = Path(usd_path) + + for key in _RENDER_STILL_CONTROL_PARAM_KEYS: + normalized.pop(key, None) + + return normalized + def _update_workflow_run_status( order_line_id: str, @@ -25,82 +118,634 @@ def _update_workflow_run_status( ) -> None: """Update WorkflowRun / WorkflowNodeResult state after task completion.""" try: - import asyncio import uuid from datetime import datetime as _dt - async def _run(): - from app.database import AsyncSessionLocal - from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun - from sqlalchemy import select as _sel + from sqlalchemy import select as _sel - async with AsyncSessionLocal() as db: - run = None - if workflow_run_id: - try: - resolved_run_id = uuid.UUID(str(workflow_run_id)) - except (TypeError, ValueError): - resolved_run_id = workflow_run_id - run_res = await db.execute( - _sel(WorkflowRun).where(WorkflowRun.id == resolved_run_id) + from app.core.db_utils import get_sync_session + from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun + + with get_sync_session() as db: + run = None + if workflow_run_id: + try: + resolved_run_id = uuid.UUID(str(workflow_run_id)) + except (TypeError, ValueError): + resolved_run_id = workflow_run_id + run = db.execute( + _sel(WorkflowRun).where(WorkflowRun.id == resolved_run_id) + ).scalar_one_or_none() + else: + run = db.execute( + _sel(WorkflowRun) + .where(WorkflowRun.order_line_id == order_line_id) + .order_by(WorkflowRun.created_at.desc()) + .limit(1) + ).scalar_one_or_none() + + if run is None: + return + + if workflow_node_id: + node_result = db.execute( + _sel(WorkflowNodeResult).where( + WorkflowNodeResult.run_id == run.id, + WorkflowNodeResult.node_name == workflow_node_id, ) - run = run_res.scalar_one_or_none() - else: - res = await db.execute( - _sel(WorkflowRun) - .where(WorkflowRun.order_line_id == order_line_id) - .order_by(WorkflowRun.created_at.desc()) - .limit(1) - ) - run = res.scalar_one_or_none() - - if run is None: - return - - if workflow_node_id: - node_res = await db.execute( - _sel(WorkflowNodeResult).where( - WorkflowNodeResult.run_id == run.id, - WorkflowNodeResult.node_name == workflow_node_id, - ) - ) - node_result = node_res.scalar_one_or_none() - if node_result is not None: - metadata = dict(node_result.output or {}) - if error: - metadata["last_error"] = error[:2000] - node_result.status = status - node_result.log = error[:2000] if error else None - node_result.output = metadata - - node_results_res = await db.execute( - _sel(WorkflowNodeResult).where(WorkflowNodeResult.run_id == run.id) - ) - node_results = list(node_results_res.scalars().all()) - - if any(node.status == "failed" for node in node_results): - run.status = "failed" - run.completed_at = _dt.utcnow() + ).scalar_one_or_none() + if node_result is not None: + metadata = dict(node_result.output or {}) if error: - run.error_message = error[:2000] - elif any(node.status in {"pending", "queued", "running", "retrying"} for node in node_results): - run.status = "pending" - run.completed_at = None - if status != "failed": - run.error_message = None - else: - run.status = status - run.completed_at = _dt.utcnow() - if status != "failed": - run.error_message = None + metadata["last_error"] = error[:2000] + node_result.status = status + node_result.log = error[:2000] if error else None + node_result.output = metadata - await db.commit() + node_results = list( + db.execute( + _sel(WorkflowNodeResult).where(WorkflowNodeResult.run_id == run.id) + ).scalars().all() + ) - asyncio.get_event_loop().run_until_complete(_run()) + if any(node.status == "failed" for node in node_results): + run.status = "failed" + run.completed_at = _dt.utcnow() + if error: + run.error_message = error[:2000] + elif any(node.status in {"pending", "queued", "running", "retrying"} for node in node_results): + run.status = "pending" + run.completed_at = None + if status != "failed": + run.error_message = None + else: + run.status = status + run.completed_at = _dt.utcnow() + if status != "failed": + run.error_message = None except Exception as _exc: logger.warning("Failed to update WorkflowRun status for line %s: %s", order_line_id, _exc) +def _mark_workflow_node_running( + order_line_id: str, + *, + workflow_run_id: str | None = None, + workflow_node_id: str | None = None, + task_id: str | None = None, +) -> None: + if not workflow_node_id: + return + + try: + from app.core.db_utils import get_sync_session + from app.domains.rendering.models import WorkflowNodeResult + from sqlalchemy import select as _sel + + with get_sync_session() as db: + import uuid + + from app.domains.rendering.models import WorkflowRun + + run = None + if workflow_run_id: + try: + resolved_run_id = uuid.UUID(str(workflow_run_id)) + except (TypeError, ValueError): + resolved_run_id = workflow_run_id + run = db.execute( + _sel(WorkflowRun).where(WorkflowRun.id == resolved_run_id) + ).scalar_one_or_none() + else: + run = db.execute( + _sel(WorkflowRun) + .where(WorkflowRun.order_line_id == order_line_id) + .order_by(WorkflowRun.created_at.desc()) + .limit(1) + ).scalar_one_or_none() + if run is None: + return + + node_result = db.execute( + _sel(WorkflowNodeResult).where( + WorkflowNodeResult.run_id == run.id, + WorkflowNodeResult.node_name == workflow_node_id, + ) + ).scalar_one_or_none() + if node_result is None: + return + + metadata = dict(node_result.output or {}) + if task_id: + metadata["task_id"] = task_id + metadata["runtime_state"] = "running" + node_result.status = "running" + node_result.log = None + node_result.output = metadata + run.status = "pending" + run.completed_at = None + except Exception as _exc: + logger.warning( + "Failed to mark WorkflowNodeResult running for line %s node %s: %s", + order_line_id, + workflow_node_id, + _exc, + ) + + +def _emit_graph_render_notifications( + order_line_id: str, + *, + success: bool, + render_log: dict | None = None, +) -> None: + try: + from sqlalchemy import create_engine, select + from sqlalchemy.orm import Session, joinedload + + from app.config import settings as app_settings + from app.domains.orders.models import OrderLine + from app.domains.products.models import Product + from app.domains.rendering.workflow_runtime_services import ( + emit_order_line_render_notifications, + ) + + engine = create_engine(app_settings.database_url_sync) + try: + with Session(engine) as session: + line = session.execute( + select(OrderLine) + .where(OrderLine.id == order_line_id) + .options( + joinedload(OrderLine.product).joinedload(Product.cad_file), + joinedload(OrderLine.output_type), + ) + ).scalar_one_or_none() + if line is None: + return + + tenant_id = None + if line.product and line.product.cad_file and line.product.cad_file.tenant_id: + tenant_id = str(line.product.cad_file.tenant_id) + + emit_order_line_render_notifications( + success=success, + order_line_id=order_line_id, + tenant_id=tenant_id, + product_name=line.product.name if line.product else "unknown", + output_type_name=line.output_type.name if line.output_type else "unknown", + render_log=render_log if isinstance(render_log, dict) else None, + session=session, + line=line, + ) + finally: + engine.dispose() + except Exception as exc: + logger.exception( + "Failed to emit graph render notifications for order_line %s: %s", + order_line_id, + exc, + ) + + +def _update_graph_output_nodes( + *, + workflow_run_id: str | None, + output_node_ids: list[str], + status: str, + output_updates: dict | None = None, + error: str | None = None, +) -> None: + if workflow_run_id is None or not output_node_ids: + return + + import uuid + + from sqlalchemy import create_engine, select + from sqlalchemy.orm import Session + + from app.config import settings as app_settings + from app.domains.rendering.models import WorkflowNodeResult + + try: + resolved_run_id = uuid.UUID(str(workflow_run_id)) + except (TypeError, ValueError): + resolved_run_id = workflow_run_id + + engine = create_engine(app_settings.database_url_sync) + try: + with Session(engine) as session: + for node_id in output_node_ids: + node_result = session.execute( + select(WorkflowNodeResult).where( + WorkflowNodeResult.run_id == resolved_run_id, + WorkflowNodeResult.node_name == node_id, + ) + ).scalar_one_or_none() + if node_result is None: + continue + metadata = dict(node_result.output or {}) + if output_updates: + metadata.update(output_updates) + if error: + metadata["last_error"] = error[:2000] + node_result.status = status + node_result.log = error[:2000] if error else None + node_result.output = metadata + session.commit() + finally: + engine.dispose() + + +def _update_graph_notify_nodes( + *, + workflow_run_id: str | None, + notify_node_ids: list[str], + status: str, + output_updates: dict | None = None, + error: str | None = None, +) -> None: + if workflow_run_id is None or not notify_node_ids: + return + + import uuid + + from sqlalchemy import create_engine, select + from sqlalchemy.orm import Session + + from app.config import settings as app_settings + from app.domains.rendering.models import WorkflowNodeResult + + try: + resolved_run_id = uuid.UUID(str(workflow_run_id)) + except (TypeError, ValueError): + resolved_run_id = workflow_run_id + + engine = create_engine(app_settings.database_url_sync) + try: + with Session(engine) as session: + for node_id in notify_node_ids: + node_result = session.execute( + select(WorkflowNodeResult).where( + WorkflowNodeResult.run_id == resolved_run_id, + WorkflowNodeResult.node_name == node_id, + ) + ).scalar_one_or_none() + if node_result is None: + continue + metadata = dict(node_result.output or {}) + if output_updates: + metadata.update(output_updates) + if error: + metadata["last_error"] = error[:2000] + node_result.status = status + node_result.log = error[:2000] if error else None + node_result.output = metadata + session.commit() + finally: + engine.dispose() + + +def _finalize_graph_notify_nodes( + *, + workflow_run_id: str | None = None, + notify_node_ids: list[str] | None = None, + success: bool, + render_node_id: str | None = None, + error: str | None = None, +) -> None: + notify_node_ids = list(notify_node_ids or []) + if workflow_run_id is None or not notify_node_ids: + return + + if success: + _update_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=notify_node_ids, + status="completed", + output_updates={ + "notification_mode": "completed_via_render_task", + "completed_by_node_id": render_node_id, + "render_success": True, + }, + ) + else: + _update_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=notify_node_ids, + status="failed", + output_updates={ + "notification_mode": "failed_via_render_task", + "completed_by_node_id": render_node_id, + "render_success": False, + }, + error=error, + ) + + +def _finalize_graph_still_output( + order_line_id: str, + *, + success: bool, + output_path: str, + render_log: dict | None = None, + workflow_run_id: str | None = None, + output_node_ids: list[str] | None = None, + render_node_id: str | None = None, + error: str | None = None, +) -> None: + output_node_ids = list(output_node_ids or []) + if workflow_run_id is None or not output_node_ids: + return + + from sqlalchemy import create_engine, select + from sqlalchemy.orm import Session, joinedload + + from app.config import settings as app_settings + from app.domains.orders.models import OrderLine + from app.domains.products.models import Product + from app.domains.rendering.workflow_runtime_services import persist_order_line_output + + engine = create_engine(app_settings.database_url_sync) + try: + with Session(engine) as session: + line = session.execute( + select(OrderLine) + .where(OrderLine.id == order_line_id) + .options(joinedload(OrderLine.product).joinedload(Product.cad_file)) + ).scalar_one_or_none() + if line is None: + return + + persisted = persist_order_line_output( + session, + line, + success=success, + output_path=output_path, + render_log=render_log if isinstance(render_log, dict) else None, + workflow_run_id=workflow_run_id, + ) + finally: + engine.dispose() + + if success: + _update_graph_output_nodes( + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + status="completed", + output_updates={ + "publication_mode": "graph_authoritative", + "authoritative_result_path": persisted.result_path, + "persisted_result_path": persisted.result_path, + "asset_id": persisted.asset_id, + "storage_key": persisted.storage_key, + "asset_type": persisted.asset_type.value if persisted.asset_type is not None else None, + "completed_by_node_id": render_node_id, + }, + ) + else: + _update_graph_output_nodes( + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + status="failed", + output_updates={ + "publication_mode": "blocked_by_render_failure", + "authoritative_result_path": None, + "persisted_result_path": None, + "completed_by_node_id": render_node_id, + }, + error=error, + ) + + +def _finalize_graph_blend_output( + order_line_id: str, + *, + success: bool, + output_path: str, + render_log: dict | None = None, + workflow_run_id: str | None = None, + output_node_ids: list[str] | None = None, + render_node_id: str | None = None, + error: str | None = None, +) -> None: + output_node_ids = list(output_node_ids or []) + if workflow_run_id is None or not output_node_ids: + return + + from sqlalchemy import create_engine, select + from sqlalchemy.orm import Session, joinedload + + from app.config import settings as app_settings + from app.domains.media.models import MediaAssetType + from app.domains.orders.models import OrderLine + from app.domains.products.models import Product + from app.domains.rendering.workflow_runtime_services import persist_order_line_media_asset + + engine = create_engine(app_settings.database_url_sync) + try: + with Session(engine) as session: + line = session.execute( + select(OrderLine) + .where(OrderLine.id == order_line_id) + .options(joinedload(OrderLine.product).joinedload(Product.cad_file)) + ).scalar_one_or_none() + if line is None: + return + + persisted = persist_order_line_media_asset( + session, + line, + success=success, + output_path=output_path, + asset_type=MediaAssetType.blend_production, + render_log=render_log if isinstance(render_log, dict) else None, + workflow_run_id=workflow_run_id, + ) + finally: + engine.dispose() + + if success: + _update_graph_output_nodes( + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + status="completed", + output_updates={ + "publication_mode": "graph_authoritative", + "authoritative_result_path": persisted.result_path, + "persisted_result_path": persisted.result_path, + "asset_id": persisted.asset_id, + "storage_key": persisted.storage_key, + "asset_type": persisted.asset_type.value if persisted.asset_type is not None else None, + "completed_by_node_id": render_node_id, + }, + ) + else: + _update_graph_output_nodes( + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + status="failed", + output_updates={ + "publication_mode": "blocked_by_render_failure", + "authoritative_result_path": None, + "persisted_result_path": None, + "asset_id": None, + "storage_key": None, + "asset_type": None, + "completed_by_node_id": render_node_id, + }, + error=error, + ) + + +def _finalize_graph_turntable_output( + order_line_id: str, + *, + success: bool, + output_path: str, + render_log: dict | None = None, + workflow_run_id: str | None = None, + output_node_ids: list[str] | None = None, + render_node_id: str | None = None, + error: str | None = None, +) -> None: + _finalize_graph_still_output( + order_line_id, + success=success, + output_path=output_path, + render_log=render_log, + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + render_node_id=render_node_id, + error=error, + ) + + +def _finalize_observer_media_output( + order_line_id: str, + *, + asset_type: str, + success: bool, + output_path: str, + render_log: dict | None = None, + workflow_run_id: str | None = None, + output_node_ids: list[str] | None = None, + render_node_id: str | None = None, + error: str | None = None, +) -> None: + if workflow_run_id is None: + return + + from app.domains.media.models import MediaAssetType + + resolved_asset_type = MediaAssetType(asset_type) + output_node_ids = list(output_node_ids or []) + + if success: + _update_graph_output_nodes( + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + status="completed", + output_updates={ + "publication_mode": "shadow_observer_only", + "observer_result_path": output_path, + "persisted_result_path": output_path, + "asset_id": None, + "storage_key": None, + "asset_type": resolved_asset_type.value, + "completed_by_node_id": render_node_id, + }, + ) + else: + _update_graph_output_nodes( + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + status="failed", + output_updates={ + "publication_mode": "shadow_observer_failed", + "observer_result_path": None, + "persisted_result_path": None, + "asset_id": None, + "storage_key": None, + "asset_type": resolved_asset_type.value, + "completed_by_node_id": render_node_id, + }, + error=error, + ) + + +def _finalize_shadow_still_output( + order_line_id: str, + *, + success: bool, + output_path: str, + render_log: dict | None = None, + workflow_run_id: str | None = None, + output_node_ids: list[str] | None = None, + render_node_id: str | None = None, + error: str | None = None, +) -> None: + _finalize_observer_media_output( + order_line_id, + asset_type="still", + success=success, + output_path=output_path, + render_log=render_log, + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + render_node_id=render_node_id, + error=error, + ) + + +def _finalize_shadow_turntable_output( + order_line_id: str, + *, + success: bool, + output_path: str, + render_log: dict | None = None, + workflow_run_id: str | None = None, + output_node_ids: list[str] | None = None, + render_node_id: str | None = None, + error: str | None = None, +) -> None: + _finalize_observer_media_output( + order_line_id, + asset_type="turntable", + success=success, + output_path=output_path, + render_log=render_log, + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + render_node_id=render_node_id, + error=error, + ) + + +def _finalize_shadow_blend_output( + order_line_id: str, + *, + success: bool, + output_path: str, + render_log: dict | None = None, + workflow_run_id: str | None = None, + output_node_ids: list[str] | None = None, + render_node_id: str | None = None, + error: str | None = None, +) -> None: + _finalize_observer_media_output( + order_line_id, + asset_type="blend_production", + success=success, + output_path=output_path, + render_log=render_log, + workflow_run_id=workflow_run_id, + output_node_ids=output_node_ids, + render_node_id=render_node_id, + error=error, + ) + + @celery_app.task( bind=True, name="app.domains.rendering.tasks.render_still_task", @@ -112,9 +757,9 @@ def render_still_task( step_path: str, output_path: str, engine: str = "cycles", - samples: int = 256, + samples: int | None = None, smooth_angle: int = 30, - cycles_device: str = "auto", + cycles_device: str = "gpu", width: int = 512, height: int = 512, transparent_bg: bool = False, @@ -135,6 +780,7 @@ def render_still_task( denoising_quality: str = "", denoising_use_gpu: str = "", mesh_attributes: dict | None = None, + template_inputs: dict | None = None, ) -> dict: """Render a STEP file to a still PNG via Blender subprocess. @@ -171,6 +817,7 @@ def render_still_task( denoising_quality=denoising_quality, denoising_use_gpu=denoising_use_gpu, mesh_attributes=mesh_attributes or {}, + template_inputs=template_inputs, ) log_task_event(self.request.id, f"Completed successfully in {result.get('total_duration_s', 0):.1f}s", "done") logger.info( @@ -211,17 +858,20 @@ def render_still_task( ) def render_turntable_task( self, - step_path: str, - output_dir: str, + context_id_or_step_path: str, + output_dir: str | None = None, output_name: str = "turntable", engine: str = "cycles", + render_engine: str | None = None, samples: int = 64, smooth_angle: int = 30, - cycles_device: str = "auto", + cycles_device: str = "gpu", + transparent_bg: bool = False, width: int = 1920, height: int = 1080, frame_count: int = 120, fps: int = 30, + duration_s: float | None = None, turntable_degrees: float = 360.0, turntable_axis: str = "world_z", bg_color: str = "", @@ -236,12 +886,27 @@ def render_turntable_task( rotation_x: float = 0.0, rotation_y: float = 0.0, rotation_z: float = 0.0, + focal_length_mm: float | None = None, + sensor_width_mm: float | None = None, + material_override: str | None = None, + template_inputs: dict | None = None, + workflow_run_id: str | None = None, + workflow_node_id: str | None = None, + publish_asset_enabled: bool = True, + observer_output_enabled: bool = False, + graph_authoritative_output_enabled: bool = False, + graph_output_node_ids: list[str] | None = None, + graph_notify_node_ids: list[str] | None = None, + emit_legacy_notifications: bool = False, + emit_events: bool = True, + job_document_enabled: bool = True, + output_name_suffix: str | None = None, ) -> dict: """Render a STEP file as a turntable animation (frames + FFmpeg composite). Returns render metadata dict on success. """ - log_task_event(self.request.id, f"Starting render_turntable_task: {Path(step_path).name}", "info") + del job_document_enabled import json import os import shutil @@ -249,29 +914,93 @@ def render_turntable_task( import sys from app.services.render_blender import find_blender + graph_output_node_ids = list(graph_output_node_ids or []) + graph_notify_node_ids = list(graph_notify_node_ids or []) + order_line_id: str | None = None + step_path = context_id_or_step_path + try: + uuid.UUID(str(context_id_or_step_path)) + except (TypeError, ValueError): + resolved_order_line_context = False + else: + resolved_order_line_context = True + + if resolved_order_line_context: + order_line_id = context_id_or_step_path + step_path, _cad_file_id = _resolve_step_path_for_order_line(order_line_id) + if not step_path: + raise RuntimeError(f"Cannot resolve STEP path for order_line {order_line_id}") + step = Path(step_path) + canonical_output_dir = build_order_line_step_render_path( + step, + order_line_id, + "placeholder.mp4", + ensure_exists=True, + ) + if output_dir and Path(output_dir) != canonical_output_dir.parent: + logger.warning( + "render_turntable_task overriding non-canonical output_dir=%s with %s for order_line=%s", + output_dir, + canonical_output_dir.parent, + order_line_id, + ) + output_dir = str(canonical_output_dir.parent) + elif output_dir is None: + raise RuntimeError("render_turntable_task requires output_dir when invoked with a STEP path") + else: + step = Path(step_path) + + if render_engine not in (None, ""): + engine = str(render_engine) + + if duration_s not in (None, ""): + try: + normalized_duration_s = float(duration_s) + except (TypeError, ValueError): + normalized_duration_s = None + if normalized_duration_s is not None and normalized_duration_s > 0 and fps > 0: + frame_count = max(1, int(round(normalized_duration_s * fps))) + + if output_name_suffix: + output_name = f"{output_name}_{output_name_suffix}" + + log_task_event(self.request.id, f"Starting render_turntable_task: {step.name}", "info") + if order_line_id: + _mark_workflow_node_running( + order_line_id, + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + task_id=self.request.id, + ) + blender_bin = find_blender() if not blender_bin: raise RuntimeError("Blender binary not found in render-worker container") - step = Path(step_path) out_dir = Path(output_dir) - out_dir.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(out_dir) + output_mp4 = out_dir / f"{output_name}.mp4" + logger.info( + "render_turntable_task using output_mp4=%s for order_line=%s workflow_run_id=%s", + output_mp4, + order_line_id, + workflow_run_id, + ) scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts")) turntable_script = scripts_dir / "turntable_render.py" - # GLB generation via OCC — deflection from admin settings (scene_linear/angular_deflection) - from app.config import settings as app_settings - from sqlalchemy import create_engine as _create_engine, text as _text - from sqlalchemy.orm import Session as _Session - _db_engine = _create_engine(app_settings.database_url_sync) - with _Session(_db_engine) as _s: - _rows = _s.execute(_text("SELECT key, value FROM system_settings")).fetchall() - _sett = {r[0]: r[1] for r in _rows} - _db_engine.dispose() - linear_deflection = float(_sett.get("scene_linear_deflection", "0.1")) - angular_deflection = float(_sett.get("scene_angular_deflection", "0.1")) - glb_path = step.parent / f"{step.stem}_thumbnail.glb" + # Turntable output is a production render path, so use render-quality tessellation. + from app.services.render_blender import build_tessellated_glb_path, resolve_tessellation_settings + + linear_deflection, angular_deflection, effective_tessellation_engine = resolve_tessellation_settings("render") + glb_path = build_tessellated_glb_path( + step, + "render", + effective_tessellation_engine, + linear_deflection, + angular_deflection, + ) if not glb_path.exists() or glb_path.stat().st_size == 0: occ_script = scripts_dir / "export_step_to_gltf.py" occ_cmd = [ @@ -280,17 +1009,26 @@ def render_turntable_task( "--output_path", str(glb_path), "--linear_deflection", str(linear_deflection), "--angular_deflection", str(angular_deflection), + "--tessellation_engine", effective_tessellation_engine, ] occ_result = subprocess.run(occ_cmd, capture_output=True, text=True, timeout=120) if occ_result.returncode != 0: raise RuntimeError( f"export_step_to_gltf.py failed:\n{occ_result.stderr[-500:]}" ) - logger.info("render_turntable_task: GLB generated: %s", glb_path.name) + logger.info( + "render_turntable_task: GLB generated: %s with render tessellation linear=%s angular=%s engine=%s", + glb_path.name, + linear_deflection, + angular_deflection, + effective_tessellation_engine, + ) # Build turntable render arguments - frames_dir = out_dir / "frames" - frames_dir.mkdir(exist_ok=True) + frames_dir = out_dir / f"_frames_{output_name}" + if frames_dir.exists(): + shutil.rmtree(frames_dir, ignore_errors=True) + ensure_group_writable_dir(frames_dir) cmd = [ blender_bin, "--background", @@ -298,20 +1036,38 @@ def render_turntable_task( "--", str(glb_path), str(frames_dir), - output_name, - str(width), str(height), - engine, str(samples), str(smooth_angle), cycles_device, - str(frame_count), str(fps), str(turntable_degrees), turntable_axis, + str(frame_count), + str(int(turntable_degrees)), + str(width), + str(height), + engine, + str(samples), + "{}", template_path or "", target_collection, material_library_path or "", json.dumps(material_map) if material_map else "{}", json.dumps(part_names_ordered) if part_names_ordered else "[]", "1" if lighting_only else "0", + cycles_device, "1" if shadow_catcher else "0", - "1" if camera_orbit else "0", - str(rotation_x), str(rotation_y), str(rotation_z), + str(rotation_x), + str(rotation_y), + str(rotation_z), + turntable_axis, + bg_color, + "1" if transparent_bg else "0", ] + if camera_orbit: + cmd += ["--camera-orbit"] + if focal_length_mm is not None: + cmd += ["--focal-length", str(focal_length_mm)] + if sensor_width_mm is not None: + cmd += ["--sensor-width", str(sensor_width_mm)] + if material_override: + cmd += ["--material-override", material_override] + if template_inputs: + cmd += ["--template-inputs", json.dumps(template_inputs)] try: result = subprocess.run( @@ -326,40 +1082,234 @@ def render_turntable_task( logger.error("render_turntable_task failed: %s", exc) try: from app.core.websocket import publish_event_sync - publish_event_sync(None, { - "type": "render.turntable.failed", - "step_path": Path(step_path).name, - "error": str(exc), - }) + if emit_events: + publish_event_sync(None, { + "type": "render.turntable.failed", + "step_path": step.name, + "error": str(exc), + }) except Exception: pass + if graph_authoritative_output_enabled and order_line_id: + _finalize_graph_turntable_output( + order_line_id, + success=False, + output_path=str(output_mp4), + render_log={"error": str(exc)}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=str(exc), + ) + elif observer_output_enabled and order_line_id: + _finalize_shadow_turntable_output( + order_line_id, + success=False, + output_path=str(output_mp4), + render_log={"error": str(exc)}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=str(exc), + ) + if order_line_id: + _update_workflow_run_status( + order_line_id, + "failed", + str(exc), + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + ) + if emit_legacy_notifications: + _emit_graph_render_notifications( + order_line_id, + success=False, + render_log={"error": str(exc)}, + ) + _finalize_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=graph_notify_node_ids, + success=False, + render_node_id=workflow_node_id, + error=str(exc), + ) raise self.retry(exc=exc, countdown=60) + frame_files = sorted(frames_dir.glob("frame_*.png")) + if not frame_files: + error_message = f"No frames rendered in {frames_dir}" + if result.stdout: + error_message = f"{error_message}\nSTDOUT:\n{result.stdout[-2000:]}" + if result.stderr: + error_message = f"{error_message}\nSTDERR:\n{result.stderr[-2000:]}" + if graph_authoritative_output_enabled and order_line_id: + _finalize_graph_turntable_output( + order_line_id, + success=False, + output_path=str(output_mp4), + render_log={"error": error_message}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=error_message, + ) + elif observer_output_enabled and order_line_id: + _finalize_shadow_turntable_output( + order_line_id, + success=False, + output_path=str(output_mp4), + render_log={"error": error_message}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=error_message, + ) + if order_line_id: + _update_workflow_run_status( + order_line_id, + "failed", + error_message, + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + ) + if emit_legacy_notifications: + _emit_graph_render_notifications( + order_line_id, + success=False, + render_log={"error": error_message}, + ) + _finalize_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=graph_notify_node_ids, + success=False, + render_node_id=workflow_node_id, + error=error_message, + ) + raise RuntimeError(error_message) + # FFmpeg composite: frames → MP4 with optional background - output_mp4 = out_dir / f"{output_name}.mp4" ffmpeg_cmd = _build_ffmpeg_cmd( - frames_dir, output_mp4, fps=fps, bg_color=bg_color + frames_dir, + output_mp4, + fps=fps, + bg_color=bg_color, + width=width, + height=height, ) try: subprocess.run(ffmpeg_cmd, check=True, capture_output=True, text=True, timeout=300) except subprocess.CalledProcessError as exc: - raise RuntimeError(f"FFmpeg composite failed: {exc.stderr[-500:]}") + error_message = f"FFmpeg composite failed: {exc.stderr[-500:]}" + if graph_authoritative_output_enabled and order_line_id: + _finalize_graph_turntable_output( + order_line_id, + success=False, + output_path=str(output_mp4), + render_log={"error": error_message}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=error_message, + ) + elif observer_output_enabled and order_line_id: + _finalize_shadow_turntable_output( + order_line_id, + success=False, + output_path=str(output_mp4), + render_log={"error": error_message}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=error_message, + ) + if order_line_id: + _update_workflow_run_status( + order_line_id, + "failed", + error_message, + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + ) + if emit_legacy_notifications: + _emit_graph_render_notifications( + order_line_id, + success=False, + render_log={"error": error_message}, + ) + _finalize_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=graph_notify_node_ids, + success=False, + render_node_id=workflow_node_id, + error=error_message, + ) + raise RuntimeError(error_message) log_task_event(self.request.id, "Completed successfully", "done") try: from app.core.websocket import publish_event_sync - publish_event_sync(None, { - "type": "render.turntable.completed", - "step_path": Path(step_path).name, - "output": Path(output_mp4).name, - }) + if emit_events: + publish_event_sync(None, { + "type": "render.turntable.completed", + "step_path": step.name, + "output": output_mp4.name, + }) except Exception: pass - return { + result_payload = { "output_mp4": str(output_mp4), "frame_count": frame_count, "fps": fps, } + if graph_authoritative_output_enabled and order_line_id: + _finalize_graph_turntable_output( + order_line_id, + success=True, + output_path=str(output_mp4), + render_log=result_payload, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + ) + elif observer_output_enabled and order_line_id: + _finalize_shadow_turntable_output( + order_line_id, + success=True, + output_path=str(output_mp4), + render_log=result_payload, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + ) + elif publish_asset_enabled and order_line_id: + publish_asset.delay( + order_line_id, + "turntable", + str(output_mp4), + render_config=result_payload, + workflow_run_id=workflow_run_id, + ) + + if order_line_id: + _update_workflow_run_status( + order_line_id, + "completed", + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + ) + if emit_legacy_notifications: + _emit_graph_render_notifications( + order_line_id, + success=True, + render_log=result_payload, + ) + _finalize_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=graph_notify_node_ids, + success=True, + render_node_id=workflow_node_id, + ) + return result_payload @celery_app.task( @@ -374,76 +1324,78 @@ def publish_asset( workflow_run_id: str | None = None, ) -> str | None: """Create a MediaAsset record after a successful render.""" - import asyncio + from sqlalchemy import select + from sqlalchemy.orm import joinedload - async def _run() -> str | None: - from app.database import AsyncSessionLocal - from app.domains.media.models import MediaAsset, MediaAssetType - from app.domains.orders.models import OrderLine - from app.domains.products.models import Product - from sqlalchemy import select + from app.core.db_utils import get_sync_session + from app.domains.media.models import MediaAsset, MediaAssetType + from app.domains.orders.models import OrderLine + from app.domains.products.models import Product + from app.domains.rendering.workflow_runtime_services import ( + persist_order_line_media_asset, + persist_order_line_output, + ) - async with AsyncSessionLocal() as db: - res = await db.execute(select(OrderLine).where(OrderLine.id == order_line_id)) - line = res.scalar_one_or_none() - if not line: - return None - - # Resolve cad_file_id from the linked product - cad_file_id = None - if line.product_id: - prod_res = await db.execute(select(Product).where(Product.id == line.product_id)) - product = prod_res.scalar_one_or_none() - if product: - cad_file_id = product.cad_file_id - - asset = MediaAsset( - tenant_id=getattr(line, "tenant_id", None), - order_line_id=line.id, - product_id=line.product_id, - cad_file_id=cad_file_id, - workflow_run_id=workflow_run_id, - asset_type=MediaAssetType(asset_type), - storage_key=storage_key, - render_config=render_config, + with get_sync_session() as db: + line = db.execute( + select(OrderLine) + .where(OrderLine.id == order_line_id) + .options( + joinedload(OrderLine.product).joinedload(Product.cad_file), + joinedload(OrderLine.output_type), ) - db.add(asset) - await db.commit() - return str(asset.id) + ).scalar_one_or_none() + if not line: + return None - return asyncio.get_event_loop().run_until_complete(_run()) + resolved_asset_type = MediaAssetType(asset_type) + if resolved_asset_type in {MediaAssetType.still, MediaAssetType.turntable}: + persisted = persist_order_line_output( + db, + line, + success=True, + output_path=storage_key, + render_log=render_config if isinstance(render_config, dict) else None, + workflow_run_id=workflow_run_id, + ) + return persisted.asset_id + + persisted = persist_order_line_media_asset( + db, + line, + success=True, + output_path=storage_key, + asset_type=resolved_asset_type, + render_log=render_config if isinstance(render_config, dict) else None, + workflow_run_id=workflow_run_id, + ) + return persisted.asset_id def _resolve_step_path_for_order_line(order_line_id: str) -> tuple[str | None, str | None]: """Sync helper: resolves (step_path, cad_file_id) from an OrderLine via DB.""" - import asyncio + from sqlalchemy import select + from sqlalchemy.orm import selectinload - async def _inner() -> tuple[str | None, str | None]: - from app.database import AsyncSessionLocal - from app.domains.orders.models import OrderLine - from app.domains.products.models import Product - from app.models.cad_file import CadFile - from sqlalchemy import select - from sqlalchemy.orm import selectinload + from app.core.db_utils import get_sync_session + from app.domains.orders.models import OrderLine + from app.models.cad_file import CadFile - async with AsyncSessionLocal() as db: - res = await db.execute( - select(OrderLine) - .options(selectinload(OrderLine.product)) - .where(OrderLine.id == order_line_id) - ) - line = res.scalar_one_or_none() - if not line or not line.product or not line.product.cad_file_id: - return None, None - cad_res = await db.execute( - select(CadFile).where(CadFile.id == line.product.cad_file_id) - ) - cad = cad_res.scalar_one_or_none() - if not cad or not cad.stored_path: - return None, None - return cad.stored_path, str(line.product.cad_file_id) + with get_sync_session() as db: + line = db.execute( + select(OrderLine) + .options(selectinload(OrderLine.product)) + .where(OrderLine.id == order_line_id) + ).scalar_one_or_none() + if not line or not line.product or not line.product.cad_file_id: + return None, None - return asyncio.get_event_loop().run_until_complete(_inner()) + cad = db.execute( + select(CadFile).where(CadFile.id == line.product.cad_file_id) + ).scalar_one_or_none() + if not cad or not cad.stored_path: + return None, None + return cad.stored_path, str(line.product.cad_file_id) @celery_app.task( @@ -458,39 +1410,48 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict: Wraps render_still_task logic but accepts order_line_id instead of step_path. On success, creates a MediaAsset record via publish_asset. """ - import asyncio from app.domains.rendering.job_document import RenderJobDocument, JobState from app.core.process_steps import StepName workflow_run_id = params.pop("workflow_run_id", None) workflow_node_id = params.pop("workflow_node_id", None) publish_asset_enabled = bool(params.pop("publish_asset_enabled", True)) + observer_output_enabled = bool(params.pop("observer_output_enabled", False)) + graph_authoritative_output_enabled = bool(params.pop("graph_authoritative_output_enabled", False)) + graph_output_node_ids = list(params.pop("graph_output_node_ids", []) or []) + graph_notify_node_ids = list(params.pop("graph_notify_node_ids", []) or []) emit_events = bool(params.pop("emit_events", True)) job_document_enabled = bool(params.pop("job_document_enabled", True)) + emit_legacy_notifications = bool(params.pop("emit_legacy_notifications", False)) output_name_suffix = params.pop("output_name_suffix", None) log_task_event(self.request.id, f"Starting render_order_line_still_task: order_line={order_line_id}", "info") + _mark_workflow_node_running( + order_line_id, + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + task_id=self.request.id, + ) # Initialise job document and store real Celery task ID job_doc = RenderJobDocument.new(order_line_id=order_line_id, celery_task_id=self.request.id) job_doc.set_state(JobState.RUNNING) def _save_job_doc(): - async def _run(): - from app.database import AsyncSessionLocal - from app.domains.orders.models import OrderLine + if not job_document_enabled: + return + try: from sqlalchemy import update as _upd - async with AsyncSessionLocal() as db: - await db.execute( + + from app.core.db_utils import get_sync_session + from app.domains.orders.models import OrderLine + + with get_sync_session() as db: + db.execute( _upd(OrderLine) .where(OrderLine.id == order_line_id) .values(render_job_doc=job_doc.to_dict()) ) - await db.commit() - if not job_document_enabled: - return - try: - asyncio.get_event_loop().run_until_complete(_run()) except Exception as _exc: logger.debug("_save_job_doc failed: %s", _exc) @@ -510,21 +1471,33 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict: job_doc.finish_step(StepName.RESOLVE_STEP_PATH, output={"step_path": step_path_str}) step = Path(step_path_str) - output_dir = step.parent / "renders" - output_dir.mkdir(parents=True, exist_ok=True) - output_filename = f"line_{order_line_id}.png" + output_extension = _resolve_order_line_still_output_extension(order_line_id, params) + output_filename = f"line_{order_line_id}.{output_extension}" if output_name_suffix: - output_filename = f"line_{order_line_id}_{output_name_suffix}.png" - output_path = output_dir / output_filename + output_filename = f"line_{order_line_id}_{output_name_suffix}.{output_extension}" + output_path = build_order_line_step_render_path( + step, + order_line_id, + output_filename, + ensure_exists=True, + ) try: job_doc.begin_step(StepName.BLENDER_STILL) - from app.services.render_blender import render_still - result = render_still( - step_path=step, - output_path=output_path, - **params, + from app.services.step_processor import render_to_file + + render_params = _normalize_order_line_still_params(params) + success, result = render_to_file( + step_path=str(step), + output_path=str(output_path), + order_line_id=order_line_id, + **render_params, ) + if not success: + raise RuntimeError( + f"Failed to render still output for order_line {order_line_id}" + ) + result["output_path"] = str(output_path) job_doc.finish_step( StepName.BLENDER_STILL, output={"output_path": str(output_path), "duration_s": result.get("total_duration_s")}, @@ -536,7 +1509,27 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict: }) _save_job_doc() - if publish_asset_enabled: + if graph_authoritative_output_enabled: + _finalize_graph_still_output( + order_line_id, + success=True, + output_path=str(output_path), + render_log=result, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + ) + elif observer_output_enabled: + _finalize_shadow_still_output( + order_line_id, + success=True, + output_path=str(output_path), + render_log=result, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + ) + elif publish_asset_enabled: publish_asset.delay( order_line_id, "still", @@ -558,6 +1551,18 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict: }) except Exception: pass + if emit_legacy_notifications: + _emit_graph_render_notifications( + order_line_id, + success=True, + render_log=result, + ) + _finalize_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=graph_notify_node_ids, + success=True, + render_node_id=workflow_node_id, + ) _update_workflow_run_status( order_line_id, "completed", @@ -581,6 +1586,41 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict: }) except Exception: pass + if graph_authoritative_output_enabled: + _finalize_graph_still_output( + order_line_id, + success=False, + output_path=str(output_path), + render_log={"error": str(exc)}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=str(exc), + ) + elif observer_output_enabled: + _finalize_shadow_still_output( + order_line_id, + success=False, + output_path=str(output_path), + render_log={"error": str(exc)}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=str(exc), + ) + if emit_legacy_notifications: + _emit_graph_render_notifications( + order_line_id, + success=False, + render_log={"error": str(exc)}, + ) + _finalize_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=graph_notify_node_ids, + success=False, + render_node_id=workflow_node_id, + error=str(exc), + ) _update_workflow_run_status( order_line_id, "failed", @@ -603,6 +1643,11 @@ def export_blend_for_order_line_task( workflow_run_id: str | None = None, workflow_node_id: str | None = None, publish_asset_enabled: bool = True, + observer_output_enabled: bool = False, + graph_authoritative_output_enabled: bool = False, + graph_output_node_ids: list[str] | None = None, + graph_notify_node_ids: list[str] | None = None, + emit_legacy_notifications: bool = False, output_name_suffix: str | None = None, **_kwargs, ) -> dict: @@ -615,13 +1660,30 @@ def export_blend_for_order_line_task( import os import subprocess + graph_output_node_ids = list(graph_output_node_ids or []) + graph_notify_node_ids = list(graph_notify_node_ids or []) + _mark_workflow_node_running( + order_line_id, + workflow_run_id=workflow_run_id, + workflow_node_id=workflow_node_id, + task_id=self.request.id, + ) step_path_str, cad_file_id = _resolve_step_path_for_order_line(order_line_id) if not step_path_str: raise RuntimeError(f"Cannot resolve STEP path for order_line {order_line_id}") step = Path(step_path_str) - # Use geometry GLB as input (generate if missing) - glb_path = step.parent / f"{step.stem}_geometry.glb" + # Blend export is production-facing, so use render-quality tessellation. + from app.services.render_blender import build_tessellated_glb_path, resolve_tessellation_settings + + linear_deflection, angular_deflection, effective_tessellation_engine = resolve_tessellation_settings("render") + glb_path = build_tessellated_glb_path( + step, + "render", + effective_tessellation_engine, + linear_deflection, + angular_deflection, + ) if not glb_path.exists(): import subprocess as _sp import sys as _sys @@ -630,6 +1692,9 @@ def export_blend_for_order_line_task( _sys.executable, str(scripts_dir_tmp / "export_step_to_gltf.py"), "--step_path", str(step), "--output_path", str(glb_path), + "--linear_deflection", str(linear_deflection), + "--angular_deflection", str(angular_deflection), + "--tessellation_engine", effective_tessellation_engine, ] occ_res = _sp.run(occ_cmd, capture_output=True, text=True, timeout=120) if occ_res.returncode != 0: @@ -638,7 +1703,8 @@ def export_blend_for_order_line_task( output_name = f"{step.stem}_production.blend" if output_name_suffix: output_name = f"{step.stem}_production_{output_name_suffix}.blend" - output_path = step.parent / output_name + output_path = build_order_line_export_path(order_line_id, output_name, ensure_exists=True) + ensure_group_writable_dir(output_path.parent) scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts")) export_script = scripts_dir / "export_blend.py" @@ -647,30 +1713,56 @@ def export_blend_for_order_line_task( if not blender_bin: raise RuntimeError("Blender binary not found — cannot run export_blend task") - # Resolve asset library path and material map from DB + # Resolve asset library path and material map from DB. asset_lib_path = "" mat_map: dict = {} + blend_is_primary_output = False + engine = None try: from sqlalchemy import create_engine, select as sql_select - from sqlalchemy.orm import Session + from sqlalchemy.orm import Session, joinedload from app.config import settings as app_settings from app.domains.orders.models import OrderLine from app.domains.products.models import Product engine = create_engine(app_settings.database_url_sync) with Session(engine) as s: - line = s.execute(sql_select(OrderLine).where(OrderLine.id == order_line_id)).scalar_one_or_none() + line = s.execute( + sql_select(OrderLine) + .where(OrderLine.id == order_line_id) + .options( + joinedload(OrderLine.product).joinedload(Product.cad_file), + joinedload(OrderLine.output_type), + ) + ).scalar_one_or_none() if line: - product = s.execute(sql_select(Product).where(Product.id == line.product_id)).scalar_one_or_none() + product = line.product if product: mat_map = { m.get("part_name", ""): m.get("material", "") for m in (product.cad_part_materials or []) } + output_type = getattr(line, "output_type", None) + if output_type is not None: + blend_is_primary_output = ( + getattr(output_type, "artifact_kind", None) == "blend_asset" + or getattr(output_type, "output_format", None) == "blend" + ) except Exception as exc: logger.warning("export_blend_for_order_line_task: DB resolution error (non-fatal): %s", exc) try: + ignored_output_overrides = { + key: value + for key, value in _kwargs.items() + if key in {"output_path", "output_dir", "output_name"} + } + if ignored_output_overrides: + logger.warning( + "export_blend_for_order_line_task ignoring non-canonical output overrides for %s: %s", + order_line_id, + ignored_output_overrides, + ) cmd = [ blender_bin, "--background", "--python", str(export_script), @@ -680,12 +1772,75 @@ def export_blend_for_order_line_task( "--asset_library_blend", asset_lib_path, "--material_map", json.dumps(mat_map), ] + logger.info( + "export_blend_for_order_line_task exporting order_line=%s glb_path=%s output_path=%s workflow_run_id=%s", + order_line_id, + glb_path, + output_path, + workflow_run_id, + ) result = subprocess.run(cmd, capture_output=True, text=True, timeout=300) if result.returncode != 0: raise RuntimeError( f"export_blend.py exited {result.returncode}:\n{result.stderr[-500:]}" ) - if publish_asset_enabled: + result_payload = { + "blend_path": str(output_path), + "artifact_type": "blend_production", + } + if graph_authoritative_output_enabled: + _finalize_graph_blend_output( + order_line_id, + success=True, + output_path=str(output_path), + render_log=result_payload, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + ) + elif observer_output_enabled: + _finalize_shadow_blend_output( + order_line_id, + success=True, + output_path=str(output_path), + render_log=result_payload, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + ) + elif blend_is_primary_output: + from sqlalchemy import select as sql_select + from sqlalchemy.orm import Session, joinedload + + from app.domains.orders.models import OrderLine + from app.domains.products.models import Product + from app.domains.rendering.workflow_runtime_services import persist_order_line_output + + if engine is None: + from sqlalchemy import create_engine + from app.config import settings as app_settings + + engine = create_engine(app_settings.database_url_sync) + with Session(engine) as s: + line = s.execute( + sql_select(OrderLine) + .where(OrderLine.id == order_line_id) + .options( + joinedload(OrderLine.product).joinedload(Product.cad_file), + joinedload(OrderLine.output_type), + ) + ).scalar_one_or_none() + if line is None: + raise RuntimeError(f"Order line {order_line_id} not found during blend persistence") + persist_order_line_output( + s, + line, + success=True, + output_path=str(output_path), + render_log=result_payload, + workflow_run_id=workflow_run_id, + ) + elif publish_asset_enabled: publish_asset.delay( order_line_id, "blend_production", @@ -699,9 +1854,80 @@ def export_blend_for_order_line_task( workflow_run_id=workflow_run_id, workflow_node_id=workflow_node_id, ) - return {"blend_path": str(output_path)} + if emit_legacy_notifications: + _emit_graph_render_notifications( + order_line_id, + success=True, + render_log=result_payload, + ) + _finalize_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=graph_notify_node_ids, + success=True, + render_node_id=workflow_node_id, + ) + return result_payload except Exception as exc: logger.error("export_blend_for_order_line_task failed for %s: %s", order_line_id, exc) + if graph_authoritative_output_enabled: + _finalize_graph_blend_output( + order_line_id, + success=False, + output_path=str(output_path), + render_log={"error": str(exc)}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=str(exc), + ) + elif observer_output_enabled: + _finalize_shadow_blend_output( + order_line_id, + success=False, + output_path=str(output_path), + render_log={"error": str(exc)}, + workflow_run_id=workflow_run_id, + output_node_ids=graph_output_node_ids, + render_node_id=workflow_node_id, + error=str(exc), + ) + elif blend_is_primary_output: + try: + from sqlalchemy import select as sql_select + from sqlalchemy.orm import Session, joinedload + + from app.domains.orders.models import OrderLine + from app.domains.products.models import Product + from app.domains.rendering.workflow_runtime_services import persist_order_line_output + + if engine is None: + from sqlalchemy import create_engine + from app.config import settings as app_settings + + engine = create_engine(app_settings.database_url_sync) + with Session(engine) as s: + line = s.execute( + sql_select(OrderLine) + .where(OrderLine.id == order_line_id) + .options( + joinedload(OrderLine.product).joinedload(Product.cad_file), + joinedload(OrderLine.output_type), + ) + ).scalar_one_or_none() + if line is not None: + persist_order_line_output( + s, + line, + success=False, + output_path=str(output_path), + render_log={"error": str(exc)}, + workflow_run_id=workflow_run_id, + ) + except Exception: + logger.exception( + "export_blend_for_order_line_task: failed to persist primary blend failure for %s", + order_line_id, + ) _update_workflow_run_status( order_line_id, "failed", @@ -709,7 +1935,23 @@ def export_blend_for_order_line_task( workflow_run_id=workflow_run_id, workflow_node_id=workflow_node_id, ) + if emit_legacy_notifications: + _emit_graph_render_notifications( + order_line_id, + success=False, + render_log={"error": str(exc)}, + ) + _finalize_graph_notify_nodes( + workflow_run_id=workflow_run_id, + notify_node_ids=graph_notify_node_ids, + success=False, + render_node_id=workflow_node_id, + error=str(exc), + ) raise self.retry(exc=exc, countdown=30) + finally: + if engine is not None: + engine.dispose() @celery_app.task( @@ -792,33 +2034,21 @@ def apply_asset_library_materials_task(self, order_line_id: str, asset_library_i def _build_ffmpeg_cmd( - frames_dir: Path, output_mp4: Path, fps: int = 30, bg_color: str = "" + frames_dir: Path, + output_mp4: Path, + fps: int = 30, + bg_color: str = "", + width: int = 1920, + height: int = 1080, ) -> list: """Build FFmpeg command for compositing turntable frames to MP4.""" - import shutil as _shutil - ffmpeg = _shutil.which("ffmpeg") or "ffmpeg" - frame_pattern = str(frames_dir / "%04d.png") + from app.services.render_blender import build_turntable_ffmpeg_cmd - if bg_color: - # Overlay transparent frames onto solid color background - r = int(bg_color[1:3], 16) if bg_color.startswith("#") else 255 - g = int(bg_color[3:5], 16) if bg_color.startswith("#") else 255 - b = int(bg_color[5:7], 16) if bg_color.startswith("#") else 255 - color_str = f"color=c=0x{r:02x}{g:02x}{b:02x}:s=1920x1080:r={fps}" - return [ - ffmpeg, "-y", - "-f", "lavfi", "-i", color_str, - "-framerate", str(fps), "-i", frame_pattern, - "-filter_complex", "[0:v][1:v]overlay=0:0", - "-c:v", "libx264", "-pix_fmt", "yuv420p", - "-movflags", "+faststart", - str(output_mp4), - ] - else: - return [ - ffmpeg, "-y", - "-framerate", str(fps), "-i", frame_pattern, - "-c:v", "libx264", "-pix_fmt", "yuv420p", - "-movflags", "+faststart", - str(output_mp4), - ] + return build_turntable_ffmpeg_cmd( + frames_dir, + output_mp4, + fps=fps, + bg_color=bg_color, + width=width, + height=height, + ) diff --git a/backend/app/domains/rendering/template_input_audit.py b/backend/app/domains/rendering/template_input_audit.py new file mode 100644 index 0000000..cc60399 --- /dev/null +++ b/backend/app/domains/rendering/template_input_audit.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +import json +import re +from collections import defaultdict +from typing import Any, Iterable, Mapping + +_MARKER_PROP_NAMES = ( + "hartomat_template_input", + "hartomat.template_input", + "template_input", + "schaeffler_template_input", +) +_MARKER_KEY_PROP_NAMES = ( + "hartomat_template_input_key", + "hartomat.template_input_key", + "template_input_key", + "schaeffler_template_input_key", +) +_MARKER_VALUE_PROP_NAMES = ( + "hartomat_template_input_value", + "hartomat.template_input_value", + "template_input_value", + "schaeffler_template_input_value", +) +_NAME_PATTERNS = ( + re.compile(r"template_input__(?P[^_]+)__(?P[^_]+)", re.IGNORECASE), + re.compile(r"template-input:(?P[^=]+)=(?P.+)", re.IGNORECASE), + re.compile(r"ti::(?P[^:]+)::(?P.+)", re.IGNORECASE), +) + + +def _normalize_marker_token(value: Any) -> str | None: + if value is None: + return None + if isinstance(value, bool): + return "true" if value else "false" + text = str(value).strip() + return text or None + + +def _parse_marker_text(text: str) -> tuple[str, str] | None: + cleaned = text.strip() + if not cleaned: + return None + if cleaned.startswith("{"): + try: + payload = json.loads(cleaned) + except Exception: + payload = None + if isinstance(payload, dict): + key = _normalize_marker_token(payload.get("key")) + value = _normalize_marker_token(payload.get("value")) + if key and value: + return key, value + if "=" in cleaned: + key, value = cleaned.split("=", 1) + key = _normalize_marker_token(key) + value = _normalize_marker_token(value) + if key and value: + return key, value + return None + + +def extract_template_input_marker( + *, + name: str | None = None, + props: Mapping[str, Any] | None = None, +) -> tuple[str, str] | None: + raw_props = props or {} + + for prop_name in _MARKER_PROP_NAMES: + raw_value = raw_props.get(prop_name) + text = _normalize_marker_token(raw_value) + if not text: + continue + marker = _parse_marker_text(text) + if marker is not None: + return marker + + key = None + value = None + for prop_name in _MARKER_KEY_PROP_NAMES: + key = _normalize_marker_token(raw_props.get(prop_name)) + if key: + break + for prop_name in _MARKER_VALUE_PROP_NAMES: + value = _normalize_marker_token(raw_props.get(prop_name)) + if value: + break + if key and value: + return key, value + + candidate_name = (name or "").strip() + if candidate_name: + for pattern in _NAME_PATTERNS: + match = pattern.search(candidate_name) + if not match: + continue + marker_key = _normalize_marker_token(match.group("key")) + marker_value = _normalize_marker_token(match.group("value")) + if marker_key and marker_value: + return marker_key, marker_value + + return None + + +def suggest_workflow_input_schema( + markers: Iterable[tuple[str, str]], +) -> list[dict[str, Any]]: + values_by_key: dict[str, set[str]] = defaultdict(set) + for key, value in markers: + normalized_key = _normalize_marker_token(key) + normalized_value = _normalize_marker_token(value) + if not normalized_key or not normalized_value: + continue + values_by_key[normalized_key].add(normalized_value) + + schema: list[dict[str, Any]] = [] + for key in sorted(values_by_key): + options = sorted(values_by_key[key]) + if not options: + continue + label = key.replace("_", " ").strip().title() + if len(options) == 2 and set(options) == {"false", "true"}: + schema.append( + { + "key": key, + "label": label, + "type": "boolean", + "section": "Template Inputs", + "default": options[0] == "true", + } + ) + continue + schema.append( + { + "key": key, + "label": label, + "type": "select", + "section": "Template Inputs", + "default": options[0], + "options": [{"value": option, "label": option.replace("_", " ").title()} for option in options], + } + ) + return schema diff --git a/backend/app/domains/rendering/workflow_builder.py b/backend/app/domains/rendering/workflow_builder.py index 1672fbe..13b0a48 100644 --- a/backend/app/domains/rendering/workflow_builder.py +++ b/backend/app/domains/rendering/workflow_builder.py @@ -18,6 +18,7 @@ def dispatch_workflow( params = params or {} builders = { "still": _build_still, + "still_graph": _build_still, "turntable": _build_turntable, "multi_angle": _build_multi_angle, "still_with_exports": _build_still_with_exports, diff --git a/backend/app/domains/rendering/workflow_comparison_service.py b/backend/app/domains/rendering/workflow_comparison_service.py index 4256941..5a78fa5 100644 --- a/backend/app/domains/rendering/workflow_comparison_service.py +++ b/backend/app/domains/rendering/workflow_comparison_service.py @@ -17,7 +17,7 @@ from app.domains.orders.models import OrderLine from app.domains.rendering.models import WorkflowRun from app.domains.rendering.schemas import WorkflowComparisonArtifactOut, WorkflowRunComparisonOut -ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA = 0.0 +ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA = 1e-6 ROLLOUT_WARN_MAX_MEAN_PIXEL_DELTA = 0.02 @@ -217,6 +217,7 @@ def _find_shadow_file(order_line: OrderLine, workflow_run: WorkflowRun) -> str | upload_root = Path(settings.upload_dir) candidate_roots.append(upload_root / "renders" / str(order_line.id)) + candidate_roots.append(upload_root / "step_files" / "renders" / str(order_line.id)) candidate_roots.append(upload_root / "step_files" / "renders") seen_roots: set[Path] = set() @@ -258,6 +259,13 @@ async def build_workflow_run_comparison( authoritative_output = _build_artifact(authoritative_path) observer_output = _build_artifact(observer_path) + rollout_gate = evaluate_rollout_gate( + authoritative_output=authoritative_output, + observer_output=observer_output, + exact_match=None, + dimensions_match=None, + mean_pixel_delta=None, + ) if not authoritative_output.exists: status = "missing_authoritative" @@ -283,9 +291,9 @@ async def build_workflow_run_comparison( if exact_match: status = "matched" summary = "Observer output matches the authoritative legacy output byte-for-byte." - elif mean_pixel_delta == 0.0 and dimensions_match: + elif mean_pixel_delta is not None and mean_pixel_delta <= ROLLOUT_PASS_MAX_MEAN_PIXEL_DELTA and dimensions_match: status = "matched" - summary = "Observer output matches the authoritative legacy output visually, but file metadata differs." + summary = "Observer output matches the authoritative legacy output within the visual pass threshold." else: status = "different" if dimensions_match is False: @@ -294,6 +302,13 @@ async def build_workflow_run_comparison( summary = "Observer output differs from the authoritative output." else: summary = "Observer output differs from the authoritative output and could not be pixel-compared." + rollout_gate = evaluate_rollout_gate( + authoritative_output=authoritative_output, + observer_output=observer_output, + exact_match=exact_match, + dimensions_match=dimensions_match, + mean_pixel_delta=mean_pixel_delta, + ) return WorkflowRunComparisonOut( workflow_run_id=workflow_run.id, @@ -302,6 +317,14 @@ async def build_workflow_run_comparison( execution_mode=workflow_run.execution_mode, status=status, summary=summary, + rollout_gate_verdict=str(rollout_gate["verdict"]), + workflow_rollout_ready=bool(rollout_gate["workflow_rollout_ready"]), + workflow_rollout_status=str(rollout_gate["workflow_rollout_status"]), + rollout_reasons=[str(reason) for reason in rollout_gate["reasons"]], + rollout_thresholds={ + str(key): float(value) + for key, value in dict(rollout_gate["thresholds"]).items() + }, authoritative_output=authoritative_output.to_schema(), observer_output=observer_output.to_schema(), exact_match=exact_match, diff --git a/backend/app/domains/rendering/workflow_config_utils.py b/backend/app/domains/rendering/workflow_config_utils.py index 87cfc93..e85f24f 100644 --- a/backend/app/domains/rendering/workflow_config_utils.py +++ b/backend/app/domains/rendering/workflow_config_utils.py @@ -21,6 +21,10 @@ _PRESET_TYPES = { _EXECUTION_MODES = {"legacy", "graph", "shadow"} _WORKFLOW_BLUEPRINTS = {"cad_intake", "order_rendering", "still_graph_reference"} _WORKFLOW_STARTERS = {"cad_file", "order_line"} +_WORKFLOW_STARTER_BLUEPRINTS = { + "starter_cad_intake": "cad_file", + "starter_order_rendering": "order_line", +} _NODE_TYPE_TO_STEP: dict[str, str] = { "inputNode": StepName.RESOLVE_STEP_PATH.value, @@ -72,7 +76,7 @@ def _extract_render_params_from_nodes(nodes: list[dict[str, Any]], step: StepNam def _build_order_line_still_graph_nodes(render_params: dict[str, Any]) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: graph_render_params = deepcopy(render_params) - graph_render_params.setdefault("use_custom_render_settings", True) + graph_render_params.setdefault("use_custom_render_settings", False) nodes = [ _make_node("setup", StepName.ORDER_LINE_SETUP, 0, 160, label="Order Line Setup"), @@ -222,6 +226,7 @@ def build_preset_workflow_config( "ui": { "preset": preset_type, "execution_mode": "graph" if preset_type == "still_graph" else "legacy", + "family": "order_line", }, } @@ -235,6 +240,7 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]: _make_node("resolve_step", StepName.RESOLVE_STEP_PATH, 0, 180, label="Resolve STEP Path"), _make_node("extract_objects", StepName.OCC_OBJECT_EXTRACT, 220, 180, label="Extract STEP Objects"), _make_node("export_glb", StepName.OCC_GLB_EXPORT, 440, 180, label="Export GLB"), + _make_node("bbox", StepName.GLB_BBOX, 660, 120, label="Compute Bounding Box"), _make_node("stl_cache", StepName.STL_CACHE_GENERATE, 660, 300, label="Generate STL Cache"), _make_node( "blender_thumb", @@ -260,9 +266,11 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]: edges = [ {"from": "resolve_step", "to": "extract_objects"}, {"from": "extract_objects", "to": "export_glb"}, + {"from": "export_glb", "to": "bbox"}, {"from": "export_glb", "to": "stl_cache"}, {"from": "export_glb", "to": "blender_thumb"}, {"from": "export_glb", "to": "threejs_thumb"}, + {"from": "bbox", "to": "threejs_thumb"}, {"from": "blender_thumb", "to": "save_blender_thumb"}, {"from": "threejs_thumb", "to": "save_threejs_thumb"}, ] @@ -329,6 +337,7 @@ def build_workflow_blueprint_config(blueprint: str) -> dict[str, Any]: "ui": { "preset": "custom", "execution_mode": "graph" if blueprint == "still_graph_reference" else "legacy", + "family": "cad_file" if blueprint == "cad_intake" else "order_line", "blueprint": blueprint, }, } @@ -356,6 +365,7 @@ def build_starter_workflow_config(family: str = "order_line") -> dict[str, Any]: "ui": { "preset": "custom", "execution_mode": "legacy", + "family": family, "blueprint": blueprint, }, } @@ -385,6 +395,7 @@ def _build_legacy_custom_render_fallback_config(params: dict[str, Any] | None = "ui": { "preset": "custom", "execution_mode": "legacy", + "family": "order_line", "blueprint": "starter_order_rendering", }, } @@ -480,9 +491,16 @@ def canonicalize_workflow_config(raw: dict[str, Any]) -> dict[str, Any]: canonical["ui"].update(merged_ui) return canonical - if blueprint == "still_graph_reference": + if blueprint in _WORKFLOW_BLUEPRINTS: merged_ui = dict(normalized["ui"]) - canonical = build_workflow_blueprint_config("still_graph_reference") + canonical = build_workflow_blueprint_config(blueprint) + merged_ui.setdefault("execution_mode", canonical["ui"]["execution_mode"]) + canonical["ui"].update(merged_ui) + return canonical + + if blueprint in _WORKFLOW_STARTER_BLUEPRINTS: + merged_ui = dict(normalized["ui"]) + canonical = build_starter_workflow_config(_WORKFLOW_STARTER_BLUEPRINTS[blueprint]) merged_ui.setdefault("execution_mode", canonical["ui"]["execution_mode"]) canonical["ui"].update(merged_ui) return canonical diff --git a/backend/app/domains/rendering/workflow_executor.py b/backend/app/domains/rendering/workflow_executor.py index 9a9b137..f858366 100644 --- a/backend/app/domains/rendering/workflow_executor.py +++ b/backend/app/domains/rendering/workflow_executor.py @@ -25,7 +25,7 @@ from collections import deque from dataclasses import dataclass, field from typing import Literal -from app.domains.rendering.workflow_schema import WorkflowConfig, WorkflowNode +from app.domains.rendering.workflow_schema import WorkflowConfig, WorkflowEdge, WorkflowNode from app.core.process_steps import StepName logger = logging.getLogger(__name__) @@ -40,6 +40,17 @@ class WorkflowContext: execution_mode: WorkflowExecutionMode workflow_run_id: uuid.UUID | None = None ordered_nodes: list[WorkflowNode] = field(default_factory=list) + edges: list[WorkflowEdge] = field(default_factory=list) + + +@dataclass(slots=True) +class WorkflowTaskDispatchSpec: + node_id: str + task_name: str + args: list[str] + kwargs: dict + task_id: str + queue: str | None = None @dataclass(slots=True) @@ -48,6 +59,38 @@ class WorkflowDispatchResult: task_ids: list[str] node_task_ids: dict[str, str] skipped_node_ids: list[str] + task_specs: list[WorkflowTaskDispatchSpec] = field(default_factory=list) + + +class WorkflowTaskSubmissionError(RuntimeError): + def __init__(self, message: str, *, submitted_task_ids: list[str] | None = None) -> None: + super().__init__(message) + self.submitted_task_ids = list(submitted_task_ids or []) + + +def submit_prepared_workflow_tasks(dispatch_result: WorkflowDispatchResult) -> None: + """Submit pre-built Celery tasks after DB state has been committed.""" + from app.tasks.celery_app import celery_app + + submitted_task_ids: list[str] = [] + for spec in dispatch_result.task_specs: + task_options: dict[str, str] = {"task_id": spec.task_id} + if spec.queue: + task_options["queue"] = spec.queue + try: + celery_app.send_task( + spec.task_name, + args=spec.args, + kwargs=spec.kwargs, + **task_options, + ) + except Exception as exc: + raise WorkflowTaskSubmissionError( + f"Failed to submit workflow task for node '{spec.node_id}': {exc}", + submitted_task_ids=submitted_task_ids, + ) from exc + submitted_task_ids.append(spec.task_id) + # --------------------------------------------------------------------------- @@ -65,7 +108,7 @@ STEP_TASK_MAP: dict[StepName, str] = { StepName.STL_CACHE_GENERATE: "app.tasks.step_tasks.process_step_file", # ── Thumbnail generation ───────────────────────────────────────────── StepName.BLENDER_RENDER: "app.tasks.step_tasks.render_step_thumbnail", - StepName.THUMBNAIL_SAVE: "app.tasks.step_tasks.render_step_thumbnail", + StepName.THUMBNAIL_SAVE: "app.tasks.step_tasks.render_graph_thumbnail", # ── Order line stills & turntables ────────────────────────────────── StepName.BLENDER_STILL: "app.domains.rendering.tasks.render_order_line_still_task", StepName.BLENDER_TURNTABLE: "app.domains.rendering.tasks.render_turntable_task", @@ -98,6 +141,7 @@ def prepare_workflow_context( execution_mode=execution_mode, workflow_run_id=workflow_run_id, ordered_nodes=ordered_nodes, + edges=list(config.edges), ) diff --git a/backend/app/domains/rendering/workflow_graph_runtime.py b/backend/app/domains/rendering/workflow_graph_runtime.py index a51bf7b..aba13e7 100644 --- a/backend/app/domains/rendering/workflow_graph_runtime.py +++ b/backend/app/domains/rendering/workflow_graph_runtime.py @@ -12,12 +12,19 @@ from sqlalchemy import select from sqlalchemy.orm import Session, selectinload from app.config import settings +from app.core.render_paths import build_order_line_export_path, build_order_line_step_render_path from app.core.process_steps import StepName from app.domains.products.models import CadFile from app.domains.rendering.models import WorkflowNodeResult, WorkflowRun -from app.domains.rendering.workflow_executor import STEP_TASK_MAP, WorkflowContext, WorkflowDispatchResult +from app.domains.rendering.workflow_executor import ( + STEP_TASK_MAP, + WorkflowContext, + WorkflowDispatchResult, + WorkflowTaskDispatchSpec, +) from app.domains.rendering.workflow_node_registry import get_node_definition from app.domains.rendering.workflow_runtime_services import ( + _resolve_render_output_extension, AutoPopulateMaterialsResult, BBoxResolutionResult, MaterialResolutionResult, @@ -25,6 +32,7 @@ from app.domains.rendering.workflow_runtime_services import ( TemplateResolutionResult, auto_populate_materials_for_cad, build_order_line_render_invocation, + extract_template_input_overrides, prepare_order_line_render_context, resolve_cad_bbox, resolve_order_line_material_map, @@ -89,11 +97,13 @@ _STILL_TASK_KEYS = { "material_override", "render_engine", "resolution", + "template_inputs", } _TURNTABLE_TASK_KEYS = { "output_name", "engine", + "render_engine", "samples", "smooth_angle", "cycles_device", @@ -119,6 +129,8 @@ _TURNTABLE_TASK_KEYS = { "focal_length_mm", "sensor_width_mm", "material_override", + "template_inputs", + "duration_s", } _THUMBNAIL_TASK_KEYS = { @@ -144,13 +156,62 @@ _AUTHORITATIVE_RENDER_SETTING_KEYS = { "denoising_prefilter", "denoising_quality", "denoising_use_gpu", - "camera_orbit", "focal_length_mm", "sensor_width_mm", "bg_color", } +def _inspect_active_worker_queues(timeout: float = 1.0) -> set[str]: + from app.tasks.celery_app import celery_app + + try: + inspect_result = celery_app.control.inspect(timeout=timeout) + active_queues = inspect_result.active_queues() or {} + except Exception as exc: + logger.info("[WORKFLOW] Could not inspect active Celery queues: %s", exc) + return set() + + queue_names: set[str] = set() + for queues in active_queues.values(): + for queue in queues or []: + if not isinstance(queue, dict): + continue + name = queue.get("name") + if isinstance(name, str) and name.strip(): + queue_names.add(name.strip()) + return queue_names + + +def _resolve_shadow_render_queue( + *, + workflow_context: WorkflowContext, + node, + active_queue_names: set[str], +) -> str | None: + if workflow_context.execution_mode != "shadow": + return None + if node.step not in { + StepName.BLENDER_STILL, + StepName.BLENDER_TURNTABLE, + StepName.EXPORT_BLEND, + }: + return None + + preferred_queue = (settings.workflow_shadow_render_queue or "").strip() + if not preferred_queue or preferred_queue == "asset_pipeline": + return None + if preferred_queue in active_queue_names: + return preferred_queue + + logger.info( + "[WORKFLOW] Preferred shadow render queue %s unavailable for node %s; using default routing", + preferred_queue, + node.id, + ) + return None + + def _filter_graph_render_overrides(step: StepName, params: dict[str, Any]) -> dict[str, Any]: normalized = dict(params) use_custom_render_settings = bool(normalized.pop("use_custom_render_settings", False)) @@ -186,6 +247,8 @@ def find_unsupported_graph_nodes(workflow_context: WorkflowContext) -> list[str] def execute_graph_workflow( session: Session, workflow_context: WorkflowContext, + *, + dispatch_tasks: bool = True, ) -> WorkflowDispatchResult: if workflow_context.workflow_run_id is None: raise ValueError("workflow_context.workflow_run_id is required for graph execution") @@ -201,6 +264,12 @@ def execute_graph_workflow( task_ids: list[str] = [] node_task_ids: dict[str, str] = {} skipped_node_ids: list[str] = [] + task_specs: list[WorkflowTaskDispatchSpec] = [] + active_queue_names = ( + _inspect_active_worker_queues() + if workflow_context.execution_mode == "shadow" + else set() + ) for node in workflow_context.ordered_nodes: node_result = node_results.get(node.id) @@ -326,8 +395,6 @@ def execute_graph_workflow( skipped_node_ids.append(node.id) continue - from app.tasks.celery_app import celery_app - task_kwargs = _build_task_kwargs( session=session, workflow_context=workflow_context, @@ -335,12 +402,42 @@ def execute_graph_workflow( node=node, ) - result = celery_app.send_task( - task_name, - args=[workflow_context.context_id], - kwargs=task_kwargs, + target_queue = _resolve_shadow_render_queue( + workflow_context=workflow_context, + node=node, + active_queue_names=active_queue_names, ) - metadata["task_id"] = result.id + if dispatch_tasks: + from app.tasks.celery_app import celery_app + + if target_queue: + result = celery_app.send_task( + task_name, + args=[workflow_context.context_id], + kwargs=task_kwargs, + queue=target_queue, + ) + else: + result = celery_app.send_task( + task_name, + args=[workflow_context.context_id], + kwargs=task_kwargs, + ) + task_id = result.id + else: + task_id = str(uuid.uuid4()) + task_specs.append( + WorkflowTaskDispatchSpec( + node_id=node.id, + task_name=task_name, + args=[workflow_context.context_id], + kwargs=dict(task_kwargs), + task_id=task_id, + queue=target_queue, + ) + ) + metadata["task_id"] = task_id + metadata["task_queue"] = target_queue or "asset_pipeline" if definition is not None: metadata["execution_kind"] = definition.execution_kind metadata["attempt_count"] = 1 @@ -360,15 +457,15 @@ def execute_graph_workflow( node_result.duration_s = None state.node_outputs[node.id] = dict(metadata) session.flush() - task_ids.append(result.id) - node_task_ids[node.id] = result.id + task_ids.append(task_id) + node_task_ids[node.id] = task_id logger.info( "[WORKFLOW] Dispatched node %r (step=%s, mode=%s, run=%s) -> Celery task %s", node.id, node.step, workflow_context.execution_mode, workflow_context.workflow_run_id, - result.id, + task_id, ) continue @@ -397,6 +494,7 @@ def execute_graph_workflow( task_ids=task_ids, node_task_ids=node_task_ids, skipped_node_ids=skipped_node_ids, + task_specs=task_specs, ) @@ -466,8 +564,15 @@ def _serialize_template_result(result: TemplateResolutionResult) -> dict[str, An "material_map_count": len(result.material_map or {}), "use_materials": result.use_materials, "override_material": result.override_material, + "target_collection": result.target_collection, + "lighting_only": result.lighting_only, + "shadow_catcher": result.shadow_catcher, + "camera_orbit": result.camera_orbit, "category_key": result.category_key, "output_type_id": result.output_type_id, + "workflow_input_schema": result.workflow_input_schema, + "template_inputs": result.template_inputs, + "template_input_count": len(result.template_inputs or {}), } @@ -597,13 +702,17 @@ def _predict_task_output_metadata( order_line_id = str(state.setup.order_line.id) if node.step == StepName.BLENDER_STILL: - output_dir = step_path.parent / "renders" - output_filename = f"line_{order_line_id}.png" + output_extension = _resolve_render_output_extension(state.setup.order_line) + if output_extension not in {"png", "jpg", "webp"}: + output_extension = "png" + output_filename = f"line_{order_line_id}.{output_extension}" if output_name_suffix: - output_filename = f"line_{order_line_id}_{output_name_suffix}.png" + output_filename = f"line_{order_line_id}_{output_name_suffix}.{output_extension}" return { "artifact_role": "render_output", - "predicted_output_path": str(output_dir / output_filename), + "predicted_output_path": str( + build_order_line_step_render_path(step_path, order_line_id, output_filename) + ), "predicted_asset_type": "still", "publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)), "graph_authoritative_output_enabled": bool( @@ -618,9 +727,10 @@ def _predict_task_output_metadata( output_filename = f"{step_path.stem}_production.blend" if output_name_suffix: output_filename = f"{step_path.stem}_production_{output_name_suffix}.blend" + predicted_output_path = str(build_order_line_export_path(order_line_id, output_filename)) return { "artifact_role": "blend_export", - "predicted_output_path": str(step_path.parent / output_filename), + "predicted_output_path": predicted_output_path, "predicted_asset_type": "blend_production", "publish_asset_enabled": bool(task_kwargs.get("publish_asset_enabled", True)), "graph_authoritative_output_enabled": bool( @@ -641,7 +751,9 @@ def _predict_task_output_metadata( if isinstance(output_dir, str) and output_dir.strip(): predicted_output_path = str(Path(output_dir) / f"{output_name}.mp4") else: - predicted_output_path = str(step_path.parent / "renders" / f"{output_name}.mp4") + predicted_output_path = str( + build_order_line_step_render_path(step_path, order_line_id, f"{output_name}.mp4") + ) return { "artifact_role": "turntable_output", "predicted_output_path": predicted_output_path, @@ -733,6 +845,30 @@ def _resolve_thumbnail_request( return None +def _normalize_turntable_task_kwargs(task_kwargs: dict[str, Any]) -> dict[str, Any]: + normalized = dict(task_kwargs) + raw_duration = normalized.get("duration_s") + if raw_duration in (None, ""): + return normalized + + try: + duration_s = float(raw_duration) + except (TypeError, ValueError): + return normalized + + try: + fps = int(float(normalized.get("fps", 0))) + except (TypeError, ValueError): + return normalized + + if duration_s <= 0 or fps <= 0: + return normalized + + normalized["duration_s"] = duration_s + normalized["frame_count"] = max(1, int(round(duration_s * fps))) + return normalized + + def _build_task_kwargs( *, session: Session, @@ -751,6 +887,7 @@ def _build_task_kwargs( template_context=state.template, position_context=resolve_render_position_context(session, state.setup.order_line), material_context=state.materials, + artifact_kind_override=_artifact_kind_override_for_step(node.step), ) render_defaults = render_invocation.task_defaults() @@ -774,6 +911,15 @@ def _build_task_kwargs( }.items() if key in _TURNTABLE_TASK_KEYS } + task_kwargs = _normalize_turntable_task_kwargs(task_kwargs) + if state.setup is not None and state.setup.is_ready and state.setup.cad_file is not None: + task_kwargs["output_dir"] = str( + build_order_line_step_render_path( + state.setup.cad_file.stored_path, + str(state.setup.order_line.id), + "turntable.mp4", + ).parent + ) elif node.step == StepName.THUMBNAIL_SAVE: thumbnail_request = _resolve_thumbnail_request(workflow_context, state, node.id) or {} task_kwargs = { @@ -787,7 +933,7 @@ def _build_task_kwargs( task_kwargs["workflow_run_id"] = str(workflow_context.workflow_run_id) task_kwargs["workflow_node_id"] = node.id - if workflow_context.execution_mode == "graph" and node.step in { + if workflow_context.execution_mode in {"graph", "shadow"} and node.step in { StepName.BLENDER_STILL, StepName.EXPORT_BLEND, StepName.BLENDER_TURNTABLE, @@ -798,19 +944,23 @@ def _build_task_kwargs( step=StepName.OUTPUT_SAVE, direction="downstream", ) - connected_notify_node_ids = _connected_node_ids_by_step( - workflow_context, - node_id=node.id, - step=StepName.NOTIFY, - direction="downstream", - ) if connected_output_node_ids: task_kwargs["publish_asset_enabled"] = False - task_kwargs["graph_authoritative_output_enabled"] = True task_kwargs["graph_output_node_ids"] = connected_output_node_ids - if connected_notify_node_ids: - task_kwargs["emit_legacy_notifications"] = True - task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids + if workflow_context.execution_mode == "graph": + task_kwargs["graph_authoritative_output_enabled"] = True + else: + task_kwargs["observer_output_enabled"] = True + if workflow_context.execution_mode == "graph": + connected_notify_node_ids = _connected_node_ids_by_step( + workflow_context, + node_id=node.id, + step=StepName.NOTIFY, + direction="downstream", + ) + if connected_notify_node_ids: + task_kwargs["emit_legacy_notifications"] = True + task_kwargs["graph_notify_node_ids"] = connected_notify_node_ids if workflow_context.execution_mode == "shadow": task_kwargs["publish_asset_enabled"] = False task_kwargs["emit_events"] = False @@ -819,6 +969,16 @@ def _build_task_kwargs( return task_kwargs +def _artifact_kind_override_for_step(step: StepName) -> str | None: + if step == StepName.BLENDER_TURNTABLE: + return "turntable_video" + if step == StepName.BLENDER_STILL: + return "still_image" + if step == StepName.EXPORT_BLEND: + return "blend_asset" + return None + + def _execute_order_line_setup( *, session: Session, @@ -857,12 +1017,25 @@ def _execute_resolve_template( node_params: dict[str, Any], ) -> tuple[dict[str, Any], str, str | None]: del node - del workflow_context, node_params + del workflow_context if state.setup is None or not state.setup.is_ready: if state.setup is not None and state.setup.status == "skip": return _serialize_setup_result(state.setup), "skipped", state.setup.reason raise WorkflowGraphRuntimeError("resolve_template requires a ready order_line_setup result") - result = resolve_order_line_template_context(session, state.setup) + result = resolve_order_line_template_context( + session, + state.setup, + template_id_override=node_params.get("template_id_override"), + material_library_path_override=node_params.get("material_library_path"), + require_template=bool(node_params.get("require_template", False)), + disable_materials=bool(node_params.get("disable_materials", False)), + target_collection_override=node_params.get("target_collection"), + material_replace_mode=node_params.get("material_replace_mode"), + lighting_only_mode=node_params.get("lighting_only_mode"), + shadow_catcher_mode=node_params.get("shadow_catcher_mode"), + camera_orbit_mode=node_params.get("camera_orbit_mode"), + template_input_overrides=extract_template_input_overrides(node_params), + ) state.template = result return _serialize_template_result(result), "completed", None @@ -876,7 +1049,7 @@ def _execute_material_map_resolve( node_params: dict[str, Any], ) -> tuple[dict[str, Any], str, str | None]: del node - del session, workflow_context, node_params + del session, workflow_context if state.setup is None or not state.setup.is_ready: if state.setup is not None and state.setup.status == "skip": return _serialize_setup_result(state.setup), "skipped", state.setup.reason @@ -895,6 +1068,8 @@ def _execute_material_map_resolve( state.setup.materials_source, material_library=material_library, template=template, + material_override=node_params.get("material_override"), + disable_materials=bool(node_params.get("disable_materials", False)), ) state.materials = result return _serialize_material_result(result), "completed", None @@ -909,26 +1084,45 @@ def _execute_auto_populate_materials( node_params: dict[str, Any], ) -> tuple[dict[str, Any], str, str | None]: del node - del node_params if state.setup is None or state.setup.cad_file is None: if state.setup is not None and state.setup.status == "skip": return _serialize_setup_result(state.setup), "skipped", state.setup.reason raise WorkflowGraphRuntimeError("auto_populate_materials requires a resolved cad_file") shadow_mode = workflow_context.execution_mode == "shadow" + persist_updates = bool(node_params.get("persist_updates", not shadow_mode)) + if shadow_mode: + persist_updates = False + refresh_material_source = bool(node_params.get("refresh_material_source", True)) + include_populated_products = bool(node_params.get("include_populated_products", False)) if shadow_mode: result = auto_populate_materials_for_cad( session, str(state.setup.cad_file.id), persist_updates=False, + include_populated_products=include_populated_products, ) else: - result = auto_populate_materials_for_cad(session, str(state.setup.cad_file.id)) + result = auto_populate_materials_for_cad( + session, + str(state.setup.cad_file.id), + persist_updates=persist_updates, + include_populated_products=include_populated_products, + ) state.auto_populate = result - if not shadow_mode and state.setup.order_line is not None and state.setup.order_line.product is not None: + if ( + persist_updates + and refresh_material_source + and not shadow_mode + and state.setup.order_line is not None + and state.setup.order_line.product is not None + ): session.refresh(state.setup.order_line.product) state.setup.materials_source = state.setup.order_line.product.cad_part_materials or [] payload = _serialize_auto_populate_result(result) payload["shadow_mode"] = shadow_mode + payload["persist_updates"] = persist_updates + payload["refresh_material_source"] = refresh_material_source + payload["include_populated_products"] = include_populated_products return payload, "completed", None @@ -949,17 +1143,31 @@ def _execute_glb_bbox( step_path = state.setup.cad_file.stored_path glb_path = node_params.get("glb_path") - if glb_path is None and state.setup.glb_reuse_path is not None: + source_preference = str(node_params.get("source_preference") or "auto") + if glb_path is None and source_preference != "step_only" and state.setup.glb_reuse_path is not None: glb_path = str(state.setup.glb_reuse_path) - elif glb_path is None: + elif glb_path is None and source_preference != "step_only": step_file = Path(step_path) fallback_glb = step_file.parent / f"{step_file.stem}_thumbnail.glb" if fallback_glb.exists(): glb_path = str(fallback_glb) + if source_preference == "glb_only" and not glb_path: + payload = { + "bbox_data": None, + "has_bbox": False, + "source_kind": "none", + "step_path": step_path, + "glb_path": None, + "source_preference": source_preference, + } + return payload, "failed", "glb_only requested but no GLB artifact is available" + result = resolve_cad_bbox(step_path, glb_path=glb_path) state.bbox = result - return _serialize_bbox_result(result), "completed", None + payload = _serialize_bbox_result(result) + payload["source_preference"] = source_preference + return payload, "completed", None def _execute_resolve_step_path( @@ -1069,7 +1277,7 @@ def _execute_output_save( node, node_params: dict[str, Any], ) -> tuple[dict[str, Any], str, str | None]: - del session, node_params + del session if state.setup is None or state.setup.order_line is None: raise WorkflowGraphRuntimeError("output_save requires an order_line_setup result") @@ -1085,19 +1293,42 @@ def _execute_output_save( "shadow_mode": workflow_context.execution_mode == "shadow", } upstream_artifacts = _connected_upstream_artifacts(workflow_context, state, node.id) + expected_artifact_role = str(node_params.get("expected_artifact_role") or "").strip() or None + require_upstream_artifact = bool(node_params.get("require_upstream_artifact", False)) + if expected_artifact_role is not None: + upstream_artifacts = [ + artifact for artifact in upstream_artifacts if artifact.get("artifact_role") == expected_artifact_role + ] if workflow_context.execution_mode == "shadow": payload["publication_mode"] = "shadow_observer_only" elif any(artifact["publish_asset_enabled"] for artifact in upstream_artifacts): payload["publication_mode"] = "deferred_to_render_task" else: payload["publication_mode"] = "awaiting_graph_authoritative_save" + payload["expected_artifact_role"] = expected_artifact_role + payload["require_upstream_artifact"] = require_upstream_artifact if upstream_artifacts: payload["artifact_count"] = len(upstream_artifacts) payload["upstream_artifacts"] = upstream_artifacts + elif require_upstream_artifact: + payload["artifact_count"] = 0 + return payload, "failed", "No upstream render artifact is connected to this output node" if state.template is not None and state.template.template is not None: payload["template_name"] = state.template.template.name if state.materials is not None: payload["material_map_count"] = len(state.materials.material_map or {}) + + deferred_handoff_node_ids = [ + str(artifact.get("node_id")) + for artifact in upstream_artifacts + if artifact.get("task_id") + ] + if deferred_handoff_node_ids: + payload["handoff_state"] = "armed" + payload["handoff_node_ids"] = deferred_handoff_node_ids + payload["handoff_node_count"] = len(deferred_handoff_node_ids) + return payload, "pending", None + return payload, "completed", None @@ -1109,7 +1340,7 @@ def _execute_notify( node, node_params: dict[str, Any], ) -> tuple[dict[str, Any], str, str | None]: - del session, node_params + del session if state.setup is None or state.setup.order_line is None: raise WorkflowGraphRuntimeError("notify requires an order_line_setup result") @@ -1121,8 +1352,10 @@ def _execute_notify( payload: dict[str, Any] = { "order_line_id": str(state.setup.order_line.id), "shadow_mode": workflow_context.execution_mode == "shadow", - "channel": "audit_log", + "channel": str(node_params.get("channel") or "audit_log"), } + require_armed_render = bool(node_params.get("require_armed_render", False)) + payload["require_armed_render"] = require_armed_render if workflow_context.execution_mode == "shadow": payload["notification_mode"] = "shadow_suppressed" @@ -1136,12 +1369,15 @@ def _execute_notify( ] if not armed_node_ids: payload["notification_mode"] = "not_armed" + if require_armed_render: + return payload, "failed", "No graph render task is configured for notification handoff" return payload, "skipped", "No graph render task is configured for notification handoff" payload["notification_mode"] = "deferred_to_render_task" payload["armed_node_ids"] = armed_node_ids payload["armed_node_count"] = len(armed_node_ids) - return payload, "completed", None + payload["handoff_state"] = "armed" + return payload, "pending", None _BRIDGE_EXECUTORS = { diff --git a/backend/app/domains/rendering/workflow_node_registry.py b/backend/app/domains/rendering/workflow_node_registry.py index 304d505..b84405d 100644 --- a/backend/app/domains/rendering/workflow_node_registry.py +++ b/backend/app/domains/rendering/workflow_node_registry.py @@ -10,7 +10,17 @@ from app.core.process_steps import StepName StepCategory = Literal["input", "processing", "rendering", "output"] FieldType = Literal["number", "select", "boolean", "text"] ExecutionKind = Literal["native", "bridge"] -WorkflowNodeFamily = Literal["cad_file", "order_line"] +WorkflowNodeFamily = Literal["cad_file", "order_line", "shared"] +TextFormat = Literal[ + "plain", + "uuid", + "absolute_path", + "absolute_blend_path", + "absolute_glb_path", + "float_string", + "hex_color", + "safe_filename_suffix", +] class WorkflowNodeFieldOption(BaseModel): @@ -30,6 +40,9 @@ class WorkflowNodeFieldDefinition(BaseModel): step: float | None = None unit: str | None = None options: list[WorkflowNodeFieldOption] = [] + allow_blank: bool = True + max_length: int | None = None + text_format: TextFormat = "plain" class WorkflowNodeDefinition(BaseModel): @@ -65,6 +78,9 @@ def _field( step: float | None = None, unit: str | None = None, options: list[tuple[str | int | float | bool, str]] | None = None, + allow_blank: bool = True, + max_length: int | None = None, + text_format: TextFormat = "plain", ) -> WorkflowNodeFieldDefinition: return WorkflowNodeFieldDefinition( key=key, @@ -81,6 +97,9 @@ def _field( WorkflowNodeFieldOption(value=value, label=option_label) for value, option_label in (options or []) ], + allow_blank=allow_blank, + max_length=max_length, + text_format=text_format, ) @@ -169,7 +188,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ "cad_file", "cad.export_glb", "processing", - "Convert STEP geometry into GLB for previews and downstream rendering.", + "Convert STEP geometry into GLB for previews and downstream rendering. Uses the system tessellation profile; this node does not expose per-node overrides yet.", node_type="processNode", icon="refresh-cw", execution_kind="bridge", @@ -181,10 +200,10 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ _definition( StepName.GLB_BBOX, "Compute Bounding Box", - "order_line", + "shared", "geometry.compute_bbox", "processing", - "Compute the model bounding box from the exported GLB for framing decisions.", + "Compute the model bounding box from a prepared GLB artifact for framing decisions in either CAD-intake or order-line workflows.", node_type="processNode", icon="layers", execution_kind="bridge", @@ -196,10 +215,24 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ description="Optional absolute path to a specific GLB file. Leave empty to reuse the prepared preview/export artifact automatically.", section="Inputs", default="", + text_format="absolute_glb_path", + ), + _field( + "source_preference", + "Source Preference", + "select", + description="Prefer a prepared GLB, force STEP fallback, or fail when no GLB artifact is available.", + section="Inputs", + default="auto", + options=[ + ("auto", "Auto"), + ("step_only", "STEP Only"), + ("glb_only", "GLB Only"), + ], ), ], - input_contract={"context": "order_line", "requires": ["glb_preview"]}, - output_contract={"context": "order_line", "provides": ["bbox"]}, + input_contract={"requires": ["glb_preview"]}, + output_contract={"provides": ["bbox"]}, artifact_roles_consumed=["glb_preview"], artifact_roles_produced=["bbox"], ), @@ -213,6 +246,25 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ node_type="processNode", icon="layers", execution_kind="bridge", + defaults={"disable_materials": False, "material_override": ""}, + fields=[ + _field( + "disable_materials", + "Disable Materials", + "boolean", + description="Bypass template and alias-based material mapping for this node.", + section="Materials", + default=False, + ), + _field( + "material_override", + "Material Override", + "text", + description="Optional material name forced onto every detected part before rendering.", + section="Materials", + default="", + ), + ], input_contract={"context": "order_line", "requires": ["order_line_context", "cad_materials"]}, output_contract={"context": "order_line", "provides": ["material_assignments"]}, artifact_roles_consumed=["order_line_context", "cad_materials"], @@ -228,6 +280,37 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ node_type="processNode", icon="layers", execution_kind="bridge", + defaults={ + "persist_updates": True, + "refresh_material_source": True, + "include_populated_products": False, + }, + fields=[ + _field( + "persist_updates", + "Persist Updates", + "boolean", + description="Write discovered part-material mappings back to product records in graph mode.", + section="Behavior", + default=True, + ), + _field( + "refresh_material_source", + "Refresh Material Source", + "boolean", + description="Reload product material mappings into the workflow context after persistence.", + section="Behavior", + default=True, + ), + _field( + "include_populated_products", + "Rewrite Populated Products", + "boolean", + description="Also rebuild material mappings for products that already have non-empty assignments.", + section="Behavior", + default=False, + ), + ], input_contract={"context": "order_line", "requires": ["cad_materials"]}, output_contract={"context": "order_line", "provides": ["material_catalog_updates"]}, artifact_roles_consumed=["cad_materials"], @@ -306,7 +389,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ "cad_file", "media.save_thumbnail", "output", - "Persist the generated thumbnail back onto the CAD file record.", + "Persist the generated thumbnail back onto the CAD file record. Rendering settings are supplied by the connected upstream thumbnail request node.", node_type="outputNode", icon="download", execution_kind="bridge", @@ -360,6 +443,113 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ node_type="processNode", icon="layers", execution_kind="bridge", + defaults={ + "template_id_override": "", + "material_library_path": "", + "require_template": False, + "disable_materials": False, + "target_collection": "", + "material_replace_mode": "inherit", + "lighting_only_mode": "inherit", + "shadow_catcher_mode": "inherit", + "camera_orbit_mode": "inherit", + }, + fields=[ + _field( + "template_id_override", + "Template ID Override", + "text", + description="Optional render-template UUID to force for this workflow node instead of category/output-type resolution.", + section="Template", + default="", + text_format="uuid", + ), + _field( + "require_template", + "Require Template", + "boolean", + description="Fail this node when no active render template can be resolved.", + section="Template", + default=False, + ), + _field( + "material_library_path", + "Material Library Path", + "text", + description="Optional absolute .blend path used instead of the active asset library.", + section="Materials", + default="", + text_format="absolute_blend_path", + ), + _field( + "disable_materials", + "Disable Materials", + "boolean", + description="Resolve the template but skip material-map generation for downstream nodes.", + section="Materials", + default=False, + ), + _field( + "target_collection", + "Target Collection Override", + "text", + description="Optional collection name override applied after template resolution. Leave blank to inherit from the template.", + section="Template Overrides", + default="", + ), + _field( + "material_replace_mode", + "Material Replace", + "select", + description="Override whether template material replacement is active for downstream nodes.", + section="Template Overrides", + default="inherit", + options=[ + ("inherit", "Inherit Template"), + ("enabled", "Force Enabled"), + ("disabled", "Force Disabled"), + ], + ), + _field( + "lighting_only_mode", + "Lighting Only", + "select", + description="Override the template lighting-only flag for downstream nodes.", + section="Template Overrides", + default="inherit", + options=[ + ("inherit", "Inherit Template"), + ("enabled", "Force Enabled"), + ("disabled", "Force Disabled"), + ], + ), + _field( + "shadow_catcher_mode", + "Shadow Catcher", + "select", + description="Override the template shadow-catcher flag for downstream nodes.", + section="Template Overrides", + default="inherit", + options=[ + ("inherit", "Inherit Template"), + ("enabled", "Force Enabled"), + ("disabled", "Force Disabled"), + ], + ), + _field( + "camera_orbit_mode", + "Camera Orbit", + "select", + description="Override whether turntable renders orbit the camera or rotate the object.", + section="Template Overrides", + default="inherit", + options=[ + ("inherit", "Inherit Template"), + ("enabled", "Force Camera Orbit"), + ("disabled", "Force Object Rotation"), + ], + ), + ], input_contract={"context": "order_line", "requires": ["order_line_context"]}, output_contract={ "context": "order_line", @@ -372,6 +562,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ "use_materials", "override_material", "category_key", + "workflow_input_schema", + "template_inputs", ], }, artifact_roles_consumed=["order_line_context"], @@ -384,6 +576,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ "use_materials", "override_material", "category_key", + "workflow_input_schema", + "template_inputs", ], ), _definition( @@ -420,7 +614,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ "select", description="Force CPU, GPU, or automatic device selection.", section="Render", - default="auto", + default="gpu", options=_CYCLES_DEVICE_OPTIONS, ), _field( @@ -451,6 +645,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ description="Optional Cycles adaptive sampling threshold, for example 0.01.", section="Denoising", default="", + text_format="float_string", ), _field( "denoiser", @@ -606,7 +801,11 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ defaults={ "use_custom_render_settings": False, "fps": 24, + "frame_count": 120, "duration_s": 5, + "turntable_degrees": 360, + "turntable_axis": "world_z", + "camera_orbit": True, "rotation_z": 0, }, fields=[ @@ -664,8 +863,20 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ description="Optional hex color used during FFmpeg compositing, for example #FFFFFF.", section="Output", default="", + text_format="hex_color", ), _field("fps", "FPS", "number", section="Animation", default=24, min=1, max=120, step=1), + _field( + "frame_count", + "Frame Count", + "number", + description="Explicit total frame count for the rendered turntable clip.", + section="Animation", + default=120, + min=1, + max=7200, + step=1, + ), _field( "duration_s", "Duration", @@ -818,6 +1029,32 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ node_type="outputNode", icon="download", execution_kind="bridge", + defaults={"expected_artifact_role": "", "require_upstream_artifact": False}, + fields=[ + _field( + "expected_artifact_role", + "Expected Artifact Role", + "select", + description="Restrict this node to a specific upstream render artifact type.", + section="Output", + default="", + options=[ + ("", "Any Connected Artifact"), + ("render_output", "Still Output"), + ("turntable_output", "Turntable Output"), + ("blend_export", "Blend Export"), + ("thumbnail_output", "Thumbnail Output"), + ], + ), + _field( + "require_upstream_artifact", + "Require Upstream Artifact", + "boolean", + description="Fail the node when no matching upstream artifact is connected.", + section="Output", + default=False, + ), + ], input_contract={ "context": "order_line", "requires": ["order_line_context"], @@ -833,7 +1070,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ "order_line", "media.export_blend", "output", - "Persist the generated .blend file as a downloadable media asset.", + "Persist the generated .blend file as a downloadable media asset. Only the optional filename suffix is workflow-configurable today.", node_type="outputNode", icon="download", defaults={"output_name_suffix": ""}, @@ -845,6 +1082,8 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ description="Optional suffix appended to the generated `.blend` filename.", section="Output", default="", + text_format="safe_filename_suffix", + max_length=64, ), ], execution_kind="bridge", @@ -859,7 +1098,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ "cad_file", "cad.generate_stl_cache", "processing", - "Generate and cache STL derivatives next to the STEP source.", + "Compatibility node for legacy CAD flows. HartOMat graph execution uses direct OCC/GLB export instead, so this node intentionally performs no per-node-configurable cache generation.", node_type="convertNode", icon="refresh-cw", execution_kind="bridge", @@ -877,7 +1116,7 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ "Emit a user-visible notification for workflow completion or failure.", node_type="outputNode", icon="bell", - defaults={"channel": "audit_log"}, + defaults={"channel": "audit_log", "require_armed_render": False}, fields=[ _field( "channel", @@ -888,6 +1127,14 @@ _NODE_DEFINITIONS: list[WorkflowNodeDefinition] = [ default="audit_log", options=[("audit_log", "Audit Log")], ), + _field( + "require_armed_render", + "Require Armed Render", + "boolean", + description="Fail this node when no upstream graph render task is configured to hand off notifications.", + section="Notification", + default=False, + ), ], execution_kind="bridge", input_contract={ diff --git a/backend/app/domains/rendering/workflow_router.py b/backend/app/domains/rendering/workflow_router.py index 778b167..11e744a 100644 --- a/backend/app/domains/rendering/workflow_router.py +++ b/backend/app/domains/rendering/workflow_router.py @@ -1,31 +1,64 @@ """Workflow definition CRUD API.""" +from pathlib import Path import uuid +from datetime import datetime from fastapi import APIRouter, Depends, HTTPException, Query from pydantic import BaseModel, ValidationError from sqlalchemy import select -from sqlalchemy.orm import selectinload +from sqlalchemy.orm import Session, selectinload +from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import or_ +from app.core.process_steps import StepName from app.database import get_db from app.domains.auth.models import User -from app.utils.auth import get_current_user, require_global_admin, require_admin_or_pm, require_pm_or_above -from app.domains.rendering.models import WorkflowDefinition, WorkflowRun +from app.domains.orders.models import Order, OrderLine, OrderStatus +from app.domains.products.models import CadFile +from app.domains.rendering.models import ( + OutputType, + WorkflowDefinition, + WorkflowRun, +) from app.domains.rendering.schemas import ( WorkflowDefinitionCreate, WorkflowDefinitionUpdate, + WorkflowDraftDispatchRequest, WorkflowDefinitionOut, + WorkflowDraftPreflightRequest, + WorkflowPreflightIssueOut, + WorkflowPreflightNodeOut, + WorkflowPreflightOut, + WorkflowOrderLineContextGroupOut, + WorkflowOrderLineContextOptionOut, + WorkflowRolloutLinkedOutputTypeOut, + WorkflowRolloutLatestRunOut, + WorkflowRolloutSummaryOut, WorkflowRunComparisonOut, WorkflowRunOut, ) from app.domains.rendering.workflow_comparison_service import build_workflow_run_comparison -from app.domains.rendering.workflow_config_utils import canonicalize_workflow_config +from app.domains.rendering.workflow_config_utils import canonicalize_workflow_config, get_workflow_execution_mode +from app.domains.rendering.workflow_graph_runtime import find_unsupported_graph_nodes from app.domains.rendering.workflow_node_registry import ( StepCategory, WorkflowNodeDefinition, + get_node_definition, list_node_definitions, ) from app.domains.rendering.workflow_schema import WorkflowConfig +from app.domains.rendering.workflow_runtime_services import ( + prepare_order_line_render_context, + resolve_cad_bbox, + resolve_order_line_template_context, +) +from app.domains.rendering.output_type_contracts import ( + derive_supported_artifact_kinds_from_workflow_config, + infer_workflow_family_from_config, + validate_output_type_contract, +) +from app.utils.auth import require_global_admin, require_admin_or_pm, require_pm_or_above class PipelineStepOut(BaseModel): @@ -45,18 +78,654 @@ class NodeDefinitionsResponse(BaseModel): router = APIRouter(prefix="/api/workflows", tags=["workflows"]) +_ORDER_LINE_RUNTIME_STEPS = { + StepName.ORDER_LINE_SETUP, + StepName.RESOLVE_TEMPLATE, + StepName.MATERIAL_MAP_RESOLVE, + StepName.AUTO_POPULATE_MATERIALS, + StepName.GLB_BBOX, + StepName.BLENDER_STILL, + StepName.BLENDER_TURNTABLE, + StepName.OUTPUT_SAVE, + StepName.EXPORT_BLEND, + StepName.NOTIFY, +} -def _workflow_to_out(wf: WorkflowDefinition) -> WorkflowDefinitionOut: +_CAD_FILE_ENTRY_STEPS = { + StepName.RESOLVE_STEP_PATH, + StepName.OCC_OBJECT_EXTRACT, + StepName.OCC_GLB_EXPORT, + StepName.STL_CACHE_GENERATE, + StepName.BLENDER_RENDER, + StepName.THREEJS_RENDER, + StepName.THUMBNAIL_SAVE, +} + +_ORDER_LINE_SETUP_REQUIRED_STEPS = { + StepName.RESOLVE_TEMPLATE, + StepName.MATERIAL_MAP_RESOLVE, + StepName.AUTO_POPULATE_MATERIALS, + StepName.GLB_BBOX, + StepName.BLENDER_STILL, + StepName.BLENDER_TURNTABLE, + StepName.OUTPUT_SAVE, + StepName.EXPORT_BLEND, + StepName.NOTIFY, +} + +_RESOLVE_TEMPLATE_RECOMMENDED_STEPS = { + StepName.BLENDER_STILL, + StepName.BLENDER_TURNTABLE, + StepName.EXPORT_BLEND, + StepName.NOTIFY, +} + +_WORKFLOW_ROLLOUT_DISPLAY_ORDER = ("legacy_only", "shadow", "graph") + + +def _sort_rollout_modes(modes: set[str]) -> list[str]: + ordered = [mode for mode in _WORKFLOW_ROLLOUT_DISPLAY_ORDER if mode in modes] + extras = sorted(mode for mode in modes if mode not in _WORKFLOW_ROLLOUT_DISPLAY_ORDER) + return [*ordered, *extras] + + +def _build_latest_run_out(run: WorkflowRun | None) -> WorkflowRolloutLatestRunOut | None: + if run is None: + return None + return WorkflowRolloutLatestRunOut( + workflow_run_id=run.id, + execution_mode=run.execution_mode, + status=run.status, + created_at=run.created_at, + completed_at=run.completed_at, + ) + + +def _collect_output_type_contract_reasons( + *, + output_types: list[OutputType], + workflow_family: str | None, + supported_artifact_kinds: tuple[str, ...], +) -> list[str]: + reasons: list[str] = [] + for output_type in output_types: + try: + validate_output_type_contract( + workflow_family=output_type.workflow_family, + artifact_kind=output_type.artifact_kind, + output_format=output_type.output_format, + is_animation=output_type.is_animation, + ) + except ValueError as exc: + reasons.append(f"{output_type.name}: {exc}") + continue + + if workflow_family == "mixed": + reasons.append(f"{output_type.name}: mixed-family workflows cannot be promoted.") + continue + if workflow_family is not None and workflow_family != output_type.workflow_family: + reasons.append( + f"{output_type.name}: workflow family '{workflow_family}' does not match output type family '{output_type.workflow_family}'." + ) + if output_type.artifact_kind not in supported_artifact_kinds: + supported = ", ".join(supported_artifact_kinds) if supported_artifact_kinds else "none" + reasons.append( + f"{output_type.name}: workflow does not support artifact '{output_type.artifact_kind}' (supports {supported})." + ) + return reasons + + +async def _build_rollout_summary( + db: AsyncSession, + wf: WorkflowDefinition, + *, + workflow_family: str | None, + supported_artifact_kinds: tuple[str, ...], +) -> WorkflowRolloutSummaryOut: + output_type_filters = [OutputType.workflow_definition_id == wf.id] + if wf.output_type_id is not None: + output_type_filters.append(OutputType.id == wf.output_type_id) + + output_type_result = await db.execute( + select(OutputType) + .where(or_(*output_type_filters)) + .order_by(OutputType.is_active.desc(), OutputType.sort_order, OutputType.name) + ) + linked_output_types = list(output_type_result.scalars().unique().all()) + rollout_modes = _sort_rollout_modes( + { + (getattr(output_type, "workflow_rollout_mode", None) or "legacy_only").strip().lower() + for output_type in linked_output_types + } + ) + blocking_reasons = _collect_output_type_contract_reasons( + output_types=linked_output_types, + workflow_family=workflow_family, + supported_artifact_kinds=supported_artifact_kinds, + ) + + latest_run_result = await db.execute( + select(WorkflowRun) + .where(WorkflowRun.workflow_def_id == wf.id) + .order_by(WorkflowRun.created_at.desc()) + .limit(1) + ) + latest_run = latest_run_result.scalar_one_or_none() + + latest_shadow_run_result = await db.execute( + select(WorkflowRun) + .where( + WorkflowRun.workflow_def_id == wf.id, + WorkflowRun.execution_mode == "shadow", + ) + .order_by(WorkflowRun.created_at.desc()) + .limit(1) + ) + latest_shadow_run = latest_shadow_run_result.scalar_one_or_none() + latest_comparison = ( + await build_workflow_run_comparison(db, latest_shadow_run.id) + if latest_shadow_run is not None + else None + ) + + return WorkflowRolloutSummaryOut( + linked_output_type_count=len(linked_output_types), + active_output_type_count=sum(1 for output_type in linked_output_types if output_type.is_active), + linked_output_type_names=[output_type.name for output_type in linked_output_types], + linked_output_types=[ + WorkflowRolloutLinkedOutputTypeOut( + id=output_type.id, + name=output_type.name, + is_active=bool(output_type.is_active), + artifact_kind=(output_type.artifact_kind or "custom").strip().lower(), + workflow_rollout_mode=( + getattr(output_type, "workflow_rollout_mode", None) or "legacy_only" + ).strip().lower(), + ) + for output_type in linked_output_types + ], + rollout_modes=rollout_modes, + has_blocking_contracts=bool(blocking_reasons), + blocking_reasons=blocking_reasons, + latest_run=_build_latest_run_out(latest_run), + latest_shadow_run=_build_latest_run_out(latest_shadow_run), + latest_rollout_gate_verdict=( + latest_comparison.rollout_gate_verdict if latest_comparison is not None else None + ), + latest_rollout_ready=( + latest_comparison.workflow_rollout_ready if latest_comparison is not None else None + ), + latest_rollout_status=( + latest_comparison.workflow_rollout_status if latest_comparison is not None else None + ), + latest_rollout_reasons=( + latest_comparison.rollout_reasons if latest_comparison is not None else [] + ), + ) + + +async def _workflow_to_out(db: AsyncSession, wf: WorkflowDefinition) -> WorkflowDefinitionOut: + canonical_config = canonicalize_workflow_config(wf.config) + workflow_family = infer_workflow_family_from_config(canonical_config) + supported_artifact_kinds = tuple( + derive_supported_artifact_kinds_from_workflow_config(canonical_config) + ) return WorkflowDefinitionOut( id=wf.id, name=wf.name, output_type_id=wf.output_type_id, - config=canonicalize_workflow_config(wf.config), + config=canonical_config, + family=workflow_family, + supported_artifact_kinds=list(supported_artifact_kinds), + rollout_summary=await _build_rollout_summary( + db, + wf, + workflow_family=workflow_family, + supported_artifact_kinds=supported_artifact_kinds, + ), is_active=wf.is_active, created_at=wf.created_at, ) +def _format_order_line_context_label(order: Order, line: OrderLine) -> tuple[str, str]: + product_label = "Unnamed product" + if line.product is not None: + product_label = ( + (getattr(line.product, "name", None) or "").strip() + or (getattr(line.product, "pim_id", None) or "").strip() + or product_label + ) + + output_label = ( + (getattr(line.output_type, "name", None) or "").strip() + if line.output_type is not None + else "Tracking only" + ) + detail_bits = [output_label or "Tracking only"] + + image_number = (line.gewuenschte_bildnummer or "").strip() + if image_number: + detail_bits.append(f"Image {image_number}") + + render_position_name = ( + (getattr(line.render_position, "name", None) or "").strip() + if getattr(line, "render_position", None) is not None + else "" + ) + if render_position_name: + detail_bits.append(render_position_name) + + return ( + f"{product_label} · {' · '.join(detail_bits)}", + f"{order.order_number} · {line.render_status or 'pending'}", + ) + + +def _issue( + severity: str, + code: str, + message: str, + *, + node_id: str | None = None, + step: str | None = None, +) -> WorkflowPreflightIssueOut: + return WorkflowPreflightIssueOut( + severity=severity, + code=code, + message=message, + node_id=node_id, + step=step, + ) + + +def _node_status(issues: list[WorkflowPreflightIssueOut], *, supported: bool) -> str: + if not supported: + return "unsupported" + if any(issue.severity == "error" for issue in issues): + return "error" + if any(issue.severity == "warning" for issue in issues): + return "warning" + return "ready" + + +def _infer_expected_context_kind(ordered_nodes: list) -> str: + if any(node.step in _ORDER_LINE_RUNTIME_STEPS for node in ordered_nodes): + return "order_line" + return "cad_file" + + +def _build_workflow_preflight( + session: Session, + wf: WorkflowDefinition, + *, + context_id: str, +) -> WorkflowPreflightOut: + return _build_workflow_preflight_for_config( + session, + workflow_id=wf.id, + workflow_config=wf.config, + context_id=context_id, + ) + + +def _build_workflow_preflight_for_config( + session: Session, + *, + workflow_id: uuid.UUID | None, + workflow_config: dict, + context_id: str, +) -> WorkflowPreflightOut: + from pydantic import ValidationError as _ValidationError + from app.domains.rendering.workflow_executor import ( + prepare_workflow_context, + submit_prepared_workflow_tasks, + ) + + normalized_config = canonicalize_workflow_config(workflow_config) + try: + workflow_context = prepare_workflow_context( + normalized_config, + context_id=context_id, + execution_mode="graph", + ) + except _ValidationError as exc: + raise HTTPException(status_code=422, detail=f"Invalid workflow config: {exc.errors()}") + except ValueError as exc: + raise HTTPException(status_code=422, detail=str(exc)) + + execution_mode = get_workflow_execution_mode(normalized_config, default="legacy") + unsupported_node_ids = find_unsupported_graph_nodes(workflow_context) + expected_context_kind = _infer_expected_context_kind(workflow_context.ordered_nodes) + issues: list[WorkflowPreflightIssueOut] = [] + + parsed_context_id: uuid.UUID | None = None + try: + parsed_context_id = uuid.UUID(context_id) + except ValueError: + issues.append(_issue("error", "invalid_context_id", "Context ID must be a valid UUID.")) + + order_line: OrderLine | None = None + cad_file: CadFile | None = None + context_kind: str | None = None + resolved_order_line_id: uuid.UUID | None = None + resolved_cad_file_id: uuid.UUID | None = None + setup = None + template_resolution = None + bbox_resolution = None + + if parsed_context_id is not None: + order_line = session.get(OrderLine, parsed_context_id) + if order_line is not None: + context_kind = "order_line" + resolved_order_line_id = order_line.id + else: + cad_file = session.get(CadFile, parsed_context_id) + if cad_file is not None: + context_kind = "cad_file" + resolved_cad_file_id = cad_file.id + + if context_kind is None and parsed_context_id is not None: + issues.append( + _issue( + "error", + "context_not_found", + "Context ID did not match an existing order line or CAD file.", + ) + ) + + if context_kind is not None and context_kind != expected_context_kind: + issues.append( + _issue( + "error", + "context_kind_mismatch", + f"Workflow expects a {expected_context_kind} context, but the supplied ID resolves to {context_kind}.", + ) + ) + + if context_kind == "order_line" and order_line is not None: + setup = prepare_order_line_render_context(session, str(order_line.id), persist_state=False) + if setup.order_line is not None: + resolved_order_line_id = setup.order_line.id + if setup.cad_file is not None: + resolved_cad_file_id = setup.cad_file.id + + if setup.status == "missing": + issues.append(_issue("error", "order_line_missing", "Order line could not be loaded.")) + elif setup.status == "failed": + issues.append( + _issue( + "error", + "order_line_not_renderable", + f"Order line is not renderable: {setup.reason or 'unknown reason'}.", + ) + ) + elif setup.status == "skip": + issues.append( + _issue( + "error", + "order_line_skipped", + f"Order line would be skipped by the legacy setup step: {setup.reason or 'skip'}.", + ) + ) + + if setup.is_ready: + template_resolution = resolve_order_line_template_context(session, setup) + if any(node.step == StepName.GLB_BBOX for node in workflow_context.ordered_nodes): + glb_path = str(setup.glb_reuse_path) if setup.glb_reuse_path is not None else None + if glb_path is None and setup.cad_file is not None: + step_file = Path(setup.cad_file.stored_path) + fallback_glb = step_file.parent / f"{step_file.stem}_thumbnail.glb" + if fallback_glb.exists(): + glb_path = str(fallback_glb) + bbox_resolution = resolve_cad_bbox( + setup.cad_file.stored_path, + glb_path=glb_path, + ) + elif context_kind == "cad_file" and cad_file is not None: + resolved_cad_file_id = cad_file.id + step_path = Path(cad_file.stored_path) + if not cad_file.stored_path: + issues.append(_issue("error", "cad_file_missing_path", "CAD file has no stored STEP path.")) + elif not step_path.exists(): + issues.append( + _issue( + "error", + "cad_file_step_missing", + f"STEP source path does not exist: {cad_file.stored_path}", + ) + ) + + node_indices = {node.id: index for index, node in enumerate(workflow_context.ordered_nodes)} + nodes_out: list[WorkflowPreflightNodeOut] = [] + blocking_issue_found = any(issue.severity == "error" for issue in issues) + + for node in workflow_context.ordered_nodes: + definition = get_node_definition(node.step) + supported = node.id not in unsupported_node_ids + node_issues: list[WorkflowPreflightIssueOut] = [] + + if not supported: + node_issues.append( + _issue( + "error", + "unsupported_node", + f"Graph runtime has no executable implementation for step '{node.step.value}'.", + node_id=node.id, + step=node.step.value, + ) + ) + + if expected_context_kind == "order_line" and node.step in _CAD_FILE_ENTRY_STEPS: + node_issues.append( + _issue( + "error", + "cad_file_only_node", + "This node currently requires a direct cad_file entry context and cannot run inside an order-line graph.", + node_id=node.id, + step=node.step.value, + ) + ) + + if node.step in _ORDER_LINE_SETUP_REQUIRED_STEPS: + setup_indices = [ + node_indices[candidate.id] + for candidate in workflow_context.ordered_nodes + if candidate.step == StepName.ORDER_LINE_SETUP + ] + has_prior_setup = any(index < node_indices[node.id] for index in setup_indices) + if node.step != StepName.ORDER_LINE_SETUP and not has_prior_setup: + node_issues.append( + _issue( + "error", + "missing_order_line_setup", + "This node requires an earlier order_line_setup node in the graph.", + node_id=node.id, + step=node.step.value, + ) + ) + + if context_kind != "order_line": + node_issues.append( + _issue( + "error", + "invalid_context_kind", + "This node requires an order_line context.", + node_id=node.id, + step=node.step.value, + ) + ) + elif setup is None or not setup.is_ready: + reason = setup.reason if setup is not None else "order_line_setup_not_executed" + node_issues.append( + _issue( + "error", + "setup_not_ready", + f"Order-line setup is not ready for this node: {reason}.", + node_id=node.id, + step=node.step.value, + ) + ) + + if node.step in _RESOLVE_TEMPLATE_RECOMMENDED_STEPS: + template_indices = [ + node_indices[candidate.id] + for candidate in workflow_context.ordered_nodes + if candidate.step == StepName.RESOLVE_TEMPLATE + ] + has_prior_template = any(index < node_indices[node.id] for index in template_indices) + if not has_prior_template: + node_issues.append( + _issue( + "warning", + "missing_resolve_template", + "No earlier resolve_template node found. Render defaults may drift from legacy behavior.", + node_id=node.id, + step=node.step.value, + ) + ) + + if node.step == StepName.RESOLVE_TEMPLATE and template_resolution is not None and template_resolution.template is None: + node_issues.append( + _issue( + "warning", + "template_missing", + "No render template matched this order line. The graph would fall back to factory settings.", + node_id=node.id, + step=node.step.value, + ) + ) + + if node.step == StepName.GLB_BBOX and bbox_resolution is not None and not bbox_resolution.has_bbox: + node_issues.append( + _issue( + "warning", + "bbox_unresolved", + "Bounding box data could not be derived from the available GLB or STEP source.", + node_id=node.id, + step=node.step.value, + ) + ) + + if node.step in _CAD_FILE_ENTRY_STEPS and context_kind == "cad_file" and cad_file is not None: + if not cad_file.stored_path: + node_issues.append( + _issue( + "error", + "cad_file_missing_path", + "CAD file has no stored STEP path.", + node_id=node.id, + step=node.step.value, + ) + ) + elif not Path(cad_file.stored_path).exists(): + node_issues.append( + _issue( + "error", + "cad_file_step_missing", + f"STEP source path does not exist: {cad_file.stored_path}", + node_id=node.id, + step=node.step.value, + ) + ) + + status = _node_status(node_issues, supported=supported) + if status in {"error", "unsupported"}: + blocking_issue_found = True + + nodes_out.append( + WorkflowPreflightNodeOut( + node_id=node.id, + step=node.step.value, + label=node.ui.label if node.ui is not None else None, + execution_kind=definition.execution_kind if definition is not None else "bridge", + supported=supported, + status=status, + issues=node_issues, + ) + ) + + graph_dispatch_allowed = not blocking_issue_found + warning_count = sum(1 for node in nodes_out if node.status == "warning") + sum( + 1 for issue in issues if issue.severity == "warning" + ) + + if graph_dispatch_allowed: + summary = ( + "Preflight passed with warnings." + if warning_count > 0 + else "Graph runtime is ready for this context." + ) + else: + summary = "Preflight found blocking issues that would prevent a safe graph dispatch." + + return WorkflowPreflightOut( + workflow_id=workflow_id, + context_id=context_id, + context_kind=context_kind, + expected_context_kind=expected_context_kind, + execution_mode=execution_mode, + graph_dispatch_allowed=graph_dispatch_allowed, + summary=summary, + resolved_order_line_id=resolved_order_line_id, + resolved_cad_file_id=resolved_cad_file_id, + unsupported_node_ids=unsupported_node_ids, + issues=issues, + nodes=nodes_out, + ) + + +@router.get("/{workflow_id}/preflight", response_model=WorkflowPreflightOut) +async def preflight_workflow( + workflow_id: uuid.UUID, + context_id: str = Query( + ..., + description=( + "UUID of the entity to validate against the graph runtime. " + "For order-line workflows this is an order_line_id; " + "for STEP/thumbnail workflows this is a cad_file_id." + ), + ), + _user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(WorkflowDefinition).where(WorkflowDefinition.id == workflow_id) + ) + wf = result.scalar_one_or_none() + if not wf: + raise HTTPException(status_code=404, detail="Workflow definition not found") + if not wf.config: + raise HTTPException(status_code=400, detail="Workflow has no config") + + return await db.run_sync( + lambda sync_session: _build_workflow_preflight( + sync_session, + wf, + context_id=context_id, + ) + ) + + +@router.post("/preflight", response_model=WorkflowPreflightOut) +async def preflight_workflow_draft( + body: WorkflowDraftPreflightRequest, + _user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + if not body.config: + raise HTTPException(status_code=400, detail="Workflow has no config") + + return await db.run_sync( + lambda sync_session: _build_workflow_preflight_for_config( + sync_session, + workflow_id=body.workflow_id, + workflow_config=body.config, + context_id=body.context_id, + ) + ) + + @router.get("/node-definitions", response_model=NodeDefinitionsResponse) async def get_node_definitions( _user: User = Depends(require_admin_or_pm), @@ -64,6 +733,56 @@ async def get_node_definitions( return NodeDefinitionsResponse(definitions=list_node_definitions()) +@router.get("/contexts/order-lines", response_model=list[WorkflowOrderLineContextGroupOut]) +async def list_workflow_order_line_contexts( + limit: int = Query(50, ge=1, le=200), + user: User = Depends(require_admin_or_pm), + db: AsyncSession = Depends(get_db), +): + """Return a lightweight order-line picker model for the workflow editor.""" + order_query = ( + select(Order) + .where(Order.lines.any()) + .options( + selectinload(Order.lines).selectinload(OrderLine.product), + selectinload(Order.lines).selectinload(OrderLine.output_type), + selectinload(Order.lines).selectinload(OrderLine.render_position), + ) + .order_by(Order.updated_at.desc()) + .limit(limit) + ) + + if user.role.value not in {"global_admin", "tenant_admin", "admin", "project_manager"}: + order_query = order_query.where(Order.created_by == user.id) + + result = await db.execute(order_query) + orders = result.scalars().all() + + groups: list[WorkflowOrderLineContextGroupOut] = [] + for order in orders: + options: list[WorkflowOrderLineContextOptionOut] = [] + for line in order.lines: + label, meta = _format_order_line_context_label(order, line) + options.append( + WorkflowOrderLineContextOptionOut( + value=line.id, + label=label, + meta=meta, + ) + ) + + if options: + groups.append( + WorkflowOrderLineContextGroupOut( + order_id=order.id, + order_label=order.order_number, + options=options, + ) + ) + + return groups + + @router.get("/pipeline-steps", response_model=PipelineStepsResponse) async def get_pipeline_steps( _user: User = Depends(require_admin_or_pm), @@ -88,7 +807,7 @@ async def list_workflows( result = await db.execute( select(WorkflowDefinition).order_by(WorkflowDefinition.created_at) ) - return [_workflow_to_out(wf) for wf in result.scalars().all()] + return [await _workflow_to_out(db, wf) for wf in result.scalars().all()] @router.get("/{workflow_id}", response_model=WorkflowDefinitionOut) @@ -103,7 +822,7 @@ async def get_workflow( wf = result.scalar_one_or_none() if not wf: raise HTTPException(status_code=404, detail="Workflow definition not found") - return _workflow_to_out(wf) + return await _workflow_to_out(db, wf) @router.post("", response_model=WorkflowDefinitionOut, status_code=201) @@ -128,7 +847,7 @@ async def create_workflow( db.add(wf) await db.commit() await db.refresh(wf) - return _workflow_to_out(wf) + return await _workflow_to_out(db, wf) @router.put("/{workflow_id}", response_model=WorkflowDefinitionOut) @@ -155,12 +874,13 @@ async def update_workflow( detail = exc.errors() if isinstance(exc, ValidationError) else str(exc) raise HTTPException(status_code=422, detail=f"Invalid workflow config: {detail}") wf.config = normalized_config + flag_modified(wf, "config") if body.is_active is not None: wf.is_active = body.is_active await db.commit() await db.refresh(wf) - return _workflow_to_out(wf) + return await _workflow_to_out(db, wf) @router.delete("/{workflow_id}", status_code=204) @@ -221,6 +941,139 @@ class WorkflowDispatchResponse(BaseModel): task_ids: list[str] +async def _dispatch_workflow_for_config( + db: AsyncSession, + *, + workflow_id: uuid.UUID | None, + workflow_config: dict, + context_id: str, +) -> WorkflowDispatchResponse: + from app.domains.rendering.workflow_executor import prepare_workflow_context + from app.domains.rendering.workflow_graph_runtime import execute_graph_workflow + from app.domains.rendering.workflow_run_service import ( + create_workflow_run, + mark_workflow_run_failed, + ) + + try: + normalized_config = canonicalize_workflow_config(workflow_config) + workflow_context = prepare_workflow_context( + normalized_config, + context_id=context_id, + execution_mode="graph", + ) + except ValidationError as exc: + raise HTTPException(status_code=422, detail=f"Invalid workflow config: {exc.errors()}") + except ValueError as exc: + raise HTTPException(status_code=422, detail=str(exc)) + + resolved_order_line_id: uuid.UUID | None = None + if _infer_expected_context_kind(workflow_context.ordered_nodes) == "order_line": + try: + parsed_context_id = uuid.UUID(context_id) + except ValueError as exc: + raise HTTPException(status_code=422, detail="Context ID must be a valid UUID.") from exc + + order_line_result = await db.execute( + select(OrderLine.id).where(OrderLine.id == parsed_context_id) + ) + resolved_order_line_id = order_line_result.scalar_one_or_none() + if resolved_order_line_id is None: + raise HTTPException(status_code=404, detail="Order line context not found") + + order_result = await db.execute( + select(Order) + .join(OrderLine, OrderLine.order_id == Order.id) + .where(OrderLine.id == resolved_order_line_id) + ) + order = order_result.scalar_one_or_none() + if order is not None and order.status in (OrderStatus.submitted, OrderStatus.completed): + now = datetime.utcnow() + order.status = OrderStatus.processing + order.processing_started_at = now + order.completed_at = None + order.updated_at = now + + run_id = await db.run_sync( + lambda sync_session: create_workflow_run( + sync_session, + workflow_def_id=workflow_id, + order_line_id=resolved_order_line_id, + workflow_context=workflow_context, + ).id + ) + await db.commit() + + try: + dispatch_result = await db.run_sync( + lambda sync_session: execute_graph_workflow( + sync_session, + workflow_context, + dispatch_tasks=False, + ) + ) + except Exception as exc: + failed_result = await db.execute( + select(WorkflowRun) + .where(WorkflowRun.id == run_id) + .options(selectinload(WorkflowRun.node_results)) + ) + failed_run = failed_result.scalar_one() + mark_workflow_run_failed(failed_run, str(exc)) + await db.commit() + raise + await db.commit() + try: + submit_prepared_workflow_tasks(dispatch_result) + except Exception as exc: + failed_result = await db.execute( + select(WorkflowRun) + .where(WorkflowRun.id == run_id) + .options(selectinload(WorkflowRun.node_results)) + ) + failed_run = failed_result.scalar_one() + mark_workflow_run_failed(failed_run, str(exc)) + await db.commit() + raise + + refreshed_result = await db.execute( + select(WorkflowRun) + .where(WorkflowRun.id == run_id) + .options(selectinload(WorkflowRun.node_results)) + ) + refreshed_run = refreshed_result.scalar_one() + + return WorkflowDispatchResponse( + workflow_run=refreshed_run, + context_id=context_id, + execution_mode=workflow_context.execution_mode, + dispatched=len(dispatch_result.task_ids), + task_ids=dispatch_result.task_ids, + ) + + +@router.post("/dispatch", response_model=WorkflowDispatchResponse) +async def dispatch_workflow_draft( + request: WorkflowDraftDispatchRequest, + _user: User = Depends(require_pm_or_above), + db: AsyncSession = Depends(get_db), +): + workflow_id = request.workflow_id + if workflow_id is not None: + result = await db.execute( + select(WorkflowDefinition.id).where(WorkflowDefinition.id == workflow_id) + ) + if result.scalar_one_or_none() is None: + raise HTTPException(status_code=404, detail="Workflow definition not found") + + return await _dispatch_workflow_for_config( + db, + workflow_id=workflow_id, + workflow_config=request.config, + context_id=request.context_id, + ) + + @router.post("/{workflow_id}/dispatch", response_model=WorkflowDispatchResponse) async def dispatch_workflow_endpoint( workflow_id: uuid.UUID, @@ -241,14 +1094,6 @@ async def dispatch_workflow_endpoint( in topological (dependency) order. Returns the list of Celery task IDs so the caller can track progress. """ - from pydantic import ValidationError as _ValidationError - from app.domains.rendering.workflow_executor import prepare_workflow_context - from app.domains.rendering.workflow_graph_runtime import execute_graph_workflow - from app.domains.rendering.workflow_run_service import ( - create_workflow_run, - mark_workflow_run_failed, - ) - result = await db.execute( select(WorkflowDefinition).where(WorkflowDefinition.id == workflow_id) ) @@ -258,54 +1103,9 @@ async def dispatch_workflow_endpoint( if not wf.config: raise HTTPException(status_code=400, detail="Workflow has no config") - try: - workflow_context = prepare_workflow_context( - wf.config, - context_id=context_id, - execution_mode="graph", - ) - except _ValidationError as exc: - raise HTTPException(status_code=422, detail=f"Invalid workflow config: {exc.errors()}") - except ValueError as exc: - raise HTTPException(status_code=422, detail=str(exc)) - - run_id = await db.run_sync( - lambda sync_session: create_workflow_run( - sync_session, - workflow_def_id=wf.id, - order_line_id=None, - workflow_context=workflow_context, - ).id - ) - await db.commit() - - try: - dispatch_result = await db.run_sync( - lambda sync_session: execute_graph_workflow(sync_session, workflow_context) - ) - except Exception as exc: - failed_result = await db.execute( - select(WorkflowRun) - .where(WorkflowRun.id == run_id) - .options(selectinload(WorkflowRun.node_results)) - ) - failed_run = failed_result.scalar_one() - mark_workflow_run_failed(failed_run, str(exc)) - await db.commit() - raise - await db.commit() - - refreshed_result = await db.execute( - select(WorkflowRun) - .where(WorkflowRun.id == run_id) - .options(selectinload(WorkflowRun.node_results)) - ) - refreshed_run = refreshed_result.scalar_one() - - return WorkflowDispatchResponse( - workflow_run=refreshed_run, + return await _dispatch_workflow_for_config( + db, + workflow_id=wf.id, + workflow_config=wf.config, context_id=context_id, - execution_mode=workflow_context.execution_mode, - dispatched=len(dispatch_result.task_ids), - task_ids=dispatch_result.task_ids, ) diff --git a/backend/app/domains/rendering/workflow_runtime_services.py b/backend/app/domains/rendering/workflow_runtime_services.py index 89c9c53..9b49892 100644 --- a/backend/app/domains/rendering/workflow_runtime_services.py +++ b/backend/app/domains/rendering/workflow_runtime_services.py @@ -5,7 +5,7 @@ import re import shutil import uuid from dataclasses import dataclass, field -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import Any, Callable, Literal @@ -13,7 +13,11 @@ from sqlalchemy import select, update as sql_update from sqlalchemy.orm import Session, joinedload from app.config import settings as app_settings -from app.core.render_paths import resolve_result_path, result_path_to_storage_key +from app.core.render_paths import ( + ensure_group_writable_dir, + resolve_result_path, + result_path_to_storage_key, +) from app.domains.media.models import MediaAsset, MediaAssetType from app.domains.orders.models import Order, OrderLine, OrderStatus from app.domains.products.models import CadFile, Product @@ -37,6 +41,199 @@ logger = logging.getLogger(__name__) EmitFn = Callable[..., None] | None SetupStatus = Literal["ready", "skip", "failed", "missing"] QueueThumbnailFn = Callable[[str, dict[str, str]], None] | None +TEMPLATE_INPUT_PARAM_PREFIX = "template_input__" +_PNG_SIGNATURE = b"\x89PNG\r\n\x1a\n" +_VOLATILE_PNG_CHUNK_TYPES = {b"tEXt", b"zTXt", b"iTXt", b"tIME"} + + +def _slugify_material_lookup_key(value: str) -> str: + return re.sub(r"[^a-z0-9]+", "_", value).strip("_") + + +def _build_authoritative_material_lookup(materials_source: list[dict[str, Any]]) -> dict[str, str]: + lookup: dict[str, str] = {} + for material in materials_source: + raw_part_name = material.get("part_name") + raw_material_name = material.get("material") + if not raw_part_name or not raw_material_name: + continue + + part_name = str(raw_part_name).lower().strip() + material_name = str(raw_material_name) + if not part_name: + continue + + lookup.setdefault(part_name, material_name) + + slug_key = _slugify_material_lookup_key(part_name) + if slug_key: + lookup.setdefault(slug_key, material_name) + + stripped = re.sub(r"(_af\d+(_\d+)?)+$", "", part_name, flags=re.IGNORECASE) + if stripped != part_name: + lookup.setdefault(stripped, material_name) + slug_stripped = _slugify_material_lookup_key(stripped) + if slug_stripped: + lookup.setdefault(slug_stripped, material_name) + return lookup + + +def _common_prefix_length(left: str, right: str) -> int: + limit = min(len(left), len(right)) + idx = 0 + while idx < limit and left[idx] == right[idx]: + idx += 1 + return idx + + +def _lookup_material_by_prefix(query: str, material_lookup: dict[str, str]) -> str | None: + if not query or not material_lookup: + return None + + contenders: list[tuple[int, str]] = [] + for key, material_name in material_lookup.items(): + if len(key) >= 5 and len(query) >= 5 and (query.startswith(key) or key.startswith(query)): + contenders.append((len(key), material_name)) + + if not contenders: + return None + + contenders.sort(reverse=True) + top_length = contenders[0][0] + close_materials = { + material_name + for key_length, material_name in contenders + if key_length >= top_length - 2 + } + return contenders[0][1] if len(close_materials) == 1 else None + + +def _lookup_material_by_common_prefix(query: str, material_lookup: dict[str, str]) -> str | None: + if not query or not material_lookup: + return None + + scored: list[tuple[float, int, int, str]] = [] + for key, material_name in material_lookup.items(): + prefix_length = _common_prefix_length(query, key) + if prefix_length < 12: + continue + ratio = prefix_length / max(len(query), len(key)) + if ratio < 0.68: + continue + scored.append((ratio, prefix_length, len(key), material_name)) + + if not scored: + return None + + scored.sort(reverse=True) + top_ratio, top_prefix_length, _, top_material_name = scored[0] + close_materials = { + material_name + for ratio, prefix_length, _, material_name in scored + if ratio >= top_ratio - 0.02 and prefix_length >= top_prefix_length - 2 + } + return top_material_name if len(close_materials) == 1 else None + + +def _resolve_authoritative_material_name( + raw_name: str | None, + material_lookup: dict[str, str], + *fallback_names: str | None, +) -> str | None: + candidates = [raw_name, *fallback_names] + seen: set[str] = set() + + for candidate in candidates: + if not candidate: + continue + + normalized = str(candidate).lower().strip() + variants = [normalized] + + stripped = re.sub(r"(_af\d+(_\d+)?)+$", "", normalized, flags=re.IGNORECASE) + if stripped != normalized: + variants.append(stripped) + + no_instance = re.sub(r"_\d+$", "", stripped) + if no_instance and no_instance not in variants: + variants.append(no_instance) + + for variant in list(variants): + slug_variant = _slugify_material_lookup_key(variant) + if slug_variant and slug_variant not in variants: + variants.append(slug_variant) + + deduped_variants = [variant for variant in variants if variant and not (variant in seen or seen.add(variant))] + + for variant in deduped_variants: + material_name = material_lookup.get(variant) + if material_name: + return material_name + + for variant in deduped_variants: + material_name = _lookup_material_by_prefix(variant, material_lookup) + if material_name: + return material_name + + for variant in deduped_variants: + material_name = _lookup_material_by_common_prefix(variant, material_lookup) + if material_name: + return material_name + + return None + + +def _utcnow_naive() -> datetime: + """Return UTC as a naive datetime for legacy TIMESTAMP WITHOUT TIME ZONE columns.""" + return datetime.now(timezone.utc).replace(tzinfo=None) + + +def extract_template_input_overrides(params: dict[str, Any] | None) -> dict[str, Any]: + if not params: + return {} + + overrides: dict[str, Any] = {} + for key, value in params.items(): + if not isinstance(key, str) or not key.startswith(TEMPLATE_INPUT_PARAM_PREFIX): + continue + input_key = key[len(TEMPLATE_INPUT_PARAM_PREFIX):].strip() + if input_key: + overrides[input_key] = value + return overrides + + +def _normalize_template_input_schema(template: RenderTemplate | None) -> list[dict[str, Any]]: + raw_schema = getattr(template, "workflow_input_schema", None) if template is not None else None + if not isinstance(raw_schema, list): + return [] + + normalized: list[dict[str, Any]] = [] + for raw_field in raw_schema: + if not isinstance(raw_field, dict): + continue + key = str(raw_field.get("key") or "").strip() + if not key: + continue + normalized.append(dict(raw_field)) + return normalized + + +def _resolve_template_input_values( + schema: list[dict[str, Any]], + overrides: dict[str, Any] | None, +) -> dict[str, Any]: + raw_overrides = overrides or {} + resolved: dict[str, Any] = {} + for field in schema: + key = str(field.get("key") or "").strip() + if not key: + continue + if key in raw_overrides: + resolved[key] = raw_overrides[key] + continue + if "default" in field: + resolved[key] = field.get("default") + return resolved @dataclass(slots=True) @@ -75,8 +272,14 @@ class TemplateResolutionResult: material_map: dict[str, str] | None use_materials: bool override_material: str | None + target_collection: str + lighting_only: bool + shadow_catcher: bool + camera_orbit: bool category_key: str | None output_type_id: str | None + workflow_input_schema: list[dict[str, Any]] = field(default_factory=list) + template_inputs: dict[str, Any] = field(default_factory=dict) @dataclass(slots=True) @@ -159,6 +362,7 @@ class OrderLineRenderInvocation: sensor_width_mm: float | None = None usd_path: str | None = None material_override: str | None = None + template_inputs: dict[str, Any] = field(default_factory=dict) def task_defaults(self) -> dict[str, Any]: payload: dict[str, Any] = { @@ -196,9 +400,10 @@ class OrderLineRenderInvocation: "sensor_width_mm": self.sensor_width_mm, "usd_path": self.usd_path, "material_override": self.material_override, + "template_inputs": self.template_inputs, } for key, value in optional_values.items(): - if value not in (None, ""): + if value not in (None, "", {}, [], ()): payload[key] = value return payload @@ -242,6 +447,7 @@ class OrderLineRenderInvocation: "focal_length_mm": self.focal_length_mm, "sensor_width_mm": self.sensor_width_mm, "material_override": self.material_override, + "template_inputs": self.template_inputs, } def as_turntable_renderer_kwargs( @@ -285,6 +491,7 @@ class OrderLineRenderInvocation: "focal_length_mm": self.focal_length_mm, "sensor_width_mm": self.sensor_width_mm, "material_override": self.material_override, + "template_inputs": self.template_inputs, } def as_cinematic_renderer_kwargs( @@ -324,6 +531,7 @@ class OrderLineRenderInvocation: "focal_length_mm": self.focal_length_mm, "sensor_width_mm": self.sensor_width_mm, "material_override": self.material_override, + "template_inputs": self.template_inputs, "log_callback": log_callback, } @@ -341,7 +549,61 @@ def _resolve_asset_path(storage_key: str | None) -> Path | None: return resolve_result_path(storage_key) -def _usd_master_refresh_reason(cad_file: CadFile) -> str | None: +def _usd_master_file_refresh_reason(usd_render_path: Path | None) -> str | None: + if usd_render_path is None: + return "missing USD master file" + if not usd_render_path.exists(): + return "missing USD master file" + + try: + usd_bytes = usd_render_path.read_bytes() + except OSError: + logger.exception("render_order_line: failed to inspect usd_master %s", usd_render_path) + return "unreadable USD master file" + + usd_bytes_lower = usd_bytes.lower() + if b"schaeffler:" in usd_bytes_lower: + return "legacy Schaeffler USD primvars" + if b"hartomat:" in usd_bytes_lower: + return None + + # Binary USD (`PXR-USDC`) stores HartOMat customData in a form that is not + # reliably discoverable via a raw byte grep. For those files we rely on the + # cache fingerprint plus the upstream resolved material metadata checks. + if usd_bytes.startswith(b"PXR-USDC") or b"\x00" in usd_bytes[:256]: + return None + + # Textual USD payloads without any HartOMat markers are legacy/stale in the + # current pipeline and should be refreshed before they are reused. + try: + usd_bytes.decode("utf-8") + except UnicodeDecodeError: + return None + return "missing HartOMat USD markers" + + +def _usd_master_cache_refresh_reason(usd_asset: MediaAsset | None) -> str | None: + if usd_asset is None: + return None + + render_config = usd_asset.render_config if isinstance(usd_asset.render_config, dict) else {} + cache_key = render_config.get("cache_key") + if not isinstance(cache_key, str) or not cache_key.strip(): + return "missing USD cache fingerprint" + + # New-format keys append the render-script fingerprint as a sixth colon-delimited segment. + if len(cache_key.split(":")) < 6: + return "legacy USD cache fingerprint" + + return None + + +def _usd_master_refresh_reason( + cad_file: CadFile, + *, + usd_asset: MediaAsset | None = None, + usd_render_path: Path | None = None, +) -> str | None: resolved = cad_file.resolved_material_assignments if not isinstance(resolved, dict) or not resolved: return "missing resolved material assignments" @@ -350,7 +612,7 @@ def _usd_master_refresh_reason(cad_file: CadFile) -> str | None: for meta in resolved.values(): if not isinstance(meta, dict): continue - canonical = meta.get("canonical_material") + canonical = meta.get("canonical_material") or meta.get("material") if isinstance(canonical, str) and canonical.strip(): canonical_materials.append(canonical.strip()) @@ -360,6 +622,14 @@ def _usd_master_refresh_reason(cad_file: CadFile) -> str | None: if any(material.upper().startswith("SCHAEFFLER_") for material in canonical_materials): return "legacy Schaeffler material metadata" + cache_reason = _usd_master_cache_refresh_reason(usd_asset) + if cache_reason is not None: + return cache_reason + + file_reason = _usd_master_file_refresh_reason(usd_render_path) + if file_reason is not None: + return file_reason + return None @@ -502,6 +772,27 @@ def _coerce_bool(value: Any) -> bool: return bool(value) +def _resolve_tristate_mode( + value: Any, + *, + field_name: str, + fallback: bool | None = None, +) -> bool | None: + if value in (None, "", "inherit"): + return fallback + if isinstance(value, bool): + return value + if isinstance(value, str): + normalized = value.strip().lower() + if normalized in {"enabled", "true", "1", "yes", "on"}: + return True + if normalized in {"disabled", "false", "0", "no", "off"}: + return False + raise ValueError( + f"{field_name} must be one of: inherit, enabled, disabled" + ) + + def _resolve_render_output_extension(line: OrderLine) -> str: output_type = line.output_type output_extension = "jpg" @@ -582,7 +873,7 @@ def build_order_line_render_invocation( denoising_quality = str(render_settings.get("denoising_quality", "")) denoising_use_gpu = str(render_settings.get("denoising_use_gpu", "")) transparent_bg = bool(output_type and output_type.transparent_bg) - cycles_device = (output_type.cycles_device or "auto") if output_type is not None else "auto" + cycles_device = (output_type.cycles_device or "gpu") if output_type is not None else "gpu" render_overrides = getattr(line, "render_overrides", None) if isinstance(render_overrides, dict): @@ -682,22 +973,14 @@ def build_order_line_render_invocation( part_colors=dict(setup.part_colors or {}), part_names_ordered=part_names_ordered, template_path=template_context.template.blend_file_path if template_context and template_context.template else None, - target_collection=( - template_context.template.target_collection - if template_context and template_context.template and template_context.template.target_collection - else "Product" - ), + target_collection=template_context.target_collection if template_context else "Product", material_library_path=( template_context.material_library if template_context and use_materials else None ), material_map=material_map, - lighting_only=bool(template_context.template.lighting_only) if template_context and template_context.template else False, - shadow_catcher=( - bool(template_context.template.shadow_catcher_enabled) - if template_context and template_context.template - else False - ), - camera_orbit=bool(template_context.template.camera_orbit) if template_context and template_context.template else True, + lighting_only=template_context.lighting_only if template_context else False, + shadow_catcher=template_context.shadow_catcher if template_context else False, + camera_orbit=template_context.camera_orbit if template_context else True, rotation_x=position.rotation_x, rotation_y=position.rotation_y, rotation_z=position.rotation_z, @@ -705,6 +988,7 @@ def build_order_line_render_invocation( sensor_width_mm=position.sensor_width_mm, usd_path=str(setup.usd_render_path) if setup.usd_render_path is not None else None, material_override=material_override, + template_inputs=dict(template_context.template_inputs) if template_context is not None else {}, ) @@ -727,10 +1011,49 @@ def _canonical_public_output_path(line: OrderLine, output_path: str) -> str: return str(upload_root / "renders" / str(line.id) / filename) +def _strip_volatile_png_metadata(output_path: Path) -> None: + if output_path.suffix.lower() != ".png" or not output_path.is_file(): + return + + raw_bytes = output_path.read_bytes() + if not raw_bytes.startswith(_PNG_SIGNATURE): + return + + cursor = len(_PNG_SIGNATURE) + kept_chunks: list[bytes] = [] + changed = False + + while cursor + 12 <= len(raw_bytes): + chunk_length = int.from_bytes(raw_bytes[cursor : cursor + 4], "big") + chunk_end = cursor + 12 + chunk_length + if chunk_end > len(raw_bytes): + return + + chunk_type = raw_bytes[cursor + 4 : cursor + 8] + chunk_bytes = raw_bytes[cursor:chunk_end] + if chunk_type in _VOLATILE_PNG_CHUNK_TYPES: + changed = True + else: + kept_chunks.append(chunk_bytes) + + cursor = chunk_end + if chunk_type == b"IEND": + break + + if not changed: + return + + output_path.write_bytes(_PNG_SIGNATURE + b"".join(kept_chunks)) + + +def _normalize_output_artifact(output_path: str) -> None: + _strip_volatile_png_metadata(Path(output_path)) + + def _materialize_public_output(line: OrderLine, output_path: str) -> str: canonical_path = Path(_canonical_public_output_path(line, output_path)) source_path = Path(output_path) - canonical_path.parent.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(canonical_path.parent) if source_path != canonical_path: shutil.copy2(source_path, canonical_path) return str(canonical_path) @@ -765,6 +1088,7 @@ def persist_order_line_media_asset( resolved_workflow_run_id = _resolve_existing_workflow_run_id(session, workflow_run_id) if success: + _normalize_output_artifact(output_path) storage_key = _normalize_storage_key(output_path) output_file = Path(output_path) existing_asset = session.execute( @@ -906,13 +1230,14 @@ def persist_order_line_output( ) -> OutputSaveResult: """Persist the render result for an order line and publish the media asset if needed.""" status: Literal["completed", "failed"] = "completed" if success else "failed" - completed_at = render_completed_at or datetime.utcnow() + completed_at = render_completed_at or _utcnow_naive() persisted_output_path = output_path line.render_status = status line.render_completed_at = completed_at line.render_log = render_log if success: + _normalize_output_artifact(output_path) persisted_output_path = _materialize_public_output(line, output_path) line.result_path = persisted_output_path if success else None session.flush() @@ -1084,7 +1409,7 @@ def prepare_order_line_render_context( reason="missing_cad_file", ) - render_start = datetime.utcnow() if persist_state else None + render_start = _utcnow_naive() if persist_state else None if persist_state: session.execute( sql_update(OrderLine) @@ -1111,7 +1436,12 @@ def prepare_order_line_render_context( .limit(1) ).scalar_one_or_none() if usd_asset: - refresh_reason = _usd_master_refresh_reason(cad_file) + usd_candidate_path = _resolve_asset_path(usd_asset.storage_key) + refresh_reason = _usd_master_refresh_reason( + cad_file, + usd_asset=usd_asset, + usd_render_path=usd_candidate_path, + ) if refresh_reason is not None: logger.warning( "render_order_line: ignoring stale usd_master for cad %s (%s)", @@ -1127,7 +1457,7 @@ def prepare_order_line_render_context( if _queue_usd_master_refresh(str(cad_file.id)): _emit(emit, order_line_id, "Queued USD master regeneration in background") else: - usd_render_path = _resolve_asset_path(usd_asset.storage_key) + usd_render_path = usd_candidate_path if usd_render_path: logger.info( "render_order_line: using usd_master %s for cad %s", @@ -1203,6 +1533,12 @@ def resolve_order_line_template_context( material_library_path_override: str | None = None, require_template: bool = False, disable_materials: bool = False, + target_collection_override: str | None = None, + material_replace_mode: str | None = None, + lighting_only_mode: str | None = None, + shadow_catcher_mode: str | None = None, + camera_orbit_mode: str | None = None, + template_input_overrides: dict[str, Any] | None = None, ) -> TemplateResolutionResult: """Resolve render template, material library, and material map for a prepared order line.""" if not setup.is_ready: @@ -1242,6 +1578,7 @@ def resolve_order_line_template_context( if isinstance(material_library_path_override, str) and material_library_path_override.strip() else get_material_library_path_for_session(session) ) + material_replace_override = _resolve_tristate_mode(material_replace_mode, field_name="material_replace_mode") material_resolution = resolve_order_line_material_map( line, cad_file, @@ -1250,8 +1587,36 @@ def resolve_order_line_template_context( template=template, emit=emit, disable_materials=disable_materials, + material_replace_enabled_override=material_replace_override, ) + resolved_target_collection = ( + target_collection_override.strip() + if isinstance(target_collection_override, str) and target_collection_override.strip() + else ( + template.target_collection + if template is not None and template.target_collection + else "Product" + ) + ) + resolved_lighting_only = _resolve_tristate_mode( + lighting_only_mode, + field_name="lighting_only_mode", + fallback=bool(template.lighting_only) if template is not None else False, + ) + resolved_shadow_catcher = _resolve_tristate_mode( + shadow_catcher_mode, + field_name="shadow_catcher_mode", + fallback=bool(template.shadow_catcher_enabled) if template is not None else False, + ) + resolved_camera_orbit = _resolve_tristate_mode( + camera_orbit_mode, + field_name="camera_orbit_mode", + fallback=bool(template.camera_orbit) if template is not None else True, + ) + workflow_input_schema = _normalize_template_input_schema(template) + template_inputs = _resolve_template_input_values(workflow_input_schema, template_input_overrides) + if template: _emit( emit, @@ -1267,6 +1632,8 @@ def resolve_order_line_template_context( template.blend_file_path, template.lighting_only, ) + if template_inputs: + logger.info("Render template inputs resolved for '%s': %s", template.name, sorted(template_inputs)) if not template: _emit(emit, str(line.id), "No render template found — using factory settings (Mode A)") logger.info( @@ -1281,8 +1648,14 @@ def resolve_order_line_template_context( material_map=material_resolution.material_map, use_materials=material_resolution.use_materials, override_material=material_resolution.override_material, + target_collection=resolved_target_collection, + lighting_only=resolved_lighting_only, + shadow_catcher=resolved_shadow_catcher, + camera_orbit=resolved_camera_orbit, category_key=category_key, output_type_id=output_type_id, + workflow_input_schema=workflow_input_schema, + template_inputs=template_inputs, ) @@ -1296,6 +1669,7 @@ def resolve_order_line_material_map( emit: EmitFn = None, material_override: str | None = None, disable_materials: bool = False, + material_replace_enabled_override: bool | None = None, ) -> MaterialResolutionResult: """Resolve the effective order-line material map with legacy precedence rules.""" if disable_materials: @@ -1311,11 +1685,15 @@ def resolve_order_line_material_map( raw_material_count = 0 raw_material_map = _build_effective_material_lookup(cad_file, materials_source) use_materials = bool(material_library and raw_material_map) - if template and not template.material_replace_enabled: + if material_replace_enabled_override is not None: + use_materials = bool(material_replace_enabled_override and material_library and raw_material_map) + elif template and not template.material_replace_enabled: use_materials = False if use_materials: raw_material_count = len(raw_material_map) material_map = resolve_material_map(raw_material_map) + if cad_file: + material_map = _overlay_scene_manifest_material_map(cad_file, material_map) line_override = getattr(line, "material_override", None) output_override = line.output_type.material_override if line.output_type else None @@ -1344,21 +1722,55 @@ def resolve_order_line_material_map( ) +def _overlay_scene_manifest_material_map( + cad_file: CadFile, + material_map: dict[str, str], +) -> dict[str, str]: + """Overlay authoritative scene-manifest materials onto a resolved material map. + + Low-level lookups still retain legacy/product source assignments so older + fallback paths keep working. The final order-line material map, however, + must prefer the scene manifest's effective assignments wherever the USD/CAD + pipeline has already established authoritative part identity. + """ + if not material_map: + return material_map + + merged = dict(material_map) + manifest = build_scene_manifest(cad_file) + for part in manifest.get("parts", []): + if not isinstance(part, dict): + continue + effective_material = part.get("effective_material") + if not isinstance(effective_material, str) or not effective_material.strip(): + continue + + source_name = part.get("source_name") + part_key = part.get("part_key") + if isinstance(source_name, str) and source_name.strip(): + merged[source_name] = effective_material + if isinstance(part_key, str) and part_key.strip(): + merged[part_key] = effective_material + return merged + + def _build_effective_material_lookup( cad_file: CadFile | None, materials_source: list[dict[str, Any]], ) -> dict[str, str]: """Build a renderer-compatible material lookup from all available layers. - Authoritative scene-manifest assignments win when present, but we emit both - source-name and part-key keys so USD and GLB/STEP fallback paths resolve the - same effective material map. + Product/Excel CAD assignments stay authoritative for overlapping source-name + keys so legacy renders, thumbnails, and viewer previews keep parity with the + pre-USD pipeline. Scene-manifest assignments still fill gaps and emit part-key + aliases so USD and GLB/STEP fallback paths resolve the same effective map. """ raw_material_map: dict[str, str] = { str(material["part_name"]): str(material["material"]) for material in materials_source if material.get("part_name") and material.get("material") } + authoritative_lookup = _build_authoritative_material_lookup(materials_source) if not cad_file: return raw_material_map @@ -1372,10 +1784,16 @@ def _build_effective_material_lookup( continue source_name = part.get("source_name") part_key = part.get("part_key") - if source_name: - raw_material_map[str(source_name)] = str(effective_material) + authoritative_material = _resolve_authoritative_material_name( + str(source_name) if source_name else None, + authoritative_lookup, + str(part_key) if part_key else None, + ) + merged_material = authoritative_material or str(effective_material) + if source_name and str(source_name) not in raw_material_map: + raw_material_map[str(source_name)] = merged_material if part_key: - raw_material_map[str(part_key)] = str(effective_material) + raw_material_map.setdefault(str(part_key), merged_material) return raw_material_map diff --git a/backend/app/domains/rendering/workflow_schema.py b/backend/app/domains/rendering/workflow_schema.py index d62ddb9..32e2b46 100644 --- a/backend/app/domains/rendering/workflow_schema.py +++ b/backend/app/domains/rendering/workflow_schema.py @@ -18,6 +18,7 @@ Example config:: """ from collections import deque from typing import Any, Literal +from uuid import UUID from pydantic import BaseModel, Field, field_validator, model_validator @@ -29,6 +30,14 @@ from app.domains.rendering.workflow_node_registry import ( ) +_WORKFLOW_META_PARAM_KEYS = {"retry_policy", "failure_policy"} +_TEMPLATE_INPUT_PARAM_PREFIX = "template_input__" +_HEX_COLOR_LENGTHS = {7, 9} +_SAFE_FILENAME_SUFFIX_CHARS = set( + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-" +) + + def _context_seed_artifacts(definition: WorkflowNodeDefinition) -> set[str]: if definition.family == "order_line": return {"order_line_record"} @@ -37,10 +46,43 @@ def _context_seed_artifacts(definition: WorkflowNodeDefinition) -> set[str]: return set() +def _infer_concrete_workflow_family( + definitions: list[WorkflowNodeDefinition], +) -> Literal["cad_file", "order_line", "mixed"] | None: + concrete_families = { + definition.family + for definition in definitions + if definition.family in {"cad_file", "order_line"} + } + if not concrete_families: + return None + if len(concrete_families) > 1: + return "mixed" + return next(iter(concrete_families)) + + def _coerce_node_label(node: "WorkflowNode") -> str: return f"{node.id!r} ({node.step.value})" +def _require_node_definition(node: "WorkflowNode") -> WorkflowNodeDefinition: + definition = get_node_definition(node.step) + if definition is None: + raise ValueError( + f"node {_coerce_node_label(node)} is not registered in workflow_node_registry" + ) + return definition + + +def _is_dynamic_template_input_param(node: "WorkflowNode", key: str) -> bool: + return ( + node.step == StepName.RESOLVE_TEMPLATE + and isinstance(key, str) + and key.startswith(_TEMPLATE_INPUT_PARAM_PREFIX) + and key[len(_TEMPLATE_INPUT_PARAM_PREFIX):].strip() != "" + ) + + def _validate_param_value( *, node: "WorkflowNode", @@ -72,6 +114,105 @@ def _validate_param_value( if value not in valid_values: allowed_values = ", ".join(repr(option) for option in sorted(valid_values, key=repr)) raise ValueError(f"{field_label} must be one of: {allowed_values}") + return + + if field_definition.type == "text": + if not isinstance(value, str): + raise ValueError(f"{field_label} must be a string") + + stripped_value = value.strip() + if stripped_value == "": + if field_definition.allow_blank: + return + raise ValueError(f"{field_label} may not be blank") + + if field_definition.max_length is not None and len(value) > field_definition.max_length: + raise ValueError( + f"{field_label} must be at most {field_definition.max_length} characters" + ) + + if field_definition.text_format == "plain": + return + if field_definition.text_format == "uuid": + try: + UUID(stripped_value) + except ValueError as exc: + raise ValueError(f"{field_label} must be a valid UUID") from exc + return + if field_definition.text_format == "absolute_path": + if not stripped_value.startswith("/"): + raise ValueError(f"{field_label} must be an absolute path") + return + if field_definition.text_format == "absolute_blend_path": + if not stripped_value.startswith("/"): + raise ValueError(f"{field_label} must be an absolute path") + if not stripped_value.lower().endswith(".blend"): + raise ValueError(f"{field_label} must point to a .blend file") + return + if field_definition.text_format == "absolute_glb_path": + if not stripped_value.startswith("/"): + raise ValueError(f"{field_label} must be an absolute path") + if not stripped_value.lower().endswith(".glb"): + raise ValueError(f"{field_label} must point to a .glb file") + return + if field_definition.text_format == "float_string": + try: + float(stripped_value) + except ValueError as exc: + raise ValueError(f"{field_label} must be a valid numeric string") from exc + return + if field_definition.text_format == "hex_color": + if len(stripped_value) not in _HEX_COLOR_LENGTHS or not stripped_value.startswith("#"): + raise ValueError(f"{field_label} must be a hex color like #FFFFFF or #FFFFFFFF") + color_digits = stripped_value[1:] + if any(character not in "0123456789abcdefABCDEF" for character in color_digits): + raise ValueError(f"{field_label} must be a hex color like #FFFFFF or #FFFFFFFF") + return + if field_definition.text_format == "safe_filename_suffix": + if any(character not in _SAFE_FILENAME_SUFFIX_CHARS for character in stripped_value): + raise ValueError( + f"{field_label} may only contain letters, numbers, '.', '-' or '_'" + ) + return + + raise ValueError( + f"{field_label} uses unsupported text format {field_definition.text_format!r}" + ) + + +def _validate_meta_param_value(*, node: "WorkflowNode", key: str, value: Any) -> None: + field_label = f"node {_coerce_node_label(node)} meta param {key!r}" + + if key == "retry_policy": + if not isinstance(value, dict): + raise ValueError(f"{field_label} must be an object") + unknown_keys = sorted(raw_key for raw_key in value if raw_key not in {"max_attempts"}) + if unknown_keys: + joined = ", ".join(repr(raw_key) for raw_key in unknown_keys) + raise ValueError(f"{field_label} uses unknown key(s): {joined}") + max_attempts = value.get("max_attempts", 1) + if isinstance(max_attempts, bool) or not isinstance(max_attempts, int): + raise ValueError(f"{field_label} field 'max_attempts' must be an integer") + if max_attempts < 1 or max_attempts > 5: + raise ValueError(f"{field_label} field 'max_attempts' must be between 1 and 5") + return + + if key == "failure_policy": + if not isinstance(value, dict): + raise ValueError(f"{field_label} must be an object") + allowed_keys = {"halt_workflow", "fallback_to_legacy"} + unknown_keys = sorted(raw_key for raw_key in value if raw_key not in allowed_keys) + if unknown_keys: + joined = ", ".join(repr(raw_key) for raw_key in unknown_keys) + raise ValueError(f"{field_label} uses unknown key(s): {joined}") + for bool_key in allowed_keys: + if bool_key not in value: + continue + if not isinstance(value[bool_key], bool): + raise ValueError(f"{field_label} field {bool_key!r} must be a boolean") + return + + raise ValueError(f"{field_label} is not supported") class WorkflowPosition(BaseModel): @@ -149,18 +290,25 @@ class WorkflowConfig(BaseModel): @model_validator(mode="after") def node_params_match_registry(self) -> "WorkflowConfig": for node in self.nodes: - definition = get_node_definition(node.step) - if definition is None: - continue + definition = _require_node_definition(node) field_definitions = {field.key: field for field in definition.fields} - allowed_keys = {field.key for field in definition.fields} - unknown_keys = sorted(key for key in node.params if key not in allowed_keys) + allowed_keys = {field.key for field in definition.fields} | _WORKFLOW_META_PARAM_KEYS + unknown_keys = sorted( + key + for key in node.params + if key not in allowed_keys and not _is_dynamic_template_input_param(node, key) + ) if unknown_keys: joined = ", ".join(repr(key) for key in unknown_keys) raise ValueError( f"node {node.id!r} ({node.step.value}) uses unknown param key(s): {joined}" ) for key, value in node.params.items(): + if _is_dynamic_template_input_param(node, key): + continue + if key in _WORKFLOW_META_PARAM_KEYS: + _validate_meta_param_value(node=node, key=key, value=value) + continue field_definition = field_definitions.get(key) if field_definition is None: continue @@ -173,20 +321,19 @@ class WorkflowConfig(BaseModel): @model_validator(mode="after") def ui_family_matches_node_families(self) -> "WorkflowConfig": - families = { - definition.family - for node in self.nodes - if (definition := get_node_definition(node.step)) is not None - } + definitions = [_require_node_definition(node) for node in self.nodes] + families = {definition.family for definition in definitions} + inferred_family = _infer_concrete_workflow_family(definitions) if not families: return self - inferred_family = "mixed" if len(families) > 1 else next(iter(families)) execution_mode = self.ui.execution_mode if self.ui is not None else "legacy" if execution_mode in {"graph", "shadow"} and inferred_family == "mixed": raise ValueError( "workflow ui.execution_mode must stay single-family for graph/shadow execution" ) + if inferred_family is None: + return self if self.ui is None or self.ui.family is None: return self if self.ui.family != inferred_family: @@ -220,9 +367,7 @@ class WorkflowConfig(BaseModel): node_id = queue.popleft() processed += 1 node = node_by_id[node_id] - definition = get_node_definition(node.step) - if definition is None: - continue + definition = _require_node_definition(node) node_inputs = available_artifacts[node_id] | _context_seed_artifacts(definition) required = set(definition.input_contract.get("requires", [])) diff --git a/backend/app/main.py b/backend/app/main.py index f9623d6..88c313c 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -6,6 +6,7 @@ from fastapi.staticfiles import StaticFiles from pathlib import Path from app.config import settings +from app.core.render_paths import ensure_group_writable_dir from app.database import engine, Base from app.core.websocket import manager as ws_manager from app.core.middleware import TenantContextMiddleware @@ -33,7 +34,7 @@ from app.api.routers.chat import router as chat_router async def lifespan(app: FastAPI): # Create upload directories for subdir in ("step_files", "excel_files", "thumbnails", "renders", "blend-templates"): - Path(settings.upload_dir, subdir).mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(Path(settings.upload_dir, subdir)) # Start WebSocket Redis subscriber await ws_manager.start_redis_subscriber() yield @@ -59,7 +60,7 @@ app.add_middleware(TenantContextMiddleware) # Mount static files for thumbnails (dir created in lifespan; skip if not writable) thumbnails_dir = Path(settings.upload_dir) / "thumbnails" try: - thumbnails_dir.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(thumbnails_dir) app.mount("/thumbnails", StaticFiles(directory=str(thumbnails_dir)), name="thumbnails") except (PermissionError, OSError): pass # Running outside Docker without upload dir — thumbnails won't be served statically @@ -67,7 +68,7 @@ except (PermissionError, OSError): # Mount static files for renders renders_dir = Path(settings.upload_dir) / "renders" try: - renders_dir.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(renders_dir) app.mount("/renders", StaticFiles(directory=str(renders_dir)), name="renders") except (PermissionError, OSError): pass diff --git a/backend/app/models/output_type.py b/backend/app/models/output_type.py index c8c26f0..88bf945 100644 --- a/backend/app/models/output_type.py +++ b/backend/app/models/output_type.py @@ -2,6 +2,7 @@ from app.domains.rendering.models import ( OUTPUT_TYPE_ARTIFACT_KINDS, OUTPUT_TYPE_WORKFLOW_FAMILIES, + OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES, OutputType, VALID_RENDER_BACKENDS, ) @@ -9,5 +10,6 @@ __all__ = [ "OutputType", "VALID_RENDER_BACKENDS", "OUTPUT_TYPE_WORKFLOW_FAMILIES", + "OUTPUT_TYPE_WORKFLOW_ROLLOUT_MODES", "OUTPUT_TYPE_ARTIFACT_KINDS", ] diff --git a/backend/app/services/chat_service.py b/backend/app/services/chat_service.py index e543653..6e81e99 100644 --- a/backend/app/services/chat_service.py +++ b/backend/app/services/chat_service.py @@ -13,6 +13,7 @@ from sqlalchemy import text from sqlalchemy.ext.asyncio import AsyncSession from app.config import settings +from app.core.render_paths import result_path_to_public_url logger = logging.getLogger(__name__) @@ -774,12 +775,7 @@ async def _tool_find_product_renders( renders = [] for r in rows: path = r["result_path"] or "" - # Convert internal path to servable URL - url = None - if "/renders/" in path: - url = path[path.index("/renders/"):] - elif "/thumbnails/" in path: - url = path[path.index("/thumbnails/"):] + url = result_path_to_public_url(path, require_exists=True) # Effective material override (line overrides output type) material = r["line_material_override"] or r["ot_material_override"] or None diff --git a/backend/app/services/part_key_service.py b/backend/app/services/part_key_service.py index 3cf7a78..de76282 100644 --- a/backend/app/services/part_key_service.py +++ b/backend/app/services/part_key_service.py @@ -20,6 +20,9 @@ import re # ── Part key generation ─────────────────────────────────────────────────────── _AF_RE = re.compile(r'_AF\d+$', re.IGNORECASE) +_AF_VARIANT_RE = re.compile(r"_AF\d+(_ASM)?_?$", re.IGNORECASE) +_LEGACY_MATERIAL_PREFIX = "SCHAEFFLER_" +_CURRENT_MATERIAL_PREFIX = "HARTOMAT_" def generate_part_key( @@ -53,6 +56,95 @@ def generate_part_key( return key +def normalize_material_name(material_name: str | None) -> str | None: + """Normalize persisted legacy material names to the current HartOMat prefix.""" + if not isinstance(material_name, str): + return None + + value = material_name.strip() + if not value: + return None + + if value.upper().startswith(_LEGACY_MATERIAL_PREFIX): + return f"{_CURRENT_MATERIAL_PREFIX}{value[len(_LEGACY_MATERIAL_PREFIX):]}" + return value + + +def _normalize_semantic_source_name(raw_name: str) -> str: + """Collapse exporter-only suffixes back to their semantic OCC source name.""" + name = (raw_name or "").strip() + name = re.sub(r"\.\d{3}$", "", name) + + previous = None + while previous != name: + previous = name + name = _AF_VARIANT_RE.sub("", name) + return name + + +def _slugify_semantic_source_name(raw_name: str) -> str: + base = _normalize_semantic_source_name(raw_name) + base = re.sub(r"([a-z])([A-Z])", r"\1_\2", base) + return re.sub(r"[^a-z0-9]+", "_", base.lower()).strip("_")[:50] + + +def _derive_semantic_alias_key(part_key: str, source_name: str) -> str | None: + """Return the semantic alias for deduplicated instance keys, if any.""" + alias_key = _slugify_semantic_source_name(source_name) + if not alias_key or alias_key == part_key: + return None + if re.fullmatch( + rf"{re.escape(alias_key)}(?:_[2-9]\d*|_af\d+(?:_asm)?)", + part_key, + flags=re.IGNORECASE, + ) is None: + return None + return alias_key + + +def _alias_priority(part_key: str, source_name: str) -> tuple[int, int, int]: + match = re.fullmatch(r".+_(\d+)$", part_key) + suffix_number = int(match.group(1)) if match else 1_000_000 + return (suffix_number, len(source_name or ""), len(part_key)) + + +def _iter_lookup_keys(part_key: str, fallback_part_keys: tuple[str, ...] = ()) -> tuple[str, ...]: + ordered_keys: list[str] = [] + for key in (part_key, *fallback_part_keys): + if key and key not in ordered_keys: + ordered_keys.append(key) + return tuple(ordered_keys) + + +def _build_part_entry( + *, + part_key: str, + source_name: str, + prim_path: str | None, + manual: dict, + resolved: dict, + source: dict, + fallback_part_keys: tuple[str, ...] = (), +) -> dict: + effective_material, provenance = _resolve_material( + part_key, + source_name, + manual, + resolved, + source, + fallback_part_keys=fallback_part_keys, + ) + is_unassigned = effective_material is None + return { + "part_key": part_key, + "source_name": source_name, + "prim_path": prim_path, + "effective_material": effective_material, + "assignment_provenance": provenance, + "is_unassigned": is_unassigned, + } + + # ── Scene manifest building ─────────────────────────────────────────────────── def build_scene_manifest(cad_file, usd_asset=None) -> dict: @@ -65,7 +157,8 @@ def build_scene_manifest(cad_file, usd_asset=None) -> dict: Material assignment priority per part: 1. `manual_material_overrides[part_key]` — provenance "manual" - 2. `resolved_material_assignments[part_key]["material"]` — provenance "auto" + 2. `resolved_material_assignments[part_key]["canonical_material"]` (or legacy + `["material"]`) — provenance "auto" 3. substring match in `source_material_assignments` against source_name — provenance "source" 4. None, is_unassigned=True — provenance "default" """ @@ -80,25 +173,51 @@ def build_scene_manifest(cad_file, usd_asset=None) -> dict: if resolved: # Build from resolved assignments (USD pipeline has run) + alias_candidates: dict[str, tuple[tuple[int, int, int], dict]] = {} for part_key, meta in resolved.items(): source_name = meta.get("source_name", "") if isinstance(meta, dict) else "" prim_path = meta.get("prim_path") if isinstance(meta, dict) else None - effective_material, provenance = _resolve_material( - part_key, source_name, manual, resolved, source + part_entry = _build_part_entry( + part_key=part_key, + source_name=source_name, + prim_path=prim_path, + manual=manual, + resolved=resolved, + source=source, ) - is_unassigned = effective_material is None + parts.append(part_entry) + if part_entry["is_unassigned"]: + unassigned_parts.append(part_key) - parts.append({ - "part_key": part_key, + alias_key = _derive_semantic_alias_key(part_key, source_name) + if alias_key is None or alias_key in resolved: + continue + + candidate = { + "part_key": alias_key, "source_name": source_name, "prim_path": prim_path, - "effective_material": effective_material, - "assignment_provenance": provenance, - "is_unassigned": is_unassigned, - }) - if is_unassigned: - unassigned_parts.append(part_key) + "fallback_part_keys": (part_key,), + } + candidate_priority = _alias_priority(part_key, source_name) + current = alias_candidates.get(alias_key) + if current is None or candidate_priority < current[0]: + alias_candidates[alias_key] = (candidate_priority, candidate) + + for alias_key, (_, candidate) in alias_candidates.items(): + alias_entry = _build_part_entry( + part_key=candidate["part_key"], + source_name=candidate["source_name"], + prim_path=candidate["prim_path"], + manual=manual, + resolved=resolved, + source=source, + fallback_part_keys=candidate["fallback_part_keys"], + ) + parts.append(alias_entry) + if alias_entry["is_unassigned"]: + unassigned_parts.append(alias_key) elif cad_file.parsed_objects: # Fall back to parsed_objects from STEP extraction @@ -149,23 +268,30 @@ def _resolve_material( manual: dict, resolved: dict, source: dict, + fallback_part_keys: tuple[str, ...] = (), ) -> tuple[str | None, str]: """Return (material_name, provenance) for one part using priority order.""" + lookup_keys = _iter_lookup_keys(part_key, fallback_part_keys) + # 1. Manual override - if part_key in manual and manual[part_key]: - return str(manual[part_key]), "manual" + for lookup_key in lookup_keys: + if lookup_key in manual and manual[lookup_key]: + return normalize_material_name(str(manual[lookup_key])), "manual" # 2. Auto-resolved from USD pipeline - meta = resolved.get(part_key) - if isinstance(meta, dict) and meta.get("material"): - return str(meta["material"]), "auto" + for lookup_key in lookup_keys: + meta = resolved.get(lookup_key) + if isinstance(meta, dict): + canonical = normalize_material_name(meta.get("canonical_material") or meta.get("material")) + if canonical: + return canonical, "auto" # 3. Substring match in source_material_assignments against source_name sn_lower = source_name.lower() for src_key, src_mat in source.items(): if src_key.lower() in sn_lower or sn_lower in src_key.lower(): if src_mat: - return str(src_mat), "source" + return normalize_material_name(str(src_mat)), "source" # 4. Unassigned return None, "default" diff --git a/backend/app/services/render_blender.py b/backend/app/services/render_blender.py index 613f640..7821020 100644 --- a/backend/app/services/render_blender.py +++ b/backend/app/services/render_blender.py @@ -4,6 +4,7 @@ Used by the render-worker Celery container (which has BLENDER_BIN set and cadquery installed). The backend and standard workers fall back to the Pillow placeholder when this service is unavailable. """ +import hashlib import json import logging import os @@ -12,16 +13,175 @@ import signal import subprocess from pathlib import Path +from app.core.render_paths import ensure_group_writable_dir + logger = logging.getLogger(__name__) -def _glb_from_step(step_path: Path, glb_path: Path, tessellation_engine: str = "occ") -> None: +def resolve_tessellation_settings( + profile: str = "render", + tessellation_engine: str | None = None, +) -> tuple[float, float, str]: + """Resolve tessellation settings from system settings for a given profile.""" + profile_key = "scene" if profile == "scene" else "render" + defaults = { + "scene": (0.1, 0.1), + "render": (0.03, 0.05), + } + default_linear, default_angular = defaults[profile_key] + + try: + from app.services.step_processor import _get_all_settings + + settings = _get_all_settings() + linear_deflection = float( + settings.get(f"{profile_key}_linear_deflection", str(default_linear)) + ) + angular_deflection = float( + settings.get(f"{profile_key}_angular_deflection", str(default_angular)) + ) + effective_engine = ( + tessellation_engine + or settings.get("tessellation_engine", "occ") + or "occ" + ) + return linear_deflection, angular_deflection, effective_engine + except Exception as exc: + logger.warning( + "Could not resolve %s tessellation settings: %s; using defaults", + profile_key, + exc, + ) + return default_linear, default_angular, tessellation_engine or "occ" + + +def build_tessellated_glb_path( + step_path: Path, + profile: str, + tessellation_engine: str, + linear_deflection: float, + angular_deflection: float, +) -> Path: + """Build a settings-sensitive GLB path to avoid stale mesh reuse.""" + signature = hashlib.sha1( + f"{profile}:{tessellation_engine}:{linear_deflection:.6f}:{angular_deflection:.6f}".encode( + "utf-8" + ) + ).hexdigest()[:10] + return step_path.parent / f"{step_path.stem}_{profile}_{signature}.glb" + + +def _stringify_optional_arg(value: object) -> str: + if value in (None, ""): + return "" + return str(value) + + +def _resolve_render_samples(engine: str, samples: int | None) -> int: + if samples is not None: + return int(samples) + + effective_engine = (engine or "cycles").lower() + setting_key = ( + "blender_eevee_samples" + if effective_engine == "eevee" + else "blender_cycles_samples" + ) + try: + from app.services.step_processor import _get_all_settings + + settings = _get_all_settings() + return int(settings[setting_key]) + except Exception as exc: + logger.warning( + "Could not resolve Blender samples from settings for engine=%s: %s; " + "using legacy fallback", + effective_engine, + exc, + ) + return 64 if effective_engine == "eevee" else 256 + + +def build_turntable_ffmpeg_cmd( + frames_dir: Path, + output_path: Path, + *, + fps: int = 30, + bg_color: str = "", + width: int = 1920, + height: int = 1080, + ffmpeg_bin: str | None = None, +) -> list[str]: + """Build the canonical FFmpeg command for turntable MP4 composition. + + Legacy and graph/shadow paths must share this logic so template-backed + turntable outputs do not drift due to encoding differences. + """ + ffmpeg = ffmpeg_bin or shutil.which("ffmpeg") or "ffmpeg" + if any(frames_dir.glob("frame_*.png")): + frame_pattern = str(frames_dir / "frame_%04d.png") + else: + frame_pattern = str(frames_dir / "%04d.png") + + if bg_color: + hex_color = bg_color.lstrip("#") or "ffffff" + return [ + ffmpeg, + "-y", + "-framerate", + str(fps), + "-i", + frame_pattern, + "-f", + "lavfi", + "-i", + f"color=c=0x{hex_color}:size={width}x{height}:rate={fps}", + "-filter_complex", + "[1:v][0:v]overlay=0:0:shortest=1", + "-vcodec", + "libx264", + "-pix_fmt", + "yuv420p", + "-crf", + "18", + "-movflags", + "+faststart", + str(output_path), + ] + + return [ + ffmpeg, + "-y", + "-framerate", + str(fps), + "-i", + frame_pattern, + "-vcodec", + "libx264", + "-pix_fmt", + "yuv420p", + "-crf", + "18", + "-movflags", + "+faststart", + str(output_path), + ] + + +def _glb_from_step( + step_path: Path, + glb_path: Path, + tessellation_engine: str = "occ", + tessellation_profile: str = "render", +) -> None: """Convert STEP → GLB via OCC or GMSH (export_step_to_gltf.py, no Blender needed).""" import subprocess import sys as _sys - linear_deflection = 0.3 - angular_deflection = 0.5 + linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings( + tessellation_profile, + tessellation_engine, + ) scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts")) script_path = scripts_dir / "export_step_to_gltf.py" @@ -32,7 +192,7 @@ def _glb_from_step(step_path: Path, glb_path: Path, tessellation_engine: str = " "--output_path", str(glb_path), "--linear_deflection", str(linear_deflection), "--angular_deflection", str(angular_deflection), - "--tessellation_engine", tessellation_engine, + "--tessellation_engine", effective_engine, ] result = subprocess.run(cmd, capture_output=True, text=True, timeout=120) for line in result.stdout.splitlines(): @@ -44,7 +204,15 @@ def _glb_from_step(step_path: Path, glb_path: Path, tessellation_engine: str = " f"export_step_to_gltf.py failed (exit {result.returncode}).\n" f"STDERR: {result.stderr[-1000:]}" ) - logger.info("GLB converted: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024) + logger.info( + "GLB converted: %s (%d KB) with %s tessellation linear=%s angular=%s engine=%s", + glb_path.name, + glb_path.stat().st_size // 1024, + tessellation_profile, + linear_deflection, + angular_deflection, + effective_engine, + ) def find_blender() -> str: @@ -67,9 +235,9 @@ def render_still( width: int = 512, height: int = 512, engine: str = "cycles", - samples: int = 256, + samples: int | None = None, smooth_angle: int = 30, - cycles_device: str = "auto", + cycles_device: str = "gpu", transparent_bg: bool = False, part_colors: dict | None = None, template_path: str | None = None, @@ -92,9 +260,12 @@ def render_still( log_callback: "Callable[[str], None] | None" = None, usd_path: "Path | None" = None, tessellation_engine: str = "occ", + tessellation_profile: str = "render", focal_length_mm: float | None = None, sensor_width_mm: float | None = None, material_override: str | None = None, + template_inputs: dict | None = None, + **ignored_control_kwargs, ) -> dict: """Convert STEP → GLB (OCC or GMSH) → PNG (Blender subprocess). @@ -120,8 +291,18 @@ def render_still( t0 = time.monotonic() + if ignored_control_kwargs: + logger.debug( + "render_still ignoring unsupported control kwargs: %s", + sorted(ignored_control_kwargs.keys()), + ) + + if isinstance(usd_path, str) and usd_path.strip(): + usd_path = Path(usd_path) + + actual_samples = _resolve_render_samples(engine, samples) + # 1. GLB conversion (OCC) — skipped when usd_path is provided - glb_path = step_path.parent / f"{step_path.stem}_thumbnail.glb" use_usd = bool(usd_path and usd_path.exists()) t_glb = time.monotonic() @@ -129,15 +310,39 @@ def render_still( logger.info("[render_blender] using USD path: %s", usd_path) glb_size_bytes = 0 else: + linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings( + tessellation_profile, + tessellation_engine, + ) + glb_path = build_tessellated_glb_path( + step_path, + tessellation_profile, + effective_engine, + linear_deflection, + angular_deflection, + ) if not glb_path.exists() or glb_path.stat().st_size == 0: - _glb_from_step(step_path, glb_path, tessellation_engine) + _glb_from_step( + step_path, + glb_path, + tessellation_engine=effective_engine, + tessellation_profile=tessellation_profile, + ) else: - logger.info("GLB local hit: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024) + logger.info( + "GLB local hit: %s (%d KB) profile=%s linear=%s angular=%s engine=%s", + glb_path.name, + glb_path.stat().st_size // 1024, + tessellation_profile, + linear_deflection, + angular_deflection, + effective_engine, + ) glb_size_bytes = glb_path.stat().st_size if glb_path.exists() else 0 glb_duration_s = round(time.monotonic() - t_glb, 2) # 2. Blender render - output_path.parent.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(output_path.parent) env = dict(os.environ) if engine == "eevee": @@ -149,6 +354,7 @@ def render_still( }) else: env["EGL_PLATFORM"] = "surfaceless" + env["BLENDER_DEFAULT_SAMPLES"] = str(actual_samples) def _build_cmd(eng: str) -> list: # Pass "" as glb_path when using USD — blender_render.py reads --usd-path instead @@ -161,7 +367,7 @@ def render_still( glb_arg, str(output_path), str(width), str(height), - eng, str(samples), str(smooth_angle), + eng, str(actual_samples), str(smooth_angle), cycles_device, "1" if transparent_bg else "0", template_path or "", @@ -172,9 +378,9 @@ def render_still( "1" if lighting_only else "0", "1" if shadow_catcher else "0", str(rotation_x), str(rotation_y), str(rotation_z), - noise_threshold or "", denoiser or "", - denoising_input_passes or "", denoising_prefilter or "", - denoising_quality or "", denoising_use_gpu or "", + _stringify_optional_arg(noise_threshold), _stringify_optional_arg(denoiser), + _stringify_optional_arg(denoising_input_passes), _stringify_optional_arg(denoising_prefilter), + _stringify_optional_arg(denoising_quality), _stringify_optional_arg(denoising_use_gpu), ] if use_usd: cmd += ["--usd-path", str(usd_path)] @@ -188,6 +394,8 @@ def render_still( cmd += ["--sensor-width", str(sensor_width_mm)] if material_override: cmd += ["--material-override", material_override] + if template_inputs: + cmd += ["--template-inputs", json.dumps(template_inputs)] return cmd def _run(eng: str) -> tuple[int, list[str], list[str]]: @@ -305,7 +513,7 @@ def render_turntable_to_file( engine: str = "cycles", samples: int = 128, smooth_angle: int = 30, - cycles_device: str = "auto", + cycles_device: str = "gpu", transparent_bg: bool = False, bg_color: str = "", turntable_axis: str = "world_z", @@ -323,9 +531,11 @@ def render_turntable_to_file( camera_orbit: bool = True, usd_path: "Path | None" = None, tessellation_engine: str = "occ", + tessellation_profile: str = "render", focal_length_mm: float | None = None, sensor_width_mm: float | None = None, material_override: str | None = None, + template_inputs: dict | None = None, ) -> dict: """Render a turntable animation: STEP → STL → N frames (Blender) → mp4 (ffmpeg). @@ -357,25 +567,48 @@ def render_turntable_to_file( t0 = time.monotonic() # 1. GLB conversion (OCC) — skipped when usd_path is provided - glb_path = step_path.parent / f"{step_path.stem}_thumbnail.glb" use_usd = bool(usd_path and usd_path.exists()) t_glb = time.monotonic() if use_usd: logger.info("[render_blender] turntable using USD path: %s", usd_path) else: + linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings( + tessellation_profile, + tessellation_engine, + ) + glb_path = build_tessellated_glb_path( + step_path, + tessellation_profile, + effective_engine, + linear_deflection, + angular_deflection, + ) if not glb_path.exists() or glb_path.stat().st_size == 0: - _glb_from_step(step_path, glb_path, tessellation_engine) + _glb_from_step( + step_path, + glb_path, + tessellation_engine=effective_engine, + tessellation_profile=tessellation_profile, + ) else: - logger.info("GLB local hit: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024) + logger.info( + "GLB local hit: %s (%d KB) profile=%s linear=%s angular=%s engine=%s", + glb_path.name, + glb_path.stat().st_size // 1024, + tessellation_profile, + linear_deflection, + angular_deflection, + effective_engine, + ) glb_duration_s = round(time.monotonic() - t_glb, 2) # 2. Render frames with Blender frames_dir = output_path.parent / f"_frames_{output_path.stem}" if frames_dir.exists(): _shutil.rmtree(frames_dir, ignore_errors=True) - frames_dir.mkdir(parents=True, exist_ok=True) - output_path.parent.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(frames_dir) + ensure_group_writable_dir(output_path.parent) env = dict(os.environ) env["EGL_PLATFORM"] = "surfaceless" @@ -416,6 +649,8 @@ def render_turntable_to_file( cmd += ["--sensor-width", str(sensor_width_mm)] if material_override: cmd += ["--material-override", material_override] + if template_inputs: + cmd += ["--template-inputs", json.dumps(template_inputs)] log_lines: list[str] = [] @@ -458,34 +693,15 @@ def render_turntable_to_file( # 3. Compose frames → mp4 with ffmpeg t_ffmpeg = time.monotonic() - ffmpeg_cmd = [ - ffmpeg_bin, - "-y", - "-framerate", str(fps), - "-i", str(frames_dir / "frame_%04d.png"), - "-vcodec", "libx264", - "-pix_fmt", "yuv420p", - "-crf", "18", - "-movflags", "+faststart", - str(output_path), - ] - - # If bg_color is set and transparent_bg is True, overlay frames on solid bg - if bg_color and transparent_bg: - hex_color = bg_color.lstrip("#") - r, g, b = int(hex_color[0:2], 16), int(hex_color[2:4], 16), int(hex_color[4:6], 16) - ffmpeg_cmd = [ - ffmpeg_bin, "-y", - "-framerate", str(fps), - "-i", str(frames_dir / "frame_%04d.png"), - "-f", "lavfi", "-i", f"color=c=0x{hex_color}:size={width}x{height}:rate={fps}", - "-filter_complex", "[1:v][0:v]overlay=0:0:shortest=1", - "-vcodec", "libx264", - "-pix_fmt", "yuv420p", - "-crf", "18", - "-movflags", "+faststart", - str(output_path), - ] + ffmpeg_cmd = build_turntable_ffmpeg_cmd( + frames_dir, + output_path, + fps=fps, + bg_color=bg_color if transparent_bg else "", + width=width, + height=height, + ffmpeg_bin=ffmpeg_bin, + ) ffmpeg_proc = subprocess.run( ffmpeg_cmd, capture_output=True, text=True, timeout=300 @@ -530,7 +746,7 @@ def render_cinematic_to_file( engine: str = "cycles", samples: int = 128, smooth_angle: int = 30, - cycles_device: str = "auto", + cycles_device: str = "gpu", transparent_bg: bool = False, part_colors: dict | None = None, template_path: str | None = None, @@ -545,9 +761,11 @@ def render_cinematic_to_file( rotation_z: float = 0.0, usd_path: "Path | None" = None, tessellation_engine: str = "occ", + tessellation_profile: str = "render", focal_length_mm: float | None = None, sensor_width_mm: float | None = None, material_override: str | None = None, + template_inputs: dict | None = None, log_callback: "Callable[[str], None] | None" = None, ) -> dict: """Render a cinematic highlight animation: STEP -> GLB/USD -> 480 frames @ 24fps (Blender) -> mp4 (ffmpeg). @@ -587,25 +805,48 @@ def render_cinematic_to_file( t0 = time.monotonic() # 1. GLB conversion (OCC) — skipped when usd_path is provided - glb_path = step_path.parent / f"{step_path.stem}_thumbnail.glb" use_usd = bool(usd_path and usd_path.exists()) t_glb = time.monotonic() if use_usd: logger.info("[render_blender] cinematic using USD path: %s", usd_path) else: + linear_deflection, angular_deflection, effective_engine = resolve_tessellation_settings( + tessellation_profile, + tessellation_engine, + ) + glb_path = build_tessellated_glb_path( + step_path, + tessellation_profile, + effective_engine, + linear_deflection, + angular_deflection, + ) if not glb_path.exists() or glb_path.stat().st_size == 0: - _glb_from_step(step_path, glb_path, tessellation_engine) + _glb_from_step( + step_path, + glb_path, + tessellation_engine=effective_engine, + tessellation_profile=tessellation_profile, + ) else: - logger.info("GLB local hit: %s (%d KB)", glb_path.name, glb_path.stat().st_size // 1024) + logger.info( + "GLB local hit: %s (%d KB) profile=%s linear=%s angular=%s engine=%s", + glb_path.name, + glb_path.stat().st_size // 1024, + tessellation_profile, + linear_deflection, + angular_deflection, + effective_engine, + ) glb_duration_s = round(time.monotonic() - t_glb, 2) # 2. Render frames with Blender frames_dir = output_path.parent / f"_frames_{output_path.stem}" if frames_dir.exists(): _shutil.rmtree(frames_dir, ignore_errors=True) - frames_dir.mkdir(parents=True, exist_ok=True) - output_path.parent.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(frames_dir) + ensure_group_writable_dir(output_path.parent) env = dict(os.environ) env["EGL_PLATFORM"] = "surfaceless" @@ -645,6 +886,8 @@ def render_cinematic_to_file( cmd += ["--sensor-width", str(sensor_width_mm)] if material_override: cmd += ["--material-override", material_override] + if template_inputs: + cmd += ["--template-inputs", json.dumps(template_inputs)] log_lines: list[str] = [] diff --git a/backend/app/services/step_processor.py b/backend/app/services/step_processor.py index 4d1ed5a..974cb24 100644 --- a/backend/app/services/step_processor.py +++ b/backend/app/services/step_processor.py @@ -10,7 +10,9 @@ import logging import uuid from dataclasses import dataclass, field from pathlib import Path -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any + +from app.core.render_paths import ensure_group_writable_dir if TYPE_CHECKING: from app.models.cad_file import CadFile @@ -18,6 +20,10 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) +class MissingCadResourceError(FileNotFoundError): + """Terminal CAD resource error that should not be retried by Celery tasks.""" + + def build_part_colors( cad_parsed_objects: list[str], cad_part_materials: list[dict], @@ -1023,8 +1029,12 @@ def _get_all_settings() -> dict[str, str]: "blender_eevee_samples": "64", "thumbnail_format": "jpg", "blender_smooth_angle": "30", - "cycles_device": "auto", + "cycles_device": "gpu", "tessellation_engine": "occ", + "scene_linear_deflection": "0.1", + "scene_angular_deflection": "0.1", + "render_linear_deflection": "0.03", + "render_angular_deflection": "0.05", } try: from app.config import settings as app_settings @@ -1046,6 +1056,23 @@ def _generate_thumbnail( cad_file_id: str, upload_dir: str, part_colors: dict[str, str] | None = None, + *, + renderer: str | None = None, + render_engine: str | None = None, + samples: int | None = None, + width: int | None = None, + height: int | None = None, + transparent_bg: bool | None = None, + target_collection: str = "Product", + material_library_path: str | None = None, + material_map: dict[str, str] | None = None, + part_names_ordered: list[str] | None = None, + lighting_only: bool = False, + shadow_catcher: bool = False, + usd_path: Path | None = None, + focal_length_mm: float | None = None, + sensor_width_mm: float | None = None, + material_override: str | None = None, ) -> tuple[Path | None, dict]: """Generate thumbnail using the configured renderer. @@ -1054,12 +1081,20 @@ def _generate_thumbnail( """ import time out_dir = Path(upload_dir) / "thumbnails" - out_dir.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(out_dir) settings = _get_all_settings() - renderer = settings["thumbnail_renderer"] - fmt = settings["thumbnail_format"] # "jpg" or "png" + requested_renderer = renderer or settings["thumbnail_renderer"] + active_renderer = requested_renderer + fmt = settings["thumbnail_format"] # "jpg" or "png" ext = "jpg" if fmt == "jpg" else "png" + if requested_renderer == "threejs": + # The historical Three.js thumbnail renderer was removed from the backend. + # Keep the workflow node executable by falling back to the maintained Blender path + # while preserving the requested renderer in the render log for observability. + active_renderer = "blender" + fmt = "png" + ext = "png" # Clean up any existing thumbnail for this cad_file_id (either extension) for old_ext in ("png", "jpg"): @@ -1073,28 +1108,39 @@ def _generate_thumbnail( # Build the base render_log with the settings snapshot render_log: dict = { - "renderer": renderer, + "renderer": requested_renderer, "format": fmt, "started_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()), } - if renderer == "blender": - engine = settings["blender_engine"] + if active_renderer == "blender": + engine = render_engine or settings["blender_engine"] + resolved_samples = int(samples) if samples is not None else int(settings[f"blender_{engine}_samples"]) + resolved_width = int(width) if width is not None else 512 + resolved_height = int(height) if height is not None else 512 + resolved_transparent_bg = bool(transparent_bg) if transparent_bg is not None else False render_log.update({ "engine": engine, - "samples": int(settings[f"blender_{engine}_samples"]), + "samples": resolved_samples, "smooth_angle": int(settings["blender_smooth_angle"]), "cycles_device": settings["cycles_device"], - "width": 512, - "height": 512, + "width": resolved_width, + "height": resolved_height, + "transparent_bg": resolved_transparent_bg, }) - logger.info(f"Thumbnail renderer={renderer}, format={fmt}") + if requested_renderer != active_renderer: + render_log["renderer_backend"] = active_renderer + render_log["renderer_fallback_reason"] = "threejs_renderer_removed_using_blender_compat" + logger.info(f"Thumbnail renderer={requested_renderer}, format={fmt}") rendered_png: Path | None = None service_data: dict = {} - if renderer == "blender": - engine = settings["blender_engine"] - samples = int(settings[f"blender_{engine}_samples"]) + if active_renderer == "blender": + engine = render_engine or settings["blender_engine"] + resolved_samples = int(samples) if samples is not None else int(settings[f"blender_{engine}_samples"]) + resolved_width = int(width) if width is not None else 512 + resolved_height = int(height) if height is not None else 512 + resolved_transparent_bg = bool(transparent_bg) if transparent_bg is not None else False from app.services.render_blender import is_blender_available, render_still if is_blender_available(): @@ -1102,11 +1148,25 @@ def _generate_thumbnail( service_data = render_still( step_path=step_path, output_path=tmp_png, + width=resolved_width, + height=resolved_height, engine=engine, - samples=samples, + samples=resolved_samples, smooth_angle=int(settings["blender_smooth_angle"]), cycles_device=settings["cycles_device"], + transparent_bg=resolved_transparent_bg, + target_collection=target_collection, + material_library_path=material_library_path, + material_map=material_map, + part_names_ordered=part_names_ordered, + lighting_only=lighting_only, + shadow_catcher=shadow_catcher, tessellation_engine=settings["tessellation_engine"], + usd_path=usd_path, + focal_length_mm=focal_length_mm, + sensor_width_mm=sensor_width_mm, + material_override=material_override, + tessellation_profile="scene", ) rendered_png = tmp_png if tmp_png.exists() else None except Exception as exc: @@ -1133,8 +1193,7 @@ def _generate_thumbnail( def _finalise_image(src: Path, dst: Path) -> Path | None: - """Move src image to dst. When dst has a .webp suffix, convert via Pillow - (quality=90, method=4) for 50-70 % smaller files. Otherwise output PNG.""" + """Move src image to dst, converting the PNG intermediate when needed.""" if dst.suffix.lower() == ".webp": try: from PIL import Image @@ -1148,13 +1207,52 @@ def _finalise_image(src: Path, dst: Path) -> Path | None: out = dst.with_suffix(".png") src.rename(out) return out + if dst.suffix.lower() in {".jpg", ".jpeg"}: + try: + from PIL import Image + + img = Image.open(str(src)) + if img.mode in {"RGBA", "LA"} or (img.mode == "P" and "transparency" in img.info): + background = Image.new("RGBA", img.size, (255, 255, 255, 255)) + img = Image.alpha_composite(background, img.convert("RGBA")).convert("RGB") + else: + img = img.convert("RGB") + out = dst.with_suffix(".jpg") + img.save(str(out), "JPEG", quality=95, subsampling=0) + src.unlink(missing_ok=True) + return out + except Exception: + logger.warning("JPEG conversion failed — falling back to PNG") + out = dst.with_suffix(".png") + src.rename(out) + return out out = dst.with_suffix(".png") src.rename(out) return out -def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> bool: +def regenerate_cad_thumbnail( + cad_file_id: str, + part_colors: dict[str, str], + *, + renderer: str | None = None, + render_engine: str | None = None, + samples: int | None = None, + width: int | None = None, + height: int | None = None, + transparent_bg: bool | None = None, + target_collection: str = "Product", + material_library_path: str | None = None, + material_map: dict[str, str] | None = None, + part_names_ordered: list[str] | None = None, + lighting_only: bool = False, + shadow_catcher: bool = False, + usd_path: Path | None = None, + focal_length_mm: float | None = None, + sensor_width_mm: float | None = None, + material_override: str | None = None, +) -> bool: """ Regenerate a thumbnail with per-part colours for an existing CAD file. @@ -1170,13 +1268,18 @@ def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> b with Session(db_engine) as session: cad_file = session.get(CadFile, uuid.UUID(cad_file_id)) if not cad_file: - logger.error(f"CAD file not found: {cad_file_id}") - return False + message = f"CAD file not found: {cad_file_id}" + logger.warning(message) + raise MissingCadResourceError(message) step_path = Path(cad_file.stored_path) if not step_path.exists(): - logger.error(f"STEP file not found: {step_path}") - return False + message = f"STEP file not found: {step_path}" + logger.warning(message) + cad_file.processing_status = ProcessingStatus.failed + cad_file.error_message = message[:2000] + session.commit() + raise MissingCadResourceError(message) # Mark as processing so the activity page shows it as active cad_file.processing_status = ProcessingStatus.processing @@ -1184,7 +1287,26 @@ def regenerate_cad_thumbnail(cad_file_id: str, part_colors: dict[str, str]) -> b try: thumb_path, render_log = _generate_thumbnail( - step_path, cad_file_id, app_settings.upload_dir, part_colors=part_colors + step_path, + cad_file_id, + app_settings.upload_dir, + part_colors=part_colors, + renderer=renderer, + render_engine=render_engine, + samples=samples, + width=width, + height=height, + transparent_bg=transparent_bg, + target_collection=target_collection, + material_library_path=material_library_path, + material_map=material_map, + part_names_ordered=part_names_ordered, + lighting_only=lighting_only, + shadow_catcher=shadow_catcher, + usd_path=usd_path, + focal_length_mm=focal_length_mm, + sensor_width_mm=sensor_width_mm, + material_override=material_override, ) if thumb_path: cad_file.thumbnail_path = str(thumb_path) @@ -1207,6 +1329,7 @@ def render_to_file( part_colors: dict[str, str] | None = None, width: int | None = None, height: int | None = None, + smooth_angle: int | None = None, transparent_bg: bool = False, engine: str | None = None, samples: int | None = None, @@ -1234,6 +1357,7 @@ def render_to_file( focal_length_mm: float | None = None, sensor_width_mm: float | None = None, material_override: str | None = None, + template_inputs: dict[str, Any] | None = None, ) -> tuple[bool, dict]: """Render a STEP file to a specific output path using current system settings. @@ -1246,6 +1370,7 @@ def render_to_file( part_colors: Optional {part_name: hex_color} map. width: Optional render width (overrides system default). height: Optional render height (overrides system default). + smooth_angle: Optional auto-smooth angle override in degrees. transparent_bg: If True and renderer=blender+PNG, render with transparent background. engine: Optional per-OT engine override ("cycles" | "eevee"), or None for system default. samples: Optional per-OT samples override, or None for system default. @@ -1262,7 +1387,7 @@ def render_to_file( step = Path(step_path) out = Path(output_path) - out.parent.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(out.parent) settings = _get_all_settings() renderer = settings["thumbnail_renderer"] @@ -1284,19 +1409,20 @@ def render_to_file( if renderer == "blender": actual_engine = engine or settings["blender_engine"] - actual_samples = samples or int(settings[f"blender_{actual_engine}_samples"]) + actual_samples = int(samples) if samples is not None else int(settings[f"blender_{actual_engine}_samples"]) actual_cycles_device = cycles_device or settings["cycles_device"] + actual_smooth_angle = smooth_angle if smooth_angle is not None else int(settings["blender_smooth_angle"]) w = width or 512 h = height or 512 render_log.update({ "engine": actual_engine, "samples": actual_samples, - "smooth_angle": int(settings["blender_smooth_angle"]), + "smooth_angle": actual_smooth_angle, "cycles_device": actual_cycles_device, "width": w, "height": h, }) extra = { "engine": actual_engine, "samples": actual_samples, - "smooth_angle": int(settings["blender_smooth_angle"]), + "smooth_angle": actual_smooth_angle, "cycles_device": actual_cycles_device, "width": w, "height": h, "transparent_bg": transparent_bg, @@ -1314,6 +1440,9 @@ def render_to_file( render_log["lighting_only"] = True if shadow_catcher: render_log["shadow_catcher"] = True + if template_inputs: + extra["template_inputs"] = template_inputs + render_log["template_inputs"] = template_inputs if material_library_path and material_map: extra["material_library_path"] = material_library_path extra["material_map"] = material_map @@ -1349,7 +1478,7 @@ def render_to_file( output_path=tmp_png, engine=actual_engine, samples=actual_samples, - smooth_angle=int(settings["blender_smooth_angle"]), + smooth_angle=actual_smooth_angle, cycles_device=actual_cycles_device, width=w, height=h, transparent_bg=transparent_bg, @@ -1373,6 +1502,7 @@ def render_to_file( focal_length_mm=focal_length_mm, sensor_width_mm=sensor_width_mm, material_override=material_override, + template_inputs=template_inputs, ) rendered_png = tmp_png if tmp_png.exists() else None except Exception as exc: @@ -1400,7 +1530,7 @@ def render_to_file( def _convert_to_gltf(step_path: Path, cad_file_id: str, upload_dir: str) -> Path | None: """Convert STEP to glTF for browser 3D viewer.""" out_dir = Path(upload_dir) / "gltf" - out_dir.mkdir(parents=True, exist_ok=True) + ensure_group_writable_dir(out_dir) out_path = out_dir / f"{cad_file_id}.gltf" try: diff --git a/backend/app/services/template_service.py b/backend/app/services/template_service.py index 4a2510d..121d560 100644 --- a/backend/app/services/template_service.py +++ b/backend/app/services/template_service.py @@ -15,6 +15,7 @@ import logging from sqlalchemy import create_engine, select, and_, exists from sqlalchemy.orm import Session +from app.domains.materials.library_paths import resolve_asset_library_blend_path from app.models.render_template import RenderTemplate from app.models.system_setting import SystemSetting from app.domains.rendering.models import render_template_output_types @@ -121,14 +122,27 @@ def get_material_library_path_for_session(session: Session) -> str | None: row = session.execute( select(AssetLibrary).where(AssetLibrary.is_active == True).limit(1) # noqa: E712 ).scalar_one_or_none() - if row and row.blend_file_path: - return row.blend_file_path + if row: + resolved_path = resolve_asset_library_blend_path( + blend_file_path=row.blend_file_path, + asset_library_id=row.id, + ) + if resolved_path: + if row.blend_file_path and resolved_path != row.blend_file_path: + logger.warning( + "Active asset library %s points to missing file %s; using %s instead", + row.id, + row.blend_file_path, + resolved_path, + ) + return resolved_path row = session.execute( select(SystemSetting).where(SystemSetting.key == "material_library_path") ).scalar_one_or_none() if row and row.value and row.value.strip(): - return row.value.strip() + resolved_path = resolve_asset_library_blend_path(blend_file_path=row.value.strip()) + return resolved_path or row.value.strip() return None diff --git a/backend/app/tasks/celery_app.py b/backend/app/tasks/celery_app.py index bbeb299..85258f1 100644 --- a/backend/app/tasks/celery_app.py +++ b/backend/app/tasks/celery_app.py @@ -33,7 +33,16 @@ celery_app.conf.update( "app.domains.rendering.tasks.*": {"queue": "asset_pipeline"}, "app.tasks.beat_tasks.*": {"queue": "step_processing"}, "app.tasks.ai_tasks.*": {"queue": "ai_validation"}, - # Legacy task names (shim) — keep until old queued tasks drain + # Legacy task names (shim) — preserve the runtime queue split while + # old workflow configs and queued tasks still address app.tasks.step_tasks.*. + "app.tasks.step_tasks.render_step_thumbnail": {"queue": "asset_pipeline"}, + "app.tasks.step_tasks.render_graph_thumbnail": {"queue": "asset_pipeline"}, + "app.tasks.step_tasks.regenerate_thumbnail": {"queue": "asset_pipeline"}, + "app.tasks.step_tasks.generate_gltf_geometry_task": {"queue": "asset_pipeline"}, + "app.tasks.step_tasks.generate_usd_master_task": {"queue": "asset_pipeline"}, + "app.tasks.step_tasks.reextract_rich_metadata_task": {"queue": "asset_pipeline"}, + "app.tasks.step_tasks.reextract_cad_metadata": {"queue": "asset_pipeline"}, + "app.tasks.step_tasks.render_order_line_task": {"queue": "asset_pipeline"}, "app.tasks.step_tasks.*": {"queue": "step_processing"}, }, beat_schedule={ diff --git a/backend/app/tasks/step_tasks.py b/backend/app/tasks/step_tasks.py index 79ec6e3..cfe5ea4 100644 --- a/backend/app/tasks/step_tasks.py +++ b/backend/app/tasks/step_tasks.py @@ -11,6 +11,7 @@ from app.domains.pipeline.tasks.extract_metadata import ( # noqa: F401 reextract_rich_metadata_task, ) from app.domains.pipeline.tasks.render_thumbnail import ( # noqa: F401 + render_graph_thumbnail, render_step_thumbnail, regenerate_thumbnail, ) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 39dd378..58f8950 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -49,6 +49,7 @@ cad = [ [tool.pytest.ini_options] asyncio_mode = "auto" +cache_dir = "/tmp/pytest_cache" testpaths = ["tests"] markers = [ "integration: marks tests requiring running services", diff --git a/backend/start.sh b/backend/start.sh index c2e6edd..a4769f3 100644 --- a/backend/start.sh +++ b/backend/start.sh @@ -7,4 +7,9 @@ echo "Seeding templates and admin user..." python seed.py echo "Starting API server..." -exec uvicorn app.main:app --host 0.0.0.0 --port 8888 --reload +exec uvicorn app.main:app \ + --host 0.0.0.0 \ + --port 8888 \ + --reload \ + --reload-dir /app/app \ + --reload-dir /app/alembic diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index 968082a..1b41a4f 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -113,32 +113,44 @@ def parsed_anschlagplatten(parsed_excel_all): # ── Test-DB (nutzt separate Test-Datenbank) ────────────────────────────────── -import os import uuid import pytest_asyncio from typing import AsyncGenerator from httpx import AsyncClient, ASGITransport +from sqlalchemy.engine import make_url from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker -TEST_DB_URL = os.environ.get( - "TEST_DATABASE_URL", - "postgresql+asyncpg://hartomat:hartomat@localhost:5432/hartomat_test" -) +from app.config import settings +from tests.db_test_utils import reset_public_schema_async, resolve_test_db_url + + +def _resolve_test_db_url() -> str: + return resolve_test_db_url(async_driver=True) + + +def _sync_settings_to_test_database() -> None: + resolved = make_url(resolve_test_db_url(async_driver=False)) + settings.postgres_host = resolved.host or settings.postgres_host + settings.postgres_port = int(resolved.port or settings.postgres_port) + settings.postgres_user = resolved.username or settings.postgres_user + settings.postgres_password = resolved.password or settings.postgres_password + settings.postgres_db = resolved.database or settings.postgres_db + + +_sync_settings_to_test_database() @pytest_asyncio.fixture async def test_engine(): from app.database import Base - from sqlalchemy import text import app.models # noqa - register all models - engine = create_async_engine(TEST_DB_URL, echo=False) + engine = create_async_engine(_resolve_test_db_url(), echo=False) async with engine.begin() as conn: + await reset_public_schema_async(conn) await conn.run_sync(Base.metadata.create_all) yield engine - # Use CASCADE to handle circular FK dependencies in drop async with engine.begin() as conn: - await conn.execute(text("DROP SCHEMA public CASCADE")) - await conn.execute(text("CREATE SCHEMA public")) + await reset_public_schema_async(conn) await engine.dispose() @@ -229,6 +241,7 @@ def mock_celery_tasks(monkeypatch): task_paths = [ "app.domains.materials.tasks.refresh_asset_library_catalog", "app.tasks.step_tasks.process_step_file", + "app.tasks.step_tasks.render_graph_thumbnail", "app.tasks.step_tasks.render_step_thumbnail", "app.domains.imports.tasks.validate_excel_import", "app.domains.rendering.tasks.render_still_task", diff --git a/backend/tests/db_test_utils.py b/backend/tests/db_test_utils.py new file mode 100644 index 0000000..b1ee3e0 --- /dev/null +++ b/backend/tests/db_test_utils.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from contextlib import contextmanager +import importlib +import os +from typing import Iterator + +from sqlalchemy import text +from sqlalchemy.engine import make_url +from sqlalchemy.orm import Session +from sqlalchemy import create_engine + +from app.database import Base + + +def resolve_test_db_url(*, async_driver: bool) -> str: + explicit_url = os.environ.get("TEST_DATABASE_URL") + if explicit_url: + db_url = explicit_url + else: + host = os.environ.get("TEST_POSTGRES_HOST") or os.environ.get("POSTGRES_HOST") or "localhost" + port = os.environ.get("TEST_POSTGRES_PORT") or os.environ.get("POSTGRES_PORT") or "5432" + user = os.environ.get("TEST_POSTGRES_USER") or os.environ.get("POSTGRES_USER") or "hartomat" + password = os.environ.get("TEST_POSTGRES_PASSWORD") or os.environ.get("POSTGRES_PASSWORD") or "hartomat" + default_db = f"{os.environ.get('POSTGRES_DB', 'hartomat')}_test" + database = os.environ.get("TEST_POSTGRES_DB") or os.environ.get("TEST_DB_NAME") or default_db + driver = "postgresql+asyncpg" if async_driver else "postgresql" + db_url = f"{driver}://{user}:{password}@{host}:{port}/{database}" + + normalized_url = db_url if async_driver else db_url.replace("+asyncpg", "") + database_name = make_url(normalized_url).database or "" + if not database_name.endswith("_test"): + raise RuntimeError( + f"Refusing to run destructive test database setup against non-test database '{database_name}'." + ) + return normalized_url + + +def reset_public_schema_sync(connection) -> None: + connection.execute(text("DROP SCHEMA IF EXISTS public CASCADE")) + connection.execute(text("CREATE SCHEMA public")) + + +async def reset_public_schema_async(connection) -> None: + await connection.execute(text("DROP SCHEMA IF EXISTS public CASCADE")) + await connection.execute(text("CREATE SCHEMA public")) + + +def import_all_model_modules() -> None: + module_names = ( + "app.domains.tenants.models", + "app.domains.auth.models", + "app.domains.imports.models", + "app.domains.products.models", + "app.domains.orders.models", + "app.domains.notifications.models", + "app.domains.billing.models", + "app.domains.rendering.models", + "app.domains.materials.models", + "app.domains.media.models", + "app.domains.admin.models", + "app.models.system_setting", + "app.models.worker_config", + "app.models.chat", + ) + for module_name in module_names: + importlib.import_module(module_name) + + +@contextmanager +def sync_test_session() -> Iterator[Session]: + import_all_model_modules() + engine = create_engine(resolve_test_db_url(async_driver=False)) + with engine.begin() as conn: + reset_public_schema_sync(conn) + Base.metadata.create_all(conn) + + session = Session(engine) + try: + yield session + finally: + session.close() + with engine.begin() as conn: + reset_public_schema_sync(conn) + engine.dispose() diff --git a/backend/tests/domains/test_notifications_service.py b/backend/tests/domains/test_notifications_service.py index 43bbaf7..c307f0f 100644 --- a/backend/tests/domains/test_notifications_service.py +++ b/backend/tests/domains/test_notifications_service.py @@ -1,6 +1,10 @@ """Tests for notification config service.""" import pytest +from sqlalchemy import select + +from app.domains.notifications.models import AuditLog from app.domains.notifications.service import ( + emit_notification, upsert_notification_config, get_notification_configs, ) @@ -25,3 +29,25 @@ async def test_upsert_updates_existing(db, admin_user): cfg = next((c for c in configs if c.event_type == "order_submitted"), None) assert cfg is not None assert cfg.enabled is False + + +@pytest.mark.asyncio +async def test_emit_notification_persists_naive_utc_timestamp(db, admin_user): + """Notification writes must match the legacy naive Postgres timestamp columns.""" + await emit_notification( + db, + actor_user_id=admin_user.id, + target_user_id=admin_user.id, + action="order.submitted", + entity_type="order", + entity_id="order-123", + details={"order_number": "SA-2026-00001"}, + ) + + row = ( + await db.execute( + select(AuditLog).where(AuditLog.action == "order.submitted") + ) + ).scalar_one() + + assert row.timestamp.tzinfo is None diff --git a/backend/tests/domains/test_output_types_api.py b/backend/tests/domains/test_output_types_api.py index f374bd9..fd48330 100644 --- a/backend/tests/domains/test_output_types_api.py +++ b/backend/tests/domains/test_output_types_api.py @@ -34,9 +34,138 @@ async def test_create_output_type_infers_artifact_kind_from_format_and_animation payload = response.json() assert payload["workflow_family"] == "order_line" assert payload["artifact_kind"] == "turntable_video" + assert payload["workflow_rollout_mode"] == "legacy_only" assert payload["invocation_overrides"] == {} +@pytest.mark.asyncio +async def test_output_type_contract_catalog_exposes_backend_authored_rules( + client, + auth_headers, +): + response = await client.get( + "/api/output-types/contract-catalog", + headers=auth_headers, + ) + + assert response.status_code == 200, response.text + payload = response.json() + assert payload["workflow_families"] == ["order_line", "cad_file"] + assert payload["workflow_rollout_modes"] == ["legacy_only", "shadow", "graph"] + assert payload["artifact_kinds"] == [ + "still_image", + "turntable_video", + "model_export", + "thumbnail_image", + "blend_asset", + "package", + "custom", + ] + assert payload["allowed_artifact_kinds_by_family"]["cad_file"] == [ + "model_export", + "thumbnail_image", + "package", + "custom", + ] + assert payload["allowed_output_formats_by_family"]["order_line"] == [ + "png", + "jpg", + "jpeg", + "webp", + "mp4", + "webm", + "mov", + "blend", + ] + assert payload["allowed_invocation_override_keys_by_artifact_kind"]["turntable_video"] == [ + "width", + "height", + "engine", + "samples", + "bg_color", + "noise_threshold", + "denoiser", + "denoising_input_passes", + "denoising_prefilter", + "denoising_quality", + "denoising_use_gpu", + "frame_count", + "fps", + "turntable_axis", + ] + assert payload["default_output_format_by_artifact_kind"]["blend_asset"] == "blend" + assert payload["parameter_ownership"]["output_type_profile_keys"] == [ + "transparent_bg", + "cycles_device", + "material_override", + ] + assert payload["parameter_ownership"]["template_runtime_keys"] == [ + "target_collection", + "lighting_only", + "shadow_catcher", + "camera_orbit", + "template_inputs", + ] + assert payload["parameter_ownership"]["workflow_node_keys_by_step"]["resolve_template"] == [ + "template_id_override", + "require_template", + "material_library_path", + "disable_materials", + "target_collection", + "material_replace_mode", + "lighting_only_mode", + "shadow_catcher_mode", + "camera_orbit_mode", + ] + assert "target_collection" in payload["parameter_ownership"]["workflow_node_keys_by_step"]["blender_still"] + assert "camera_orbit" in payload["parameter_ownership"]["workflow_node_keys_by_step"]["blender_turntable"] + + +@pytest.mark.asyncio +async def test_create_output_type_infers_blend_asset_from_blend_format( + client, + auth_headers, +): + response = await client.post( + "/api/output-types", + json={ + "name": f"Blend {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "blend", + "render_backend": "celery", + "workflow_family": "order_line", + }, + headers=auth_headers, + ) + + assert response.status_code == 201, response.text + payload = response.json() + assert payload["workflow_family"] == "order_line" + assert payload["artifact_kind"] == "blend_asset" + + +@pytest.mark.asyncio +async def test_create_output_type_rejects_non_blend_artifact_for_blend_format( + client, + auth_headers, +): + response = await client.post( + "/api/output-types", + json={ + "name": f"Bad Blend {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "blend", + "render_backend": "celery", + "workflow_family": "order_line", + "artifact_kind": "still_image", + }, + headers=auth_headers, + ) + + assert response.status_code == 400, response.text + assert response.json()["detail"] == "Output format 'blend' requires artifact kind 'blend_asset'" + + @pytest.mark.asyncio async def test_create_output_type_rejects_workflow_family_mismatch( client, @@ -69,6 +198,52 @@ async def test_create_output_type_rejects_workflow_family_mismatch( assert "Workflow family mismatch" in response.json()["detail"] +@pytest.mark.asyncio +async def test_create_output_type_rejects_workflow_artifact_mismatch( + client, + db, + auth_headers, +): + workflow = WorkflowDefinition( + name=f"Blend Export {uuid.uuid4().hex[:8]}", + config={ + "version": 1, + "ui": {"preset": "custom", "execution_mode": "graph"}, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "template", "step": "resolve_template", "params": {}}, + {"id": "blend", "step": "export_blend", "params": {}}, + ], + "edges": [ + {"from": "setup", "to": "template"}, + {"from": "template", "to": "blend"}, + ], + }, + is_active=True, + ) + db.add(workflow) + await db.commit() + await db.refresh(workflow) + + response = await client.post( + "/api/output-types", + json={ + "name": f"Still {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "png", + "render_backend": "celery", + "workflow_family": "order_line", + "artifact_kind": "still_image", + "workflow_definition_id": str(workflow.id), + }, + headers=auth_headers, + ) + + assert response.status_code == 400, response.text + assert "Workflow artifact mismatch" in response.json()["detail"] + assert "blend_asset" in response.json()["detail"] + + @pytest.mark.asyncio async def test_create_output_type_rejects_artifact_kind_incompatible_with_family( client, @@ -91,6 +266,53 @@ async def test_create_output_type_rejects_artifact_kind_incompatible_with_family assert "not allowed for workflow_family" in response.json()["detail"] +@pytest.mark.asyncio +async def test_create_output_type_rejects_output_format_incompatible_with_family( + client, + auth_headers, +): + response = await client.post( + "/api/output-types", + json={ + "name": f"Bad CAD Blend {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "blend", + "render_backend": "celery", + "workflow_family": "cad_file", + "artifact_kind": "custom", + }, + headers=auth_headers, + ) + + assert response.status_code == 400, response.text + assert "Output format 'blend' is not allowed for workflow_family 'cad_file'" in response.json()["detail"] + + +@pytest.mark.asyncio +async def test_create_output_type_preserves_legacy_safe_custom_png_output_type( + client, + auth_headers, +): + response = await client.post( + "/api/output-types", + json={ + "name": f"Legacy Custom Still {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "png", + "render_backend": "celery", + "workflow_family": "order_line", + "artifact_kind": "custom", + }, + headers=auth_headers, + ) + + assert response.status_code == 201, response.text + payload = response.json() + assert payload["workflow_family"] == "order_line" + assert payload["artifact_kind"] == "custom" + assert payload["output_format"] == "png" + + @pytest.mark.asyncio async def test_create_output_type_rejects_turntable_video_without_animation( client, @@ -159,6 +381,99 @@ async def test_update_output_type_rejects_mixed_family_workflow( assert response.json()["detail"] == "Output types cannot link mixed-family workflows" +@pytest.mark.asyncio +async def test_patch_output_type_rejects_workflow_artifact_mismatch( + client, + db, + auth_headers, +): + output_type_response = await client.post( + "/api/output-types", + json={ + "name": f"Still {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "png", + "render_backend": "celery", + "workflow_family": "order_line", + "artifact_kind": "still_image", + }, + headers=auth_headers, + ) + assert output_type_response.status_code == 201, output_type_response.text + output_type = output_type_response.json() + + workflow = WorkflowDefinition( + name=f"Blend Export {uuid.uuid4().hex[:8]}", + config={ + "version": 1, + "ui": {"preset": "custom", "execution_mode": "graph"}, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "template", "step": "resolve_template", "params": {}}, + {"id": "blend", "step": "export_blend", "params": {}}, + ], + "edges": [ + {"from": "setup", "to": "template"}, + {"from": "template", "to": "blend"}, + ], + }, + is_active=True, + ) + db.add(workflow) + await db.commit() + await db.refresh(workflow) + + response = await client.patch( + f"/api/output-types/{output_type['id']}", + json={"workflow_definition_id": str(workflow.id)}, + headers=auth_headers, + ) + + assert response.status_code == 400, response.text + assert "Workflow artifact mismatch" in response.json()["detail"] + assert "blend_asset" in response.json()["detail"] + + +@pytest.mark.asyncio +async def test_patch_output_type_updates_workflow_rollout_mode( + client, + db, + auth_headers, +): + workflow = WorkflowDefinition( + name=f"Still Graph {uuid.uuid4().hex[:8]}", + config=build_preset_workflow_config("still_graph"), + is_active=True, + ) + db.add(workflow) + await db.commit() + await db.refresh(workflow) + + create_response = await client.post( + "/api/output-types", + json={ + "name": f"Rollout {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "png", + "render_backend": "celery", + "workflow_family": "order_line", + "workflow_definition_id": str(workflow.id), + }, + headers=auth_headers, + ) + assert create_response.status_code == 201, create_response.text + output_type = create_response.json() + + patch_response = await client.patch( + f"/api/output-types/{output_type['id']}", + json={"workflow_rollout_mode": "graph"}, + headers=auth_headers, + ) + + assert patch_response.status_code == 200, patch_response.text + assert patch_response.json()["workflow_rollout_mode"] == "graph" + + @pytest.mark.asyncio async def test_create_output_type_backfills_invocation_overrides_from_legacy_render_settings( client, @@ -189,6 +504,25 @@ async def test_create_output_type_backfills_invocation_overrides_from_legacy_ren "height": 900, "engine": "cycles", } + assert payload["invocation_profile"]["artifact_kind"] == "still_image" + assert payload["invocation_profile"]["allowed_override_keys"] == [ + "width", + "height", + "engine", + "samples", + "bg_color", + "noise_threshold", + "denoiser", + "denoising_input_passes", + "denoising_prefilter", + "denoising_quality", + "denoising_use_gpu", + ] + assert payload["invocation_profile"]["invocation_overrides"] == { + "width": 1600, + "height": 900, + "engine": "cycles", + } assert payload["render_settings"]["width"] == 1600 assert payload["render_settings"]["height"] == 900 assert payload["render_settings"]["engine"] == "cycles" @@ -235,6 +569,57 @@ async def test_patch_output_type_invocation_overrides_syncs_legacy_render_settin assert payload["render_settings"]["engine"] == "cycles" +@pytest.mark.asyncio +async def test_create_output_type_rejects_unknown_invocation_override_key( + client, + auth_headers, +): + response = await client.post( + "/api/output-types", + json={ + "name": f"Bad Override {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "png", + "render_backend": "celery", + "workflow_family": "order_line", + "invocation_overrides": { + "width": 1600, + "bogus": "value", + }, + }, + headers=auth_headers, + ) + + assert response.status_code == 400, response.text + assert "Unsupported invocation override keys" in response.json()["detail"] + + +@pytest.mark.asyncio +async def test_create_output_type_rejects_disallowed_invocation_override_for_blend_asset( + client, + auth_headers, +): + response = await client.post( + "/api/output-types", + json={ + "name": f"Blend Override {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "blend", + "render_backend": "celery", + "workflow_family": "order_line", + "invocation_overrides": { + "width": 1600, + }, + }, + headers=auth_headers, + ) + + assert response.status_code == 400, response.text + assert response.json()["detail"] == ( + "Invocation overrides not allowed for artifact kind 'blend_asset': width" + ) + + @pytest.mark.asyncio async def test_patch_output_type_recomputes_artifact_kind_when_switching_family( client, @@ -266,3 +651,35 @@ async def test_patch_output_type_recomputes_artifact_kind_when_switching_family( payload = response.json() assert payload["workflow_family"] == "cad_file" assert payload["artifact_kind"] == "thumbnail_image" + + +@pytest.mark.asyncio +async def test_patch_output_type_rejects_output_format_incompatible_with_family( + client, + auth_headers, +): + output_type_response = await client.post( + "/api/output-types", + json={ + "name": f"Still {uuid.uuid4().hex[:8]}", + "renderer": "blender", + "output_format": "png", + "render_backend": "celery", + "workflow_family": "order_line", + }, + headers=auth_headers, + ) + assert output_type_response.status_code == 201, output_type_response.text + output_type = output_type_response.json() + + response = await client.patch( + f"/api/output-types/{output_type['id']}", + json={ + "output_format": "gltf", + "artifact_kind": "custom", + }, + headers=auth_headers, + ) + + assert response.status_code == 400, response.text + assert "Output format 'gltf' is not allowed for workflow_family 'order_line'" in response.json()["detail"] diff --git a/backend/tests/domains/test_render_blender_samples.py b/backend/tests/domains/test_render_blender_samples.py new file mode 100644 index 0000000..d2bdbdd --- /dev/null +++ b/backend/tests/domains/test_render_blender_samples.py @@ -0,0 +1,574 @@ +from __future__ import annotations + +import importlib.util +import selectors +import sys +from pathlib import Path +from types import SimpleNamespace + +import pytest + + +def test_resolve_render_samples_uses_system_settings_when_omitted(monkeypatch): + from app.services.render_blender import _resolve_render_samples + + monkeypatch.setattr( + "app.services.step_processor._get_all_settings", + lambda: { + "blender_cycles_samples": "32", + "blender_eevee_samples": "12", + }, + ) + + assert _resolve_render_samples("cycles", None) == 32 + assert _resolve_render_samples("eevee", None) == 12 + assert _resolve_render_samples("cycles", 48) == 48 + + +def test_resolve_tessellation_settings_uses_profile_specific_values(monkeypatch): + from app.services.render_blender import resolve_tessellation_settings + + monkeypatch.setattr( + "app.services.step_processor._get_all_settings", + lambda: { + "tessellation_engine": "occ", + "scene_linear_deflection": "0.1", + "scene_angular_deflection": "0.1", + "render_linear_deflection": "0.03", + "render_angular_deflection": "0.05", + }, + ) + + assert resolve_tessellation_settings("scene") == (0.1, 0.1, "occ") + assert resolve_tessellation_settings("render") == (0.03, 0.05, "occ") + + +def test_render_still_passes_resolved_samples_to_blender_cli(tmp_path, monkeypatch): + from app.services.render_blender import build_tessellated_glb_path, render_still + + step_path = tmp_path / "bearing.step" + step_path.write_text("STEP", encoding="utf-8") + glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05) + glb_path.parent.mkdir(parents=True, exist_ok=True) + glb_path.write_text("GLB", encoding="utf-8") + output_path = tmp_path / "render.png" + output_path.write_text("PNG", encoding="utf-8") + + scripts_dir = tmp_path / "render-scripts" + scripts_dir.mkdir() + (scripts_dir / "blender_render.py").write_text("# test stub\n", encoding="utf-8") + + captured: dict[str, object] = {} + + class _FakeProc: + def __init__(self) -> None: + self.stdout = object() + self.stderr = object() + self.pid = 1234 + self.returncode = 0 + + def wait(self, timeout: int | None = None) -> int: + del timeout + return self.returncode + + def wait(self, timeout: int | None = None) -> int: + del timeout + return self.returncode + + def wait(self, timeout: int = 10) -> int: + return self.returncode + + class _FakeSelector: + def register(self, *_args, **_kwargs) -> None: + return None + + def get_map(self) -> dict: + return {} + + def close(self) -> None: + return None + + def _fake_popen(cmd, stdout, stderr, text, env, start_new_session): + captured["cmd"] = cmd + captured["env"] = env + return _FakeProc() + + monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir)) + monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender") + monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None) + monkeypatch.setattr("app.services.render_blender._resolve_render_samples", lambda engine, samples: 32) + monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen) + monkeypatch.setattr(selectors, "DefaultSelector", _FakeSelector) + + result = render_still( + step_path=step_path, + output_path=output_path, + engine="cycles", + samples=None, + width=640, + height=480, + ) + + assert captured["cmd"][10] == "32" + assert captured["env"]["BLENDER_DEFAULT_SAMPLES"] == "32" + assert result["engine_used"] == "cycles" + + +def test_render_still_passes_template_inputs_to_blender_cli(tmp_path, monkeypatch): + from app.services.render_blender import build_tessellated_glb_path, render_still + + step_path = tmp_path / "bearing.step" + step_path.write_text("STEP", encoding="utf-8") + glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05) + glb_path.parent.mkdir(parents=True, exist_ok=True) + glb_path.write_text("GLB", encoding="utf-8") + output_path = tmp_path / "render.png" + output_path.write_text("PNG", encoding="utf-8") + + scripts_dir = tmp_path / "render-scripts" + scripts_dir.mkdir() + (scripts_dir / "blender_render.py").write_text("# test stub\n", encoding="utf-8") + + captured: dict[str, object] = {} + + class _FakeProc: + def __init__(self) -> None: + self.stdout = object() + self.stderr = object() + self.pid = 1234 + self.returncode = 0 + + def wait(self, timeout: int = 10) -> int: + return self.returncode + + class _FakeSelector: + def register(self, *_args, **_kwargs) -> None: + return None + + def get_map(self) -> dict: + return {} + + def close(self) -> None: + return None + + def _fake_popen(cmd, stdout, stderr, text, env, start_new_session): + captured["cmd"] = cmd + return _FakeProc() + + monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir)) + monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender") + monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None) + monkeypatch.setattr("app.services.render_blender._resolve_render_samples", lambda engine, samples: 32) + monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen) + monkeypatch.setattr(selectors, "DefaultSelector", _FakeSelector) + + render_still( + step_path=step_path, + output_path=output_path, + engine="cycles", + samples=None, + width=640, + height=480, + template_inputs={"studio_variant": "warm"}, + ) + + assert "--template-inputs" in captured["cmd"] + idx = captured["cmd"].index("--template-inputs") + assert captured["cmd"][idx + 1] == '{"studio_variant": "warm"}' + + +def test_render_still_uses_settings_sensitive_render_glb_path(tmp_path, monkeypatch): + from app.services.render_blender import build_tessellated_glb_path, render_still + + step_path = tmp_path / "bearing.step" + step_path.write_text("STEP", encoding="utf-8") + output_path = tmp_path / "render.png" + output_path.write_text("PNG", encoding="utf-8") + + scripts_dir = tmp_path / "render-scripts" + scripts_dir.mkdir() + (scripts_dir / "blender_render.py").write_text("# test stub\n", encoding="utf-8") + + captured: dict[str, object] = {} + + class _FakeProc: + def __init__(self) -> None: + self.stdout = object() + self.stderr = object() + self.pid = 1234 + self.returncode = 0 + + def wait(self, timeout: int = 10) -> int: + return self.returncode + + class _FakeSelector: + def register(self, *_args, **_kwargs) -> None: + return None + + def get_map(self) -> dict: + return {} + + def close(self) -> None: + return None + + def _fake_glb_from_step(step_path, glb_path, tessellation_engine="occ", tessellation_profile="render"): + captured["glb_path"] = glb_path + captured["tessellation_engine"] = tessellation_engine + captured["tessellation_profile"] = tessellation_profile + glb_path.write_text("GLB", encoding="utf-8") + + def _fake_popen(cmd, stdout, stderr, text, env, start_new_session): + captured["cmd"] = cmd + return _FakeProc() + + monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir)) + monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender") + monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None) + monkeypatch.setattr("app.services.render_blender._resolve_render_samples", lambda engine, samples: 32) + monkeypatch.setattr( + "app.services.step_processor._get_all_settings", + lambda: { + "tessellation_engine": "occ", + "render_linear_deflection": "0.03", + "render_angular_deflection": "0.05", + "blender_cycles_samples": "32", + "blender_eevee_samples": "12", + }, + ) + monkeypatch.setattr("app.services.render_blender._glb_from_step", _fake_glb_from_step) + monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen) + monkeypatch.setattr(selectors, "DefaultSelector", _FakeSelector) + + render_still( + step_path=step_path, + output_path=output_path, + engine="cycles", + samples=None, + width=640, + height=480, + ) + + expected_glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05) + assert captured["glb_path"] == expected_glb_path + assert captured["tessellation_profile"] == "render" + assert captured["cmd"][5] == str(expected_glb_path) + + +def test_render_turntable_passes_template_inputs_to_blender_cli(tmp_path, monkeypatch): + from app.services.render_blender import build_tessellated_glb_path, render_turntable_to_file + + step_path = tmp_path / "bearing.step" + step_path.write_text("STEP", encoding="utf-8") + glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05) + glb_path.parent.mkdir(parents=True, exist_ok=True) + glb_path.write_text("GLB", encoding="utf-8") + output_path = tmp_path / "turntable.mp4" + output_path.parent.mkdir(parents=True, exist_ok=True) + + scripts_dir = tmp_path / "render-scripts" + scripts_dir.mkdir() + (scripts_dir / "turntable_render.py").write_text("# test stub\n", encoding="utf-8") + + captured: dict[str, object] = {} + + class _FakeProc: + def __init__(self) -> None: + self.pid = 1234 + self.returncode = 0 + + def communicate(self, timeout: int | None = None) -> tuple[str, str]: + frames_dir = Path(captured["cmd"][6]) + frames_dir.mkdir(parents=True, exist_ok=True) + (frames_dir / "frame_0001.png").write_text("PNG", encoding="utf-8") + return ("[turntable_render] ok\n", "") + + def _fake_popen(cmd, stdout, stderr, text, env, start_new_session): + captured["cmd"] = cmd + return _FakeProc() + + def _fake_ffmpeg(cmd, capture_output, text, timeout): + output_path.write_text("MP4", encoding="utf-8") + return SimpleNamespace(returncode=0, stdout="", stderr="") + + monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir)) + monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender") + monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None) + monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen) + monkeypatch.setattr("app.services.render_blender.subprocess.run", _fake_ffmpeg) + monkeypatch.setattr("app.services.render_blender.build_turntable_ffmpeg_cmd", lambda *args, **kwargs: ["ffmpeg", str(output_path)]) + monkeypatch.setattr("app.services.render_blender.resolve_tessellation_settings", lambda *args, **kwargs: (0.03, 0.05, "occ")) + + render_turntable_to_file( + step_path=step_path, + output_path=output_path, + engine="cycles", + samples=32, + template_inputs={"studio_variant": "warm"}, + ) + + assert "--template-inputs" in captured["cmd"] + idx = captured["cmd"].index("--template-inputs") + assert captured["cmd"][idx + 1] == '{"studio_variant": "warm"}' + + +def test_render_cinematic_passes_template_inputs_to_blender_cli(tmp_path, monkeypatch): + from app.services.render_blender import build_tessellated_glb_path, render_cinematic_to_file + + step_path = tmp_path / "bearing.step" + step_path.write_text("STEP", encoding="utf-8") + glb_path = build_tessellated_glb_path(step_path, "render", "occ", 0.03, 0.05) + glb_path.parent.mkdir(parents=True, exist_ok=True) + glb_path.write_text("GLB", encoding="utf-8") + output_path = tmp_path / "cinematic.mp4" + output_path.parent.mkdir(parents=True, exist_ok=True) + + scripts_dir = tmp_path / "render-scripts" + scripts_dir.mkdir() + (scripts_dir / "cinematic_render.py").write_text("# test stub\n", encoding="utf-8") + + captured: dict[str, object] = {} + + class _FakeProc: + def __init__(self) -> None: + self.stdout = object() + self.stderr = object() + self.pid = 1234 + self.returncode = 0 + + def wait(self, timeout: int | None = None) -> int: + del timeout + return self.returncode + + class _FakeSelector: + def __init__(self) -> None: + self._registered: list[object] = [] + self._delivered = False + + def register(self, fileobj, _event, data): + self._registered.append((fileobj, data)) + + def unregister(self, fileobj): + self._registered = [item for item in self._registered if item[0] is not fileobj] + + def get_map(self) -> dict[int, object]: + return {idx: item for idx, item in enumerate(self._registered)} + + def select(self, timeout=None): + del timeout + if self._delivered: + for fileobj, _data in list(self._registered): + if hasattr(fileobj, "readline"): + fileobj.readline = lambda: "" + self._registered.clear() + return [] + self._delivered = True + events = [] + for fileobj, data in list(self._registered): + events.append((SimpleNamespace(fileobj=fileobj, data=data), None)) + return events + + def close(self): + return None + + class _FakeStream: + def __init__(self, lines: list[str]) -> None: + self._lines = list(lines) + + def readline(self) -> str: + if not self._lines: + return "" + return self._lines.pop(0) + + def _fake_popen(cmd, stdout, stderr, text, env, start_new_session): + captured["cmd"] = cmd + frames_dir = Path(cmd[6]) + frames_dir.mkdir(parents=True, exist_ok=True) + (frames_dir / "frame_0001.png").write_text("PNG", encoding="utf-8") + proc = _FakeProc() + proc.stdout = _FakeStream(["[cinematic_render] ok\n"]) + proc.stderr = _FakeStream([]) + return proc + + def _fake_ffmpeg(cmd, capture_output, text, timeout): + output_path.write_text("MP4", encoding="utf-8") + return SimpleNamespace(returncode=0, stdout="", stderr="") + + monkeypatch.setenv("RENDER_SCRIPTS_DIR", str(scripts_dir)) + monkeypatch.setattr("app.services.render_blender.find_blender", lambda: "/usr/bin/blender") + monkeypatch.setattr("app.services.render_blender.ensure_group_writable_dir", lambda _path: None) + monkeypatch.setattr("app.services.render_blender.subprocess.Popen", _fake_popen) + monkeypatch.setattr("app.services.render_blender.subprocess.run", _fake_ffmpeg) + monkeypatch.setattr("app.services.render_blender.build_turntable_ffmpeg_cmd", lambda *args, **kwargs: ["ffmpeg", str(output_path)]) + monkeypatch.setattr("app.services.render_blender.resolve_tessellation_settings", lambda *args, **kwargs: (0.03, 0.05, "occ")) + monkeypatch.setattr("selectors.DefaultSelector", _FakeSelector) + + render_cinematic_to_file( + step_path=step_path, + output_path=output_path, + engine="cycles", + samples=32, + template_inputs={"studio_variant": "warm"}, + ) + + assert "--template-inputs" in captured["cmd"] + idx = captured["cmd"].index("--template-inputs") + assert captured["cmd"][idx + 1] == '{"studio_variant": "warm"}' + + +def test_render_still_task_keeps_samples_unset_until_render_service(tmp_path, monkeypatch): + from app.domains.rendering.tasks import render_still_task + + step_path = tmp_path / "bearing.step" + step_path.write_text("STEP", encoding="utf-8") + output_path = tmp_path / "render.png" + captured: dict[str, object] = {} + + def _fake_render_still(**kwargs): + captured.update(kwargs) + return {"total_duration_s": 0.1} + + monkeypatch.setattr("app.domains.rendering.tasks.log_task_event", lambda *args, **kwargs: None) + monkeypatch.setattr("app.services.render_blender.render_still", _fake_render_still) + + task_self = SimpleNamespace( + request=SimpleNamespace(id="task-still"), + retry=lambda *, exc, countdown: (_ for _ in ()).throw(exc), + ) + + result = render_still_task.run.__func__(task_self, str(step_path), str(output_path)) + + assert captured["samples"] is None + assert result["total_duration_s"] == 0.1 + + +def test_blender_args_prefers_backend_default_samples_env(monkeypatch): + module_path = ( + Path(__file__).resolve().parents[2] + / "render-worker" + / "scripts" + / "_blender_args.py" + ) + if not module_path.exists(): + pytest.skip(f"{module_path} not present in this runtime") + spec = importlib.util.spec_from_file_location("test_blender_args_module", module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + + monkeypatch.setenv("BLENDER_DEFAULT_SAMPLES", "32") + monkeypatch.setattr( + sys, + "argv", + [ + "blender_render.py", + "--", + "input.glb", + "output.png", + "512", + "512", + "cycles", + "", + ], + ) + + args = module.parse_args() + + assert args.samples == 32 + + +def test_blender_args_parses_template_inputs(monkeypatch): + module_path = ( + Path(__file__).resolve().parents[2] + / "render-worker" + / "scripts" + / "_blender_args.py" + ) + if not module_path.exists(): + pytest.skip(f"{module_path} not present in this runtime") + spec = importlib.util.spec_from_file_location("test_blender_args_module_template_inputs", module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + + monkeypatch.setattr( + sys, + "argv", + [ + "blender_render.py", + "--", + "input.glb", + "output.png", + "512", + "512", + "cycles", + "64", + "30", + "auto", + "0", + "", + "Product", + "", + "{}", + "[]", + "0", + "0", + "0", + "0", + "", + "", + "", + "", + "", + "", + "--template-inputs", + '{"studio_variant":"warm"}', + ], + ) + + args = module.parse_args() + + assert args.template_inputs == {"studio_variant": "warm"} + + +def test_render_to_file_preserves_explicit_zero_samples(tmp_path, monkeypatch): + from app.services.step_processor import render_to_file + + step_path = tmp_path / "bearing.step" + step_path.write_text("STEP", encoding="utf-8") + output_path = tmp_path / "render.png" + captured: dict[str, object] = {} + + monkeypatch.setattr( + "app.services.step_processor._get_all_settings", + lambda: { + "thumbnail_renderer": "blender", + "thumbnail_format": "png", + "blender_engine": "cycles", + "blender_cycles_samples": "32", + "blender_eevee_samples": "12", + "cycles_device": "auto", + "blender_smooth_angle": "30", + "tessellation_engine": "occ", + }, + ) + monkeypatch.setattr("app.services.step_processor.ensure_group_writable_dir", lambda _path: None) + monkeypatch.setattr("app.services.render_blender.is_blender_available", lambda: True) + + def _fake_render_still(**kwargs): + captured.update(kwargs) + kwargs["output_path"].write_text("PNG", encoding="utf-8") + return {"total_duration_s": 0.1, "engine_used": kwargs["engine"]} + + monkeypatch.setattr("app.services.render_blender.render_still", _fake_render_still) + + success, render_log = render_to_file( + str(step_path), + str(output_path), + samples=0, + ) + + assert success is True + assert captured["samples"] == 0 + assert render_log["samples"] == 0 diff --git a/backend/tests/domains/test_rendering_publish_asset.py b/backend/tests/domains/test_rendering_publish_asset.py new file mode 100644 index 0000000..f51b705 --- /dev/null +++ b/backend/tests/domains/test_rendering_publish_asset.py @@ -0,0 +1,152 @@ +from __future__ import annotations + +import os +import uuid +from contextlib import contextmanager +from pathlib import Path + +import pytest +from sqlalchemy import select, text +from sqlalchemy.orm import Session + +from app.domains.auth.models import User, UserRole +from app.domains.media.models import MediaAsset, MediaAssetType +from app.domains.orders.models import Order, OrderLine, OrderStatus +from app.domains.products.models import CadFile, Product +from app.domains.rendering.models import OutputType + +from tests.db_test_utils import sync_test_session as sync_test_session_ctx + + +@pytest.fixture +def sync_session(): + with sync_test_session_ctx() as session: + yield session + + +def _seed_order_line(session: Session, tmp_path: Path) -> OrderLine: + step_path = tmp_path / "parts" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + + user = User( + id=uuid.uuid4(), + email=f"publish-{uuid.uuid4().hex[:8]}@test.local", + password_hash="hash", + full_name="Publish Tester", + role=UserRole.admin, + is_active=True, + ) + cad_file = CadFile( + id=uuid.uuid4(), + original_name="bearing.step", + stored_path=str(step_path), + file_hash=f"hash-{uuid.uuid4().hex}", + ) + product = Product( + id=uuid.uuid4(), + pim_id="P-2000", + name="Bearing Publish", + category_key="bearings", + cad_file_id=cad_file.id, + cad_file=cad_file, + ) + output_type = OutputType( + id=uuid.uuid4(), + name="HQ Still", + renderer="blender", + output_format="png", + render_settings={"width": 1600, "height": 900}, + ) + order = Order( + id=uuid.uuid4(), + order_number=f"ORD-{uuid.uuid4().hex[:8]}", + status=OrderStatus.processing, + created_by=user.id, + ) + line = OrderLine( + id=uuid.uuid4(), + order_id=order.id, + product_id=product.id, + product=product, + output_type_id=output_type.id, + output_type=output_type, + render_status="processing", + ) + + session.add_all([user, cad_file, product, output_type, order, line]) + session.commit() + return line + + +def test_publish_asset_canonicalizes_still_outputs(sync_session, tmp_path, monkeypatch): + from app.config import settings + from app.domains.rendering.tasks import publish_asset + + upload_dir = tmp_path / "uploads" + monkeypatch.setattr(settings, "upload_dir", str(upload_dir)) + + line = _seed_order_line(sync_session, tmp_path) + source_output = tmp_path / "parts" / "renders" / "line.png" + source_output.parent.mkdir(parents=True, exist_ok=True) + source_output.write_bytes(b"png") + + @contextmanager + def _session_ctx(): + yield sync_session + + monkeypatch.setattr("app.core.db_utils.get_sync_session", _session_ctx) + + asset_id = publish_asset.run( + str(line.id), + "still", + str(source_output), + render_config={"renderer": "blender", "engine_used": "cycles"}, + ) + + sync_session.expire_all() + stored_line = sync_session.get(OrderLine, line.id) + stored_asset = sync_session.execute( + select(MediaAsset).where(MediaAsset.id == uuid.UUID(asset_id)) + ).scalar_one() + + assert stored_line.result_path == f"{upload_dir}/renders/{line.id}/Bearing_Publish_HQ_Still.png" + assert Path(stored_line.result_path).is_file() + assert stored_asset.storage_key == f"renders/{line.id}/Bearing_Publish_HQ_Still.png" + assert stored_asset.asset_type == MediaAssetType.still + + +def test_publish_asset_canonicalizes_blend_storage_key_without_touching_order_line(sync_session, tmp_path, monkeypatch): + from app.config import settings + from app.domains.rendering.tasks import publish_asset + + upload_dir = tmp_path / "uploads" + monkeypatch.setattr(settings, "upload_dir", str(upload_dir)) + + line = _seed_order_line(sync_session, tmp_path) + source_output = tmp_path / "parts" / "bearing_production.blend" + source_output.parent.mkdir(parents=True, exist_ok=True) + source_output.write_bytes(b"blend") + + @contextmanager + def _session_ctx(): + yield sync_session + + monkeypatch.setattr("app.core.db_utils.get_sync_session", _session_ctx) + + asset_id = publish_asset.run( + str(line.id), + "blend_production", + str(source_output), + render_config={"artifact_type": "blend_production"}, + ) + + sync_session.expire_all() + stored_line = sync_session.get(OrderLine, line.id) + stored_asset = sync_session.execute( + select(MediaAsset).where(MediaAsset.id == uuid.UUID(asset_id)) + ).scalar_one() + + assert stored_line.result_path is None + assert stored_asset.storage_key == str(source_output) + assert stored_asset.asset_type == MediaAssetType.blend_production diff --git a/backend/tests/domains/test_rendering_service.py b/backend/tests/domains/test_rendering_service.py index e932774..2de73c0 100644 --- a/backend/tests/domains/test_rendering_service.py +++ b/backend/tests/domains/test_rendering_service.py @@ -1,9 +1,13 @@ """Tests for rendering domain — workflow builder + task helpers.""" import uuid +from pathlib import Path +from types import SimpleNamespace from unittest.mock import MagicMock, patch import pytest +from app.core.render_paths import build_order_line_export_path, build_order_line_step_render_path + # --------------------------------------------------------------------------- # workflow_builder unit tests (no DB required) @@ -92,6 +96,54 @@ def test_generate_gltf_geometry_task_importable(): assert hasattr(generate_gltf_geometry_task, "delay") +def test_build_ffmpeg_cmd_prefers_prefixed_frame_sequence(tmp_path): + from app.domains.rendering.tasks import _build_ffmpeg_cmd + + frames_dir = tmp_path / "frames" + frames_dir.mkdir() + (frames_dir / "frame_0001.png").write_text("png", encoding="utf-8") + + cmd = _build_ffmpeg_cmd(frames_dir, tmp_path / "turntable.mp4") + + assert any("frame_%04d.png" in part for part in cmd) + + +def test_build_ffmpeg_cmd_falls_back_to_legacy_frame_sequence(tmp_path): + from app.domains.rendering.tasks import _build_ffmpeg_cmd + + frames_dir = tmp_path / "frames" + frames_dir.mkdir() + + cmd = _build_ffmpeg_cmd(frames_dir, tmp_path / "turntable.mp4") + + assert any(part.endswith("%04d.png") for part in cmd) + assert not any("frame_%04d.png" in part for part in cmd) + + +def test_build_ffmpeg_cmd_limits_bg_color_overlay_to_frame_sequence(tmp_path): + from app.domains.rendering.tasks import _build_ffmpeg_cmd + + frames_dir = tmp_path / "frames" + frames_dir.mkdir() + (frames_dir / "frame_0001.png").write_text("png", encoding="utf-8") + + cmd = _build_ffmpeg_cmd( + frames_dir, + tmp_path / "turntable.mp4", + fps=24, + bg_color="#ffffff", + width=512, + height=512, + ) + + assert "-filter_complex" in cmd + filter_index = cmd.index("-filter_complex") + 1 + assert cmd[filter_index] == "[1:v][0:v]overlay=0:0:shortest=1" + assert "color=c=0xffffff:size=512x512:rate=24" in cmd + assert "-crf" in cmd + assert cmd[cmd.index("-crf") + 1] == "18" + + # --------------------------------------------------------------------------- # New order-line tasks are importable and correctly registered # --------------------------------------------------------------------------- @@ -105,3 +157,1290 @@ def test_render_order_line_still_task_importable(): def test_export_blend_for_order_line_task_importable(): from app.domains.rendering.tasks import export_blend_for_order_line_task assert export_blend_for_order_line_task.queue == "asset_pipeline" + + +def test_normalize_order_line_still_params_maps_legacy_editor_fields(): + from app.domains.rendering.tasks import _normalize_order_line_still_params + + normalized = _normalize_order_line_still_params( + { + "render_engine": "cycles", + "samples": 256, + "resolution": [1920, 1080], + "rotation_z": 45, + "usd_path": "/app/uploads/step_files/example_master.usd", + } + ) + + assert normalized["engine"] == "cycles" + assert "render_engine" not in normalized + assert normalized["width"] == 1920 + assert normalized["height"] == 1080 + assert "resolution" not in normalized + assert normalized["rotation_z"] == 45 + assert str(normalized["usd_path"]) == "/app/uploads/step_files/example_master.usd" + + +def test_normalize_order_line_still_params_drops_graph_only_override_flag(): + from app.domains.rendering.tasks import _normalize_order_line_still_params + + normalized = _normalize_order_line_still_params( + { + "use_custom_render_settings": True, + "width": 640, + "height": 640, + } + ) + + assert "use_custom_render_settings" not in normalized + assert normalized["width"] == 640 + assert normalized["height"] == 640 + + +def test_normalize_order_line_still_params_drops_graph_control_params(): + from app.domains.rendering.tasks import _normalize_order_line_still_params + + normalized = _normalize_order_line_still_params( + { + "width": 640, + "graph_notify_node_ids": ["notify"], + "graph_output_node_ids": ["output"], + "workflow_run_id": "run-1", + "workflow_node_id": "render", + "emit_legacy_notifications": True, + } + ) + + assert normalized == {"width": 640} + + +def test_resolve_order_line_still_output_extension_uses_output_type_contract(monkeypatch): + from app.domains.rendering.tasks import _resolve_order_line_still_output_extension + + line = SimpleNamespace(output_type=SimpleNamespace(output_format="webp"), render_overrides=None) + db = MagicMock() + db.execute.return_value.scalar_one_or_none.return_value = line + + class _SessionCtx: + def __enter__(self): + return db + + def __exit__(self, exc_type, exc, tb): + return False + + monkeypatch.setattr("app.core.db_utils.get_sync_session", lambda: _SessionCtx()) + + assert _resolve_order_line_still_output_extension("line-1") == "webp" + + +def test_render_order_line_still_task_finalizes_non_png_outputs(tmp_path, monkeypatch): + from app.domains.rendering.tasks import render_order_line_still_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-webp-save")) + + render_calls: list[dict] = [] + + def _fake_render_to_file(step_path, output_path, **kwargs): + render_calls.append( + { + "step_path": step_path, + "output_path": output_path, + **kwargs, + } + ) + Path(output_path).parent.mkdir(parents=True, exist_ok=True) + Path(output_path).write_text("WEBP", encoding="utf-8") + return True, {"renderer": "blender", "engine_used": "cycles", "total_duration_s": 0.5} + + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_order_line_still_output_extension", + lambda order_line_id, params=None: "webp", + ) + monkeypatch.setattr("app.services.step_processor.render_to_file", _fake_render_to_file) + monkeypatch.setattr("app.domains.rendering.tasks._update_workflow_run_status", lambda *args, **kwargs: None) + monkeypatch.setattr("app.domains.rendering.tasks._finalize_graph_still_output", lambda *args, **kwargs: None) + + result = render_order_line_still_task.run.__func__( + task_self, + "line-webp", + job_document_enabled=False, + emit_events=False, + publish_asset_enabled=False, + ) + + expected_output = build_order_line_step_render_path(step_path, "line-webp", "line_line-webp.webp") + assert render_calls == [ + { + "step_path": str(step_path), + "output_path": str(expected_output), + "order_line_id": "line-webp", + } + ] + assert result["output_path"] == str(expected_output) + + +def test_finalise_image_converts_png_to_jpg(tmp_path): + from PIL import Image + + from app.services.step_processor import _finalise_image + + src = tmp_path / "source.png" + dst = tmp_path / "final.jpg" + + Image.new("RGBA", (4, 4), (10, 20, 30, 255)).save(src, "PNG") + + result = _finalise_image(src, dst) + + assert result == dst + assert dst.exists() + assert not src.exists() + with Image.open(dst) as img: + assert img.format == "JPEG" + assert img.mode == "RGB" + + +def test_finalise_image_flattens_transparency_for_jpg(tmp_path): + from PIL import Image + + from app.services.step_processor import _finalise_image + + src = tmp_path / "source.png" + dst = tmp_path / "final.jpg" + + Image.new("RGBA", (2, 2), (0, 0, 0, 0)).save(src, "PNG") + + result = _finalise_image(src, dst) + + assert result == dst + with Image.open(dst) as img: + assert img.format == "JPEG" + assert img.getpixel((0, 0)) == (255, 255, 255) + assert img.getpixel((1, 1)) == (255, 255, 255) + + +def test_render_order_line_still_task_uses_graph_authoritative_output_handoff(tmp_path, monkeypatch): + from app.domains.rendering.tasks import publish_asset, render_order_line_still_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-graph-save")) + + finalize_calls: list[dict] = [] + notify_finalize_calls: list[dict] = [] + notify_emit_calls: list[dict] = [] + publish_calls: list[tuple] = [] + render_calls: list[dict] = [] + + def _fake_render_to_file(step_path, output_path, **kwargs): + render_calls.append( + { + "step_path": step_path, + "output_path": output_path, + **kwargs, + } + ) + Path(output_path).parent.mkdir(parents=True, exist_ok=True) + Path(output_path).write_text("PNG", encoding="utf-8") + return True, {"renderer": "blender", "engine_used": "cycles", "total_duration_s": 0.5} + + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_order_line_still_output_extension", + lambda order_line_id, params=None: "png", + ) + monkeypatch.setattr("app.services.step_processor.render_to_file", _fake_render_to_file) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_still_output", + lambda order_line_id, **kwargs: finalize_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_notify_nodes", + lambda **kwargs: notify_finalize_calls.append(kwargs), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._emit_graph_render_notifications", + lambda order_line_id, **kwargs: notify_emit_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + monkeypatch.setattr( + publish_asset, + "delay", + lambda *args, **kwargs: publish_calls.append((args, kwargs)), + ) + + result = render_order_line_still_task.run.__func__( + task_self, + "line-1", + workflow_run_id="run-1", + workflow_node_id="render", + publish_asset_enabled=False, + graph_authoritative_output_enabled=True, + graph_output_node_ids=["output"], + graph_notify_node_ids=["notify"], + job_document_enabled=False, + emit_events=False, + emit_legacy_notifications=True, + width=640, + height=480, + ) + + expected_output = build_order_line_step_render_path(step_path, "line-1", "line_line-1.png") + assert result["renderer"] == "blender" + assert publish_calls == [] + assert render_calls == [ + { + "step_path": str(step_path), + "output_path": str(expected_output), + "order_line_id": "line-1", + "width": 640, + "height": 480, + } + ] + assert finalize_calls == [ + { + "order_line_id": "line-1", + "success": True, + "output_path": str(expected_output), + "render_log": result, + "workflow_run_id": "run-1", + "output_node_ids": ["output"], + "render_node_id": "render", + } + ] + assert notify_emit_calls == [ + { + "order_line_id": "line-1", + "success": True, + "render_log": result, + } + ] + assert notify_finalize_calls == [ + { + "workflow_run_id": "run-1", + "notify_node_ids": ["notify"], + "success": True, + "render_node_id": "render", + } + ] + + +def test_render_order_line_still_task_uses_shadow_observer_output_handoff(tmp_path, monkeypatch): + from app.domains.rendering.tasks import publish_asset, render_order_line_still_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-shadow-save")) + + observer_finalize_calls: list[dict] = [] + publish_calls: list[tuple] = [] + + def _fake_render_to_file(step_path, output_path, **kwargs): + Path(output_path).parent.mkdir(parents=True, exist_ok=True) + Path(output_path).write_text("PNG", encoding="utf-8") + return True, {"renderer": "blender", "engine_used": "cycles", "total_duration_s": 0.5} + + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_order_line_still_output_extension", + lambda order_line_id, params=None: "png", + ) + monkeypatch.setattr("app.services.step_processor.render_to_file", _fake_render_to_file) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_shadow_still_output", + lambda order_line_id, **kwargs: observer_finalize_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + publish_asset, + "delay", + lambda *args, **kwargs: publish_calls.append((args, kwargs)), + ) + + result = render_order_line_still_task.run.__func__( + task_self, + "line-shadow", + workflow_run_id="run-shadow", + workflow_node_id="render", + publish_asset_enabled=False, + observer_output_enabled=True, + graph_output_node_ids=["output"], + job_document_enabled=False, + emit_events=False, + ) + + expected_output = build_order_line_step_render_path(step_path, "line-shadow", "line_line-shadow.png") + assert result["renderer"] == "blender" + assert publish_calls == [] + assert observer_finalize_calls == [ + { + "order_line_id": "line-shadow", + "success": True, + "output_path": str(expected_output), + "render_log": result, + "workflow_run_id": "run-shadow", + "output_node_ids": ["output"], + "render_node_id": "render", + } + ] + + +def test_render_order_line_still_task_publishes_asset_without_graph_authoritative_handoff( + tmp_path, + monkeypatch, +): + from app.domains.rendering.tasks import publish_asset, render_order_line_still_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-publish")) + + finalize_calls: list[dict] = [] + publish_calls: list[tuple] = [] + + def _fake_render_to_file(step_path, output_path, **kwargs): + Path(output_path).parent.mkdir(parents=True, exist_ok=True) + Path(output_path).write_text("PNG", encoding="utf-8") + return True, { + "renderer": "blender", + "engine_used": "cycles", + "total_duration_s": 0.5, + } + + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr("app.services.step_processor.render_to_file", _fake_render_to_file) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_still_output", + lambda *args, **kwargs: finalize_calls.append(kwargs), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + publish_asset, + "delay", + lambda *args, **kwargs: publish_calls.append((args, kwargs)), + ) + + render_order_line_still_task.run.__func__( + task_self, + "line-2", + workflow_run_id="run-2", + workflow_node_id="render", + publish_asset_enabled=True, + graph_authoritative_output_enabled=False, + job_document_enabled=False, + emit_events=False, + width=640, + height=480, + ) + + expected_output = build_order_line_step_render_path(step_path, "line-2", "line_line-2.png") + assert finalize_calls == [] + assert publish_calls == [ + ( + ( + "line-2", + "still", + str(expected_output), + ), + { + "render_config": { + "renderer": "blender", + "engine_used": "cycles", + "total_duration_s": 0.5, + "output_path": str(expected_output), + }, + "workflow_run_id": "run-2", + }, + ) + ] + + +def test_export_blend_for_order_line_task_uses_graph_authoritative_output_handoff( + tmp_path, + monkeypatch, +): + from app.config import settings + from app.domains.rendering.tasks import export_blend_for_order_line_task, publish_asset + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + glb_path = step_path.parent / "bearing_render.glb" + glb_path.write_text("GLB", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-blend-graph")) + + finalize_calls: list[dict] = [] + notify_finalize_calls: list[dict] = [] + notify_emit_calls: list[dict] = [] + publish_calls: list[tuple] = [] + + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.services.render_blender.find_blender", + lambda: "/usr/bin/blender", + ) + monkeypatch.setattr( + "app.services.render_blender.build_tessellated_glb_path", + lambda *args, **kwargs: glb_path, + ) + monkeypatch.setattr( + "subprocess.run", + lambda *args, **kwargs: SimpleNamespace(returncode=0, stderr=""), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_blend_output", + lambda order_line_id, **kwargs: finalize_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_notify_nodes", + lambda **kwargs: notify_finalize_calls.append(kwargs), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._emit_graph_render_notifications", + lambda order_line_id, **kwargs: notify_emit_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + monkeypatch.setattr( + publish_asset, + "delay", + lambda *args, **kwargs: publish_calls.append((args, kwargs)), + ) + + result = export_blend_for_order_line_task.run.__func__( + task_self, + "line-3", + workflow_run_id="run-3", + workflow_node_id="blend", + publish_asset_enabled=False, + graph_authoritative_output_enabled=True, + graph_output_node_ids=["output"], + graph_notify_node_ids=["notify"], + emit_legacy_notifications=True, + ) + + expected_blend_path = build_order_line_export_path("line-3", "bearing_production.blend") + + assert result == { + "blend_path": str(expected_blend_path), + "artifact_type": "blend_production", + } + assert publish_calls == [] + assert finalize_calls == [ + { + "order_line_id": "line-3", + "success": True, + "output_path": str(expected_blend_path), + "render_log": result, + "workflow_run_id": "run-3", + "output_node_ids": ["output"], + "render_node_id": "blend", + } + ] + assert notify_emit_calls == [ + { + "order_line_id": "line-3", + "success": True, + "render_log": result, + } + ] + assert notify_finalize_calls == [ + { + "workflow_run_id": "run-3", + "notify_node_ids": ["notify"], + "success": True, + "render_node_id": "blend", + } + ] + + +def test_export_blend_for_order_line_task_publishes_without_graph_authoritative_handoff( + tmp_path, + monkeypatch, +): + from app.config import settings + from app.domains.rendering.tasks import export_blend_for_order_line_task, publish_asset + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + glb_path = step_path.parent / "bearing_render.glb" + glb_path.write_text("GLB", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-blend-publish")) + + finalize_calls: list[dict] = [] + publish_calls: list[tuple] = [] + + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.services.render_blender.find_blender", + lambda: "/usr/bin/blender", + ) + monkeypatch.setattr( + "app.services.render_blender.build_tessellated_glb_path", + lambda *args, **kwargs: glb_path, + ) + monkeypatch.setattr( + "subprocess.run", + lambda *args, **kwargs: SimpleNamespace(returncode=0, stderr=""), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_blend_output", + lambda *args, **kwargs: finalize_calls.append(kwargs), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + publish_asset, + "delay", + lambda *args, **kwargs: publish_calls.append((args, kwargs)), + ) + + export_blend_for_order_line_task.run.__func__( + task_self, + "line-4", + workflow_run_id="run-4", + workflow_node_id="blend", + publish_asset_enabled=True, + graph_authoritative_output_enabled=False, + ) + + expected_blend_path = build_order_line_export_path("line-4", "bearing_production.blend") + + assert finalize_calls == [] + assert publish_calls == [ + ( + ( + "line-4", + "blend_production", + str(expected_blend_path), + ), + { + "workflow_run_id": "run-4", + }, + ) + ] + + +def test_render_turntable_task_uses_graph_authoritative_output_handoff( + tmp_path, + monkeypatch, +): + from app.domains.rendering.tasks import publish_asset, render_turntable_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-turntable-graph")) + expected_output_mp4 = build_order_line_step_render_path(step_path, "line-5", "preview.mp4") + + finalize_calls: list[dict] = [] + notify_finalize_calls: list[dict] = [] + notify_emit_calls: list[dict] = [] + publish_calls: list[tuple] = [] + workflow_status_calls: list[tuple] = [] + blender_calls: list[list[str]] = [] + + def _fake_subprocess_run(cmd, *args, **kwargs): + cmd_text = " ".join(str(part) for part in cmd) + if "export_step_to_gltf.py" in cmd_text: + glb_path = step_path.parent / "bearing_thumbnail.glb" + glb_path.write_text("GLB", encoding="utf-8") + elif "turntable_render.py" in cmd_text: + blender_calls.append([str(part) for part in cmd]) + frames_dir = Path(cmd[cmd.index("--") + 2]) + frames_dir.mkdir(parents=True, exist_ok=True) + (frames_dir / "frame_0001.png").write_text("PNG", encoding="utf-8") + if "ffmpeg" in cmd_text: + output_mp4 = expected_output_mp4 + output_mp4.parent.mkdir(parents=True, exist_ok=True) + output_mp4.write_text("MP4", encoding="utf-8") + return SimpleNamespace(returncode=0, stdout="", stderr="") + + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.services.render_blender.find_blender", + lambda: "/usr/bin/blender", + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._build_ffmpeg_cmd", + lambda *args, **kwargs: ["ffmpeg", str(expected_output_mp4)], + ) + monkeypatch.setattr( + "subprocess.run", + _fake_subprocess_run, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_turntable_output", + lambda order_line_id, **kwargs: finalize_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: workflow_status_calls.append((args, kwargs)), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_notify_nodes", + lambda **kwargs: notify_finalize_calls.append(kwargs), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._emit_graph_render_notifications", + lambda order_line_id, **kwargs: notify_emit_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + monkeypatch.setattr( + publish_asset, + "delay", + lambda *args, **kwargs: publish_calls.append((args, kwargs)), + ) + + result = render_turntable_task.run.__func__( + task_self, + "line-5", + output_name="preview", + workflow_run_id="run-5", + workflow_node_id="turntable", + publish_asset_enabled=False, + graph_authoritative_output_enabled=True, + graph_output_node_ids=["output"], + graph_notify_node_ids=["notify"], + emit_legacy_notifications=True, + emit_events=False, + ) + + assert result == { + "output_mp4": str(expected_output_mp4), + "frame_count": 120, + "fps": 30, + } + assert publish_calls == [] + assert finalize_calls == [ + { + "order_line_id": "line-5", + "success": True, + "output_path": str(expected_output_mp4), + "render_log": result, + "workflow_run_id": "run-5", + "output_node_ids": ["output"], + "render_node_id": "turntable", + } + ] + assert workflow_status_calls == [ + ( + ("line-5", "completed"), + {"workflow_run_id": "run-5", "workflow_node_id": "turntable"}, + ) + ] + assert notify_emit_calls == [ + { + "order_line_id": "line-5", + "success": True, + "render_log": result, + } + ] + assert notify_finalize_calls == [ + { + "workflow_run_id": "run-5", + "notify_node_ids": ["notify"], + "success": True, + "render_node_id": "turntable", + } + ] + assert len(blender_calls) == 1 + args = blender_calls[0][blender_calls[0].index("--") + 1 :] + assert Path(args[0]).parent == step_path.parent + assert Path(args[0]).suffix == ".glb" + expected_frames_dir = expected_output_mp4.parent / "_frames_preview" + assert args[1:9] == [ + str(expected_frames_dir), + "120", + "360", + "1920", + "1080", + "cycles", + "64", + "{}", + ] + assert args[9:17] == [ + "", + "Product", + "", + "{}", + "[]", + "0", + "gpu", + "0", + ] + assert args[17:23] == [ + "0.0", + "0.0", + "0.0", + "world_z", + "", + "0", + ] + + +def test_render_turntable_task_uses_shadow_observer_output_handoff( + tmp_path, + monkeypatch, +): + from app.domains.rendering.tasks import publish_asset, render_turntable_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-turntable-shadow")) + expected_output_mp4 = build_order_line_step_render_path( + step_path, + "line-shadow", + "preview_shadow-abcd1234.mp4", + ) + + observer_finalize_calls: list[dict] = [] + publish_calls: list[tuple] = [] + + def _fake_subprocess_run(cmd, *args, **kwargs): + cmd_text = " ".join(str(part) for part in cmd) + if "export_step_to_gltf.py" in cmd_text: + glb_path = step_path.parent / "bearing_thumbnail.glb" + glb_path.write_text("GLB", encoding="utf-8") + elif "turntable_render.py" in cmd_text: + frames_dir = Path(cmd[cmd.index("--") + 2]) + frames_dir.mkdir(parents=True, exist_ok=True) + (frames_dir / "frame_0001.png").write_text("PNG", encoding="utf-8") + if "ffmpeg" in cmd_text: + output_mp4 = expected_output_mp4 + output_mp4.parent.mkdir(parents=True, exist_ok=True) + output_mp4.write_text("MP4", encoding="utf-8") + return SimpleNamespace(returncode=0, stdout="", stderr="") + + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.services.render_blender.find_blender", + lambda: "/usr/bin/blender", + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._build_ffmpeg_cmd", + lambda *args, **kwargs: ["ffmpeg", str(expected_output_mp4)], + ) + monkeypatch.setattr("subprocess.run", _fake_subprocess_run) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_shadow_turntable_output", + lambda order_line_id, **kwargs: observer_finalize_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + publish_asset, + "delay", + lambda *args, **kwargs: publish_calls.append((args, kwargs)), + ) + + result = render_turntable_task.run.__func__( + task_self, + "line-shadow", + output_name="preview", + output_name_suffix="shadow-abcd1234", + workflow_run_id="run-shadow", + workflow_node_id="turntable", + publish_asset_enabled=False, + observer_output_enabled=True, + graph_output_node_ids=["output"], + emit_events=False, + ) + + assert result == { + "output_mp4": str(expected_output_mp4), + "frame_count": 120, + "fps": 30, + } + assert publish_calls == [] + assert observer_finalize_calls == [ + { + "order_line_id": "line-shadow", + "success": True, + "output_path": str(expected_output_mp4), + "render_log": result, + "workflow_run_id": "run-shadow", + "output_node_ids": ["output"], + "render_node_id": "turntable", + } + ] + + +def test_render_turntable_task_uses_isolated_frames_dir_per_output_name( + tmp_path, + monkeypatch, +): + from app.domains.rendering.tasks import render_turntable_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-turntable-isolated-frames")) + expected_output_mp4 = build_order_line_step_render_path( + step_path, + "line-shadow", + "preview_shadow-run123.mp4", + ) + expected_frames_dir = expected_output_mp4.parent / "_frames_preview_shadow-run123" + + blender_calls: list[list[str]] = [] + ffmpeg_calls: list[tuple[Path, Path, dict]] = [] + + def _fake_subprocess_run(cmd, *args, **kwargs): + cmd_text = " ".join(str(part) for part in cmd) + if "export_step_to_gltf.py" in cmd_text: + glb_path = step_path.parent / "bearing_thumbnail.glb" + glb_path.write_text("GLB", encoding="utf-8") + elif "turntable_render.py" in cmd_text: + blender_calls.append([str(part) for part in cmd]) + frames_dir = Path(cmd[cmd.index("--") + 2]) + frames_dir.mkdir(parents=True, exist_ok=True) + (frames_dir / "frame_0001.png").write_text("fresh", encoding="utf-8") + elif "ffmpeg" in cmd_text: + output_mp4 = expected_output_mp4 + output_mp4.parent.mkdir(parents=True, exist_ok=True) + output_mp4.write_text("MP4", encoding="utf-8") + return SimpleNamespace(returncode=0, stdout="", stderr="") + + def _fake_build_ffmpeg_cmd(frames_dir, output_mp4, **kwargs): + ffmpeg_calls.append((frames_dir, output_mp4, kwargs)) + return ["ffmpeg", str(output_mp4)] + + stale_frame_dir = expected_frames_dir + stale_frame_dir.mkdir(parents=True, exist_ok=True) + (stale_frame_dir / "frame_0001.png").write_text("stale", encoding="utf-8") + + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.services.render_blender.find_blender", + lambda: "/usr/bin/blender", + ) + monkeypatch.setattr("app.domains.rendering.tasks._build_ffmpeg_cmd", _fake_build_ffmpeg_cmd) + monkeypatch.setattr("subprocess.run", _fake_subprocess_run) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_shadow_turntable_output", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + + render_turntable_task.run.__func__( + task_self, + "line-shadow", + output_name="preview", + output_name_suffix="shadow-run123", + workflow_run_id="run-shadow", + workflow_node_id="turntable", + publish_asset_enabled=False, + observer_output_enabled=True, + emit_events=False, + ) + + assert len(blender_calls) == 1 + args = blender_calls[0][blender_calls[0].index("--") + 1 :] + assert args[1] == str(stale_frame_dir) + assert stale_frame_dir.exists() + assert list(stale_frame_dir.iterdir()) == [stale_frame_dir / "frame_0001.png"] + assert (stale_frame_dir / "frame_0001.png").read_text(encoding="utf-8") == "fresh" + assert ffmpeg_calls == [ + ( + stale_frame_dir, + expected_output_mp4, + {"fps": 30, "bg_color": "", "width": 1920, "height": 1080}, + ) + ] + + +def test_render_turntable_task_preserves_legacy_step_path_signature( + tmp_path, + monkeypatch, +): + from app.domains.rendering.tasks import publish_asset, render_turntable_task + + step_path = tmp_path / "cad" / "bearing.step" + output_dir = step_path.parent / "legacy-renders" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace(request=SimpleNamespace(id="task-turntable-legacy")) + + finalize_calls: list[dict] = [] + publish_calls: list[tuple] = [] + workflow_status_calls: list[tuple] = [] + blender_calls: list[list[str]] = [] + + def _fake_subprocess_run(cmd, *args, **kwargs): + cmd_text = " ".join(str(part) for part in cmd) + if "export_step_to_gltf.py" in cmd_text: + glb_path = step_path.parent / "bearing_thumbnail.glb" + glb_path.write_text("GLB", encoding="utf-8") + elif "turntable_render.py" in cmd_text: + blender_calls.append([str(part) for part in cmd]) + frames_dir = Path(cmd[cmd.index("--") + 2]) + frames_dir.mkdir(parents=True, exist_ok=True) + (frames_dir / "frame_0001.png").write_text("PNG", encoding="utf-8") + if "ffmpeg" in cmd_text: + output_mp4 = output_dir / "turntable.mp4" + output_mp4.parent.mkdir(parents=True, exist_ok=True) + output_mp4.write_text("MP4", encoding="utf-8") + return SimpleNamespace(returncode=0, stdout="", stderr="") + + monkeypatch.setattr( + "app.services.render_blender.find_blender", + lambda: "/usr/bin/blender", + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._build_ffmpeg_cmd", + lambda *args, **kwargs: ["ffmpeg", str(output_dir / "turntable.mp4")], + ) + monkeypatch.setattr( + "subprocess.run", + _fake_subprocess_run, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_turntable_output", + lambda *args, **kwargs: finalize_calls.append(kwargs), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: workflow_status_calls.append((args, kwargs)), + ) + monkeypatch.setattr( + publish_asset, + "delay", + lambda *args, **kwargs: publish_calls.append((args, kwargs)), + ) + + result = render_turntable_task.run.__func__( + task_self, + str(step_path), + str(output_dir), + emit_events=False, + ) + + assert result == { + "output_mp4": str(output_dir / "turntable.mp4"), + "frame_count": 120, + "fps": 30, + } + assert len(blender_calls) == 1 + assert "--camera-orbit" in blender_calls[0] + assert finalize_calls == [] + assert publish_calls == [] + assert workflow_status_calls == [] + + +def test_render_order_line_still_task_finalizes_notify_handoff_on_failure(tmp_path, monkeypatch): + from app.domains.rendering.tasks import render_order_line_still_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace( + request=SimpleNamespace(id="task-still-failure"), + retry=lambda *, exc, countdown: (_ for _ in ()).throw(exc), + ) + + notify_finalize_calls: list[dict] = [] + notify_emit_calls: list[dict] = [] + + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.services.step_processor.render_to_file", + lambda **kwargs: (_ for _ in ()).throw(RuntimeError("still boom")), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_still_output", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_notify_nodes", + lambda **kwargs: notify_finalize_calls.append(kwargs), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._emit_graph_render_notifications", + lambda order_line_id, **kwargs: notify_emit_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + + with pytest.raises(RuntimeError, match="still boom"): + render_order_line_still_task.run.__func__( + task_self, + "line-still-failure", + workflow_run_id="run-still-failure", + workflow_node_id="render", + graph_authoritative_output_enabled=True, + graph_output_node_ids=["output"], + graph_notify_node_ids=["notify"], + emit_legacy_notifications=True, + job_document_enabled=False, + emit_events=False, + ) + + assert notify_emit_calls == [ + { + "order_line_id": "line-still-failure", + "success": False, + "render_log": {"error": "still boom"}, + } + ] + assert notify_finalize_calls == [ + { + "workflow_run_id": "run-still-failure", + "notify_node_ids": ["notify"], + "success": False, + "render_node_id": "render", + "error": "still boom", + } + ] + + +def test_export_blend_for_order_line_task_finalizes_notify_handoff_on_failure( + tmp_path, + monkeypatch, +): + from app.config import settings + from app.domains.rendering.tasks import export_blend_for_order_line_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + glb_path = step_path.parent / "bearing_render.glb" + glb_path.write_text("GLB", encoding="utf-8") + task_self = SimpleNamespace( + request=SimpleNamespace(id="task-blend-failure"), + retry=lambda *, exc, countdown: (_ for _ in ()).throw(exc), + ) + + notify_finalize_calls: list[dict] = [] + notify_emit_calls: list[dict] = [] + + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.services.render_blender.find_blender", + lambda: "/usr/bin/blender", + ) + monkeypatch.setattr( + "app.services.render_blender.build_tessellated_glb_path", + lambda *args, **kwargs: glb_path, + ) + monkeypatch.setattr( + "subprocess.run", + lambda *args, **kwargs: SimpleNamespace(returncode=1, stderr="blend boom"), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_blend_output", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_notify_nodes", + lambda **kwargs: notify_finalize_calls.append(kwargs), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._emit_graph_render_notifications", + lambda order_line_id, **kwargs: notify_emit_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + + with pytest.raises(RuntimeError, match="export_blend.py exited 1"): + export_blend_for_order_line_task.run.__func__( + task_self, + "line-blend-failure", + workflow_run_id="run-blend-failure", + workflow_node_id="blend", + graph_authoritative_output_enabled=True, + graph_output_node_ids=["output"], + graph_notify_node_ids=["notify"], + emit_legacy_notifications=True, + ) + + assert notify_emit_calls == [ + { + "order_line_id": "line-blend-failure", + "success": False, + "render_log": {"error": "export_blend.py exited 1:\nblend boom"}, + } + ] + assert notify_finalize_calls == [ + { + "workflow_run_id": "run-blend-failure", + "notify_node_ids": ["notify"], + "success": False, + "render_node_id": "blend", + "error": "export_blend.py exited 1:\nblend boom", + } + ] + + +def test_render_turntable_task_finalizes_notify_handoff_on_failure( + tmp_path, + monkeypatch, +): + from app.domains.rendering.tasks import render_turntable_task + + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + task_self = SimpleNamespace( + request=SimpleNamespace(id="task-turntable-failure"), + retry=lambda *, exc, countdown: (_ for _ in ()).throw(exc), + ) + + notify_finalize_calls: list[dict] = [] + notify_emit_calls: list[dict] = [] + + def _fake_subprocess_run(cmd, *args, **kwargs): + cmd_text = " ".join(str(part) for part in cmd) + if "export_step_to_gltf.py" in cmd_text: + glb_path = step_path.parent / "bearing_thumbnail.glb" + glb_path.write_text("GLB", encoding="utf-8") + return SimpleNamespace(returncode=0, stdout="", stderr="") + if "turntable_render.py" in cmd_text: + return SimpleNamespace(returncode=1, stdout="turntable boom", stderr="") + return SimpleNamespace(returncode=0, stdout="", stderr="") + + monkeypatch.setattr( + "app.domains.rendering.tasks._resolve_step_path_for_order_line", + lambda order_line_id: (str(step_path), "cad-1"), + ) + monkeypatch.setattr( + "app.services.render_blender.find_blender", + lambda: "/usr/bin/blender", + ) + monkeypatch.setattr( + "subprocess.run", + _fake_subprocess_run, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_turntable_output", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._update_workflow_run_status", + lambda *args, **kwargs: None, + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._finalize_graph_notify_nodes", + lambda **kwargs: notify_finalize_calls.append(kwargs), + ) + monkeypatch.setattr( + "app.domains.rendering.tasks._emit_graph_render_notifications", + lambda order_line_id, **kwargs: notify_emit_calls.append( + {"order_line_id": order_line_id, **kwargs} + ), + ) + + with pytest.raises(RuntimeError, match="Blender turntable exited 1"): + render_turntable_task.run.__func__( + task_self, + "line-turntable-failure", + workflow_run_id="run-turntable-failure", + workflow_node_id="turntable", + graph_authoritative_output_enabled=True, + graph_output_node_ids=["output"], + graph_notify_node_ids=["notify"], + emit_legacy_notifications=True, + emit_events=False, + ) + + assert notify_emit_calls == [ + { + "order_line_id": "line-turntable-failure", + "success": False, + "render_log": {"error": "Blender turntable exited 1:\nturntable boom"}, + } + ] + assert notify_finalize_calls == [ + { + "workflow_run_id": "run-turntable-failure", + "notify_node_ids": ["notify"], + "success": False, + "render_node_id": "turntable", + "error": "Blender turntable exited 1:\nturntable boom", + } + ] diff --git a/backend/tests/domains/test_template_input_audit.py b/backend/tests/domains/test_template_input_audit.py new file mode 100644 index 0000000..bb25b8b --- /dev/null +++ b/backend/tests/domains/test_template_input_audit.py @@ -0,0 +1,77 @@ +from app.domains.rendering.template_input_audit import ( + extract_template_input_marker, + suggest_workflow_input_schema, +) + + +def test_extract_template_input_marker_from_combined_property() -> None: + marker = extract_template_input_marker(props={"template_input": "studio_variant=warm"}) + assert marker == ("studio_variant", "warm") + + +def test_extract_template_input_marker_from_json_property() -> None: + marker = extract_template_input_marker( + props={"hartomat_template_input": '{"key":"lighting_profile","value":"shadow"}'} + ) + assert marker == ("lighting_profile", "shadow") + + +def test_extract_template_input_marker_from_split_properties() -> None: + marker = extract_template_input_marker( + props={"template_input_key": "alpha_mode", "template_input_value": "transparent"} + ) + assert marker == ("alpha_mode", "transparent") + + +def test_extract_template_input_marker_from_name_pattern() -> None: + marker = extract_template_input_marker(name="template-input:studio_variant=warm") + assert marker == ("studio_variant", "warm") + + +def test_suggest_workflow_input_schema_builds_select_fields() -> None: + schema = suggest_workflow_input_schema( + [ + ("studio_variant", "warm"), + ("studio_variant", "cool"), + ("alpha_mode", "transparent"), + ("alpha_mode", "opaque"), + ] + ) + + assert schema == [ + { + "default": "opaque", + "key": "alpha_mode", + "label": "Alpha Mode", + "options": [ + {"label": "Opaque", "value": "opaque"}, + {"label": "Transparent", "value": "transparent"}, + ], + "section": "Template Inputs", + "type": "select", + }, + { + "default": "cool", + "key": "studio_variant", + "label": "Studio Variant", + "options": [ + {"label": "Cool", "value": "cool"}, + {"label": "Warm", "value": "warm"}, + ], + "section": "Template Inputs", + "type": "select", + }, + ] + + +def test_suggest_workflow_input_schema_builds_boolean_field() -> None: + schema = suggest_workflow_input_schema([("shadow_pass", "true"), ("shadow_pass", "false")]) + assert schema == [ + { + "default": False, + "key": "shadow_pass", + "label": "Shadow Pass", + "section": "Template Inputs", + "type": "boolean", + } + ] diff --git a/backend/tests/domains/test_workflow_config_utils.py b/backend/tests/domains/test_workflow_config_utils.py index d500873..e01d4bb 100644 --- a/backend/tests/domains/test_workflow_config_utils.py +++ b/backend/tests/domains/test_workflow_config_utils.py @@ -40,8 +40,9 @@ def test_build_preset_workflow_config_creates_graph_still_variant(): assert config["ui"]["execution_mode"] == "graph" assert [node["step"] for node in config["nodes"]] == [ "order_line_setup", - "auto_populate_materials", "resolve_template", + "auto_populate_materials", + "glb_bbox", "material_map_resolve", "blender_still", "output_save", @@ -51,6 +52,7 @@ def test_build_preset_workflow_config_creates_graph_still_variant(): assert render_node["params"]["width"] == 1600 assert render_node["params"]["height"] == 900 assert render_node["params"]["samples"] == 128 + assert render_node["params"]["use_custom_render_settings"] is False def test_canonicalize_workflow_config_migrates_legacy_preset(): @@ -215,6 +217,111 @@ def test_canonicalize_workflow_config_defaults_execution_mode_for_canonical_conf assert canonical["ui"]["execution_mode"] == "legacy" +def test_canonicalize_workflow_config_rebuilds_canonical_still_graph_preset(): + canonical = canonicalize_workflow_config( + { + "version": 1, + "ui": {"preset": "still_graph", "execution_mode": "graph"}, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "resolve_materials", "step": "material_map_resolve", "params": {}}, + {"id": "template", "step": "resolve_template", "params": {}}, + {"id": "render", "step": "blender_still", "params": {"width": 1280, "height": 720, "samples": 32}}, + {"id": "output", "step": "output_save", "params": {}}, + {"id": "notify", "step": "notify", "params": {}}, + ], + "edges": [ + {"from": "setup", "to": "resolve_materials"}, + {"from": "resolve_materials", "to": "template"}, + {"from": "template", "to": "render"}, + {"from": "render", "to": "output"}, + {"from": "render", "to": "notify"}, + ], + } + ) + + assert canonical["ui"]["preset"] == "still_graph" + assert canonical["ui"]["execution_mode"] == "graph" + assert [node["step"] for node in canonical["nodes"]] == [ + "order_line_setup", + "resolve_template", + "auto_populate_materials", + "glb_bbox", + "material_map_resolve", + "blender_still", + "output_save", + "notify", + ] + render_node = next(node for node in canonical["nodes"] if node["step"] == "blender_still") + assert render_node["params"]["width"] == 1280 + assert render_node["params"]["height"] == 720 + assert render_node["params"]["samples"] == 32 + assert render_node["params"]["use_custom_render_settings"] is False + + +def test_build_workflow_blueprint_config_cad_intake_supplies_bbox_to_threejs_thumbnail(): + config = build_workflow_blueprint_config("cad_intake") + + assert config["ui"]["family"] == "cad_file" + assert [node["step"] for node in config["nodes"]] == [ + "resolve_step_path", + "occ_object_extract", + "occ_glb_export", + "glb_bbox", + "stl_cache_generate", + "blender_render", + "threejs_render", + "thumbnail_save", + "thumbnail_save", + ] + assert {"from": "export_glb", "to": "bbox"} in config["edges"] + assert {"from": "bbox", "to": "threejs_thumb"} in config["edges"] + + +def test_canonicalize_workflow_config_rebuilds_reference_blueprints(): + canonical = canonicalize_workflow_config( + { + "version": 1, + "ui": {"preset": "custom", "execution_mode": "legacy", "blueprint": "order_rendering"}, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + ], + "edges": [], + } + ) + + assert canonical["ui"]["blueprint"] == "order_rendering" + assert canonical["ui"]["family"] == "order_line" + assert any(node["step"] == "blender_turntable" for node in canonical["nodes"]) + assert any(node["step"] == "export_blend" for node in canonical["nodes"]) + + +def test_canonicalize_workflow_config_rebuilds_starter_blueprints(): + canonical = canonicalize_workflow_config( + { + "version": 1, + "ui": {"preset": "custom", "execution_mode": "legacy", "blueprint": "starter_cad_intake"}, + "nodes": [], + "edges": [], + } + ) + + assert canonical["ui"]["blueprint"] == "starter_cad_intake" + assert canonical["ui"]["family"] == "cad_file" + assert canonical["nodes"] == [ + { + "id": "resolve_step", + "step": "resolve_step_path", + "params": {}, + "ui": { + "type": "inputNode", + "position": {"x": 120, "y": 140}, + "label": "Resolve STEP Path", + }, + } + ] + + def test_workflow_config_requires_canonicalization_for_legacy_payloads(): assert workflow_config_requires_canonicalization( { @@ -235,11 +342,13 @@ def test_build_workflow_blueprint_config_creates_cad_intake_family_graph(): assert config["version"] == 1 assert config["ui"]["preset"] == "custom" + assert config["ui"]["family"] == "cad_file" assert config["ui"]["blueprint"] == "cad_intake" assert [node["step"] for node in config["nodes"]] == [ "resolve_step_path", "occ_object_extract", "occ_glb_export", + "glb_bbox", "stl_cache_generate", "blender_render", "threejs_render", @@ -253,6 +362,7 @@ def test_build_workflow_blueprint_config_creates_order_rendering_family_graph(): assert config["version"] == 1 assert config["ui"]["preset"] == "custom" + assert config["ui"]["family"] == "order_line" assert config["ui"]["blueprint"] == "order_rendering" assert any(node["step"] == "blender_still" for node in config["nodes"]) assert any(node["step"] == "blender_turntable" for node in config["nodes"]) @@ -260,11 +370,34 @@ def test_build_workflow_blueprint_config_creates_order_rendering_family_graph(): assert sum(1 for node in config["nodes"] if node["step"] == "notify") == 3 +def test_build_workflow_blueprint_config_creates_still_graph_reference(): + config = build_workflow_blueprint_config("still_graph_reference") + + assert config["version"] == 1 + assert config["ui"]["preset"] == "custom" + assert config["ui"]["family"] == "order_line" + assert config["ui"]["blueprint"] == "still_graph_reference" + assert config["ui"]["execution_mode"] == "graph" + assert [node["step"] for node in config["nodes"]] == [ + "order_line_setup", + "resolve_template", + "auto_populate_materials", + "glb_bbox", + "material_map_resolve", + "blender_still", + "output_save", + "notify", + ] + render_node = next(node for node in config["nodes"] if node["step"] == "blender_still") + assert render_node["params"]["use_custom_render_settings"] is False + + def test_build_starter_workflow_config_creates_minimal_valid_custom_graph(): config = build_starter_workflow_config() assert config["version"] == 1 assert config["ui"]["preset"] == "custom" + assert config["ui"]["family"] == "order_line" assert config["ui"]["blueprint"] == "starter_order_rendering" assert config["nodes"] == [ { diff --git a/backend/tests/domains/test_workflow_dispatch_service.py b/backend/tests/domains/test_workflow_dispatch_service.py index 9fa8b9f..32d62da 100644 --- a/backend/tests/domains/test_workflow_dispatch_service.py +++ b/backend/tests/domains/test_workflow_dispatch_service.py @@ -7,6 +7,7 @@ from types import SimpleNamespace import pytest from PIL import Image, PngImagePlugin from sqlalchemy import select +from sqlalchemy.engine import make_url from sqlalchemy.orm import selectinload from app.config import settings @@ -18,15 +19,128 @@ from app.domains.rendering.workflow_comparison_service import ( _build_artifact, evaluate_rollout_gate, ) -from app.domains.rendering.workflow_config_utils import build_preset_workflow_config +from app.domains.rendering.workflow_config_utils import ( + build_preset_workflow_config, + build_workflow_blueprint_config, +) +from tests.db_test_utils import resolve_test_db_url def _use_test_database(monkeypatch) -> None: - monkeypatch.setattr(settings, "postgres_host", "postgres") - monkeypatch.setattr(settings, "postgres_port", 5432) - monkeypatch.setattr(settings, "postgres_user", "hartomat") - monkeypatch.setattr(settings, "postgres_password", "hartomat") - monkeypatch.setattr(settings, "postgres_db", "hartomat_test") + resolved = make_url(resolve_test_db_url(async_driver=False)) + monkeypatch.setattr(settings, "postgres_host", resolved.host or settings.postgres_host) + monkeypatch.setattr(settings, "postgres_port", int(resolved.port or settings.postgres_port)) + monkeypatch.setattr(settings, "postgres_user", resolved.username or settings.postgres_user) + monkeypatch.setattr(settings, "postgres_password", resolved.password or settings.postgres_password) + monkeypatch.setattr(settings, "postgres_db", resolved.database or settings.postgres_db) + + +def _build_valid_custom_still_graph( + *, + execution_mode: str = "graph", + width: int = 1024, + height: int = 768, + include_output: bool = False, + include_notify: bool = False, +) -> dict[str, object]: + nodes: list[dict[str, object]] = [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "template", "step": "resolve_template", "params": {}}, + {"id": "populate_materials", "step": "auto_populate_materials", "params": {}}, + {"id": "resolve_materials", "step": "material_map_resolve", "params": {}}, + {"id": "render", "step": "blender_still", "params": {"width": width, "height": height}}, + ] + edges: list[dict[str, str]] = [ + {"from": "setup", "to": "template"}, + {"from": "setup", "to": "populate_materials"}, + {"from": "template", "to": "resolve_materials"}, + {"from": "populate_materials", "to": "resolve_materials"}, + {"from": "template", "to": "render"}, + {"from": "resolve_materials", "to": "render"}, + ] + if include_output: + nodes.append({"id": "output", "step": "output_save", "params": {}}) + edges.append({"from": "render", "to": "output"}) + if include_notify: + nodes.append({"id": "notify", "step": "notify", "params": {}}) + edges.append({"from": "render", "to": "notify"}) + return { + "version": 1, + "ui": {"preset": "custom", "execution_mode": execution_mode}, + "nodes": nodes, + "edges": edges, + } + + +def _build_valid_custom_turntable_graph( + *, + execution_mode: str = "graph", + fps: int = 24, + frame_count: int = 96, + include_output: bool = False, + include_notify: bool = False, +) -> dict[str, object]: + duration_s = frame_count / fps + nodes: list[dict[str, object]] = [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "template", "step": "resolve_template", "params": {}}, + {"id": "populate_materials", "step": "auto_populate_materials", "params": {}}, + {"id": "bbox", "step": "glb_bbox", "params": {}}, + {"id": "resolve_materials", "step": "material_map_resolve", "params": {}}, + {"id": "turntable", "step": "blender_turntable", "params": {"fps": fps, "duration_s": duration_s}}, + ] + edges: list[dict[str, str]] = [ + {"from": "setup", "to": "template"}, + {"from": "setup", "to": "populate_materials"}, + {"from": "setup", "to": "bbox"}, + {"from": "template", "to": "resolve_materials"}, + {"from": "populate_materials", "to": "resolve_materials"}, + {"from": "bbox", "to": "turntable"}, + {"from": "template", "to": "turntable"}, + {"from": "resolve_materials", "to": "turntable"}, + ] + if include_output: + nodes.append({"id": "output", "step": "output_save", "params": {}}) + edges.append({"from": "turntable", "to": "output"}) + if include_notify: + nodes.append({"id": "notify", "step": "notify", "params": {}}) + edges.append({"from": "turntable", "to": "notify"}) + return { + "version": 1, + "ui": {"preset": "custom", "execution_mode": execution_mode}, + "nodes": nodes, + "edges": edges, + } + + +def _build_valid_custom_blend_graph(*, include_output: bool = False) -> dict[str, object]: + nodes: list[dict[str, object]] = [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "template", "step": "resolve_template", "params": {}}, + {"id": "blend", "step": "export_blend", "params": {}}, + ] + edges: list[dict[str, str]] = [ + {"from": "setup", "to": "template"}, + {"from": "template", "to": "blend"}, + ] + if include_output: + nodes.append({"id": "output", "step": "output_save", "params": {}}) + edges.append({"from": "blend", "to": "output"}) + return { + "version": 1, + "ui": {"preset": "custom", "execution_mode": "graph"}, + "nodes": nodes, + "edges": edges, + } + + +def _derive_rollout_mode_from_config(workflow_config: dict | None) -> str: + execution_mode = ((workflow_config or {}).get("ui") or {}).get("execution_mode") + if execution_mode == "graph": + return "graph" + if execution_mode == "shadow": + return "shadow" + return "legacy_only" async def _seed_order_line( @@ -61,6 +175,7 @@ async def _seed_order_line( db.add(workflow_definition) await db.flush() output_type.workflow_definition_id = workflow_definition.id + output_type.workflow_rollout_mode = _derive_rollout_mode_from_config(workflow_config) order_line = OrderLine( order_id=order.id, @@ -148,6 +263,54 @@ async def test_dispatch_render_with_workflow_falls_back_to_legacy_without_workfl assert runs == [] +@pytest.mark.asyncio +async def test_dispatch_render_with_workflow_falls_back_on_artifact_contract_mismatch( + db, + admin_user, + monkeypatch, +): + _use_test_database(monkeypatch) + seeded = await _seed_order_line( + db, + admin_user, + workflow_config={ + "version": 1, + "ui": {"preset": "custom", "execution_mode": "graph"}, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "template", "step": "resolve_template", "params": {}}, + {"id": "blend", "step": "export_blend", "params": {}}, + ], + "edges": [ + {"from": "setup", "to": "template"}, + {"from": "template", "to": "blend"}, + ], + }, + ) + output_type = seeded["output_type"] + output_type.artifact_kind = "still_image" + await db.commit() + + monkeypatch.setattr( + "app.domains.rendering.dispatch_service._legacy_dispatch", + lambda order_line_id: {"backend": "legacy", "order_line_id": order_line_id}, + ) + + result = dispatch_render_with_workflow(str(seeded["order_line"].id)) + + await db.rollback() + + assert result["backend"] == "legacy" + assert result["order_line_id"] == str(seeded["order_line"].id) + assert result["rollout_gate_status"] == "workflow_contract_mismatch" + assert result["workflow_rollout_ready"] is False + assert result["output_type_rollout_ready"] is False + assert any("Expected artifact kind: still_image." in reason for reason in result["rollout_gate_reasons"]) + assert any("blend_asset" in reason for reason in result["rollout_gate_reasons"]) + runs = (await db.execute(select(WorkflowRun))).scalars().all() + assert runs == [] + + @pytest.mark.asyncio async def test_dispatch_render_with_workflow_creates_run_and_node_results_for_preset_dispatch( db, @@ -203,15 +366,11 @@ async def test_dispatch_render_with_workflow_falls_back_when_workflow_runtime_pr seeded = await _seed_order_line( db, admin_user, - workflow_config={ - "version": 1, - "nodes": [ - {"id": "render", "step": "blender_still", "params": {}}, - ], - "edges": [ - {"from": "missing", "to": "render"}, - ], - }, + workflow_config=build_preset_workflow_config("still", {"width": 640, "height": 640}), + ) + monkeypatch.setattr( + "app.domains.rendering.workflow_executor.prepare_workflow_context", + lambda *_args, **_kwargs: (_ for _ in ()).throw(RuntimeError("prep exploded")), ) monkeypatch.setattr( @@ -248,19 +407,7 @@ async def test_dispatch_render_with_workflow_graph_mode_dispatches_supported_cus workflow_definition = WorkflowDefinition( name=f"Graph Workflow {uuid.uuid4().hex[:8]}", output_type_id=order_line.output_type_id, - config={ - "version": 1, - "ui": {"preset": "custom", "execution_mode": "graph"}, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}}, - {"id": "template", "step": "resolve_template", "params": {}}, - {"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}}, - ], - "edges": [ - {"from": "setup", "to": "template"}, - {"from": "template", "to": "render"}, - ], - }, + config=_build_valid_custom_still_graph(execution_mode="graph"), is_active=True, ) db.add(workflow_definition) @@ -268,6 +415,7 @@ async def test_dispatch_render_with_workflow_graph_mode_dispatches_supported_cus output_type = await db.get(OutputType, order_line.output_type_id) assert output_type is not None output_type.workflow_definition_id = workflow_definition.id + output_type.workflow_rollout_mode = "graph" await db.commit() monkeypatch.setattr( @@ -315,21 +463,7 @@ async def test_dispatch_render_with_workflow_graph_mode_uses_output_save_as_auth workflow_definition = WorkflowDefinition( name=f"Graph Output Save {uuid.uuid4().hex[:8]}", output_type_id=order_line.output_type_id, - config={ - "version": 1, - "ui": {"preset": "custom", "execution_mode": "graph"}, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}}, - {"id": "template", "step": "resolve_template", "params": {}}, - {"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}}, - {"id": "output", "step": "output_save", "params": {}}, - ], - "edges": [ - {"from": "setup", "to": "template"}, - {"from": "template", "to": "render"}, - {"from": "render", "to": "output"}, - ], - }, + config=_build_valid_custom_still_graph(execution_mode="graph", include_output=True), is_active=True, ) db.add(workflow_definition) @@ -337,6 +471,7 @@ async def test_dispatch_render_with_workflow_graph_mode_uses_output_save_as_auth output_type = await db.get(OutputType, order_line.output_type_id) assert output_type is not None output_type.workflow_definition_id = workflow_definition.id + output_type.workflow_rollout_mode = "graph" await db.commit() calls: list[tuple[str, list[str], dict]] = [] @@ -367,8 +502,10 @@ async def test_dispatch_render_with_workflow_graph_mode_uses_output_save_as_auth assert calls[0][2]["publish_asset_enabled"] is False assert calls[0][2]["graph_authoritative_output_enabled"] is True assert calls[0][2]["graph_output_node_ids"] == ["output"] - assert node_results["output"].status == "completed" + assert node_results["output"].status == "pending" assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save" + assert node_results["output"].output["handoff_state"] == "armed" + assert node_results["output"].output["handoff_node_ids"] == ["render"] @pytest.mark.asyncio @@ -395,6 +532,7 @@ async def test_dispatch_render_with_workflow_graph_mode_canonicalizes_legacy_pre output_type = await db.get(OutputType, order_line.output_type_id) assert output_type is not None output_type.workflow_definition_id = workflow_definition.id + output_type.workflow_rollout_mode = "graph" await db.commit() monkeypatch.setattr( @@ -421,7 +559,7 @@ async def test_dispatch_render_with_workflow_graph_mode_canonicalizes_legacy_pre assert node_results["setup"].status == "completed" assert node_results["template"].status == "completed" assert node_results["render"].status == "queued" - assert node_results["output"].status == "completed" + assert node_results["output"].status == "pending" @pytest.mark.asyncio @@ -436,21 +574,7 @@ async def test_dispatch_render_with_workflow_graph_mode_falls_back_to_legacy_on_ workflow_definition = WorkflowDefinition( name=f"Graph Workflow {uuid.uuid4().hex[:8]}", output_type_id=order_line.output_type_id, - config={ - "version": 1, - "ui": {"preset": "custom", "execution_mode": "graph"}, - "nodes": [ - { - "id": "setup", - "step": "order_line_setup", - "params": {"failure_policy": {"fallback_to_legacy": True}}, - }, - {"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}}, - ], - "edges": [ - {"from": "setup", "to": "render"}, - ], - }, + config=_build_valid_custom_still_graph(execution_mode="graph"), is_active=True, ) db.add(workflow_definition) @@ -458,6 +582,7 @@ async def test_dispatch_render_with_workflow_graph_mode_falls_back_to_legacy_on_ output_type = await db.get(OutputType, order_line.output_type_id) assert output_type is not None output_type.workflow_definition_id = workflow_definition.id + output_type.workflow_rollout_mode = "graph" await db.commit() monkeypatch.setattr( @@ -490,6 +615,40 @@ async def test_dispatch_render_with_workflow_graph_mode_falls_back_to_legacy_on_ assert run.error_message == "graph dispatch exploded" +@pytest.mark.asyncio +async def test_dispatch_render_with_graph_capable_workflow_respects_legacy_only_rollout_mode( + db, + admin_user, + monkeypatch, +): + _use_test_database(monkeypatch) + seeded = await _seed_order_line( + db, + admin_user, + workflow_config=_build_valid_custom_still_graph(execution_mode="graph"), + ) + output_type = seeded["output_type"] + output_type.workflow_rollout_mode = "legacy_only" + await db.commit() + + monkeypatch.setattr( + "app.domains.rendering.dispatch_service._legacy_dispatch", + lambda order_line_id: {"backend": "legacy", "order_line_id": order_line_id}, + ) + + result = dispatch_render_with_workflow(str(seeded["order_line"].id)) + + await db.rollback() + + assert result["backend"] == "legacy" + assert result["order_line_id"] == str(seeded["order_line"].id) + assert result["workflow_rollout_mode"] == "legacy_only" + assert result["configured_execution_mode"] == "graph" + assert result["rollout_gate_status"] == "rollout_legacy_only" + assert result["workflow_rollout_ready"] is False + assert result["output_type_rollout_ready"] is False + + @pytest.mark.asyncio async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritative_and_dispatches_graph_observer( db, @@ -502,19 +661,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritat workflow_definition = WorkflowDefinition( name=f"Shadow Workflow {uuid.uuid4().hex[:8]}", output_type_id=order_line.output_type_id, - config={ - "version": 1, - "ui": {"preset": "custom", "execution_mode": "shadow"}, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}}, - {"id": "template", "step": "resolve_template", "params": {}}, - {"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}}, - ], - "edges": [ - {"from": "setup", "to": "template"}, - {"from": "template", "to": "render"}, - ], - }, + config=_build_valid_custom_still_graph(execution_mode="shadow"), is_active=True, ) db.add(workflow_definition) @@ -522,6 +669,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_keeps_legacy_authoritat output_type = await db.get(OutputType, order_line.output_type_id) assert output_type is not None output_type.workflow_definition_id = workflow_definition.id + output_type.workflow_rollout_mode = "shadow" await db.commit() calls: list[tuple[str, list[str], dict]] = [] @@ -592,6 +740,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_canonicalizes_legacy_pr output_type = await db.get(OutputType, order_line.output_type_id) assert output_type is not None output_type.workflow_definition_id = workflow_definition.id + output_type.workflow_rollout_mode = "shadow" await db.commit() calls: list[tuple[str, list[str], dict]] = [] @@ -610,21 +759,13 @@ async def test_dispatch_render_with_workflow_shadow_mode_canonicalizes_legacy_pr await db.rollback() - run_result = await db.execute( - select(WorkflowRun) - .where(WorkflowRun.id == uuid.UUID(result["shadow_workflow_run_id"])) - .options(selectinload(WorkflowRun.node_results)) - ) - run = run_result.scalar_one() - node_results = {node_result.node_name: node_result for node_result in run.node_results} - assert result["backend"] == "legacy" assert result["execution_mode"] == "shadow" - assert result["shadow_status"] == "dispatched" - assert result["shadow_task_ids"] == ["legacy-shadow-task-1"] - assert run.execution_mode == "shadow" - assert node_results["output"].status == "completed" - assert calls[0][2]["publish_asset_enabled"] is False + assert result["shadow_status"] == "skipped" + assert result["rollout_gate_status"] == "shadow_skipped" + assert "shadow_workflow_run_id" not in result + assert "material_assignments" in result["shadow_error"] + assert calls == [] @pytest.mark.asyncio @@ -639,17 +780,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_ignores_graph_failures_ workflow_definition = WorkflowDefinition( name=f"Shadow Workflow {uuid.uuid4().hex[:8]}", output_type_id=order_line.output_type_id, - config={ - "version": 1, - "ui": {"preset": "custom", "execution_mode": "shadow"}, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}}, - {"id": "render", "step": "blender_still", "params": {"width": 1024, "height": 768}}, - ], - "edges": [ - {"from": "setup", "to": "render"}, - ], - }, + config=_build_valid_custom_still_graph(execution_mode="shadow"), is_active=True, ) db.add(workflow_definition) @@ -657,6 +788,7 @@ async def test_dispatch_render_with_workflow_shadow_mode_ignores_graph_failures_ output_type = await db.get(OutputType, order_line.output_type_id) assert output_type is not None output_type.workflow_definition_id = workflow_definition.id + output_type.workflow_rollout_mode = "shadow" await db.commit() monkeypatch.setattr( @@ -730,6 +862,32 @@ def test_evaluate_rollout_gate_warns_on_small_visual_delta(tmp_path: Path): assert any("warn threshold" in reason for reason in gate["reasons"]) +def test_evaluate_rollout_gate_passes_near_zero_visual_delta(tmp_path: Path): + authoritative = tmp_path / "authoritative.png" + observer = tmp_path / "observer.png" + + Image.new("RGBA", (1024, 1024), color=(106, 106, 106, 255)).save(authoritative) + Image.new("RGBA", (1024, 1024), color=(106, 106, 106, 255)).save(observer) + + with Image.open(observer) as image: + image.putpixel((444, 137), (106, 106, 107, 255)) + image.putpixel((651, 142), (105, 106, 106, 255)) + image.save(observer) + + gate = evaluate_rollout_gate( + authoritative_output=_build_artifact(str(authoritative)), + observer_output=_build_artifact(str(observer)), + exact_match=False, + dimensions_match=True, + mean_pixel_delta=((1 + 1) / (1024 * 1024 * 4 * 255)), + ) + + assert gate["verdict"] == "pass" + assert gate["ready"] is True + assert gate["status"] == "ready_for_rollout" + assert any("pass threshold" in reason for reason in gate["reasons"]) + + def test_evaluate_rollout_gate_fails_on_missing_observer(tmp_path: Path): authoritative = tmp_path / "authoritative.png" Image.new("RGBA", (16, 16), color=(0, 128, 255, 255)).save(authoritative) @@ -796,7 +954,11 @@ def test_dispatch_render_with_workflow_unit_marks_shadow_dispatch_as_pending_rol workflow_def_id = uuid.uuid4() fake_line = SimpleNamespace( id=uuid.UUID(order_line_id), - output_type=SimpleNamespace(id=output_type_id, workflow_definition_id=workflow_def_id), + output_type=SimpleNamespace( + id=output_type_id, + workflow_definition_id=workflow_def_id, + workflow_rollout_mode="shadow", + ), ) fake_workflow_def = SimpleNamespace(id=workflow_def_id, config={"version": 1}, is_active=True) fake_run = SimpleNamespace(id=uuid.uuid4()) @@ -951,12 +1113,14 @@ async def test_workflow_dispatch_endpoint_returns_workflow_run_with_node_results assert node_results["setup"]["output"]["order_line_id"] == str(order_line.id) assert node_results["template"]["status"] == "completed" assert node_results["template"]["output"]["use_materials"] is False - assert node_results["output"]["status"] == "completed" + assert node_results["output"]["status"] == "pending" assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save" + assert node_results["output"]["output"]["handoff_state"] == "armed" + assert node_results["output"]["output"]["handoff_node_ids"] == ["render"] @pytest.mark.asyncio -async def test_workflow_dispatch_endpoint_arms_output_save_for_export_blend( +async def test_workflow_dispatch_endpoint_rejects_output_save_for_export_blend_only_graph( client, db, admin_user, @@ -968,18 +1132,7 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_export_blend( order_line = await _seed_renderable_order_line(db, admin_user, tmp_path) workflow_definition = WorkflowDefinition( name=f"Blend Output Workflow {uuid.uuid4().hex[:8]}", - config={ - "version": 1, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}}, - {"id": "blend", "step": "export_blend", "params": {}}, - {"id": "output", "step": "output_save", "params": {}}, - ], - "edges": [ - {"from": "setup", "to": "blend"}, - {"from": "blend", "to": "output"}, - ], - }, + config=_build_valid_custom_blend_graph(include_output=True), is_active=True, ) db.add(workflow_definition) @@ -1000,35 +1153,9 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_export_blend( headers=auth_headers, ) - assert response.status_code == 200 - body = response.json() - - assert body["context_id"] == context_id - assert body["execution_mode"] == "graph" - assert body["dispatched"] == 1 - assert body["task_ids"] == ["task-1"] - assert calls == [ - ( - "app.domains.rendering.tasks.export_blend_for_order_line_task", - [context_id], - { - "workflow_run_id": body["workflow_run"]["id"], - "workflow_node_id": "blend", - "publish_asset_enabled": False, - "graph_authoritative_output_enabled": True, - "graph_output_node_ids": ["output"], - }, - ) - ] - - node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]} - assert node_results["blend"]["status"] == "queued" - assert node_results["blend"]["output"]["predicted_asset_type"] == "blend_production" - assert node_results["blend"]["output"]["publish_asset_enabled"] is False - assert node_results["blend"]["output"]["graph_authoritative_output_enabled"] is True - assert node_results["blend"]["output"]["graph_output_node_ids"] == ["output"] - assert node_results["output"]["status"] == "completed" - assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save" + assert response.status_code == 422 + assert "output_save" in response.json()["detail"] + assert calls == [] @pytest.mark.asyncio @@ -1044,18 +1171,7 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_turntable( order_line = await _seed_renderable_order_line(db, admin_user, tmp_path) workflow_definition = WorkflowDefinition( name=f"Turntable Output Workflow {uuid.uuid4().hex[:8]}", - config={ - "version": 1, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}}, - {"id": "turntable", "step": "blender_turntable", "params": {"fps": 24, "frame_count": 96}}, - {"id": "output", "step": "output_save", "params": {}}, - ], - "edges": [ - {"from": "setup", "to": "turntable"}, - {"from": "turntable", "to": "output"}, - ], - }, + config=_build_valid_custom_turntable_graph(include_output=True), is_active=True, ) db.add(workflow_definition) @@ -1091,7 +1207,6 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_turntable( assert calls[0][2]["graph_authoritative_output_enabled"] is True assert calls[0][2]["graph_output_node_ids"] == ["output"] assert calls[0][2]["fps"] == 24 - assert calls[0][2]["frame_count"] == 96 node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]} assert node_results["turntable"]["status"] == "queued" @@ -1099,8 +1214,10 @@ async def test_workflow_dispatch_endpoint_arms_output_save_for_turntable( assert node_results["turntable"]["output"]["publish_asset_enabled"] is False assert node_results["turntable"]["output"]["graph_authoritative_output_enabled"] is True assert node_results["turntable"]["output"]["graph_output_node_ids"] == ["output"] - assert node_results["output"]["status"] == "completed" + assert node_results["output"]["status"] == "pending" assert node_results["output"]["output"]["publication_mode"] == "awaiting_graph_authoritative_save" + assert node_results["output"]["output"]["handoff_state"] == "armed" + assert node_results["output"]["output"]["handoff_node_ids"] == ["turntable"] @pytest.mark.asyncio @@ -1116,18 +1233,7 @@ async def test_workflow_dispatch_endpoint_arms_notify_handoff_for_render_node( order_line = await _seed_renderable_order_line(db, admin_user, tmp_path) workflow_definition = WorkflowDefinition( name=f"Notify Workflow {uuid.uuid4().hex[:8]}", - config={ - "version": 1, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}}, - {"id": "render", "step": "blender_still", "params": {}}, - {"id": "notify", "step": "notify", "params": {}}, - ], - "edges": [ - {"from": "setup", "to": "render"}, - {"from": "render", "to": "notify"}, - ], - }, + config=_build_valid_custom_still_graph(include_notify=True), is_active=True, ) db.add(workflow_definition) @@ -1166,9 +1272,10 @@ async def test_workflow_dispatch_endpoint_arms_notify_handoff_for_render_node( node_results = {node["node_name"]: node for node in body["workflow_run"]["node_results"]} assert node_results["render"]["status"] == "queued" assert node_results["render"]["output"]["graph_notify_node_ids"] == ["notify"] - assert node_results["notify"]["status"] == "completed" + assert node_results["notify"]["status"] == "pending" assert node_results["notify"]["output"]["notification_mode"] == "deferred_to_render_task" assert node_results["notify"]["output"]["armed_node_ids"] == ["render"] + assert node_results["notify"]["output"]["handoff_state"] == "armed" @pytest.mark.asyncio @@ -1246,19 +1353,7 @@ async def test_workflow_draft_dispatch_endpoint_dispatches_unsaved_render_graph( json={ "workflow_id": str(workflow_definition.id), "context_id": str(order_line.id), - "config": { - "version": 1, - "ui": {"preset": "custom", "execution_mode": "graph"}, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}, "ui": {"label": "Setup"}}, - {"id": "template", "step": "resolve_template", "params": {}, "ui": {"label": "Template"}}, - {"id": "render", "step": "blender_still", "params": {"width": 800, "height": 600}, "ui": {"label": "Render"}}, - ], - "edges": [ - {"from": "setup", "to": "template"}, - {"from": "template", "to": "render"}, - ], - }, + "config": _build_valid_custom_still_graph(width=800, height=600), }, ) @@ -1306,17 +1401,7 @@ async def test_workflow_draft_dispatch_endpoint_marks_submitted_order_processing headers=auth_headers, json={ "context_id": str(order_line.id), - "config": { - "version": 1, - "ui": {"preset": "custom", "execution_mode": "graph"}, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}, "ui": {"label": "Setup"}}, - {"id": "render", "step": "blender_still", "params": {}, "ui": {"label": "Render"}}, - ], - "edges": [ - {"from": "setup", "to": "render"}, - ], - }, + "config": _build_valid_custom_still_graph(), }, ) @@ -1413,19 +1498,7 @@ async def test_workflow_preflight_endpoint_supports_direct_cad_file_graphs( ) workflow_definition = WorkflowDefinition( name=f"CAD Workflow {uuid.uuid4().hex[:8]}", - config={ - "version": 1, - "ui": {"preset": "custom", "execution_mode": "graph"}, - "nodes": [ - {"id": "input", "step": "resolve_step_path", "params": {}, "ui": {"label": "Resolve STEP"}}, - {"id": "render", "step": "blender_render", "params": {"width": 512, "height": 512}, "ui": {"label": "Thumbnail"}}, - {"id": "save", "step": "thumbnail_save", "params": {}, "ui": {"label": "Save Thumbnail"}}, - ], - "edges": [ - {"from": "input", "to": "render"}, - {"from": "render", "to": "save"}, - ], - }, + config=build_workflow_blueprint_config("cad_intake"), is_active=True, ) db.add_all([cad_file, workflow_definition]) @@ -1443,7 +1516,7 @@ async def test_workflow_preflight_endpoint_supports_direct_cad_file_graphs( assert body["context_kind"] == "cad_file" assert body["expected_context_kind"] == "cad_file" - assert body["execution_mode"] == "graph" + assert body["execution_mode"] == "legacy" assert body["graph_dispatch_allowed"] is True assert body["resolved_cad_file_id"] == str(cad_file.id) assert all(node["status"] == "ready" for node in body["nodes"]) @@ -1464,19 +1537,7 @@ async def test_workflow_draft_preflight_endpoint_validates_unsaved_render_graph( headers=auth_headers, json={ "context_id": str(order_line.id), - "config": { - "version": 1, - "ui": {"preset": "custom", "execution_mode": "graph"}, - "nodes": [ - {"id": "setup", "step": "order_line_setup", "params": {}, "ui": {"label": "Setup"}}, - {"id": "template", "step": "resolve_template", "params": {}, "ui": {"label": "Template"}}, - {"id": "render", "step": "blender_still", "params": {"width": 640, "height": 640}, "ui": {"label": "Render"}}, - ], - "edges": [ - {"from": "setup", "to": "template"}, - {"from": "template", "to": "render"}, - ], - }, + "config": _build_valid_custom_still_graph(width=640, height=640), }, ) @@ -1489,7 +1550,13 @@ async def test_workflow_draft_preflight_endpoint_validates_unsaved_render_graph( assert body["execution_mode"] == "graph" assert body["graph_dispatch_allowed"] is True assert body["resolved_order_line_id"] == str(order_line.id) - assert [node["node_id"] for node in body["nodes"]] == ["setup", "template", "render"] + assert [node["node_id"] for node in body["nodes"]] == [ + "setup", + "template", + "populate_materials", + "resolve_materials", + "render", + ] @pytest.mark.asyncio @@ -1646,7 +1713,9 @@ async def test_workflow_run_comparison_endpoint_reports_metadata_only_difference assert body["exact_match"] is False assert body["dimensions_match"] is True assert body["mean_pixel_delta"] == 0.0 - assert "metadata differs" in body["summary"] + assert body["summary"] == ( + "Observer output matches the authoritative legacy output within the visual pass threshold." + ) @pytest.mark.asyncio @@ -1695,7 +1764,9 @@ async def test_workflow_run_comparison_endpoint_finds_shadow_output_in_step_file admin_user, auth_headers, tmp_path, + monkeypatch, ): + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) order_line = await _seed_renderable_order_line(db, admin_user, tmp_path) workflow_run = WorkflowRun( order_line_id=order_line.id, @@ -1710,7 +1781,7 @@ async def test_workflow_run_comparison_endpoint_finds_shadow_output_in_step_file authoritative_path = render_dir / "authoritative.png" Image.new("RGBA", (12, 12), (32, 160, 255, 255)).save(authoritative_path) - step_shadow_dir = Path("/app/uploads/step_files/renders") + step_shadow_dir = Path(settings.upload_dir) / "step_files" / "renders" / str(order_line.id) step_shadow_dir.mkdir(parents=True, exist_ok=True) shadow_path = step_shadow_dir / f"line_{order_line.id}_shadow-{str(workflow_run.id)[:8]}.png" Image.new("RGBA", (12, 12), (32, 160, 255, 255)).save(shadow_path) @@ -1729,3 +1800,52 @@ async def test_workflow_run_comparison_endpoint_finds_shadow_output_in_step_file assert body["status"] == "matched" assert body["observer_output"]["exists"] is True assert body["observer_output"]["path"] == str(shadow_path) + + +@pytest.mark.asyncio +async def test_workflow_run_comparison_endpoint_treats_near_zero_visual_delta_as_match( + client, + db, + admin_user, + auth_headers, + tmp_path, +): + order_line = await _seed_renderable_order_line(db, admin_user, tmp_path) + workflow_run = WorkflowRun( + order_line_id=order_line.id, + execution_mode="shadow", + status="completed", + ) + db.add(workflow_run) + await db.flush() + + render_dir = tmp_path / "comparison-near-zero" / str(order_line.id) + render_dir.mkdir(parents=True, exist_ok=True) + authoritative_path = render_dir / "authoritative.png" + shadow_path = render_dir / f"line_{order_line.id}_shadow-{str(workflow_run.id)[:8]}.png" + + Image.new("RGBA", (1024, 1024), (106, 106, 106, 255)).save(authoritative_path) + Image.new("RGBA", (1024, 1024), (106, 106, 106, 255)).save(shadow_path) + + with Image.open(shadow_path) as image: + image.putpixel((444, 137), (106, 106, 107, 255)) + image.putpixel((651, 142), (105, 106, 106, 255)) + image.save(shadow_path) + + order_line.result_path = str(authoritative_path) + order_line.render_status = "completed" + await db.commit() + + response = await client.get( + f"/api/workflows/runs/{workflow_run.id}/comparison", + headers=auth_headers, + ) + + assert response.status_code == 200 + body = response.json() + assert body["status"] == "matched" + assert body["exact_match"] is False + assert body["dimensions_match"] is True + assert body["mean_pixel_delta"] is not None + assert body["mean_pixel_delta"] <= 1e-6 + assert "pass threshold" in body["summary"] diff --git a/backend/tests/domains/test_workflow_graph_runtime.py b/backend/tests/domains/test_workflow_graph_runtime.py index de761f0..aec2f62 100644 --- a/backend/tests/domains/test_workflow_graph_runtime.py +++ b/backend/tests/domains/test_workflow_graph_runtime.py @@ -6,10 +6,10 @@ from pathlib import Path from types import SimpleNamespace import pytest -from sqlalchemy import create_engine, select, text +from sqlalchemy import select, text from sqlalchemy.orm import Session, selectinload -from app.database import Base +from app.core.render_paths import build_order_line_export_path, build_order_line_step_render_path from app.core.process_steps import StepName from app.domains.auth.models import User, UserRole from app.domains.materials.models import AssetLibrary @@ -27,25 +27,13 @@ from app.domains.rendering.workflow_graph_runtime import ( from app.domains.rendering.workflow_run_service import create_workflow_run from app.domains.rendering.workflow_runtime_services import OrderLineRenderSetupResult -import app.models # noqa: F401 -from tests.db_test_utils import reset_public_schema_sync, resolve_test_db_url +from tests.db_test_utils import sync_test_session as sync_test_session_ctx @pytest.fixture def sync_session(): - engine = create_engine(resolve_test_db_url(async_driver=False)) - with engine.begin() as conn: - reset_public_schema_sync(conn) - Base.metadata.create_all(conn) - - session = Session(engine) - try: + with sync_test_session_ctx() as session: yield session - finally: - session.close() - with engine.begin() as conn: - reset_public_schema_sync(conn) - engine.dispose() def _seed_renderable_order_line( @@ -137,6 +125,19 @@ def _seed_renderable_order_line( target_collection="Product", material_replace_enabled=True, lighting_only=False, + workflow_input_schema=[ + { + "key": "studio_variant", + "label": "Studio Variant", + "type": "select", + "section": "Template Inputs", + "default": "default", + "options": [ + {"value": "default", "label": "Default"}, + {"value": "warm", "label": "Warm"}, + ], + } + ], is_active=True, output_types=[output_type], ) @@ -329,6 +330,193 @@ def test_execute_graph_workflow_routes_cad_thumbnail_save_using_upstream_threejs assert node_results["save"].output["predicted_output_path"].endswith(f"{cad_file.id}.png") +def test_execute_graph_workflow_serializes_template_schema_and_template_inputs( + sync_session, + tmp_path, + monkeypatch, +): + line = _seed_renderable_order_line(sync_session, tmp_path) + template = sync_session.execute(select(RenderTemplate)).unique().scalar_one() + + monkeypatch.setattr( + "app.domains.rendering.workflow_runtime_services.resolve_material_map", + lambda raw_map: {key: f"resolved:{value}" for key, value in raw_map.items()}, + ) + + workflow_context = prepare_workflow_context( + { + "version": 1, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + { + "id": "template", + "step": "resolve_template", + "params": { + "template_id_override": str(template.id), + "template_input__studio_variant": "warm", + }, + }, + ], + "edges": [ + {"from": "setup", "to": "template"}, + ], + }, + context_id=str(line.id), + execution_mode="graph", + ) + run = create_workflow_run( + sync_session, + workflow_def_id=None, + order_line_id=line.id, + workflow_context=workflow_context, + ) + + dispatch_result = execute_graph_workflow(sync_session, workflow_context) + sync_session.commit() + + refreshed_run = sync_session.execute( + select(WorkflowRun) + .where(WorkflowRun.id == run.id) + .options(selectinload(WorkflowRun.node_results)) + ).scalar_one() + node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results} + + assert dispatch_result.task_ids == [] + assert node_results["template"].status == "completed" + assert node_results["template"].output["workflow_input_schema"] == template.workflow_input_schema + assert node_results["template"].output["template_inputs"] == {"studio_variant": "warm"} + assert node_results["template"].output["template_input_count"] == 1 + + +def test_execute_graph_workflow_passes_template_inputs_to_still_task( + sync_session, + tmp_path, + monkeypatch, +): + line = _seed_renderable_order_line(sync_session, tmp_path) + template = sync_session.execute(select(RenderTemplate)).unique().scalar_one() + + send_calls: list[tuple[str, list[str], dict[str, object]]] = [] + + def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]): + send_calls.append((task_name, args, kwargs)) + return SimpleNamespace(id="task-still-template-inputs") + + monkeypatch.setattr( + "app.tasks.celery_app.celery_app.send_task", + _fake_send_task, + ) + + workflow_context = prepare_workflow_context( + { + "version": 1, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + { + "id": "template", + "step": "resolve_template", + "params": { + "template_id_override": str(template.id), + "template_input__studio_variant": "warm", + }, + }, + {"id": "render", "step": "blender_still", "params": {}}, + ], + "edges": [ + {"from": "setup", "to": "template"}, + {"from": "template", "to": "render"}, + ], + }, + context_id=str(line.id), + execution_mode="graph", + ) + create_workflow_run( + sync_session, + workflow_def_id=None, + order_line_id=line.id, + workflow_context=workflow_context, + ) + + dispatch_result = execute_graph_workflow(sync_session, workflow_context) + sync_session.commit() + + assert dispatch_result.task_ids == ["task-still-template-inputs"] + assert len(send_calls) == 1 + assert send_calls[0][0] == "app.domains.rendering.tasks.render_order_line_still_task" + assert send_calls[0][1] == [str(line.id)] + assert send_calls[0][2]["template_inputs"] == {"studio_variant": "warm"} + + +def test_execute_graph_workflow_passes_template_inputs_and_duration_to_turntable_task( + sync_session, + tmp_path, + monkeypatch, +): + line = _seed_renderable_order_line(sync_session, tmp_path) + template = sync_session.execute(select(RenderTemplate)).unique().scalar_one() + + send_calls: list[tuple[str, list[str], dict[str, object]]] = [] + + def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]): + send_calls.append((task_name, args, kwargs)) + return SimpleNamespace(id="task-turntable-template-inputs") + + monkeypatch.setattr( + "app.tasks.celery_app.celery_app.send_task", + _fake_send_task, + ) + + workflow_context = prepare_workflow_context( + { + "version": 1, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + { + "id": "template", + "step": "resolve_template", + "params": { + "template_id_override": str(template.id), + "template_input__studio_variant": "warm", + }, + }, + { + "id": "render", + "step": "blender_turntable", + "params": { + "fps": 12, + "duration_s": 7, + "frame_count": 999, + }, + }, + ], + "edges": [ + {"from": "setup", "to": "template"}, + {"from": "template", "to": "render"}, + ], + }, + context_id=str(line.id), + execution_mode="graph", + ) + create_workflow_run( + sync_session, + workflow_def_id=None, + order_line_id=line.id, + workflow_context=workflow_context, + ) + + dispatch_result = execute_graph_workflow(sync_session, workflow_context) + sync_session.commit() + + assert dispatch_result.task_ids == ["task-turntable-template-inputs"] + assert len(send_calls) == 1 + assert send_calls[0][0] == "app.domains.rendering.tasks.render_turntable_task" + assert send_calls[0][1] == [str(line.id)] + assert send_calls[0][2]["template_inputs"] == {"studio_variant": "warm"} + assert send_calls[0][2]["duration_s"] == 7.0 + assert send_calls[0][2]["fps"] == 12 + assert send_calls[0][2]["frame_count"] == 84 + + def test_execute_graph_workflow_completes_cad_bridge_only_nodes_without_queueing( sync_session, tmp_path, @@ -660,6 +848,108 @@ def test_build_task_kwargs_autoscales_default_samples_via_shared_render_invocati assert kwargs["samples"] == 64 +def test_build_task_kwargs_ignores_authoritative_still_overrides_without_opt_in( + tmp_path, + monkeypatch, +): + step_path = tmp_path / "cad" / "bearing.step" + step_path.parent.mkdir(parents=True, exist_ok=True) + step_path.write_text("STEP", encoding="utf-8") + + output_type = OutputType( + id=uuid.uuid4(), + name="Still Preview", + renderer="blender", + output_format="png", + render_settings={ + "width": 2048, + "height": 1536, + "engine": "cycles", + "samples": 128, + "noise_threshold": "0.05", + }, + transparent_bg=True, + cycles_device="cuda", + ) + cad_file = CadFile( + id=uuid.uuid4(), + original_name="bearing.step", + stored_path=str(step_path), + file_hash="hash-graph-2", + parsed_objects={"objects": ["InnerRing", "OuterRing"]}, + ) + product = Product( + id=uuid.uuid4(), + pim_id="P-graph-2", + name="Bearing G2", + category_key="bearings", + cad_file_id=cad_file.id, + cad_file=cad_file, + ) + line = OrderLine( + id=uuid.uuid4(), + order_id=uuid.uuid4(), + product_id=product.id, + product=product, + output_type_id=output_type.id, + output_type=output_type, + ) + state = WorkflowGraphState( + setup=OrderLineRenderSetupResult( + status="ready", + order_line=line, + cad_file=cad_file, + part_colors={"InnerRing": "Steel raw"}, + ) + ) + workflow_context = SimpleNamespace( + workflow_run_id=uuid.uuid4(), + execution_mode="graph", + ordered_nodes=[], + edges=[], + ) + node = SimpleNamespace( + id="render", + step=StepName.BLENDER_STILL, + params={ + "width": 1024, + "height": 768, + "samples": 16, + "render_engine": "eevee", + "transparent_bg": False, + "cycles_device": "cpu", + "noise_threshold": "0.2", + }, + ) + + monkeypatch.setattr( + "app.domains.rendering.workflow_graph_runtime.resolve_render_position_context", + lambda _session, _line: SimpleNamespace( + rotation_x=0.0, + rotation_y=0.0, + rotation_z=0.0, + focal_length_mm=None, + sensor_width_mm=None, + ), + ) + + kwargs = _build_task_kwargs( + session=object(), + workflow_context=workflow_context, + state=state, + node=node, + ) + + assert kwargs["width"] == 2048 + assert kwargs["height"] == 1536 + assert kwargs["engine"] == "cycles" + assert kwargs["samples"] == 128 + assert kwargs["transparent_bg"] is True + assert kwargs["cycles_device"] == "cuda" + assert kwargs["noise_threshold"] == "0.05" + assert "render_engine" not in kwargs + + def test_execute_graph_workflow_respects_custom_render_settings_opt_in_for_still_task( sync_session, tmp_path, @@ -838,6 +1128,221 @@ def test_execute_graph_workflow_preserves_turntable_timing_without_custom_render assert kwargs["output_name_suffix"].startswith("shadow-") +def test_execute_graph_workflow_respects_custom_render_settings_opt_in_for_turntable_task( + sync_session, + tmp_path, + monkeypatch, +): + line = _seed_renderable_order_line(sync_session, tmp_path) + assert line.output_type is not None + line.output_type.render_settings = { + "width": 2048, + "height": 2048, + "engine": "cycles", + "samples": 128, + "fps": 30, + "frame_count": 180, + } + sync_session.commit() + + send_calls: list[tuple[str, list[str], dict[str, object]]] = [] + + def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]): + send_calls.append((task_name, args, kwargs)) + return SimpleNamespace(id="task-custom-turntable") + + monkeypatch.setattr( + "app.tasks.celery_app.celery_app.send_task", + _fake_send_task, + ) + + workflow_context = prepare_workflow_context( + { + "version": 1, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "template", "step": "resolve_template", "params": {}}, + { + "id": "render", + "step": "blender_turntable", + "params": { + "use_custom_render_settings": True, + "width": 1024, + "height": 768, + "samples": 32, + "render_engine": "eevee", + "fps": 12, + "duration_s": 6, + }, + }, + ], + "edges": [ + {"from": "setup", "to": "template"}, + {"from": "template", "to": "render"}, + ], + }, + context_id=str(line.id), + execution_mode="graph", + ) + create_workflow_run( + sync_session, + workflow_def_id=None, + order_line_id=line.id, + workflow_context=workflow_context, + ) + + dispatch_result = execute_graph_workflow(sync_session, workflow_context) + sync_session.commit() + + assert dispatch_result.task_ids == ["task-custom-turntable"] + assert len(send_calls) == 1 + + task_name, args, kwargs = send_calls[0] + assert task_name == "app.domains.rendering.tasks.render_turntable_task" + assert args == [str(line.id)] + assert kwargs["width"] == 1024 + assert kwargs["height"] == 768 + assert kwargs["samples"] == 32 + assert kwargs["render_engine"] == "eevee" + assert kwargs["engine"] == "cycles" + assert kwargs["fps"] == 12 + assert kwargs["duration_s"] == 6.0 + assert kwargs["frame_count"] == 72 + + +def test_execute_graph_workflow_preserves_template_camera_orbit_without_custom_render_settings( + sync_session, + tmp_path, + monkeypatch, +): + line = _seed_renderable_order_line(sync_session, tmp_path) + template = sync_session.execute(select(RenderTemplate)).unique().scalar_one() + template.camera_orbit = False + assert line.output_type is not None + line.output_type.render_settings = { + "width": 2048, + "height": 2048, + "engine": "cycles", + "samples": 128, + "fps": 30, + "frame_count": 180, + } + sync_session.commit() + + send_calls: list[tuple[str, list[str], dict[str, object]]] = [] + + def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object]): + send_calls.append((task_name, args, kwargs)) + return SimpleNamespace(id="task-turntable-camera-orbit") + + monkeypatch.setattr( + "app.tasks.celery_app.celery_app.send_task", + _fake_send_task, + ) + + workflow_context = prepare_workflow_context( + { + "version": 1, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "template", "step": "resolve_template", "params": {}}, + { + "id": "render", + "step": "blender_turntable", + "params": { + "fps": 24, + "frame_count": 120, + }, + }, + ], + "edges": [ + {"from": "setup", "to": "template"}, + {"from": "template", "to": "render"}, + ], + }, + context_id=str(line.id), + execution_mode="graph", + ) + create_workflow_run( + sync_session, + workflow_def_id=None, + order_line_id=line.id, + workflow_context=workflow_context, + ) + + dispatch_result = execute_graph_workflow(sync_session, workflow_context) + sync_session.commit() + + assert dispatch_result.task_ids == ["task-turntable-camera-orbit"] + assert len(send_calls) == 1 + assert send_calls[0][2]["camera_orbit"] is False + + +def test_execute_graph_workflow_serializes_template_override_modes( + sync_session, + tmp_path, + monkeypatch, +): + line = _seed_renderable_order_line(sync_session, tmp_path) + template = sync_session.execute(select(RenderTemplate)).unique().scalar_one() + template.target_collection = "TemplateCollection" + template.material_replace_enabled = False + template.lighting_only = False + template.shadow_catcher_enabled = False + template.camera_orbit = True + sync_session.commit() + + workflow_context = prepare_workflow_context( + { + "version": 1, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + { + "id": "template", + "step": "resolve_template", + "params": { + "target_collection": "NodeCollection", + "material_library_path": "/libraries/materials.blend", + "material_replace_mode": "enabled", + "lighting_only_mode": "enabled", + "shadow_catcher_mode": "enabled", + "camera_orbit_mode": "disabled", + }, + }, + ], + "edges": [ + {"from": "setup", "to": "template"}, + ], + }, + context_id=str(line.id), + execution_mode="graph", + ) + run = create_workflow_run( + sync_session, + workflow_def_id=None, + order_line_id=line.id, + workflow_context=workflow_context, + ) + + dispatch_result = execute_graph_workflow(sync_session, workflow_context) + sync_session.commit() + + refreshed_run = sync_session.execute( + select(WorkflowRun) + .where(WorkflowRun.id == run.id) + .options(selectinload(WorkflowRun.node_results)) + ).scalar_one() + node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results} + + assert dispatch_result.task_ids == [] + assert node_results["template"].status == "completed" + assert node_results["template"].output["target_collection"] == "NodeCollection" + assert node_results["template"].output["use_materials"] is True + assert node_results["template"].output["lighting_only"] is True + assert node_results["template"].output["shadow_catcher"] is True + assert node_results["template"].output["camera_orbit"] is False + + def test_execute_graph_workflow_retries_bridge_node_and_persists_attempt_metadata( sync_session, monkeypatch, @@ -1010,16 +1515,22 @@ def test_execute_graph_workflow_supports_output_save_bridge_node( assert send_calls[0][2]["graph_authoritative_output_enabled"] is True assert send_calls[0][2]["graph_output_node_ids"] == ["output"] assert node_results["render"].status == "queued" - assert node_results["output"].status == "completed" + assert node_results["output"].status == "pending" assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save" assert node_results["output"].output["order_line_id"] == str(line.id) + assert node_results["output"].output["handoff_state"] == "armed" + assert node_results["output"].output["handoff_node_ids"] == ["render"] assert node_results["output"].output["artifact_count"] == 1 assert node_results["output"].output["upstream_artifacts"] == [ { "node_id": "render", "artifact_role": "render_output", "predicted_output_path": str( - tmp_path / "cad" / "renders" / f"line_{line.id}.png" + build_order_line_step_render_path( + line.product.cad_file.stored_path, + str(line.id), + f"line_{line.id}.png", + ) ), "predicted_asset_type": "still", "publish_asset_enabled": False, @@ -1086,14 +1597,16 @@ def test_execute_graph_workflow_arms_output_save_handoff_for_export_blend( assert send_calls[0][2]["graph_authoritative_output_enabled"] is True assert send_calls[0][2]["graph_output_node_ids"] == ["output"] assert node_results["blend"].status == "queued" - assert node_results["output"].status == "completed" + assert node_results["output"].status == "pending" assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save" + assert node_results["output"].output["handoff_state"] == "armed" + assert node_results["output"].output["handoff_node_ids"] == ["blend"] assert node_results["output"].output["artifact_count"] == 1 assert node_results["output"].output["upstream_artifacts"] == [ { "node_id": "blend", "artifact_role": "blend_export", - "predicted_output_path": str(tmp_path / "cad" / "bearing_production.blend"), + "predicted_output_path": str(build_order_line_export_path(str(line.id), "bearing_production.blend")), "predicted_asset_type": "blend_production", "publish_asset_enabled": False, "graph_authoritative_output_enabled": True, @@ -1160,14 +1673,18 @@ def test_execute_graph_workflow_arms_output_save_handoff_for_turntable( assert send_calls[0][2]["graph_output_node_ids"] == ["output"] assert send_calls[0][2]["workflow_node_id"] == "turntable" assert node_results["turntable"].status == "queued" - assert node_results["output"].status == "completed" + assert node_results["output"].status == "pending" assert node_results["output"].output["publication_mode"] == "awaiting_graph_authoritative_save" + assert node_results["output"].output["handoff_state"] == "armed" + assert node_results["output"].output["handoff_node_ids"] == ["turntable"] assert node_results["output"].output["artifact_count"] == 1 assert node_results["output"].output["upstream_artifacts"] == [ { "node_id": "turntable", "artifact_role": "turntable_output", - "predicted_output_path": str(tmp_path / "cad" / "renders" / "turntable.mp4"), + "predicted_output_path": str( + build_order_line_step_render_path(line.product.cad_file.stored_path, str(line.id), "turntable.mp4") + ), "predicted_asset_type": "turntable", "publish_asset_enabled": False, "graph_authoritative_output_enabled": True, @@ -1178,6 +1695,150 @@ def test_execute_graph_workflow_arms_output_save_handoff_for_turntable( ] +def test_execute_graph_workflow_arms_shadow_output_save_handoff_for_turntable( + sync_session, + tmp_path, + monkeypatch, +): + line = _seed_renderable_order_line(sync_session, tmp_path) + send_calls: list[tuple[str, list[str], dict[str, object]]] = [] + + monkeypatch.setattr( + "app.tasks.celery_app.celery_app.send_task", + lambda task_name, args, kwargs: send_calls.append((task_name, args, kwargs)) + or SimpleNamespace(id="task-shadow-turntable-output-save"), + ) + + workflow_context = prepare_workflow_context( + { + "version": 1, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "turntable", "step": "blender_turntable", "params": {"fps": 24, "frame_count": 96}}, + {"id": "output", "step": "output_save", "params": {}}, + ], + "edges": [ + {"from": "setup", "to": "turntable"}, + {"from": "turntable", "to": "output"}, + ], + }, + context_id=str(line.id), + execution_mode="shadow", + ) + run = create_workflow_run( + sync_session, + workflow_def_id=None, + order_line_id=line.id, + workflow_context=workflow_context, + ) + + dispatch_result = execute_graph_workflow(sync_session, workflow_context) + sync_session.commit() + + refreshed_run = sync_session.execute( + select(WorkflowRun) + .where(WorkflowRun.id == run.id) + .options(selectinload(WorkflowRun.node_results)) + ).scalar_one() + node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results} + + assert dispatch_result.task_ids == ["task-shadow-turntable-output-save"] + assert len(send_calls) == 1 + assert send_calls[0][0] == "app.domains.rendering.tasks.render_turntable_task" + assert send_calls[0][1] == [str(line.id)] + assert send_calls[0][2]["publish_asset_enabled"] is False + assert send_calls[0][2]["observer_output_enabled"] is True + assert send_calls[0][2]["graph_output_node_ids"] == ["output"] + assert "graph_authoritative_output_enabled" not in send_calls[0][2] + assert node_results["turntable"].status == "queued" + assert node_results["output"].status == "pending" + assert node_results["output"].output["publication_mode"] == "shadow_observer_only" + assert node_results["output"].output["handoff_state"] == "armed" + assert node_results["output"].output["handoff_node_ids"] == ["turntable"] + assert node_results["output"].output["artifact_count"] == 1 + assert node_results["output"].output["upstream_artifacts"] == [ + { + "node_id": "turntable", + "artifact_role": "turntable_output", + "predicted_output_path": str( + build_order_line_step_render_path( + line.product.cad_file.stored_path, + str(line.id), + f"turntable_shadow-{str(run.id)[:8]}.mp4", + ) + ), + "predicted_asset_type": "turntable", + "publish_asset_enabled": False, + "graph_authoritative_output_enabled": False, + "graph_output_node_ids": ["output"], + "notify_handoff_enabled": False, + "task_id": "task-shadow-turntable-output-save", + } + ] + + +def test_execute_graph_workflow_routes_shadow_render_tasks_to_light_queue_when_available( + sync_session, + tmp_path, + monkeypatch, +): + line = _seed_renderable_order_line(sync_session, tmp_path) + send_calls: list[tuple[str, list[str], dict[str, object], dict[str, object]]] = [] + + monkeypatch.setattr( + "app.domains.rendering.workflow_graph_runtime._inspect_active_worker_queues", + lambda timeout=1.0: {"asset_pipeline", "asset_pipeline_light"}, + ) + + def _fake_send_task(task_name: str, args: list[str], kwargs: dict[str, object], **task_options): + send_calls.append((task_name, args, kwargs, task_options)) + return SimpleNamespace(id="task-shadow-light-queue") + + monkeypatch.setattr( + "app.tasks.celery_app.celery_app.send_task", + _fake_send_task, + ) + + workflow_context = prepare_workflow_context( + { + "version": 1, + "nodes": [ + {"id": "setup", "step": "order_line_setup", "params": {}}, + {"id": "turntable", "step": "blender_turntable", "params": {"fps": 24, "frame_count": 96}}, + {"id": "output", "step": "output_save", "params": {}}, + ], + "edges": [ + {"from": "setup", "to": "turntable"}, + {"from": "turntable", "to": "output"}, + ], + }, + context_id=str(line.id), + execution_mode="shadow", + ) + run = create_workflow_run( + sync_session, + workflow_def_id=None, + order_line_id=line.id, + workflow_context=workflow_context, + ) + + dispatch_result = execute_graph_workflow(sync_session, workflow_context) + sync_session.commit() + + refreshed_run = sync_session.execute( + select(WorkflowRun) + .where(WorkflowRun.id == run.id) + .options(selectinload(WorkflowRun.node_results)) + ).scalar_one() + node_results = {node_result.node_name: node_result for node_result in refreshed_run.node_results} + + assert dispatch_result.task_ids == ["task-shadow-light-queue"] + assert len(send_calls) == 1 + assert send_calls[0][0] == "app.domains.rendering.tasks.render_turntable_task" + assert send_calls[0][3]["queue"] == "asset_pipeline_light" + assert node_results["turntable"].output["task_queue"] == "asset_pipeline_light" + + def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch( sync_session, tmp_path, @@ -1240,12 +1901,21 @@ def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch assert send_calls[0][2]["graph_output_node_ids"] == ["still_output"] assert send_calls[1][0] == "app.domains.rendering.tasks.render_turntable_task" assert send_calls[1][2]["graph_output_node_ids"] == ["turntable_output"] + assert node_results["still_output"].status == "pending" + assert node_results["still_output"].output["handoff_state"] == "armed" + assert node_results["still_output"].output["handoff_node_ids"] == ["still"] assert node_results["still_output"].output["artifact_count"] == 1 assert node_results["still_output"].output["upstream_artifacts"] == [ { "node_id": "still", "artifact_role": "render_output", - "predicted_output_path": str(tmp_path / "cad" / "renders" / f"line_{line.id}.png"), + "predicted_output_path": str( + build_order_line_step_render_path( + line.product.cad_file.stored_path, + str(line.id), + f"line_{line.id}.png", + ) + ), "predicted_asset_type": "still", "publish_asset_enabled": False, "graph_authoritative_output_enabled": True, @@ -1254,12 +1924,17 @@ def test_execute_graph_workflow_routes_output_save_handoffs_per_connected_branch "task_id": "task-branch-1", } ] + assert node_results["turntable_output"].status == "pending" + assert node_results["turntable_output"].output["handoff_state"] == "armed" + assert node_results["turntable_output"].output["handoff_node_ids"] == ["turntable"] assert node_results["turntable_output"].output["artifact_count"] == 1 assert node_results["turntable_output"].output["upstream_artifacts"] == [ { "node_id": "turntable", "artifact_role": "turntable_output", - "predicted_output_path": str(tmp_path / "cad" / "renders" / "turntable.mp4"), + "predicted_output_path": str( + build_order_line_step_render_path(line.product.cad_file.stored_path, str(line.id), "turntable.mp4") + ), "predicted_asset_type": "turntable", "publish_asset_enabled": False, "graph_authoritative_output_enabled": True, @@ -1379,9 +2054,10 @@ def test_execute_graph_workflow_arms_notify_handoff_for_graph_render_task( assert send_calls[0][2]["emit_legacy_notifications"] is True assert send_calls[0][2]["graph_notify_node_ids"] == ["notify"] assert node_results["render"].output["graph_notify_node_ids"] == ["notify"] - assert node_results["notify"].status == "completed" + assert node_results["notify"].status == "pending" assert node_results["notify"].output["notification_mode"] == "deferred_to_render_task" assert node_results["notify"].output["armed_node_ids"] == ["render"] + assert node_results["notify"].output["handoff_state"] == "armed" def test_execute_graph_workflow_routes_notify_handoffs_per_connected_branch( @@ -1451,10 +2127,14 @@ def test_execute_graph_workflow_routes_notify_handoffs_per_connected_branch( assert send_calls[1][2]["graph_notify_node_ids"] == ["turntable_notify"] assert node_results["still"].output["graph_notify_node_ids"] == ["still_notify"] assert node_results["turntable"].output["graph_notify_node_ids"] == ["turntable_notify"] - assert node_results["still_notify"].status == "completed" + assert node_results["still_notify"].status == "pending" + assert node_results["still_notify"].output["notification_mode"] == "deferred_to_render_task" assert node_results["still_notify"].output["armed_node_ids"] == ["still"] - assert node_results["turntable_notify"].status == "completed" + assert node_results["still_notify"].output["handoff_state"] == "armed" + assert node_results["turntable_notify"].status == "pending" + assert node_results["turntable_notify"].output["notification_mode"] == "deferred_to_render_task" assert node_results["turntable_notify"].output["armed_node_ids"] == ["turntable"] + assert node_results["turntable_notify"].output["handoff_state"] == "armed" def test_execute_graph_workflow_suppresses_notify_node_in_shadow_mode( diff --git a/backend/tests/domains/test_workflow_node_registry.py b/backend/tests/domains/test_workflow_node_registry.py index 95eb4a6..da2575c 100644 --- a/backend/tests/domains/test_workflow_node_registry.py +++ b/backend/tests/domains/test_workflow_node_registry.py @@ -1,7 +1,9 @@ import pytest from app.core.process_steps import StepName -from app.domains.rendering.models import WorkflowDefinition +from app.domains.rendering.models import OutputType, WorkflowDefinition, WorkflowRun +from app.domains.rendering.workflow_config_utils import build_preset_workflow_config +from app.domains.rendering.workflow_graph_runtime import _STILL_TASK_KEYS, _TURNTABLE_TASK_KEYS from app.domains.rendering.workflow_node_registry import ( get_node_definition, list_node_definitions, @@ -14,11 +16,55 @@ def test_node_registry_covers_all_step_names(): expected_steps = {step.value for step in StepName} assert registered_steps == expected_steps - assert all(definition.family in {"cad_file", "order_line"} for definition in definitions) + assert all(definition.family in {"cad_file", "order_line", "shared"} for definition in definitions) assert all(definition.module_key for definition in definitions) assert all(definition.legacy_source for definition in definitions) +def test_node_registry_module_keys_are_unique(): + definitions = list_node_definitions() + module_keys = [definition.module_key for definition in definitions] + + assert len(module_keys) == len(set(module_keys)) + + +def test_node_registry_defaults_match_declared_fields(): + definitions = list_node_definitions() + + for definition in definitions: + field_keys = {field.key for field in definition.fields} + default_keys = set(definition.defaults) + assert default_keys <= field_keys + + +def test_node_registry_contracts_have_valid_shape(): + definitions = list_node_definitions() + + for definition in definitions: + input_context = definition.input_contract.get("context") + output_context = definition.output_contract.get("context") + + if definition.family == "shared": + assert input_context is None + assert output_context is None + else: + assert input_context == definition.family + assert output_context == definition.family + + required = definition.input_contract.get("requires", []) + required_any = definition.input_contract.get("requires_any", []) + provides = definition.output_contract.get("provides", []) + + assert len(required) == len(set(required)) + assert len(required_any) == len(set(required_any)) + assert len(provides) == len(set(provides)) + assert len(definition.artifact_roles_consumed) == len(set(definition.artifact_roles_consumed)) + assert len(definition.artifact_roles_produced) == len(set(definition.artifact_roles_produced)) + + field_keys = [field.key for field in definition.fields] + assert len(field_keys) == len(set(field_keys)) + + def test_turntable_node_definition_exposes_expected_schema(): definition = get_node_definition(StepName.BLENDER_TURNTABLE) @@ -27,7 +73,11 @@ def test_turntable_node_definition_exposes_expected_schema(): assert definition.module_key == "render.production.turntable" assert definition.node_type == "renderFramesNode" assert definition.defaults["fps"] == 24 + assert definition.defaults["frame_count"] == 120 assert definition.defaults["duration_s"] == 5 + assert definition.defaults["turntable_degrees"] == 360 + assert definition.defaults["turntable_axis"] == "world_z" + assert definition.defaults["camera_orbit"] is True assert definition.input_contract["context"] == "order_line" assert definition.output_contract["provides"] == ["rendered_frames", "rendered_video"] assert "material_assignments" in definition.artifact_roles_consumed @@ -55,6 +105,22 @@ def test_turntable_node_definition_exposes_expected_schema(): } +def test_graph_render_node_fields_are_supported_by_runtime_dispatch(): + still_definition = get_node_definition(StepName.BLENDER_STILL) + turntable_definition = get_node_definition(StepName.BLENDER_TURNTABLE) + + assert still_definition is not None + assert turntable_definition is not None + + still_runtime_fields = {field.key for field in still_definition.fields if field.key != "use_custom_render_settings"} + turntable_runtime_fields = { + field.key for field in turntable_definition.fields if field.key != "use_custom_render_settings" + } + + assert still_runtime_fields <= _STILL_TASK_KEYS + assert turntable_runtime_fields <= _TURNTABLE_TASK_KEYS + + def test_order_line_setup_and_template_contracts_expose_runtime_outputs(): setup = get_node_definition(StepName.ORDER_LINE_SETUP) template = get_node_definition(StepName.RESOLVE_TEMPLATE) @@ -87,12 +153,40 @@ def test_order_line_setup_and_template_contracts_expose_runtime_outputs(): "use_materials", "override_material", } - assert {field.key for field in bbox.fields} == {"glb_path"} + assert {field.key for field in bbox.fields} == {"glb_path", "source_preference"} + assert bbox.family == "shared" + assert bbox.input_contract == {"requires": ["glb_preview"]} + assert bbox.output_contract == {"provides": ["bbox"]} + assert {field.key for field in template.fields} == { + "template_id_override", + "require_template", + "material_library_path", + "disable_materials", + "target_collection", + "material_replace_mode", + "lighting_only_mode", + "shadow_catcher_mode", + "camera_orbit_mode", + } + assert {field.key for field in get_node_definition(StepName.MATERIAL_MAP_RESOLVE).fields} == { + "disable_materials", + "material_override", + } + assert {field.key for field in get_node_definition(StepName.AUTO_POPULATE_MATERIALS).fields} == { + "persist_updates", + "refresh_material_source", + "include_populated_products", + } assert output.input_contract["requires"] == ["order_line_context"] assert output.input_contract["requires_any"] == ["rendered_image", "rendered_frames", "rendered_video"] assert set(output.output_contract["provides"]) >= {"media_asset", "workflow_result"} + assert {field.key for field in output.fields} == { + "expected_artifact_role", + "require_upstream_artifact", + } assert export_blend.defaults["output_name_suffix"] == "" assert {field.key for field in export_blend.fields} == {"output_name_suffix"} + assert notify.defaults == {"channel": "audit_log", "require_armed_render": False} assert notify.input_contract["requires"] == ["order_line_context"] assert notify.input_contract["requires_any"] == [ "rendered_image", @@ -100,6 +194,58 @@ def test_order_line_setup_and_template_contracts_expose_runtime_outputs(): "rendered_video", "workflow_result", ] + assert {field.key for field in notify.fields} == {"channel", "require_armed_render"} + + +def test_cad_and_export_contract_nodes_only_expose_supported_settings(): + occ_glb_export = get_node_definition(StepName.OCC_GLB_EXPORT) + thumbnail_save = get_node_definition(StepName.THUMBNAIL_SAVE) + export_blend = get_node_definition(StepName.EXPORT_BLEND) + stl_cache_generate = get_node_definition(StepName.STL_CACHE_GENERATE) + + assert occ_glb_export is not None + assert thumbnail_save is not None + assert export_blend is not None + assert stl_cache_generate is not None + + assert occ_glb_export.family == "cad_file" + assert occ_glb_export.fields == [] + assert occ_glb_export.defaults == {} + assert occ_glb_export.input_contract == {"context": "cad_file", "requires": ["step_path"]} + assert occ_glb_export.output_contract == {"context": "cad_file", "provides": ["glb_preview"]} + assert occ_glb_export.artifact_roles_consumed == ["step_path"] + assert occ_glb_export.artifact_roles_produced == ["glb_preview"] + assert "does not expose per-node overrides yet" in occ_glb_export.description + + assert thumbnail_save.family == "cad_file" + assert thumbnail_save.fields == [] + assert thumbnail_save.defaults == {} + assert thumbnail_save.input_contract == {"context": "cad_file", "requires": ["rendered_image"]} + assert thumbnail_save.output_contract == {"context": "cad_file", "provides": ["cad_thumbnail_media"]} + assert thumbnail_save.artifact_roles_consumed == ["rendered_image"] + assert thumbnail_save.artifact_roles_produced == ["cad_thumbnail_media"] + assert "connected upstream thumbnail request node" in thumbnail_save.description + + assert export_blend.family == "order_line" + assert export_blend.defaults == {"output_name_suffix": ""} + assert {field.key for field in export_blend.fields} == {"output_name_suffix"} + assert export_blend.input_contract == { + "context": "order_line", + "requires": ["order_line_context", "render_template"], + } + assert export_blend.output_contract == {"context": "order_line", "provides": ["blend_asset"]} + assert export_blend.artifact_roles_consumed == ["order_line_context", "render_template"] + assert export_blend.artifact_roles_produced == ["blend_asset"] + assert "Only the optional filename suffix is workflow-configurable today." in export_blend.description + + assert stl_cache_generate.family == "cad_file" + assert stl_cache_generate.fields == [] + assert stl_cache_generate.defaults == {} + assert stl_cache_generate.input_contract == {"context": "cad_file", "requires": ["step_path"]} + assert stl_cache_generate.output_contract == {"context": "cad_file", "provides": ["stl_cache"]} + assert stl_cache_generate.artifact_roles_consumed == ["step_path"] + assert stl_cache_generate.artifact_roles_produced == ["stl_cache"] + assert "Compatibility node for legacy CAD flows." in stl_cache_generate.description @pytest.mark.asyncio @@ -146,6 +292,16 @@ async def test_node_definitions_endpoint_returns_registry(client, auth_headers): "material_override", } + blender_turntable = next( + definition for definition in body["definitions"] if definition["step"] == StepName.BLENDER_TURNTABLE.value + ) + assert blender_turntable["defaults"]["fps"] == 24 + assert blender_turntable["defaults"]["frame_count"] == 120 + assert blender_turntable["defaults"]["duration_s"] == 5 + assert blender_turntable["defaults"]["turntable_degrees"] == 360 + assert blender_turntable["defaults"]["turntable_axis"] == "world_z" + assert blender_turntable["defaults"]["camera_orbit"] is True + glb_bbox = next( definition for definition in body["definitions"] if definition["step"] == StepName.GLB_BBOX.value ) @@ -162,7 +318,30 @@ async def test_node_definitions_endpoint_returns_registry(client, auth_headers): "step": None, "unit": None, "options": [], - } + "allow_blank": True, + "max_length": None, + "text_format": "absolute_glb_path", + }, + { + "key": "source_preference", + "label": "Source Preference", + "type": "select", + "description": "Prefer a prepared GLB, force STEP fallback, or fail when no GLB artifact is available.", + "section": "Inputs", + "default": "auto", + "min": None, + "max": None, + "step": None, + "unit": None, + "options": [ + {"value": "auto", "label": "Auto"}, + {"value": "step_only", "label": "STEP Only"}, + {"value": "glb_only", "label": "GLB Only"}, + ], + "allow_blank": True, + "max_length": None, + "text_format": "plain", + }, ] @@ -203,6 +382,85 @@ async def test_workflow_crud_roundtrip_preserves_execution_mode(client, auth_hea assert fetched["config"]["ui"]["execution_mode"] == "shadow" +@pytest.mark.asyncio +async def test_workflow_crud_exposes_supported_artifact_kinds(client, auth_headers): + create_response = await client.post( + "/api/workflows", + headers=auth_headers, + json={ + "name": "Still Workflow Contract", + "config": build_preset_workflow_config("still_graph"), + "is_active": True, + }, + ) + + assert create_response.status_code == 201, create_response.text + created = create_response.json() + assert created["family"] == "order_line" + assert created["supported_artifact_kinds"] == ["still_image"] + + get_response = await client.get(f"/api/workflows/{created['id']}", headers=auth_headers) + + assert get_response.status_code == 200 + fetched = get_response.json() + assert fetched["supported_artifact_kinds"] == ["still_image"] + + +@pytest.mark.asyncio +async def test_workflow_crud_exposes_rollout_summary(client, db, auth_headers): + workflow = WorkflowDefinition( + name="Shadow Rollout Workflow", + config=build_preset_workflow_config("still_graph") | { + "ui": { + **(build_preset_workflow_config("still_graph").get("ui") or {}), + "execution_mode": "shadow", + } + }, + is_active=True, + ) + db.add(workflow) + await db.flush() + + output_type = OutputType( + name="Shadow Still Output", + workflow_definition_id=workflow.id, + workflow_family="order_line", + artifact_kind="still_image", + workflow_rollout_mode="shadow", + render_backend="celery", + ) + db.add(output_type) + await db.flush() + + workflow_run = WorkflowRun( + workflow_def_id=workflow.id, + execution_mode="shadow", + status="completed", + ) + db.add(workflow_run) + await db.commit() + + response = await client.get(f"/api/workflows/{workflow.id}", headers=auth_headers) + + assert response.status_code == 200, response.text + body = response.json() + assert body["rollout_summary"]["linked_output_type_count"] == 1 + assert body["rollout_summary"]["linked_output_type_names"] == ["Shadow Still Output"] + assert body["rollout_summary"]["linked_output_types"] == [ + { + "id": str(output_type.id), + "name": "Shadow Still Output", + "is_active": True, + "artifact_kind": "still_image", + "workflow_rollout_mode": "shadow", + } + ] + assert body["rollout_summary"]["rollout_modes"] == ["shadow"] + assert body["rollout_summary"]["has_blocking_contracts"] is False + assert body["rollout_summary"]["latest_shadow_run"]["workflow_run_id"] == str(workflow_run.id) + assert body["rollout_summary"]["latest_shadow_run"]["execution_mode"] == "shadow" + + @pytest.mark.asyncio async def test_admin_backfill_workflows_rewrites_legacy_configs(client, db, auth_headers): legacy = WorkflowDefinition( diff --git a/backend/tests/domains/test_workflow_runtime_services.py b/backend/tests/domains/test_workflow_runtime_services.py index 63adc9e..6786689 100644 --- a/backend/tests/domains/test_workflow_runtime_services.py +++ b/backend/tests/domains/test_workflow_runtime_services.py @@ -5,6 +5,7 @@ import uuid from pathlib import Path import pytest +from PIL import Image, PngImagePlugin from sqlalchemy import select, text from sqlalchemy.orm import Session @@ -15,6 +16,7 @@ from app.domains.orders.models import Order, OrderLine, OrderStatus from app.domains.products.models import CadFile, Product from app.domains.rendering.models import OutputType, RenderTemplate from app.domains.rendering.workflow_runtime_services import ( + _build_effective_material_lookup, auto_populate_materials_for_cad, build_order_line_render_invocation, emit_order_line_render_notifications, @@ -101,6 +103,75 @@ def _seed_order_line_graph(session: Session, tmp_path: Path) -> OrderLine: return line +def _write_png_with_metadata(path: Path, *, rgba: tuple[int, int, int, int], date_text: str) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + image = Image.new("RGBA", (8, 8), rgba) + metadata = PngImagePlugin.PngInfo() + metadata.add_text("Date", date_text) + metadata.add_text("Software", "Blender") + image.save(path, pnginfo=metadata) + + +def test_effective_material_lookup_keeps_product_assignments_authoritative_and_adds_manifest_aliases(): + cad_file = CadFile( + id=uuid.uuid4(), + original_name="bearing.step", + stored_path="/tmp/bearing.step", + file_hash=f"hash-{uuid.uuid4().hex}", + resolved_material_assignments={ + "inner_ring": { + "source_name": "InnerRing", + "prim_path": "/Root/Assembly/inner_ring", + "canonical_material": "HARTOMAT_010101_Steel-Bare", + }, + "usd_only_part": { + "source_name": "UsdOnlyPart", + "prim_path": "/Root/Assembly/usd_only_part", + "canonical_material": "HARTOMAT_050101_Elastomer-Black", + }, + }, + ) + + effective = _build_effective_material_lookup( + cad_file, + [ + {"part_name": "InnerRing", "material": "Steel raw"}, + ], + ) + + assert effective["InnerRing"] == "Steel raw" + assert effective["inner_ring"] == "Steel raw" + assert effective["UsdOnlyPart"] == "HARTOMAT_050101_Elastomer-Black" + assert effective["usd_only_part"] == "HARTOMAT_050101_Elastomer-Black" + + +def test_effective_material_lookup_backfills_manifest_part_keys_from_legacy_serialized_names(): + cad_file = CadFile( + id=uuid.uuid4(), + original_name="bearing.step", + stored_path="/tmp/bearing.step", + file_hash=f"hash-{uuid.uuid4().hex}", + resolved_material_assignments={ + "rwdr_b_f_802044_tr4_h122bk": { + "source_name": "RWDR_B_F-802044_TR4_H122BK", + "prim_path": "/Root/Assembly/rwdr_b_f_802044_tr4_h122bk", + "canonical_material": "HARTOMAT_010101_Steel-Bare", + }, + }, + ) + + effective = _build_effective_material_lookup( + cad_file, + [ + {"part_name": "RWDR_B_F-802044_TR4_H122B-69186", "material": "Steel--Stahl"}, + ], + ) + + assert effective["RWDR_B_F-802044_TR4_H122B-69186"] == "Steel--Stahl" + assert effective["RWDR_B_F-802044_TR4_H122BK"] == "Steel--Stahl" + assert effective["rwdr_b_f_802044_tr4_h122bk"] == "Steel--Stahl" + + def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd(sync_session, tmp_path, monkeypatch): from app.config import settings @@ -118,7 +189,10 @@ def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd } usd_asset_path = upload_dir / "usd" / "bearing.usd" usd_asset_path.parent.mkdir(parents=True, exist_ok=True) - usd_asset_path.write_text("USD", encoding="utf-8") + usd_asset_path.write_text( + "hartomat:canonicalMaterialName\nhartomat:partKey\n", + encoding="utf-8", + ) sync_session.add( MediaAsset( @@ -127,6 +201,9 @@ def test_prepare_order_line_render_context_marks_line_processing_and_prefers_usd product_id=line.product_id, asset_type=MediaAssetType.usd_master, storage_key="usd/bearing.usd", + render_config={ + "cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint", + }, ) ) sync_session.commit() @@ -230,6 +307,264 @@ def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd(sync_se assert line.render_status == "processing" +def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd_cache_key(sync_session, tmp_path, monkeypatch): + from app.config import settings + + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + upload_dir = Path(settings.upload_dir) + upload_dir.mkdir(parents=True, exist_ok=True) + + line = _seed_order_line_graph(sync_session, tmp_path) + line.product.cad_file.resolved_material_assignments = { + "inner_ring": { + "source_name": "InnerRing", + "prim_path": "/Root/Assembly/inner_ring", + "canonical_material": "HARTOMAT_010101_Steel-Bare", + } + } + + usd_asset_path = upload_dir / "usd" / "bearing.usd" + usd_asset_path.parent.mkdir(parents=True, exist_ok=True) + usd_asset_path.write_text( + "hartomat:canonicalMaterialName\nhartomat:partKey\n", + encoding="utf-8", + ) + glb_asset_path = upload_dir / "step_files" / "bearing_thumbnail.glb" + glb_asset_path.parent.mkdir(parents=True, exist_ok=True) + glb_asset_path.write_text("GLB", encoding="utf-8") + + sync_session.add_all( + [ + MediaAsset( + id=uuid.uuid4(), + cad_file_id=line.product.cad_file_id, + product_id=line.product_id, + asset_type=MediaAssetType.usd_master, + storage_key="usd/bearing.usd", + render_config={ + "cache_key": "stephash:0.03:0.05:20.0:materialhash", + }, + ), + MediaAsset( + id=uuid.uuid4(), + cad_file_id=line.product.cad_file_id, + product_id=line.product_id, + asset_type=MediaAssetType.gltf_geometry, + storage_key="step_files/bearing_thumbnail.glb", + ), + ] + ) + sync_session.commit() + + queued: list[str] = [] + + class _Task: + @staticmethod + def delay(cad_file_id: str) -> None: + queued.append(cad_file_id) + + monkeypatch.setattr( + "app.tasks.step_tasks.generate_usd_master_task", + _Task(), + ) + + result = prepare_order_line_render_context(sync_session, str(line.id)) + + expected_glb = tmp_path / "parts" / "bearing_thumbnail.glb" + assert result.is_ready + assert result.usd_render_path is None + assert result.glb_reuse_path == expected_glb + assert expected_glb.exists() + assert queued == [str(line.product.cad_file_id)] + + +def test_prepare_order_line_render_context_accepts_binary_usd_without_literal_hartomat_markers(sync_session, tmp_path, monkeypatch): + from app.config import settings + + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + upload_dir = Path(settings.upload_dir) + upload_dir.mkdir(parents=True, exist_ok=True) + + line = _seed_order_line_graph(sync_session, tmp_path) + line.product.cad_file.resolved_material_assignments = { + "inner_ring": { + "source_name": "InnerRing", + "prim_path": "/Root/Assembly/inner_ring", + "canonical_material": "HARTOMAT_010101_Steel-Bare", + } + } + + usd_asset_path = upload_dir / "usd" / "bearing.usd" + usd_asset_path.parent.mkdir(parents=True, exist_ok=True) + usd_asset_path.write_bytes(b"PXR-USDC\x00binary-usd-with-customdata-not-greppable") + + sync_session.add( + MediaAsset( + id=uuid.uuid4(), + cad_file_id=line.product.cad_file_id, + product_id=line.product_id, + asset_type=MediaAssetType.usd_master, + storage_key="usd/bearing.usd", + render_config={ + "cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint", + }, + ) + ) + sync_session.commit() + + queued: list[str] = [] + + class _Task: + @staticmethod + def delay(cad_file_id: str) -> None: + queued.append(cad_file_id) + + monkeypatch.setattr( + "app.tasks.step_tasks.generate_usd_master_task", + _Task(), + ) + + result = prepare_order_line_render_context(sync_session, str(line.id)) + + assert result.is_ready + assert result.usd_render_path == usd_asset_path + assert result.glb_reuse_path is None + assert queued == [] + + +def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd_file_markers(sync_session, tmp_path, monkeypatch): + from app.config import settings + + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + upload_dir = Path(settings.upload_dir) + upload_dir.mkdir(parents=True, exist_ok=True) + + line = _seed_order_line_graph(sync_session, tmp_path) + line.product.cad_file.resolved_material_assignments = { + "inner_ring": { + "source_name": "InnerRing", + "prim_path": "/Root/Assembly/inner_ring", + "canonical_material": "HARTOMAT_010101_Steel-Bare", + } + } + + usd_asset_path = upload_dir / "usd" / "bearing.usd" + usd_asset_path.parent.mkdir(parents=True, exist_ok=True) + usd_asset_path.write_text("legacy-usd-without-hartomat-markers", encoding="utf-8") + glb_asset_path = upload_dir / "step_files" / "bearing_thumbnail.glb" + glb_asset_path.parent.mkdir(parents=True, exist_ok=True) + glb_asset_path.write_text("GLB", encoding="utf-8") + + sync_session.add_all( + [ + MediaAsset( + id=uuid.uuid4(), + cad_file_id=line.product.cad_file_id, + product_id=line.product_id, + asset_type=MediaAssetType.usd_master, + storage_key="usd/bearing.usd", + render_config={ + "cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint", + }, + ), + MediaAsset( + id=uuid.uuid4(), + cad_file_id=line.product.cad_file_id, + product_id=line.product_id, + asset_type=MediaAssetType.gltf_geometry, + storage_key="step_files/bearing_thumbnail.glb", + ), + ] + ) + sync_session.commit() + + queued: list[str] = [] + + class _Task: + @staticmethod + def delay(cad_file_id: str) -> None: + queued.append(cad_file_id) + + monkeypatch.setattr( + "app.tasks.step_tasks.generate_usd_master_task", + _Task(), + ) + + result = prepare_order_line_render_context(sync_session, str(line.id)) + + expected_glb = tmp_path / "parts" / "bearing_thumbnail.glb" + assert result.is_ready + assert result.usd_render_path is None + assert result.glb_reuse_path == expected_glb + assert expected_glb.exists() + assert queued == [str(line.product.cad_file_id)] + + +def test_prepare_order_line_render_context_queues_refresh_for_legacy_usd_material_field(sync_session, tmp_path, monkeypatch): + from app.config import settings + + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + upload_dir = Path(settings.upload_dir) + upload_dir.mkdir(parents=True, exist_ok=True) + + line = _seed_order_line_graph(sync_session, tmp_path) + line.product.cad_file.resolved_material_assignments = { + "inner_ring": { + "source_name": "InnerRing", + "prim_path": "/Root/Assembly/inner_ring", + "material": "SCHAEFFLER_010101_Steel-Bare", + } + } + + usd_asset_path = upload_dir / "usd" / "bearing.usd" + usd_asset_path.parent.mkdir(parents=True, exist_ok=True) + usd_asset_path.write_text("USD", encoding="utf-8") + glb_asset_path = upload_dir / "step_files" / "bearing_thumbnail.glb" + glb_asset_path.parent.mkdir(parents=True, exist_ok=True) + glb_asset_path.write_text("GLB", encoding="utf-8") + + sync_session.add_all( + [ + MediaAsset( + id=uuid.uuid4(), + cad_file_id=line.product.cad_file_id, + product_id=line.product_id, + asset_type=MediaAssetType.usd_master, + storage_key="usd/bearing.usd", + ), + MediaAsset( + id=uuid.uuid4(), + cad_file_id=line.product.cad_file_id, + product_id=line.product_id, + asset_type=MediaAssetType.gltf_geometry, + storage_key="step_files/bearing_thumbnail.glb", + ), + ] + ) + sync_session.commit() + + queued: list[str] = [] + + class _Task: + @staticmethod + def delay(cad_file_id: str) -> None: + queued.append(cad_file_id) + + monkeypatch.setattr( + "app.tasks.step_tasks.generate_usd_master_task", + _Task(), + ) + + result = prepare_order_line_render_context(sync_session, str(line.id)) + + expected_glb = tmp_path / "parts" / "bearing_thumbnail.glb" + assert result.is_ready + assert result.usd_render_path is None + assert result.glb_reuse_path == expected_glb + assert expected_glb.exists() + assert queued == [str(line.product.cad_file_id)] + + def test_prepare_order_line_render_context_skips_closed_orders(sync_session, tmp_path, monkeypatch): from app.config import settings @@ -322,6 +657,11 @@ def test_build_order_line_render_invocation_applies_output_and_line_overrides(tm material_map={"InnerRing": "SteelPolished"}, use_materials=True, override_material="Studio White", + target_collection="Assembly", + lighting_only=True, + shadow_catcher=True, + camera_orbit=False, + template_inputs={"studio_variant": "warm"}, category_key="bearings", output_type_id=str(output_type.id), ), @@ -357,6 +697,7 @@ def test_build_order_line_render_invocation_applies_output_and_line_overrides(tm assert invocation.part_names_ordered == ["InnerRing", "OuterRing"] assert invocation.rotation_x == 12.0 assert invocation.focal_length_mm == 50.0 + assert invocation.template_inputs == {"studio_variant": "warm"} still_kwargs = invocation.as_still_renderer_kwargs( step_path=str(step_path), @@ -374,6 +715,7 @@ def test_build_order_line_render_invocation_applies_output_and_line_overrides(tm assert still_kwargs["cycles_device"] == "cuda" assert still_kwargs["material_library_path"] == "/libraries/materials.blend" assert still_kwargs["material_override"] == "Studio White" + assert still_kwargs["template_inputs"] == {"studio_variant": "warm"} assert still_kwargs["job_id"] == "job-1" assert still_kwargs["order_line_id"] == "line-1" @@ -437,6 +779,11 @@ def test_build_order_line_render_invocation_autoscales_samples_and_prefers_mater material_map={"InnerRing": "TemplateSteel"}, use_materials=True, override_material="Template White", + target_collection="Product", + lighting_only=False, + shadow_catcher=False, + camera_orbit=True, + template_inputs={"studio_variant": "warm"}, category_key="bearings", output_type_id=str(output_type.id), ), @@ -480,11 +827,13 @@ def test_build_order_line_render_invocation_autoscales_samples_and_prefers_mater assert turntable_kwargs["samples"] == 64 assert turntable_kwargs["material_map"] == {"InnerRing": "ResolvedSteel"} assert turntable_kwargs["material_library_path"] is None + assert turntable_kwargs["template_inputs"] == {"studio_variant": "warm"} assert cinematic_kwargs["width"] == 1024 assert cinematic_kwargs["height"] == 512 assert cinematic_kwargs["engine"] == "eevee" assert cinematic_kwargs["samples"] == 64 assert cinematic_kwargs["material_override"] == "Resolved White" + assert cinematic_kwargs["template_inputs"] == {"studio_variant": "warm"} def test_resolve_order_line_template_context_uses_exact_template_and_override(sync_session, tmp_path, monkeypatch): @@ -584,6 +933,153 @@ def test_resolve_order_line_template_context_supports_explicit_template_and_libr "InnerRing": "resolved:Steel raw", "OuterRing": "resolved:Steel raw", } + assert result.target_collection == "ForcedCollection" + assert result.lighting_only is False + assert result.shadow_catcher is False + assert result.camera_orbit is True + + +def test_resolve_order_line_template_context_applies_template_override_modes( + sync_session, + tmp_path, + monkeypatch, +): + from app.config import settings + + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + line = _seed_order_line_graph(sync_session, tmp_path) + template = RenderTemplate( + id=uuid.uuid4(), + name="Overrideable Template", + category_key="bearings", + blend_file_path="/templates/overrideable.blend", + original_filename="overrideable.blend", + target_collection="TemplateCollection", + material_replace_enabled=False, + lighting_only=False, + shadow_catcher_enabled=False, + camera_orbit=True, + is_active=True, + output_types=[line.output_type], + ) + sync_session.add(template) + sync_session.add( + AssetLibrary( + id=uuid.uuid4(), + name="Default Library", + blend_file_path="/libraries/materials.blend", + is_active=True, + ) + ) + sync_session.commit() + + monkeypatch.setattr( + "app.domains.rendering.workflow_runtime_services.resolve_material_map", + lambda raw_map: {key: f"resolved:{value}" for key, value in raw_map.items()}, + ) + + setup = prepare_order_line_render_context(sync_session, str(line.id)) + result = resolve_order_line_template_context( + sync_session, + setup, + template_id_override=str(template.id), + material_library_path_override="/libraries/materials.blend", + target_collection_override="NodeCollection", + material_replace_mode="enabled", + lighting_only_mode="enabled", + shadow_catcher_mode="enabled", + camera_orbit_mode="disabled", + ) + + assert result.template is not None + assert result.use_materials is True + assert result.material_map == { + "InnerRing": "resolved:Steel raw", + "OuterRing": "resolved:Steel raw", + } + assert result.target_collection == "NodeCollection" + assert result.lighting_only is True + assert result.shadow_catcher is True + assert result.camera_orbit is False + + +def test_resolve_order_line_template_context_exposes_template_schema_and_invocation_inputs( + sync_session, + tmp_path, + monkeypatch, +): + from app.config import settings + + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + line = _seed_order_line_graph(sync_session, tmp_path) + template = RenderTemplate( + id=uuid.uuid4(), + name="Schema Template", + category_key="bearings", + blend_file_path="/templates/schema-template.blend", + original_filename="schema-template.blend", + target_collection="Product", + material_replace_enabled=True, + lighting_only=False, + shadow_catcher_enabled=False, + camera_orbit=True, + workflow_input_schema=[ + { + "key": "studio_variant", + "label": "Studio Variant", + "type": "select", + "section": "Template Inputs", + "default": "default", + "options": [ + {"value": "default", "label": "Default"}, + {"value": "warm", "label": "Warm"}, + ], + }, + { + "key": "camera_profile", + "label": "Camera Profile", + "type": "text", + "section": "Template Inputs", + "default": "macro", + }, + ], + is_active=True, + output_types=[line.output_type], + ) + sync_session.add(template) + sync_session.add( + AssetLibrary( + id=uuid.uuid4(), + name="Default Library", + blend_file_path="/libraries/materials.blend", + is_active=True, + ) + ) + sync_session.commit() + + monkeypatch.setattr( + "app.domains.rendering.workflow_runtime_services.resolve_material_map", + lambda raw_map: {key: f"resolved:{value}" for key, value in raw_map.items()}, + ) + + setup = prepare_order_line_render_context(sync_session, str(line.id)) + template_context = resolve_order_line_template_context( + sync_session, + setup, + template_id_override=str(template.id), + template_input_overrides={"studio_variant": "warm"}, + ) + invocation = build_order_line_render_invocation(setup, template_context=template_context) + + assert template_context.workflow_input_schema == template.workflow_input_schema + assert template_context.template_inputs == { + "studio_variant": "warm", + "camera_profile": "macro", + } + assert invocation.template_inputs == { + "studio_variant": "warm", + "camera_profile": "macro", + } def test_resolve_order_line_template_context_can_disable_material_resolution(sync_session, tmp_path, monkeypatch): @@ -1077,6 +1573,56 @@ def test_persist_order_line_output_canonicalizes_step_file_outputs(sync_session, assert asset.storage_key == f"renders/{line.id}/{expected_path.name}" +def test_png_persistence_strips_volatile_metadata_for_primary_and_observer_outputs( + sync_session, + tmp_path, + monkeypatch, +): + from app.config import settings + + upload_dir = tmp_path / "uploads" + monkeypatch.setattr(settings, "upload_dir", str(upload_dir)) + line = _seed_order_line_graph(sync_session, tmp_path) + + primary_source = upload_dir / "step_files" / "renders" / f"line_{line.id}.png" + observer_source = upload_dir / "step_files" / "renders" / f"line_{line.id}_shadow.png" + _write_png_with_metadata( + primary_source, + rgba=(12, 34, 56, 255), + date_text="2026/04/10 17:05:27", + ) + _write_png_with_metadata( + observer_source, + rgba=(12, 34, 56, 255), + date_text="2026/04/10 17:06:30", + ) + + primary_result = persist_order_line_output( + sync_session, + line, + success=True, + output_path=str(primary_source), + render_log={"renderer": "blender", "engine_used": "cycles"}, + ) + observer_result = persist_order_line_media_asset( + sync_session, + line, + success=True, + output_path=str(observer_source), + asset_type=MediaAssetType.still, + render_log={"renderer": "blender", "engine_used": "cycles"}, + ) + + primary_bytes = Path(primary_result.result_path or "").read_bytes() + observer_bytes = Path(observer_result.result_path or "").read_bytes() + + assert primary_bytes == observer_bytes + assert b"Date" not in primary_bytes + assert b"Date" not in observer_bytes + assert Image.open(primary_result.result_path).getpixel((0, 0)) == (12, 34, 56, 255) + assert Image.open(observer_result.result_path).getpixel((0, 0)) == (12, 34, 56, 255) + + def test_persist_order_line_output_classifies_blend_outputs_as_blend_assets(sync_session, tmp_path, monkeypatch): from app.config import settings diff --git a/backend/tests/domains/test_workflow_schema.py b/backend/tests/domains/test_workflow_schema.py index 95596ac..07168e4 100644 --- a/backend/tests/domains/test_workflow_schema.py +++ b/backend/tests/domains/test_workflow_schema.py @@ -1,6 +1,7 @@ import pytest from pydantic import ValidationError +from app.core.process_steps import StepName from app.domains.rendering.workflow_schema import WorkflowConfig @@ -72,6 +73,35 @@ def test_workflow_schema_rejects_unknown_node_params(): ) +def test_workflow_schema_rejects_unregistered_nodes_from_registry(monkeypatch): + from app.domains.rendering import workflow_schema as schema_module + + original = schema_module.get_node_definition + + def fake_get_node_definition(step): + if step == StepName.GLB_BBOX: + return None + return original(step) + + monkeypatch.setattr(schema_module, "get_node_definition", fake_get_node_definition) + + with pytest.raises(ValidationError, match="is not registered in workflow_node_registry"): + WorkflowConfig.model_validate( + { + "version": 1, + "nodes": [ + { + "id": "bbox", + "step": StepName.GLB_BBOX.value, + "params": {}, + }, + ], + "edges": [], + "ui": {"family": "order_line"}, + } + ) + + def test_workflow_schema_accepts_known_node_params(): config = WorkflowConfig.model_validate( { @@ -92,6 +122,149 @@ def test_workflow_schema_accepts_known_node_params(): assert config.ui.family == "order_line" +def test_workflow_schema_rejects_invalid_glb_path_format(): + with pytest.raises(ValidationError, match="must point to a .glb file"): + WorkflowConfig.model_validate( + { + "version": 1, + "nodes": [ + { + "id": "bbox", + "step": "glb_bbox", + "params": {"glb_path": "/tmp/model.gltf"}, + }, + ], + "edges": [], + } + ) + + +def test_workflow_schema_rejects_invalid_template_id_override_format(): + with pytest.raises(ValidationError, match="must be a valid UUID"): + WorkflowConfig.model_validate( + { + "version": 1, + "nodes": [ + { + "id": "template", + "step": "resolve_template", + "params": {"template_id_override": "not-a-uuid"}, + }, + ], + "edges": [], + } + ) + + +def test_workflow_schema_rejects_invalid_material_library_path_format(): + with pytest.raises(ValidationError, match="must point to a .blend file"): + WorkflowConfig.model_validate( + { + "version": 1, + "nodes": [ + { + "id": "template", + "step": "resolve_template", + "params": {"material_library_path": "/tmp/library.txt"}, + }, + ], + "edges": [], + } + ) + + +def test_workflow_schema_rejects_invalid_noise_threshold_format(): + with pytest.raises(ValidationError, match="must be a valid numeric string"): + WorkflowConfig.model_validate( + { + "version": 1, + "nodes": [ + { + "id": "render", + "step": "blender_still", + "params": {"noise_threshold": "fast"}, + }, + ], + "edges": [], + } + ) + + +def test_workflow_schema_rejects_invalid_bg_color_format(): + with pytest.raises(ValidationError, match="must be a hex color"): + WorkflowConfig.model_validate( + { + "version": 1, + "nodes": [ + { + "id": "turntable", + "step": "blender_turntable", + "params": {"bg_color": "blue"}, + }, + ], + "edges": [], + } + ) + + +def test_workflow_schema_rejects_invalid_output_name_suffix_format(): + with pytest.raises(ValidationError, match="may only contain letters, numbers"): + WorkflowConfig.model_validate( + { + "version": 1, + "nodes": [ + { + "id": "blend", + "step": "export_blend", + "params": {"output_name_suffix": "../unsafe"}, + }, + ], + "edges": [], + } + ) + + +def test_workflow_schema_accepts_empty_optional_text_overrides(): + config = WorkflowConfig.model_validate( + { + "version": 1, + "nodes": [ + { + "id": "template", + "step": "resolve_template", + "params": { + "template_id_override": "", + "material_library_path": "", + }, + }, + { + "id": "render", + "step": "blender_still", + "params": { + "noise_threshold": "", + "material_override": "", + }, + }, + { + "id": "turntable", + "step": "blender_turntable", + "params": {"bg_color": ""}, + }, + { + "id": "blend", + "step": "export_blend", + "params": {"output_name_suffix": ""}, + }, + ], + "edges": [], + "ui": {"family": "order_line"}, + } + ) + + assert config.ui is not None + assert config.ui.family == "order_line" + + def test_workflow_schema_rejects_ui_family_mismatch(): with pytest.raises(ValidationError, match="ui.family"): WorkflowConfig.model_validate( @@ -226,6 +399,32 @@ def test_workflow_schema_accepts_transitive_contract_wiring(): assert config.ui.execution_mode == "graph" +def test_workflow_schema_accepts_cad_intake_contract_wiring_with_shared_bbox_node(): + config = WorkflowConfig.model_validate( + { + "version": 1, + "nodes": [ + {"id": "resolve_step", "step": "resolve_step_path", "params": {}}, + {"id": "export_glb", "step": "occ_glb_export", "params": {}}, + {"id": "bbox", "step": "glb_bbox", "params": {}}, + {"id": "threejs_thumb", "step": "threejs_render", "params": {}}, + {"id": "save", "step": "thumbnail_save", "params": {}}, + ], + "edges": [ + {"from": "resolve_step", "to": "export_glb"}, + {"from": "export_glb", "to": "bbox"}, + {"from": "export_glb", "to": "threejs_thumb"}, + {"from": "bbox", "to": "threejs_thumb"}, + {"from": "threejs_thumb", "to": "save"}, + ], + "ui": {"family": "cad_file", "execution_mode": "graph"}, + } + ) + + assert config.ui is not None + assert config.ui.family == "cad_file" + + def test_workflow_schema_rejects_mixed_family_graph_execution(): with pytest.raises(ValidationError, match="single-family"): WorkflowConfig.model_validate( diff --git a/backend/tests/domains/test_workflow_smoke_harness.py b/backend/tests/domains/test_workflow_smoke_harness.py new file mode 100644 index 0000000..5810293 --- /dev/null +++ b/backend/tests/domains/test_workflow_smoke_harness.py @@ -0,0 +1,227 @@ +from __future__ import annotations + +import importlib.util +from pathlib import Path +import sys +import types + + +def _load_render_pipeline_script(): + candidates = [ + Path(__file__).resolve().parents[3] / "scripts" / "test_render_pipeline.py", + Path("/compose/scripts/test_render_pipeline.py"), + ] + script_path = next((candidate for candidate in candidates if candidate.exists()), None) + assert script_path is not None + if "requests" not in sys.modules: + requests_stub = types.ModuleType("requests") + requests_stub.Response = object + requests_stub.Session = object + requests_stub.exceptions = types.SimpleNamespace( + ConnectionError=RuntimeError, + ChunkedEncodingError=RuntimeError, + ReadTimeout=RuntimeError, + ) + sys.modules["requests"] = requests_stub + spec = importlib.util.spec_from_file_location("test_render_pipeline_script", script_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def test_build_output_type_workflow_link_payload_sets_graph_rollout_mode_explicitly(): + module = _load_render_pipeline_script() + + payload = module.build_output_type_workflow_link_payload( + workflow_definition_id="workflow-graph-123", + execution_mode="graph", + ) + + assert payload == { + "workflow_definition_id": "workflow-graph-123", + "workflow_rollout_mode": "graph", + "is_active": True, + } + + +def test_build_output_type_workflow_link_payload_sets_shadow_rollout_mode_explicitly(): + module = _load_render_pipeline_script() + + payload = module.build_output_type_workflow_link_payload( + workflow_definition_id="workflow-shadow-123", + execution_mode="shadow", + ) + + assert payload == { + "workflow_definition_id": "workflow-shadow-123", + "workflow_rollout_mode": "shadow", + "is_active": True, + } + + +def test_build_output_type_workflow_link_payload_keeps_legacy_rollout_implicit(): + module = _load_render_pipeline_script() + + payload = module.build_output_type_workflow_link_payload( + workflow_definition_id="workflow-legacy-123", + execution_mode="legacy", + ) + + assert payload == { + "workflow_definition_id": "workflow-legacy-123", + "is_active": True, + } + + +def test_build_graph_still_config_matches_canonical_still_graph_contract(): + module = _load_render_pipeline_script() + + config = module.build_graph_still_config( + execution_mode="shadow", + render_params={ + "resolution": [1920, 1080], + "engine": "cycles", + "samples": 128, + }, + ) + + assert config["ui"] == { + "preset": "still_graph", + "execution_mode": "shadow", + "family": "order_line", + } + assert [node["id"] for node in config["nodes"]] == [ + "setup", + "template", + "populate_materials", + "bbox", + "resolve_materials", + "render", + "output", + "notify", + ] + assert config["edges"] == [ + {"from": "setup", "to": "template"}, + {"from": "setup", "to": "populate_materials"}, + {"from": "setup", "to": "bbox"}, + {"from": "template", "to": "resolve_materials"}, + {"from": "populate_materials", "to": "resolve_materials"}, + {"from": "resolve_materials", "to": "render"}, + {"from": "bbox", "to": "render"}, + {"from": "template", "to": "render"}, + {"from": "render", "to": "output"}, + {"from": "render", "to": "notify"}, + ] + + render_node = next(node for node in config["nodes"] if node["id"] == "render") + assert render_node["params"] == { + "width": 1920, + "height": 1080, + "render_engine": "cycles", + "samples": 128, + "use_custom_render_settings": False, + } + + +def test_render_template_candidates_for_output_type_matches_m2m_and_legacy_fields(): + module = _load_render_pipeline_script() + + templates = [ + { + "id": "template-active-m2m", + "is_active": True, + "output_type_ids": ["ot-1", "ot-2"], + "output_type_id": None, + }, + { + "id": "template-active-legacy", + "is_active": True, + "output_type_ids": [], + "output_type_id": "ot-1", + }, + { + "id": "template-inactive", + "is_active": False, + "output_type_ids": ["ot-1"], + "output_type_id": None, + }, + ] + + matches = module.render_template_candidates_for_output_type(templates, "ot-1") + + assert [template["id"] for template in matches] == [ + "template-active-m2m", + "template-active-legacy", + ] + + +def test_build_graph_still_config_can_inherit_output_type_render_settings(): + module = _load_render_pipeline_script() + + config = module.build_graph_still_config( + execution_mode="shadow", + use_custom_render_settings=False, + ) + + render_node = next(node for node in config["nodes"] if node["id"] == "render") + assert render_node["params"] == { + "use_custom_render_settings": False, + } + + +def test_choose_template_backed_output_type_prefers_requested_name(): + module = _load_render_pipeline_script() + + output_types = [ + { + "id": "ot-1", + "name": "HQ-Blender-Alpha-HDR", + "renderer": "blender", + "artifact_kind": "still_image", + "is_animation": False, + }, + { + "id": "ot-2", + "name": "Turntable", + "renderer": "blender", + "artifact_kind": "turntable_video", + "is_animation": True, + }, + ] + templates = [ + { + "id": "template-1", + "is_active": True, + "output_type_ids": ["ot-1"], + "output_type_id": None, + } + ] + + output_type, matches = module.choose_template_backed_output_type( + output_types, + templates, + preferred_name="HQ-Blender-Alpha-HDR", + ) + + assert output_type["id"] == "ot-1" + assert [template["id"] for template in matches] == ["template-1"] + + +def test_build_output_type_workflow_snapshot_keeps_restore_contract(): + module = _load_render_pipeline_script() + + snapshot = module.build_output_type_workflow_snapshot( + { + "workflow_definition_id": "workflow-123", + "workflow_rollout_mode": "shadow", + "is_active": False, + } + ) + + assert snapshot == { + "workflow_definition_id": "workflow-123", + "workflow_rollout_mode": "shadow", + "is_active": False, + } diff --git a/backend/tests/integration/test_cad_model_endpoint.py b/backend/tests/integration/test_cad_model_endpoint.py new file mode 100644 index 0000000..705dbb1 --- /dev/null +++ b/backend/tests/integration/test_cad_model_endpoint.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +import uuid + +import pytest + + +@pytest.mark.integration +@pytest.mark.asyncio +async def test_cad_model_endpoint_falls_back_to_gltf_geometry_asset( + client, + db, + auth_headers, + tmp_path, +): + from app.domains.media.models import MediaAsset, MediaAssetType + from app.domains.products.models import CadFile, ProcessingStatus + + glb_path = tmp_path / "example.glb" + glb_path.write_bytes(b"glTF") + + cad = CadFile( + id=uuid.uuid4(), + original_name="example.step", + stored_path=str(tmp_path / "example.step"), + file_hash="cad-model-endpoint-fallback", + file_size=123, + processing_status=ProcessingStatus.completed, + gltf_path=None, + ) + db.add(cad) + await db.flush() + + asset = MediaAsset( + id=uuid.uuid4(), + cad_file_id=cad.id, + asset_type=MediaAssetType.gltf_geometry, + storage_key=str(glb_path), + mime_type="model/gltf-binary", + ) + db.add(asset) + await db.commit() + + response = await client.get(f"/api/cad/{cad.id}/model", headers=auth_headers) + + assert response.status_code == 200 + assert response.headers["content-type"] == "model/gltf-binary" + assert response.content == b"glTF" diff --git a/backend/tests/integration/test_media_batch_delete.py b/backend/tests/integration/test_media_batch_delete.py new file mode 100644 index 0000000..86e1a9c --- /dev/null +++ b/backend/tests/integration/test_media_batch_delete.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +import uuid + +import pytest + + +@pytest.mark.asyncio +async def test_batch_delete_assets_awaits_global_admin_guard(client, auth_headers, monkeypatch): + guard_calls: list[str] = [] + deleted_asset_ids: list[str] = [] + + async def _guard(user): + guard_calls.append(str(user.id)) + return user + + async def _delete_media_asset(_db, asset_id): + deleted_asset_ids.append(str(asset_id)) + return True + + monkeypatch.setattr("app.utils.auth.require_global_admin", _guard) + monkeypatch.setattr("app.domains.media.service.delete_media_asset", _delete_media_asset) + + asset_ids = [str(uuid.uuid4()), str(uuid.uuid4())] + response = await client.post("/api/media/batch-delete", json=asset_ids, headers=auth_headers) + + assert response.status_code == 200, response.text + assert len(guard_calls) == 1 + assert deleted_asset_ids == asset_ids + assert response.json() == {"deleted": 2, "requested": 2} diff --git a/backend/tests/test_admin_settings_defaults.py b/backend/tests/test_admin_settings_defaults.py new file mode 100644 index 0000000..5cd8254 --- /dev/null +++ b/backend/tests/test_admin_settings_defaults.py @@ -0,0 +1,12 @@ +from app.api.routers.admin import SETTINGS_DEFAULTS, _settings_to_out + + +def test_settings_to_out_uses_consistent_tessellation_fallbacks() -> None: + raw = dict(SETTINGS_DEFAULTS) + raw.pop("scene_angular_deflection", None) + raw.pop("render_angular_deflection", None) + + settings = _settings_to_out(raw) + + assert settings.scene_angular_deflection == 0.1 + assert settings.render_angular_deflection == 0.05 diff --git a/backend/tests/test_asset_library_paths.py b/backend/tests/test_asset_library_paths.py new file mode 100644 index 0000000..05fb8fd --- /dev/null +++ b/backend/tests/test_asset_library_paths.py @@ -0,0 +1,67 @@ +from pathlib import Path + +from app.config import settings +from app.domains.materials.library_paths import ( + asset_library_dir, + list_asset_library_blends, + resolve_asset_library_blend_path, +) + + +def test_asset_library_dir_uses_upload_dir(monkeypatch, tmp_path): + monkeypatch.setattr(settings, "upload_dir", str(tmp_path / "uploads")) + + assert asset_library_dir() == tmp_path / "uploads" / "asset-libraries" + + +def test_resolve_asset_library_blend_path_prefers_existing_configured_path(monkeypatch, tmp_path): + upload_dir = tmp_path / "uploads" + library_dir = upload_dir / "asset-libraries" + library_dir.mkdir(parents=True, exist_ok=True) + configured = tmp_path / "external" / "materials.blend" + configured.parent.mkdir(parents=True, exist_ok=True) + configured.write_bytes(b"blend") + monkeypatch.setattr(settings, "upload_dir", str(upload_dir)) + + resolved = resolve_asset_library_blend_path( + blend_file_path=str(configured), + asset_library_id="ignored", + ) + + assert resolved == str(configured) + + +def test_resolve_asset_library_blend_path_falls_back_to_id_named_file(monkeypatch, tmp_path): + upload_dir = tmp_path / "uploads" + library_dir = upload_dir / "asset-libraries" + library_dir.mkdir(parents=True, exist_ok=True) + expected = library_dir / "1234.blend" + expected.write_bytes(b"blend") + monkeypatch.setattr(settings, "upload_dir", str(upload_dir)) + + resolved = resolve_asset_library_blend_path( + blend_file_path=str(library_dir / "missing.blend"), + asset_library_id="1234", + ) + + assert resolved == str(expected) + + +def test_resolve_asset_library_blend_path_falls_back_to_newest_available_file(monkeypatch, tmp_path): + upload_dir = tmp_path / "uploads" + library_dir = upload_dir / "asset-libraries" + library_dir.mkdir(parents=True, exist_ok=True) + older = library_dir / "older.blend" + newer = library_dir / "newer.blend" + older.write_bytes(b"older") + newer.write_bytes(b"newer") + newer.touch() + monkeypatch.setattr(settings, "upload_dir", str(upload_dir)) + + resolved = resolve_asset_library_blend_path( + blend_file_path=str(library_dir / "missing.blend"), + asset_library_id="missing", + ) + + assert resolved == str(newer) + assert list_asset_library_blends() == [newer, older] diff --git a/backend/tests/test_export_glb_task.py b/backend/tests/test_export_glb_task.py new file mode 100644 index 0000000..542b8ee --- /dev/null +++ b/backend/tests/test_export_glb_task.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import uuid +from pathlib import Path + +import app.models # noqa: F401 Ensures SQLAlchemy relationships are registered. + +from app.domains.media.models import MediaAsset, MediaAssetType +from app.domains.pipeline.tasks.export_glb import _usd_cache_hit_refresh_reason +from app.domains.products.models import CadFile + + +def _build_cad_file() -> CadFile: + return CadFile( + id=uuid.uuid4(), + original_name="bearing.step", + stored_path="/tmp/bearing.step", + file_hash=f"hash-{uuid.uuid4().hex}", + resolved_material_assignments={ + "inner_ring": { + "source_name": "InnerRing", + "prim_path": "/Root/Assembly/inner_ring", + "canonical_material": "HARTOMAT_010101_Steel-Bare", + } + }, + ) + + +def _build_usd_asset() -> MediaAsset: + return MediaAsset( + id=uuid.uuid4(), + cad_file_id=uuid.uuid4(), + asset_type=MediaAssetType.usd_master, + storage_key="step_files/bearing_master.usd", + render_config={ + "cache_key": "stephash:0.03:0.05:20.0:materialhash:scriptfingerprint", + }, + ) + + +def test_usd_cache_hit_refresh_reason_accepts_binary_usd_without_literal_hartomat_tokens(tmp_path: Path): + cad_file = _build_cad_file() + usd_asset = _build_usd_asset() + usd_path = tmp_path / "bearing_master.usd" + usd_path.write_text("#usda 1.0\n", encoding="utf-8") + + refresh_reason = _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_path) + + assert refresh_reason is None + + +def test_usd_cache_hit_refresh_reason_accepts_current_hartomat_usd(tmp_path: Path): + cad_file = _build_cad_file() + usd_asset = _build_usd_asset() + usd_path = tmp_path / "bearing_master.usd" + usd_path.write_text( + "hartomat:canonicalMaterialName\nhartomat:partKey\n", + encoding="utf-8", + ) + + refresh_reason = _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_path) + + assert refresh_reason is None diff --git a/backend/tests/test_export_step_to_gltf.py b/backend/tests/test_export_step_to_gltf.py new file mode 100644 index 0000000..66b398e --- /dev/null +++ b/backend/tests/test_export_step_to_gltf.py @@ -0,0 +1,220 @@ +from __future__ import annotations + +import importlib.util +import json +import struct +from pathlib import Path + + +def _load_export_module(): + candidates = [ + Path(__file__).resolve().parents[2] / "render-worker" / "scripts" / "export_step_to_gltf.py", + Path("/compose/render-worker/scripts/export_step_to_gltf.py"), + ] + module_path = next((path for path in candidates if path.exists()), None) + assert module_path is not None + spec = importlib.util.spec_from_file_location("test_export_step_to_gltf", module_path) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def _write_minimal_glb(path: Path, payload: dict) -> None: + json_bytes = json.dumps(payload, separators=(",", ":")).encode() + pad = (4 - len(json_bytes) % 4) % 4 + json_bytes += b" " * pad + chunk = struct.pack(" dict: + data = path.read_bytes() + json_len = struct.unpack_from("= '3.13'", + "python_full_version < '3.13'", +] + +[[package]] +name = "aiofiles" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" }, +] + +[[package]] +name = "alembic" +version = "1.18.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/13/8b084e0f2efb0275a1d534838844926f798bd766566b1375174e2448cd31/alembic-1.18.4.tar.gz", hash = "sha256:cb6e1fd84b6174ab8dbb2329f86d631ba9559dd78df550b57804d607672cedbc", size = 2056725, upload-time = "2026-02-10T16:00:47.195Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/29/6533c317b74f707ea28f8d633734dbda2119bbadfc61b2f3640ba835d0f7/alembic-1.18.4-py3-none-any.whl", hash = "sha256:a5ed4adcf6d8a4cb575f3d759f071b03cd6e5c7618eb796cb52497be25bfe19a", size = 263893, upload-time = "2026-02-10T16:00:49.997Z" }, +] + +[[package]] +name = "amqp" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013, upload-time = "2024-11-12T19:55:44.051Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" }, +] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "asyncpg" +version = "0.31.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/17/cc02bc49bc350623d050fa139e34ea512cd6e020562f2a7312a7bcae4bc9/asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d", size = 643159, upload-time = "2025-11-24T23:25:36.443Z" }, + { url = "https://files.pythonhosted.org/packages/a4/62/4ded7d400a7b651adf06f49ea8f73100cca07c6df012119594d1e3447aa6/asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab", size = 638157, upload-time = "2025-11-24T23:25:37.89Z" }, + { url = "https://files.pythonhosted.org/packages/d6/5b/4179538a9a72166a0bf60ad783b1ef16efb7960e4d7b9afe9f77a5551680/asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c", size = 2918051, upload-time = "2025-11-24T23:25:39.461Z" }, + { url = "https://files.pythonhosted.org/packages/e6/35/c27719ae0536c5b6e61e4701391ffe435ef59539e9360959240d6e47c8c8/asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109", size = 2972640, upload-time = "2025-11-24T23:25:41.512Z" }, + { url = "https://files.pythonhosted.org/packages/43/f4/01ebb9207f29e645a64699b9ce0eefeff8e7a33494e1d29bb53736f7766b/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da", size = 2851050, upload-time = "2025-11-24T23:25:43.153Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f4/03ff1426acc87be0f4e8d40fa2bff5c3952bef0080062af9efc2212e3be8/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9", size = 2962574, upload-time = "2025-11-24T23:25:44.942Z" }, + { url = "https://files.pythonhosted.org/packages/c7/39/cc788dfca3d4060f9d93e67be396ceec458dfc429e26139059e58c2c244d/asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24", size = 521076, upload-time = "2025-11-24T23:25:46.486Z" }, + { url = "https://files.pythonhosted.org/packages/28/fc/735af5384c029eb7f1ca60ccb8fa95521dbdaeef788edf4cecfc604c3cab/asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047", size = 584980, upload-time = "2025-11-24T23:25:47.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" }, + { url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" }, + { url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" }, + { url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" }, + { url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" }, + { url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" }, + { url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" }, + { url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" }, + { url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" }, + { url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" }, + { url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" }, + { url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" }, +] + +[[package]] +name = "bcrypt" +version = "3.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e8/36/edc85ab295ceff724506252b774155eff8a238f13730c8b13badd33ef866/bcrypt-3.2.2.tar.gz", hash = "sha256:433c410c2177057705da2a9f2cd01dd157493b2a7ac14c8593a16b3dab6b6bfb", size = 42455, upload-time = "2022-05-01T17:58:52.348Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c2/05354b1d4351d2e686a32296cc9dd1e63f9909a580636df0f7b06d774600/bcrypt-3.2.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:7180d98a96f00b1050e93f5b0f556e658605dd9f524d0b0e68ae7944673f525e", size = 50049, upload-time = "2022-05-01T18:05:47.625Z" }, + { url = "https://files.pythonhosted.org/packages/8c/b3/1257f7d64ee0aa0eb4fb1de5da8c2647a57db7b737da1f2342ac1889d3b8/bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:61bae49580dce88095d669226d5076d0b9d927754cedbdf76c6c9f5099ad6f26", size = 54914, upload-time = "2022-05-01T18:03:00.752Z" }, + { url = "https://files.pythonhosted.org/packages/61/3d/dce83194830183aa700cab07c89822471d21663a86a0b305d1e5c7b02810/bcrypt-3.2.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88273d806ab3a50d06bc6a2fc7c87d737dd669b76ad955f449c43095389bc8fb", size = 54403, upload-time = "2022-05-01T18:03:02.483Z" }, + { url = "https://files.pythonhosted.org/packages/86/1b/f4d7425dfc6cd0e405b48ee484df6d80fb39e05f25963dbfcc2c511e8341/bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d2cb9d969bfca5bc08e45864137276e4c3d3d7de2b162171def3d188bf9d34a", size = 62337, upload-time = "2022-05-01T18:05:49.524Z" }, + { url = "https://files.pythonhosted.org/packages/3e/df/289db4f31b303de6addb0897c8b5c01b23bd4b8c511ac80a32b08658847c/bcrypt-3.2.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b02d6bfc6336d1094276f3f588aa1225a598e27f8e3388f4db9948cb707b521", size = 61026, upload-time = "2022-05-01T18:05:51.107Z" }, + { url = "https://files.pythonhosted.org/packages/40/8f/b67b42faa2e4d944b145b1a402fc08db0af8fe2dfa92418c674b5a302496/bcrypt-3.2.2-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2c46100e315c3a5b90fdc53e429c006c5f962529bc27e1dfd656292c20ccc40", size = 64672, upload-time = "2022-05-01T18:05:52.748Z" }, + { url = "https://files.pythonhosted.org/packages/fc/9a/e1867f0b27a3f4ce90e21dd7f322f0e15d4aac2434d3b938dcf765e47c6b/bcrypt-3.2.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7d9ba2e41e330d2af4af6b1b6ec9e6128e91343d0b4afb9282e54e5508f31baa", size = 56795, upload-time = "2022-05-01T18:03:04.028Z" }, + { url = "https://files.pythonhosted.org/packages/18/76/057b0637c880e6cb0abdc8a867d080376ddca6ed7d05b7738f589cc5c1a8/bcrypt-3.2.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cd43303d6b8a165c29ec6756afd169faba9396a9472cdff753fe9f19b96ce2fa", size = 62075, upload-time = "2022-05-01T18:05:54.412Z" }, + { url = "https://files.pythonhosted.org/packages/f1/64/cd93e2c3e28a5fa8bcf6753d5cc5e858e4da08bf51404a0adb6a412532de/bcrypt-3.2.2-cp36-abi3-win32.whl", hash = "sha256:4e029cef560967fb0cf4a802bcf4d562d3d6b4b1bf81de5ec1abbe0f1adb027e", size = 27916, upload-time = "2022-05-01T18:05:56.45Z" }, + { url = "https://files.pythonhosted.org/packages/f5/37/7cd297ff571c4d86371ff024c0e008b37b59e895b28f69444a9b6f94ca1a/bcrypt-3.2.2-cp36-abi3-win_amd64.whl", hash = "sha256:7ff2069240c6bbe49109fe84ca80508773a904f5a8cb960e02a977f7f519b129", size = 29581, upload-time = "2022-05-01T18:05:57.878Z" }, +] + +[[package]] +name = "billiard" +version = "4.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/23/b12ac0bcdfb7360d664f40a00b1bda139cbbbced012c34e375506dbd0143/billiard-4.2.4.tar.gz", hash = "sha256:55f542c371209e03cd5862299b74e52e4fbcba8250ba611ad94276b369b6a85f", size = 156537, upload-time = "2025-11-30T13:28:48.52Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/87/8bab77b323f16d67be364031220069f79159117dd5e43eeb4be2fef1ac9b/billiard-4.2.4-py3-none-any.whl", hash = "sha256:525b42bdec68d2b983347ac312f892db930858495db601b5836ac24e6477cde5", size = 87070, upload-time = "2025-11-30T13:28:47.016Z" }, +] + +[[package]] +name = "boto3" +version = "1.42.85" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/31/9d/a9a7b5a9351e3ff0baae01136f71ba6fc4652fe0dc2da3b0a8ebdfc1be44/boto3-1.42.85.tar.gz", hash = "sha256:1cd3dcbfaba85c6071ba9397c1804b6a94a1a97031b8f1993fdba27c0c5d6eba", size = 112769, upload-time = "2026-04-07T19:40:53.834Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/ab/3167b8ec3cf1d87ad08d2ad5f15823a22945cae7870798274c283c3a18f1/boto3-1.42.85-py3-none-any.whl", hash = "sha256:4f6ac066e41d18ec33f532253fac0f35e0fdca373724458f983ce3d531340b7a", size = 140556, upload-time = "2026-04-07T19:40:52.186Z" }, +] + +[[package]] +name = "botocore" +version = "1.42.85" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/ac/7f14b05cf43e4baae99f4570b02e10b2aebf242dfd86245523340390c834/botocore-1.42.85.tar.gz", hash = "sha256:2ee61f80b7724a143e16d0a85408ef5fa20b99dce7a3c8ec5d25cc8dced164c1", size = 15159562, upload-time = "2026-04-07T19:40:43.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/f3/c1fbaff4c509c616fd01f44357283a8992f10b3a05d932b22e602aa3a221/botocore-1.42.85-py3-none-any.whl", hash = "sha256:828b67722caeb7e240eefedee74050e803d1fa102958ead9c4009101eefd5381", size = 14839741, upload-time = "2026-04-07T19:40:40.733Z" }, +] + +[[package]] +name = "brotli" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/16/c92ca344d646e71a43b8bb353f0a6490d7f6e06210f8554c8f874e454285/brotli-1.2.0.tar.gz", hash = "sha256:e310f77e41941c13340a95976fe66a8a95b01e783d430eeaf7a2f87e0a57dd0a", size = 7388632, upload-time = "2025-11-05T18:39:42.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/ef/f285668811a9e1ddb47a18cb0b437d5fc2760d537a2fe8a57875ad6f8448/brotli-1.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:15b33fe93cedc4caaff8a0bd1eb7e3dab1c61bb22a0bf5bdfdfd97cd7da79744", size = 863110, upload-time = "2025-11-05T18:38:12.978Z" }, + { url = "https://files.pythonhosted.org/packages/50/62/a3b77593587010c789a9d6eaa527c79e0848b7b860402cc64bc0bc28a86c/brotli-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:898be2be399c221d2671d29eed26b6b2713a02c2119168ed914e7d00ceadb56f", size = 445438, upload-time = "2025-11-05T18:38:14.208Z" }, + { url = "https://files.pythonhosted.org/packages/cd/e1/7fadd47f40ce5549dc44493877db40292277db373da5053aff181656e16e/brotli-1.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:350c8348f0e76fff0a0fd6c26755d2653863279d086d3aa2c290a6a7251135dd", size = 1534420, upload-time = "2025-11-05T18:38:15.111Z" }, + { url = "https://files.pythonhosted.org/packages/12/8b/1ed2f64054a5a008a4ccd2f271dbba7a5fb1a3067a99f5ceadedd4c1d5a7/brotli-1.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1ad3fda65ae0d93fec742a128d72e145c9c7a99ee2fcd667785d99eb25a7fe", size = 1632619, upload-time = "2025-11-05T18:38:16.094Z" }, + { url = "https://files.pythonhosted.org/packages/89/5a/7071a621eb2d052d64efd5da2ef55ecdac7c3b0c6e4f9d519e9c66d987ef/brotli-1.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:40d918bce2b427a0c4ba189df7a006ac0c7277c180aee4617d99e9ccaaf59e6a", size = 1426014, upload-time = "2025-11-05T18:38:17.177Z" }, + { url = "https://files.pythonhosted.org/packages/26/6d/0971a8ea435af5156acaaccec1a505f981c9c80227633851f2810abd252a/brotli-1.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2a7f1d03727130fc875448b65b127a9ec5d06d19d0148e7554384229706f9d1b", size = 1489661, upload-time = "2025-11-05T18:38:18.41Z" }, + { url = "https://files.pythonhosted.org/packages/f3/75/c1baca8b4ec6c96a03ef8230fab2a785e35297632f402ebb1e78a1e39116/brotli-1.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9c79f57faa25d97900bfb119480806d783fba83cd09ee0b33c17623935b05fa3", size = 1599150, upload-time = "2025-11-05T18:38:19.792Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1a/23fcfee1c324fd48a63d7ebf4bac3a4115bdb1b00e600f80f727d850b1ae/brotli-1.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:844a8ceb8483fefafc412f85c14f2aae2fb69567bf2a0de53cdb88b73e7c43ae", size = 1493505, upload-time = "2025-11-05T18:38:20.913Z" }, + { url = "https://files.pythonhosted.org/packages/36/e5/12904bbd36afeef53d45a84881a4810ae8810ad7e328a971ebbfd760a0b3/brotli-1.2.0-cp311-cp311-win32.whl", hash = "sha256:aa47441fa3026543513139cb8926a92a8e305ee9c71a6209ef7a97d91640ea03", size = 334451, upload-time = "2025-11-05T18:38:21.94Z" }, + { url = "https://files.pythonhosted.org/packages/02/8b/ecb5761b989629a4758c394b9301607a5880de61ee2ee5fe104b87149ebc/brotli-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:022426c9e99fd65d9475dce5c195526f04bb8be8907607e27e747893f6ee3e24", size = 369035, upload-time = "2025-11-05T18:38:22.941Z" }, + { url = "https://files.pythonhosted.org/packages/11/ee/b0a11ab2315c69bb9b45a2aaed022499c9c24a205c3a49c3513b541a7967/brotli-1.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:35d382625778834a7f3061b15423919aa03e4f5da34ac8e02c074e4b75ab4f84", size = 861543, upload-time = "2025-11-05T18:38:24.183Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2f/29c1459513cd35828e25531ebfcbf3e92a5e49f560b1777a9af7203eb46e/brotli-1.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a61c06b334bd99bc5ae84f1eeb36bfe01400264b3c352f968c6e30a10f9d08b", size = 444288, upload-time = "2025-11-05T18:38:25.139Z" }, + { url = "https://files.pythonhosted.org/packages/3d/6f/feba03130d5fceadfa3a1bb102cb14650798c848b1df2a808356f939bb16/brotli-1.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:acec55bb7c90f1dfc476126f9711a8e81c9af7fb617409a9ee2953115343f08d", size = 1528071, upload-time = "2025-11-05T18:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/2b/38/f3abb554eee089bd15471057ba85f47e53a44a462cfce265d9bf7088eb09/brotli-1.2.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:260d3692396e1895c5034f204f0db022c056f9e2ac841593a4cf9426e2a3faca", size = 1626913, upload-time = "2025-11-05T18:38:27.284Z" }, + { url = "https://files.pythonhosted.org/packages/03/a7/03aa61fbc3c5cbf99b44d158665f9b0dd3d8059be16c460208d9e385c837/brotli-1.2.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:072e7624b1fc4d601036ab3f4f27942ef772887e876beff0301d261210bca97f", size = 1419762, upload-time = "2025-11-05T18:38:28.295Z" }, + { url = "https://files.pythonhosted.org/packages/21/1b/0374a89ee27d152a5069c356c96b93afd1b94eae83f1e004b57eb6ce2f10/brotli-1.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adedc4a67e15327dfdd04884873c6d5a01d3e3b6f61406f99b1ed4865a2f6d28", size = 1484494, upload-time = "2025-11-05T18:38:29.29Z" }, + { url = "https://files.pythonhosted.org/packages/cf/57/69d4fe84a67aef4f524dcd075c6eee868d7850e85bf01d778a857d8dbe0a/brotli-1.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7a47ce5c2288702e09dc22a44d0ee6152f2c7eda97b3c8482d826a1f3cfc7da7", size = 1593302, upload-time = "2025-11-05T18:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/d5/3b/39e13ce78a8e9a621c5df3aeb5fd181fcc8caba8c48a194cd629771f6828/brotli-1.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:af43b8711a8264bb4e7d6d9a6d004c3a2019c04c01127a868709ec29962b6036", size = 1487913, upload-time = "2025-11-05T18:38:31.618Z" }, + { url = "https://files.pythonhosted.org/packages/62/28/4d00cb9bd76a6357a66fcd54b4b6d70288385584063f4b07884c1e7286ac/brotli-1.2.0-cp312-cp312-win32.whl", hash = "sha256:e99befa0b48f3cd293dafeacdd0d191804d105d279e0b387a32054c1180f3161", size = 334362, upload-time = "2025-11-05T18:38:32.939Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4e/bc1dcac9498859d5e353c9b153627a3752868a9d5f05ce8dedd81a2354ab/brotli-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:b35c13ce241abdd44cb8ca70683f20c0c079728a36a996297adb5334adfc1c44", size = 369115, upload-time = "2025-11-05T18:38:33.765Z" }, + { url = "https://files.pythonhosted.org/packages/6c/d4/4ad5432ac98c73096159d9ce7ffeb82d151c2ac84adcc6168e476bb54674/brotli-1.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9e5825ba2c9998375530504578fd4d5d1059d09621a02065d1b6bfc41a8e05ab", size = 861523, upload-time = "2025-11-05T18:38:34.67Z" }, + { url = "https://files.pythonhosted.org/packages/91/9f/9cc5bd03ee68a85dc4bc89114f7067c056a3c14b3d95f171918c088bf88d/brotli-1.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0cf8c3b8ba93d496b2fae778039e2f5ecc7cff99df84df337ca31d8f2252896c", size = 444289, upload-time = "2025-11-05T18:38:35.6Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b6/fe84227c56a865d16a6614e2c4722864b380cb14b13f3e6bef441e73a85a/brotli-1.2.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8565e3cdc1808b1a34714b553b262c5de5fbda202285782173ec137fd13709f", size = 1528076, upload-time = "2025-11-05T18:38:36.639Z" }, + { url = "https://files.pythonhosted.org/packages/55/de/de4ae0aaca06c790371cf6e7ee93a024f6b4bb0568727da8c3de112e726c/brotli-1.2.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:26e8d3ecb0ee458a9804f47f21b74845cc823fd1bb19f02272be70774f56e2a6", size = 1626880, upload-time = "2025-11-05T18:38:37.623Z" }, + { url = "https://files.pythonhosted.org/packages/5f/16/a1b22cbea436642e071adcaf8d4b350a2ad02f5e0ad0da879a1be16188a0/brotli-1.2.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67a91c5187e1eec76a61625c77a6c8c785650f5b576ca732bd33ef58b0dff49c", size = 1419737, upload-time = "2025-11-05T18:38:38.729Z" }, + { url = "https://files.pythonhosted.org/packages/46/63/c968a97cbb3bdbf7f974ef5a6ab467a2879b82afbc5ffb65b8acbb744f95/brotli-1.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ecdb3b6dc36e6d6e14d3a1bdc6c1057c8cbf80db04031d566eb6080ce283a48", size = 1484440, upload-time = "2025-11-05T18:38:39.916Z" }, + { url = "https://files.pythonhosted.org/packages/06/9d/102c67ea5c9fc171f423e8399e585dabea29b5bc79b05572891e70013cdd/brotli-1.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3e1b35d56856f3ed326b140d3c6d9db91740f22e14b06e840fe4bb1923439a18", size = 1593313, upload-time = "2025-11-05T18:38:41.24Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4a/9526d14fa6b87bc827ba1755a8440e214ff90de03095cacd78a64abe2b7d/brotli-1.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54a50a9dad16b32136b2241ddea9e4df159b41247b2ce6aac0b3276a66a8f1e5", size = 1487945, upload-time = "2025-11-05T18:38:42.277Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e8/3fe1ffed70cbef83c5236166acaed7bb9c766509b157854c80e2f766b38c/brotli-1.2.0-cp313-cp313-win32.whl", hash = "sha256:1b1d6a4efedd53671c793be6dd760fcf2107da3a52331ad9ea429edf0902f27a", size = 334368, upload-time = "2025-11-05T18:38:43.345Z" }, + { url = "https://files.pythonhosted.org/packages/ff/91/e739587be970a113b37b821eae8097aac5a48e5f0eca438c22e4c7dd8648/brotli-1.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:b63daa43d82f0cdabf98dee215b375b4058cce72871fd07934f179885aad16e8", size = 369116, upload-time = "2025-11-05T18:38:44.609Z" }, + { url = "https://files.pythonhosted.org/packages/17/e1/298c2ddf786bb7347a1cd71d63a347a79e5712a7c0cba9e3c3458ebd976f/brotli-1.2.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:6c12dad5cd04530323e723787ff762bac749a7b256a5bece32b2243dd5c27b21", size = 863080, upload-time = "2025-11-05T18:38:45.503Z" }, + { url = "https://files.pythonhosted.org/packages/84/0c/aac98e286ba66868b2b3b50338ffbd85a35c7122e9531a73a37a29763d38/brotli-1.2.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3219bd9e69868e57183316ee19c84e03e8f8b5a1d1f2667e1aa8c2f91cb061ac", size = 445453, upload-time = "2025-11-05T18:38:46.433Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f1/0ca1f3f99ae300372635ab3fe2f7a79fa335fee3d874fa7f9e68575e0e62/brotli-1.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:963a08f3bebd8b75ac57661045402da15991468a621f014be54e50f53a58d19e", size = 1528168, upload-time = "2025-11-05T18:38:47.371Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a6/2ebfc8f766d46df8d3e65b880a2e220732395e6d7dc312c1e1244b0f074a/brotli-1.2.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9322b9f8656782414b37e6af884146869d46ab85158201d82bab9abbcb971dc7", size = 1627098, upload-time = "2025-11-05T18:38:48.385Z" }, + { url = "https://files.pythonhosted.org/packages/f3/2f/0976d5b097ff8a22163b10617f76b2557f15f0f39d6a0fe1f02b1a53e92b/brotli-1.2.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cf9cba6f5b78a2071ec6fb1e7bd39acf35071d90a81231d67e92d637776a6a63", size = 1419861, upload-time = "2025-11-05T18:38:49.372Z" }, + { url = "https://files.pythonhosted.org/packages/9c/97/d76df7176a2ce7616ff94c1fb72d307c9a30d2189fe877f3dd99af00ea5a/brotli-1.2.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7547369c4392b47d30a3467fe8c3330b4f2e0f7730e45e3103d7d636678a808b", size = 1484594, upload-time = "2025-11-05T18:38:50.655Z" }, + { url = "https://files.pythonhosted.org/packages/d3/93/14cf0b1216f43df5609f5b272050b0abd219e0b54ea80b47cef9867b45e7/brotli-1.2.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:fc1530af5c3c275b8524f2e24841cbe2599d74462455e9bae5109e9ff42e9361", size = 1593455, upload-time = "2025-11-05T18:38:51.624Z" }, + { url = "https://files.pythonhosted.org/packages/b3/73/3183c9e41ca755713bdf2cc1d0810df742c09484e2e1ddd693bee53877c1/brotli-1.2.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d2d085ded05278d1c7f65560aae97b3160aeb2ea2c0b3e26204856beccb60888", size = 1488164, upload-time = "2025-11-05T18:38:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/64/6a/0c78d8f3a582859236482fd9fa86a65a60328a00983006bcf6d83b7b2253/brotli-1.2.0-cp314-cp314-win32.whl", hash = "sha256:832c115a020e463c2f67664560449a7bea26b0c1fdd690352addad6d0a08714d", size = 339280, upload-time = "2025-11-05T18:38:54.02Z" }, + { url = "https://files.pythonhosted.org/packages/f5/10/56978295c14794b2c12007b07f3e41ba26acda9257457d7085b0bb3bb90c/brotli-1.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:e7c0af964e0b4e3412a0ebf341ea26ec767fa0b4cf81abb5e897c9338b5ad6a3", size = 375639, upload-time = "2025-11-05T18:38:55.67Z" }, +] + +[[package]] +name = "brotlicffi" +version = "1.2.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/b6/017dc5f852ed9b8735af77774509271acbf1de02d238377667145fcee01d/brotlicffi-1.2.0.1.tar.gz", hash = "sha256:c20d5c596278307ad06414a6d95a892377ea274a5c6b790c2548c009385d621c", size = 478156, upload-time = "2026-03-05T19:54:11.547Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/f9/dfa56316837fa798eac19358351e974de8e1e2ca9475af4cb90293cd6576/brotlicffi-1.2.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c85e65913cf2b79c57a3fdd05b98d9731d9255dc0cb696b09376cc091b9cddd", size = 433046, upload-time = "2026-03-05T19:53:46.209Z" }, + { url = "https://files.pythonhosted.org/packages/4a/f5/f8f492158c76b0d940388801f04f747028971ad5774287bded5f1e53f08d/brotlicffi-1.2.0.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:535f2d05d0273408abc13fc0eebb467afac17b0ad85090c8913690d40207dac5", size = 1541126, upload-time = "2026-03-05T19:53:48.248Z" }, + { url = "https://files.pythonhosted.org/packages/3b/e1/ff87af10ac419600c63e9287a0649c673673ae6b4f2bcf48e96cb2f89f60/brotlicffi-1.2.0.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce17eb798ca59ecec67a9bb3fd7a4304e120d1cd02953ce522d959b9a84d58ac", size = 1541983, upload-time = "2026-03-05T19:53:50.317Z" }, + { url = "https://files.pythonhosted.org/packages/47/c0/80ecd9bd45776109fab14040e478bf63e456967c9ddee2353d8330ed8de1/brotlicffi-1.2.0.1-cp314-cp314t-win32.whl", hash = "sha256:3c9544f83cb715d95d7eab3af4adbbef8b2093ad6382288a83b3a25feb1a57ec", size = 349047, upload-time = "2026-03-05T19:53:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/ab/98/13e5b250236a281b6cd9e92a01ee1ae231029fa78faee932ef3766e1cb24/brotlicffi-1.2.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:625f8115d32ae9c0740d01ea51518437c3fbaa3e78d41cb18459f6f7ac326000", size = 385652, upload-time = "2026-03-05T19:53:53.892Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9f/b98dcd4af47994cee97aebac866996a006a2e5fc1fd1e2b82a8ad95cf09c/brotlicffi-1.2.0.1-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:91ba5f0ccc040f6ff8f7efaf839f797723d03ed46acb8ae9408f99ffd2572cf4", size = 432608, upload-time = "2026-03-05T19:53:56.736Z" }, + { url = "https://files.pythonhosted.org/packages/b1/7a/ac4ee56595a061e3718a6d1ea7e921f4df156894acffb28ed88a1fd52022/brotlicffi-1.2.0.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be9a670c6811af30a4bd42d7116dc5895d3b41beaa8ed8a89050447a0181f5ce", size = 1534257, upload-time = "2026-03-05T19:53:58.667Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/e7410db7f6f56de57744ea52a115084ceb2735f4d44973f349bb92136586/brotlicffi-1.2.0.1-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f3314a3476f59e5443f9f72a6dff16edc0c3463c9b318feaef04ae3e4683f5a", size = 1536838, upload-time = "2026-03-05T19:54:00.705Z" }, + { url = "https://files.pythonhosted.org/packages/a6/75/6e7977d1935fc3fbb201cbd619be8f2c7aea25d40a096967132854b34708/brotlicffi-1.2.0.1-cp38-abi3-win32.whl", hash = "sha256:82ea52e2b5d3145b6c406ebd3efb0d55db718b7ad996bd70c62cec0439de1187", size = 343337, upload-time = "2026-03-05T19:54:02.446Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ef/e7e485ce5e4ba3843a0a92feb767c7b6098fd6e65ce752918074d175ae71/brotlicffi-1.2.0.1-cp38-abi3-win_amd64.whl", hash = "sha256:da2e82a08e7778b8bc539d27ca03cdd684113e81394bfaaad8d0dfc6a17ddede", size = 379026, upload-time = "2026-03-05T19:54:04.322Z" }, + { url = "https://files.pythonhosted.org/packages/7f/53/6262c2256513e6f530d81642477cb19367270922063eaa2d7b781d8c723d/brotlicffi-1.2.0.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:e015af99584c6db1490a69a210c765953e473e63adc2d891ac3062a737c9e851", size = 402265, upload-time = "2026-03-05T19:54:05.858Z" }, + { url = "https://files.pythonhosted.org/packages/1f/d9/d5340b43cf5fbe7fe5a083d237e5338cc1caa73bea523be1c5e452c26290/brotlicffi-1.2.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:37cb587d32bf7168e2218c455e22e409ad1f3157c6c71945879a311f3e6b6abf", size = 406710, upload-time = "2026-03-05T19:54:07.272Z" }, + { url = "https://files.pythonhosted.org/packages/a3/82/dbced4c1e0792efdf23fd90ff6d2a320c64ff4dfef7aacc85c04fde9ddd2/brotlicffi-1.2.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d6ba65dd528892b4d9960beba2ae011a753620bcfc66cf6fa3cee18d7b0baa4", size = 402787, upload-time = "2026-03-05T19:54:08.73Z" }, + { url = "https://files.pythonhosted.org/packages/ef/6f/534205ba7590c9a8716a614f270c5c2ec419b5b7079b3f9cd31b7b5580de/brotlicffi-1.2.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f2a5575653b0672638ba039b82fda56854934d7a6a24d4b8b5033f73ab43cbc1", size = 375108, upload-time = "2026-03-05T19:54:10.079Z" }, +] + +[[package]] +name = "celery" +version = "5.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "billiard" }, + { name = "click" }, + { name = "click-didyoumean" }, + { name = "click-plugins" }, + { name = "click-repl" }, + { name = "kombu" }, + { name = "python-dateutil" }, + { name = "tzlocal" }, + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e8/b4/a1233943ab5c8ea05fb877a88a0a0622bf47444b99e4991a8045ac37ea1d/celery-5.6.3.tar.gz", hash = "sha256:177006bd2054b882e9f01be59abd8529e88879ef50d7918a7050c5a9f4e12912", size = 1742243, upload-time = "2026-03-26T12:14:51.76Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/c9/6eccdda96e098f7ae843162db2d3c149c6931a24fda69fe4ab84d0027eb5/celery-5.6.3-py3-none-any.whl", hash = "sha256:0808f42f80909c4d5833202360ffafb2a4f83f4d8e23e1285d926610e9a7afa6", size = 451235, upload-time = "2026-03-26T12:14:49.491Z" }, +] + +[package.optional-dependencies] +redis = [ + { name = "kombu", extra = ["redis"] }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "click" +version = "8.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/75/31212c6bf2503fdf920d87fee5d7a86a2e3bcf444984126f13d8e4016804/click-8.3.2.tar.gz", hash = "sha256:14162b8b3b3550a7d479eafa77dfd3c38d9dc8951f6f69c78913a8f9a7540fd5", size = 302856, upload-time = "2026-04-03T19:14:45.118Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/20/71885d8b97d4f3dde17b1fdb92dbd4908b00541c5a3379787137285f602e/click-8.3.2-py3-none-any.whl", hash = "sha256:1924d2c27c5653561cd2cae4548d1406039cb79b858b747cfea24924bbc1616d", size = 108379, upload-time = "2026-04-03T19:14:43.505Z" }, +] + +[[package]] +name = "click-didyoumean" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089, upload-time = "2024-03-24T08:22:07.499Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631, upload-time = "2024-03-24T08:22:06.356Z" }, +] + +[[package]] +name = "click-plugins" +version = "1.1.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343, upload-time = "2025-06-25T00:47:37.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051, upload-time = "2025-06-25T00:47:36.731Z" }, +] + +[[package]] +name = "click-repl" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "prompt-toolkit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449, upload-time = "2023-06-15T12:43:51.141Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289, upload-time = "2023-06-15T12:43:48.626Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/37/d24c8f8220ff07b839b2c043ea4903a33b0f455abe673ae3c03bbdb7f212/coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d", size = 219381, upload-time = "2026-03-17T10:30:14.68Z" }, + { url = "https://files.pythonhosted.org/packages/35/8b/cd129b0ca4afe886a6ce9d183c44d8301acbd4ef248622e7c49a23145605/coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587", size = 219880, upload-time = "2026-03-17T10:30:16.231Z" }, + { url = "https://files.pythonhosted.org/packages/55/2f/e0e5b237bffdb5d6c530ce87cc1d413a5b7d7dfd60fb067ad6d254c35c76/coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642", size = 250303, upload-time = "2026-03-17T10:30:17.748Z" }, + { url = "https://files.pythonhosted.org/packages/92/be/b1afb692be85b947f3401375851484496134c5554e67e822c35f28bf2fbc/coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b", size = 252218, upload-time = "2026-03-17T10:30:19.804Z" }, + { url = "https://files.pythonhosted.org/packages/da/69/2f47bb6fa1b8d1e3e5d0c4be8ccb4313c63d742476a619418f85740d597b/coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686", size = 254326, upload-time = "2026-03-17T10:30:21.321Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d0/79db81da58965bd29dabc8f4ad2a2af70611a57cba9d1ec006f072f30a54/coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743", size = 256267, upload-time = "2026-03-17T10:30:23.094Z" }, + { url = "https://files.pythonhosted.org/packages/e5/32/d0d7cc8168f91ddab44c0ce4806b969df5f5fdfdbb568eaca2dbc2a04936/coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75", size = 250430, upload-time = "2026-03-17T10:30:25.311Z" }, + { url = "https://files.pythonhosted.org/packages/4d/06/a055311d891ddbe231cd69fdd20ea4be6e3603ffebddf8704b8ca8e10a3c/coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209", size = 252017, upload-time = "2026-03-17T10:30:27.284Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f6/d0fd2d21e29a657b5f77a2fe7082e1568158340dceb941954f776dce1b7b/coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a", size = 250080, upload-time = "2026-03-17T10:30:29.481Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ab/0d7fb2efc2e9a5eb7ddcc6e722f834a69b454b7e6e5888c3a8567ecffb31/coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e", size = 253843, upload-time = "2026-03-17T10:30:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/ba/6f/7467b917bbf5408610178f62a49c0ed4377bb16c1657f689cc61470da8ce/coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd", size = 249802, upload-time = "2026-03-17T10:30:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/75/2c/1172fb689df92135f5bfbbd69fc83017a76d24ea2e2f3a1154007e2fb9f8/coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8", size = 250707, upload-time = "2026-03-17T10:30:35.2Z" }, + { url = "https://files.pythonhosted.org/packages/67/21/9ac389377380a07884e3b48ba7a620fcd9dbfaf1d40565facdc6b36ec9ef/coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf", size = 221880, upload-time = "2026-03-17T10:30:36.775Z" }, + { url = "https://files.pythonhosted.org/packages/af/7f/4cd8a92531253f9d7c1bbecd9fa1b472907fb54446ca768c59b531248dc5/coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9", size = 222816, upload-time = "2026-03-17T10:30:38.891Z" }, + { url = "https://files.pythonhosted.org/packages/12/a6/1d3f6155fb0010ca68eba7fe48ca6c9da7385058b77a95848710ecf189b1/coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028", size = 221483, upload-time = "2026-03-17T10:30:40.463Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" }, + { url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" }, + { url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" }, + { url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" }, + { url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" }, + { url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" }, + { url = "https://files.pythonhosted.org/packages/74/8c/74fedc9663dcf168b0a059d4ea756ecae4da77a489048f94b5f512a8d0b3/coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1", size = 219576, upload-time = "2026-03-17T10:31:09.045Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c9/44fb661c55062f0818a6ffd2685c67aa30816200d5f2817543717d4b92eb/coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3", size = 219942, upload-time = "2026-03-17T10:31:10.708Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/93419671cee82b780bab7ea96b67c8ef448f5f295f36bf5031154ec9a790/coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26", size = 250935, upload-time = "2026-03-17T10:31:12.392Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/1666e3a4462f8202d836920114fa7a5ee9275d1fa45366d336c551a162dd/coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3", size = 253541, upload-time = "2026-03-17T10:31:14.247Z" }, + { url = "https://files.pythonhosted.org/packages/4e/5e/3ee3b835647be646dcf3c65a7c6c18f87c27326a858f72ab22c12730773d/coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b", size = 254780, upload-time = "2026-03-17T10:31:16.193Z" }, + { url = "https://files.pythonhosted.org/packages/44/b3/cb5bd1a04cfcc49ede6cd8409d80bee17661167686741e041abc7ee1b9a9/coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a", size = 256912, upload-time = "2026-03-17T10:31:17.89Z" }, + { url = "https://files.pythonhosted.org/packages/1b/66/c1dceb7b9714473800b075f5c8a84f4588f887a90eb8645282031676e242/coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969", size = 251165, upload-time = "2026-03-17T10:31:19.605Z" }, + { url = "https://files.pythonhosted.org/packages/b7/62/5502b73b97aa2e53ea22a39cf8649ff44827bef76d90bf638777daa27a9d/coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161", size = 252908, upload-time = "2026-03-17T10:31:21.312Z" }, + { url = "https://files.pythonhosted.org/packages/7d/37/7792c2d69854397ca77a55c4646e5897c467928b0e27f2d235d83b5d08c6/coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15", size = 250873, upload-time = "2026-03-17T10:31:23.565Z" }, + { url = "https://files.pythonhosted.org/packages/a3/23/bc866fb6163be52a8a9e5d708ba0d3b1283c12158cefca0a8bbb6e247a43/coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1", size = 255030, upload-time = "2026-03-17T10:31:25.58Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8b/ef67e1c222ef49860701d346b8bbb70881bef283bd5f6cbba68a39a086c7/coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6", size = 250694, upload-time = "2026-03-17T10:31:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/46/0d/866d1f74f0acddbb906db212e096dee77a8e2158ca5e6bb44729f9d93298/coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17", size = 252469, upload-time = "2026-03-17T10:31:29.472Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f5/be742fec31118f02ce42b21c6af187ad6a344fed546b56ca60caacc6a9a0/coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85", size = 222112, upload-time = "2026-03-17T10:31:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/66/40/7732d648ab9d069a46e686043241f01206348e2bbf128daea85be4d6414b/coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b", size = 222923, upload-time = "2026-03-17T10:31:33.633Z" }, + { url = "https://files.pythonhosted.org/packages/48/af/fea819c12a095781f6ccd504890aaddaf88b8fab263c4940e82c7b770124/coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664", size = 221540, upload-time = "2026-03-17T10:31:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/23/d2/17879af479df7fbbd44bd528a31692a48f6b25055d16482fdf5cdb633805/coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d", size = 220262, upload-time = "2026-03-17T10:31:37.184Z" }, + { url = "https://files.pythonhosted.org/packages/5b/4c/d20e554f988c8f91d6a02c5118f9abbbf73a8768a3048cb4962230d5743f/coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0", size = 220617, upload-time = "2026-03-17T10:31:39.245Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/f9f5277b95184f764b24e7231e166dfdb5780a46d408a2ac665969416d61/coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806", size = 261912, upload-time = "2026-03-17T10:31:41.324Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f6/7f1ab39393eeb50cfe4747ae8ef0e4fc564b989225aa1152e13a180d74f8/coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3", size = 263987, upload-time = "2026-03-17T10:31:43.724Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/62c084fb489ed9c6fbdf57e006752e7c516ea46fd690e5ed8b8617c7d52e/coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9", size = 266416, upload-time = "2026-03-17T10:31:45.769Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f6/df63d8660e1a0bff6125947afda112a0502736f470d62ca68b288ea762d8/coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd", size = 267558, upload-time = "2026-03-17T10:31:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/5b/02/353ca81d36779bd108f6d384425f7139ac3c58c750dcfaafe5d0bee6436b/coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606", size = 261163, upload-time = "2026-03-17T10:31:50.125Z" }, + { url = "https://files.pythonhosted.org/packages/2c/16/2e79106d5749bcaf3aee6d309123548e3276517cd7851faa8da213bc61bf/coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e", size = 263981, upload-time = "2026-03-17T10:31:51.961Z" }, + { url = "https://files.pythonhosted.org/packages/29/c7/c29e0c59ffa6942030ae6f50b88ae49988e7e8da06de7ecdbf49c6d4feae/coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0", size = 261604, upload-time = "2026-03-17T10:31:53.872Z" }, + { url = "https://files.pythonhosted.org/packages/40/48/097cdc3db342f34006a308ab41c3a7c11c3f0d84750d340f45d88a782e00/coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87", size = 265321, upload-time = "2026-03-17T10:31:55.997Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/4994af354689e14fd03a75f8ec85a9a68d94e0188bbdab3fc1516b55e512/coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479", size = 260502, upload-time = "2026-03-17T10:31:58.308Z" }, + { url = "https://files.pythonhosted.org/packages/22/c6/9bb9ef55903e628033560885f5c31aa227e46878118b63ab15dc7ba87797/coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2", size = 262688, upload-time = "2026-03-17T10:32:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/14/4f/f5df9007e50b15e53e01edea486814783a7f019893733d9e4d6caad75557/coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a", size = 222788, upload-time = "2026-03-17T10:32:02.246Z" }, + { url = "https://files.pythonhosted.org/packages/e1/98/aa7fccaa97d0f3192bec013c4e6fd6d294a6ed44b640e6bb61f479e00ed5/coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819", size = 223851, upload-time = "2026-03-17T10:32:04.416Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8b/e5c469f7352651e5f013198e9e21f97510b23de957dd06a84071683b4b60/coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911", size = 222104, upload-time = "2026-03-17T10:32:06.65Z" }, + { url = "https://files.pythonhosted.org/packages/8e/77/39703f0d1d4b478bfd30191d3c14f53caf596fac00efb3f8f6ee23646439/coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f", size = 219621, upload-time = "2026-03-17T10:32:08.589Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3e/51dff36d99ae14639a133d9b164d63e628532e2974d8b1edb99dd1ebc733/coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e", size = 219953, upload-time = "2026-03-17T10:32:10.507Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6c/1f1917b01eb647c2f2adc9962bd66c79eb978951cab61bdc1acab3290c07/coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a", size = 250992, upload-time = "2026-03-17T10:32:12.41Z" }, + { url = "https://files.pythonhosted.org/packages/22/e5/06b1f88f42a5a99df42ce61208bdec3bddb3d261412874280a19796fc09c/coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510", size = 253503, upload-time = "2026-03-17T10:32:14.449Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/2a148a51e5907e504fa7b85490277734e6771d8844ebcc48764a15e28155/coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247", size = 254852, upload-time = "2026-03-17T10:32:16.56Z" }, + { url = "https://files.pythonhosted.org/packages/61/77/50e8d3d85cc0b7ebe09f30f151d670e302c7ff4a1bf6243f71dd8b0981fa/coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6", size = 257161, upload-time = "2026-03-17T10:32:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c4/b5fd1d4b7bf8d0e75d997afd3925c59ba629fc8616f1b3aae7605132e256/coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0", size = 251021, upload-time = "2026-03-17T10:32:21.344Z" }, + { url = "https://files.pythonhosted.org/packages/f8/66/6ea21f910e92d69ef0b1c3346ea5922a51bad4446c9126db2ae96ee24c4c/coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882", size = 252858, upload-time = "2026-03-17T10:32:23.506Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ea/879c83cb5d61aa2a35fb80e72715e92672daef8191b84911a643f533840c/coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740", size = 250823, upload-time = "2026-03-17T10:32:25.516Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fb/616d95d3adb88b9803b275580bdeee8bd1b69a886d057652521f83d7322f/coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16", size = 255099, upload-time = "2026-03-17T10:32:27.944Z" }, + { url = "https://files.pythonhosted.org/packages/1c/93/25e6917c90ec1c9a56b0b26f6cad6408e5f13bb6b35d484a0d75c9cf000d/coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0", size = 250638, upload-time = "2026-03-17T10:32:29.914Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7b/dc1776b0464145a929deed214aef9fb1493f159b59ff3c7eeeedf91eddd0/coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0", size = 252295, upload-time = "2026-03-17T10:32:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/ea/fb/99cbbc56a26e07762a2740713f3c8f9f3f3106e3a3dd8cc4474954bccd34/coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc", size = 222360, upload-time = "2026-03-17T10:32:34.233Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b7/4758d4f73fb536347cc5e4ad63662f9d60ba9118cb6785e9616b2ce5d7fa/coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633", size = 223174, upload-time = "2026-03-17T10:32:36.369Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f2/24d84e1dfe70f8ac9fdf30d338239860d0d1d5da0bda528959d0ebc9da28/coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8", size = 221739, upload-time = "2026-03-17T10:32:38.736Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/4a168591057b3668c2428bff25dd3ebc21b629d666d90bcdfa0217940e84/coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b", size = 220351, upload-time = "2026-03-17T10:32:41.196Z" }, + { url = "https://files.pythonhosted.org/packages/f5/21/1fd5c4dbfe4a58b6b99649125635df46decdfd4a784c3cd6d410d303e370/coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c", size = 220612, upload-time = "2026-03-17T10:32:43.204Z" }, + { url = "https://files.pythonhosted.org/packages/d6/fe/2a924b3055a5e7e4512655a9d4609781b0d62334fa0140c3e742926834e2/coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9", size = 261985, upload-time = "2026-03-17T10:32:45.514Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/c8928f2bd518c45990fe1a2ab8db42e914ef9b726c975facc4282578c3eb/coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29", size = 264107, upload-time = "2026-03-17T10:32:47.971Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ae/4ae35bbd9a0af9d820362751f0766582833c211224b38665c0f8de3d487f/coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607", size = 266513, upload-time = "2026-03-17T10:32:50.1Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/d326174c55af36f74eac6ae781612d9492f060ce8244b570bb9d50d9d609/coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90", size = 267650, upload-time = "2026-03-17T10:32:52.391Z" }, + { url = "https://files.pythonhosted.org/packages/7a/5e/31484d62cbd0eabd3412e30d74386ece4a0837d4f6c3040a653878bfc019/coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3", size = 261089, upload-time = "2026-03-17T10:32:54.544Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d8/49a72d6de146eebb0b7e48cc0f4bc2c0dd858e3d4790ab2b39a2872b62bd/coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab", size = 263982, upload-time = "2026-03-17T10:32:56.803Z" }, + { url = "https://files.pythonhosted.org/packages/06/3b/0351f1bd566e6e4dd39e978efe7958bde1d32f879e85589de147654f57bb/coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562", size = 261579, upload-time = "2026-03-17T10:32:59.466Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/796a2a2f4017f554d7810f5c573449b35b1e46788424a548d4d19201b222/coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2", size = 265316, upload-time = "2026-03-17T10:33:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/3d/16/d5ae91455541d1a78bc90abf495be600588aff8f6db5c8b0dae739fa39c9/coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea", size = 260427, upload-time = "2026-03-17T10:33:03.945Z" }, + { url = "https://files.pythonhosted.org/packages/48/11/07f413dba62db21fb3fad5d0de013a50e073cc4e2dc4306e770360f6dfc8/coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a", size = 262745, upload-time = "2026-03-17T10:33:06.285Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/d792371332eb4663115becf4bad47e047d16234b1aff687b1b18c58d60ae/coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215", size = 223146, upload-time = "2026-03-17T10:33:08.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/51/37221f59a111dca5e85be7dbf09696323b5b9f13ff65e0641d535ed06ea8/coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43", size = 224254, upload-time = "2026-03-17T10:33:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/54/83/6acacc889de8987441aa7d5adfbdbf33d288dad28704a67e574f1df9bcbb/coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45", size = 222276, upload-time = "2026-03-17T10:33:13.466Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "cryptography" +version = "46.0.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" }, + { url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" }, + { url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" }, + { url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" }, + { url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" }, + { url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" }, + { url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" }, + { url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" }, + { url = "https://files.pythonhosted.org/packages/7b/56/15619b210e689c5403bb0540e4cb7dbf11a6bf42e483b7644e471a2812b3/cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842", size = 7119671, upload-time = "2026-04-08T01:56:44Z" }, + { url = "https://files.pythonhosted.org/packages/74/66/e3ce040721b0b5599e175ba91ab08884c75928fbeb74597dd10ef13505d2/cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c", size = 4268551, upload-time = "2026-04-08T01:56:46.071Z" }, + { url = "https://files.pythonhosted.org/packages/03/11/5e395f961d6868269835dee1bafec6a1ac176505a167f68b7d8818431068/cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902", size = 4408887, upload-time = "2026-04-08T01:56:47.718Z" }, + { url = "https://files.pythonhosted.org/packages/40/53/8ed1cf4c3b9c8e611e7122fb56f1c32d09e1fff0f1d77e78d9ff7c82653e/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d", size = 4271354, upload-time = "2026-04-08T01:56:49.312Z" }, + { url = "https://files.pythonhosted.org/packages/50/46/cf71e26025c2e767c5609162c866a78e8a2915bbcfa408b7ca495c6140c4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022", size = 4905845, upload-time = "2026-04-08T01:56:50.916Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ea/01276740375bac6249d0a971ebdf6b4dc9ead0ee0a34ef3b5a88c1a9b0d4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce", size = 4444641, upload-time = "2026-04-08T01:56:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4c/7d258f169ae71230f25d9f3d06caabcff8c3baf0978e2b7d65e0acac3827/cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f", size = 3967749, upload-time = "2026-04-08T01:56:54.597Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/2ea0767cad19e71b3530e4cad9605d0b5e338b6a1e72c37c9c1ceb86c333/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99", size = 4270942, upload-time = "2026-04-08T01:56:56.416Z" }, + { url = "https://files.pythonhosted.org/packages/41/3d/fe14df95a83319af25717677e956567a105bb6ab25641acaa093db79975d/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1", size = 4871079, upload-time = "2026-04-08T01:56:58.31Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/4a479e0f36f8f378d397f4eab4c850b4ffb79a2f0d58704b8fa0703ddc11/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2", size = 4443999, upload-time = "2026-04-08T01:57:00.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/17/b59a741645822ec6d04732b43c5d35e4ef58be7bfa84a81e5ae6f05a1d33/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e", size = 4399191, upload-time = "2026-04-08T01:57:02.654Z" }, + { url = "https://files.pythonhosted.org/packages/59/6a/bb2e166d6d0e0955f1e9ff70f10ec4b2824c9cfcdb4da772c7dd69cc7d80/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee", size = 4655782, upload-time = "2026-04-08T01:57:04.592Z" }, + { url = "https://files.pythonhosted.org/packages/95/b6/3da51d48415bcb63b00dc17c2eff3a651b7c4fed484308d0f19b30e8cb2c/cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298", size = 3002227, upload-time = "2026-04-08T01:57:06.91Z" }, + { url = "https://files.pythonhosted.org/packages/32/a8/9f0e4ed57ec9cebe506e58db11ae472972ecb0c659e4d52bbaee80ca340a/cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb", size = 3475332, upload-time = "2026-04-08T01:57:08.807Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" }, + { url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" }, + { url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" }, + { url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" }, + { url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" }, + { url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" }, + { url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" }, + { url = "https://files.pythonhosted.org/packages/63/0c/dca8abb64e7ca4f6b2978769f6fea5ad06686a190cec381f0a796fdcaaba/cryptography-46.0.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f", size = 3476879, upload-time = "2026-04-08T01:57:38.664Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ea/075aac6a84b7c271578d81a2f9968acb6e273002408729f2ddff517fed4a/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15", size = 4219700, upload-time = "2026-04-08T01:57:40.625Z" }, + { url = "https://files.pythonhosted.org/packages/6c/7b/1c55db7242b5e5612b29fc7a630e91ee7a6e3c8e7bf5406d22e206875fbd/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455", size = 4385982, upload-time = "2026-04-08T01:57:42.725Z" }, + { url = "https://files.pythonhosted.org/packages/cb/da/9870eec4b69c63ef5925bf7d8342b7e13bc2ee3d47791461c4e49ca212f4/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65", size = 4219115, upload-time = "2026-04-08T01:57:44.939Z" }, + { url = "https://files.pythonhosted.org/packages/f4/72/05aa5832b82dd341969e9a734d1812a6aadb088d9eb6f0430fc337cc5a8f/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968", size = 4385479, upload-time = "2026-04-08T01:57:46.86Z" }, + { url = "https://files.pythonhosted.org/packages/20/2a/1b016902351a523aa2bd446b50a5bc1175d7a7d1cf90fe2ef904f9b84ebc/cryptography-46.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4", size = 3412829, upload-time = "2026-04-08T01:57:48.874Z" }, +] + +[[package]] +name = "cssselect2" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tinycss2" }, + { name = "webencodings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e0/20/92eaa6b0aec7189fa4b75c890640e076e9e793095721db69c5c81142c2e1/cssselect2-0.9.0.tar.gz", hash = "sha256:759aa22c216326356f65e62e791d66160a0f9c91d1424e8d8adc5e74dddfc6fb", size = 35595, upload-time = "2026-02-12T17:16:39.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/21/0e/8459ca4413e1a21a06c97d134bfaf18adfd27cea068813dc0faae06cbf00/cssselect2-0.9.0-py3-none-any.whl", hash = "sha256:6a99e5f91f9a016a304dd929b0966ca464bcfda15177b6fb4a118fc0fb5d9563", size = 15453, upload-time = "2026-02-12T17:16:38.317Z" }, +] + +[[package]] +name = "dataclasses-json" +version = "0.6.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "marshmallow" }, + { name = "typing-inspect" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227, upload-time = "2024-06-09T16:20:19.103Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686, upload-time = "2024-06-09T16:20:16.715Z" }, +] + +[[package]] +name = "deprecated" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/85/12f0a49a7c4ffb70572b6c2ef13c90c88fd190debda93b23f026b25f9634/deprecated-1.3.1.tar.gz", hash = "sha256:b1b50e0ff0c1fddaa5708a2c6b0a6588bb09b892825ab2b214ac9ea9d92a5223", size = 2932523, upload-time = "2025-10-30T08:19:02.757Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f", size = 11298, upload-time = "2025-10-30T08:19:00.758Z" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "dnspython" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, +] + +[[package]] +name = "ecdsa" +version = "0.19.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/ca/8de7744cb3bc966c85430ca2d0fcaeea872507c6a4cf6e007f7fe269ed9d/ecdsa-0.19.2.tar.gz", hash = "sha256:62635b0ac1ca2e027f82122b5b81cb706edc38cd91c63dda28e4f3455a2bf930", size = 202432, upload-time = "2026-03-26T09:58:17.675Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/79/119091c98e2bf49e24ed9f3ae69f816d715d2904aefa6a2baa039a2ba0b0/ecdsa-0.19.2-py2.py3-none-any.whl", hash = "sha256:840f5dc5e375c68f36c1a7a5b9caad28f95daa65185c9253c0c08dd952bb7399", size = 150818, upload-time = "2026-03-26T09:58:15.808Z" }, +] + +[[package]] +name = "email-validator" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, +] + +[[package]] +name = "et-xmlfile" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234, upload-time = "2024-10-25T17:25:40.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, +] + +[[package]] +name = "factory-boy" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "faker" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ba/98/75cacae9945f67cfe323829fc2ac451f64517a8a330b572a06a323997065/factory_boy-3.3.3.tar.gz", hash = "sha256:866862d226128dfac7f2b4160287e899daf54f2612778327dd03d0e2cb1e3d03", size = 164146, upload-time = "2025-02-03T09:49:04.433Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/8d/2bc5f5546ff2ccb3f7de06742853483ab75bf74f36a92254702f8baecc79/factory_boy-3.3.3-py2.py3-none-any.whl", hash = "sha256:1c39e3289f7e667c4285433f305f8d506efc2fe9c73aaea4151ebd5cdea394fc", size = 37036, upload-time = "2025-02-03T09:49:01.659Z" }, +] + +[[package]] +name = "faker" +version = "40.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/95/4822ffe94723553789aef783104f4f18fc20d7c4c68e1bbd633e11d09758/faker-40.13.0.tar.gz", hash = "sha256:a0751c84c3abac17327d7bb4c98e8afe70ebf7821e01dd7d0b15cd8856415525", size = 1962043, upload-time = "2026-04-06T16:44:55.68Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/8a/708103325edff16a0b0e004de0d37db8ba216a32713948c64d71f6d4a4c2/faker-40.13.0-py3-none-any.whl", hash = "sha256:c1298fd0d819b3688fb5fd358c4ba8f56c7c8c740b411fd3dbd8e30bf2c05019", size = 1994597, upload-time = "2026-04-06T16:44:53.698Z" }, +] + +[[package]] +name = "fastapi" +version = "0.135.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/e6/7adb4c5fa231e82c35b8f5741a9f2d055f520c29af5546fd70d3e8e1cd2e/fastapi-0.135.3.tar.gz", hash = "sha256:bd6d7caf1a2bdd8d676843cdcd2287729572a1ef524fc4d65c17ae002a1be654", size = 396524, upload-time = "2026-04-01T16:23:58.188Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/a4/5caa2de7f917a04ada20018eccf60d6cc6145b0199d55ca3711b0fc08312/fastapi-0.135.3-py3-none-any.whl", hash = "sha256:9b0f590c813acd13d0ab43dd8494138eb58e484bfac405db1f3187cfc5810d98", size = 117734, upload-time = "2026-04-01T16:23:59.328Z" }, +] + +[[package]] +name = "fonttools" +version = "4.62.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/08/7012b00a9a5874311b639c3920270c36ee0c445b69d9989a85e5c92ebcb0/fonttools-4.62.1.tar.gz", hash = "sha256:e54c75fd6041f1122476776880f7c3c3295ffa31962dc6ebe2543c00dca58b5d", size = 3580737, upload-time = "2026-03-13T13:54:25.52Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/39/23ff32561ec8d45a4d48578b4d241369d9270dc50926c017570e60893701/fonttools-4.62.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:40975849bac44fb0b9253d77420c6d8b523ac4dcdcefeff6e4d706838a5b80f7", size = 2871039, upload-time = "2026-03-13T13:52:33.127Z" }, + { url = "https://files.pythonhosted.org/packages/24/7f/66d3f8a9338a9b67fe6e1739f47e1cd5cee78bd3bc1206ef9b0b982289a5/fonttools-4.62.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9dde91633f77fa576879a0c76b1d89de373cae751a98ddf0109d54e173b40f14", size = 2416346, upload-time = "2026-03-13T13:52:35.676Z" }, + { url = "https://files.pythonhosted.org/packages/aa/53/5276ceba7bff95da7793a07c5284e1da901cf00341ce5e2f3273056c0cca/fonttools-4.62.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6acb4109f8bee00fec985c8c7afb02299e35e9c94b57287f3ea542f28bd0b0a7", size = 5100897, upload-time = "2026-03-13T13:52:38.102Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a1/40a5c4d8e28b0851d53a8eeeb46fbd73c325a2a9a165f290a5ed90e6c597/fonttools-4.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1c5c25671ce8805e0d080e2ffdeca7f1e86778c5cbfbeae86d7f866d8830517b", size = 5071078, upload-time = "2026-03-13T13:52:41.305Z" }, + { url = "https://files.pythonhosted.org/packages/e3/be/d378fca4c65ea1956fee6d90ace6e861776809cbbc5af22388a090c3c092/fonttools-4.62.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a5d8825e1140f04e6c99bb7d37a9e31c172f3bc208afbe02175339e699c710e1", size = 5076908, upload-time = "2026-03-13T13:52:44.122Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d9/ae6a1d0693a4185a84605679c8a1f719a55df87b9c6e8e817bfdd9ef5936/fonttools-4.62.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:268abb1cb221e66c014acc234e872b7870d8b5d4657a83a8f4205094c32d2416", size = 5202275, upload-time = "2026-03-13T13:52:46.591Z" }, + { url = "https://files.pythonhosted.org/packages/54/6c/af95d9c4efb15cabff22642b608342f2bd67137eea6107202d91b5b03184/fonttools-4.62.1-cp311-cp311-win32.whl", hash = "sha256:942b03094d7edbb99bdf1ae7e9090898cad7bf9030b3d21f33d7072dbcb51a53", size = 2293075, upload-time = "2026-03-13T13:52:48.711Z" }, + { url = "https://files.pythonhosted.org/packages/d3/97/bf54c5b3f2be34e1f143e6db838dfdc54f2ffa3e68c738934c82f3b2a08d/fonttools-4.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:e8514f4924375f77084e81467e63238b095abda5107620f49421c368a6017ed2", size = 2344593, upload-time = "2026-03-13T13:52:50.725Z" }, + { url = "https://files.pythonhosted.org/packages/47/d4/dbacced3953544b9a93088cc10ef2b596d348c983d5c67a404fa41ec51ba/fonttools-4.62.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:90365821debbd7db678809c7491ca4acd1e0779b9624cdc6ddaf1f31992bf974", size = 2870219, upload-time = "2026-03-13T13:52:53.664Z" }, + { url = "https://files.pythonhosted.org/packages/66/9e/a769c8e99b81e5a87ab7e5e7236684de4e96246aae17274e5347d11ebd78/fonttools-4.62.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12859ff0b47dd20f110804c3e0d0970f7b832f561630cd879969011541a464a9", size = 2414891, upload-time = "2026-03-13T13:52:56.493Z" }, + { url = "https://files.pythonhosted.org/packages/69/64/f19a9e3911968c37e1e620e14dfc5778299e1474f72f4e57c5ec771d9489/fonttools-4.62.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c125ffa00c3d9003cdaaf7f2c79e6e535628093e14b5de1dccb08859b680936", size = 5033197, upload-time = "2026-03-13T13:52:59.179Z" }, + { url = "https://files.pythonhosted.org/packages/9b/8a/99c8b3c3888c5c474c08dbfd7c8899786de9604b727fcefb055b42c84bba/fonttools-4.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:149f7d84afca659d1a97e39a4778794a2f83bf344c5ee5134e09995086cc2392", size = 4988768, upload-time = "2026-03-13T13:53:02.761Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c6/0f904540d3e6ab463c1243a0d803504826a11604c72dd58c2949796a1762/fonttools-4.62.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0aa72c43a601cfa9273bb1ae0518f1acadc01ee181a6fc60cd758d7fdadffc04", size = 4971512, upload-time = "2026-03-13T13:53:05.678Z" }, + { url = "https://files.pythonhosted.org/packages/29/0b/5cbef6588dc9bd6b5c9ad6a4d5a8ca384d0cea089da31711bbeb4f9654a6/fonttools-4.62.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:19177c8d96c7c36359266e571c5173bcee9157b59cfc8cb0153c5673dc5a3a7d", size = 5122723, upload-time = "2026-03-13T13:53:08.662Z" }, + { url = "https://files.pythonhosted.org/packages/4a/47/b3a5342d381595ef439adec67848bed561ab7fdb1019fa522e82101b7d9c/fonttools-4.62.1-cp312-cp312-win32.whl", hash = "sha256:a24decd24d60744ee8b4679d38e88b8303d86772053afc29b19d23bb8207803c", size = 2281278, upload-time = "2026-03-13T13:53:10.998Z" }, + { url = "https://files.pythonhosted.org/packages/28/b1/0c2ab56a16f409c6c8a68816e6af707827ad5d629634691ff60a52879792/fonttools-4.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e7863e10b3de72376280b515d35b14f5eeed639d1aa7824f4cf06779ec65e42", size = 2331414, upload-time = "2026-03-13T13:53:13.992Z" }, + { url = "https://files.pythonhosted.org/packages/3b/56/6f389de21c49555553d6a5aeed5ac9767631497ac836c4f076273d15bd72/fonttools-4.62.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c22b1014017111c401469e3acc5433e6acf6ebcc6aa9efb538a533c800971c79", size = 2865155, upload-time = "2026-03-13T13:53:16.132Z" }, + { url = "https://files.pythonhosted.org/packages/03/c5/0e3966edd5ec668d41dfe418787726752bc07e2f5fd8c8f208615e61fa89/fonttools-4.62.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:68959f5fc58ed4599b44aad161c2837477d7f35f5f79402d97439974faebfebe", size = 2412802, upload-time = "2026-03-13T13:53:18.878Z" }, + { url = "https://files.pythonhosted.org/packages/52/94/e6ac4b44026de7786fe46e3bfa0c87e51d5d70a841054065d49cd62bb909/fonttools-4.62.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef46db46c9447103b8f3ff91e8ba009d5fe181b1920a83757a5762551e32bb68", size = 5013926, upload-time = "2026-03-13T13:53:21.379Z" }, + { url = "https://files.pythonhosted.org/packages/e2/98/8b1e801939839d405f1f122e7d175cebe9aeb4e114f95bfc45e3152af9a7/fonttools-4.62.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6706d1cb1d5e6251a97ad3c1b9347505c5615c112e66047abbef0f8545fa30d1", size = 4964575, upload-time = "2026-03-13T13:53:23.857Z" }, + { url = "https://files.pythonhosted.org/packages/46/76/7d051671e938b1881670528fec69cc4044315edd71a229c7fd712eaa5119/fonttools-4.62.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2e7abd2b1e11736f58c1de27819e1955a53267c21732e78243fa2fa2e5c1e069", size = 4953693, upload-time = "2026-03-13T13:53:26.569Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ae/b41f8628ec0be3c1b934fc12b84f4576a5c646119db4d3bdd76a217c90b5/fonttools-4.62.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:403d28ce06ebfc547fbcb0cb8b7f7cc2f7a2d3e1a67ba9a34b14632df9e080f9", size = 5094920, upload-time = "2026-03-13T13:53:29.329Z" }, + { url = "https://files.pythonhosted.org/packages/f2/f6/53a1e9469331a23dcc400970a27a4caa3d9f6edbf5baab0260285238b884/fonttools-4.62.1-cp313-cp313-win32.whl", hash = "sha256:93c316e0f5301b2adbe6a5f658634307c096fd5aae60a5b3412e4f3e1728ab24", size = 2279928, upload-time = "2026-03-13T13:53:32.352Z" }, + { url = "https://files.pythonhosted.org/packages/38/60/35186529de1db3c01f5ad625bde07c1f576305eab6d86bbda4c58445f721/fonttools-4.62.1-cp313-cp313-win_amd64.whl", hash = "sha256:7aa21ff53e28a9c2157acbc44e5b401149d3c9178107130e82d74ceb500e5056", size = 2330514, upload-time = "2026-03-13T13:53:34.991Z" }, + { url = "https://files.pythonhosted.org/packages/36/f0/2888cdac391807d68d90dcb16ef858ddc1b5309bfc6966195a459dd326e2/fonttools-4.62.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fa1d16210b6b10a826d71bed68dd9ec24a9e218d5a5e2797f37c573e7ec215ca", size = 2864442, upload-time = "2026-03-13T13:53:37.509Z" }, + { url = "https://files.pythonhosted.org/packages/4b/b2/e521803081f8dc35990816b82da6360fa668a21b44da4b53fc9e77efcd62/fonttools-4.62.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:aa69d10ed420d8121118e628ad47d86e4caa79ba37f968597b958f6cceab7eca", size = 2410901, upload-time = "2026-03-13T13:53:40.55Z" }, + { url = "https://files.pythonhosted.org/packages/00/a4/8c3511ff06e53110039358dbbdc1a65d72157a054638387aa2ada300a8b8/fonttools-4.62.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd13b7999d59c5eb1c2b442eb2d0c427cb517a0b7a1f5798fc5c9e003f5ff782", size = 4999608, upload-time = "2026-03-13T13:53:42.798Z" }, + { url = "https://files.pythonhosted.org/packages/28/63/cd0c3b26afe60995a5295f37c246a93d454023726c3261cfbb3559969bb9/fonttools-4.62.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8d337fdd49a79b0d51c4da87bc38169d21c3abbf0c1aa9367eff5c6656fb6dae", size = 4912726, upload-time = "2026-03-13T13:53:45.405Z" }, + { url = "https://files.pythonhosted.org/packages/70/b9/ac677cb07c24c685cf34f64e140617d58789d67a3dd524164b63648c6114/fonttools-4.62.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d241cdc4a67b5431c6d7f115fdf63335222414995e3a1df1a41e1182acd4bcc7", size = 4951422, upload-time = "2026-03-13T13:53:48.326Z" }, + { url = "https://files.pythonhosted.org/packages/e6/10/11c08419a14b85b7ca9a9faca321accccc8842dd9e0b1c8a72908de05945/fonttools-4.62.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c05557a78f8fa514da0f869556eeda40887a8abc77c76ee3f74cf241778afd5a", size = 5060979, upload-time = "2026-03-13T13:53:51.366Z" }, + { url = "https://files.pythonhosted.org/packages/4e/3c/12eea4a4cf054e7ab058ed5ceada43b46809fce2bf319017c4d63ae55bb4/fonttools-4.62.1-cp314-cp314-win32.whl", hash = "sha256:49a445d2f544ce4a69338694cad575ba97b9a75fff02720da0882d1a73f12800", size = 2283733, upload-time = "2026-03-13T13:53:53.606Z" }, + { url = "https://files.pythonhosted.org/packages/6b/67/74b070029043186b5dd13462c958cb7c7f811be0d2e634309d9a1ffb1505/fonttools-4.62.1-cp314-cp314-win_amd64.whl", hash = "sha256:1eecc128c86c552fb963fe846ca4e011b1be053728f798185a1687502f6d398e", size = 2335663, upload-time = "2026-03-13T13:53:56.23Z" }, + { url = "https://files.pythonhosted.org/packages/42/c5/4d2ed3ca6e33617fc5624467da353337f06e7f637707478903c785bd8e20/fonttools-4.62.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:1596aeaddf7f78e21e68293c011316a25267b3effdaccaf4d59bc9159d681b82", size = 2947288, upload-time = "2026-03-13T13:53:59.397Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e9/7ab11ddfda48ed0f89b13380e5595ba572619c27077be0b2c447a63ff351/fonttools-4.62.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:8f8fca95d3bb3208f59626a4b0ea6e526ee51f5a8ad5d91821c165903e8d9260", size = 2449023, upload-time = "2026-03-13T13:54:01.642Z" }, + { url = "https://files.pythonhosted.org/packages/b2/10/a800fa090b5e8819942e54e19b55fc7c21fe14a08757c3aa3ca8db358939/fonttools-4.62.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee91628c08e76f77b533d65feb3fbe6d9dad699f95be51cf0d022db94089cdc4", size = 5137599, upload-time = "2026-03-13T13:54:04.495Z" }, + { url = "https://files.pythonhosted.org/packages/37/dc/8ccd45033fffd74deb6912fa1ca524643f584b94c87a16036855b498a1ed/fonttools-4.62.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f37df1cac61d906e7b836abe356bc2f34c99d4477467755c216b72aa3dc748b", size = 4920933, upload-time = "2026-03-13T13:54:07.557Z" }, + { url = "https://files.pythonhosted.org/packages/99/eb/e618adefb839598d25ac8136cd577925d6c513dc0d931d93b8af956210f0/fonttools-4.62.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:92bb00a947e666169c99b43753c4305fc95a890a60ef3aeb2a6963e07902cc87", size = 5016232, upload-time = "2026-03-13T13:54:10.611Z" }, + { url = "https://files.pythonhosted.org/packages/d9/5f/9b5c9bfaa8ec82def8d8168c4f13615990d6ce5996fe52bd49bfb5e05134/fonttools-4.62.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:bdfe592802ef939a0e33106ea4a318eeb17822c7ee168c290273cbd5fabd746c", size = 5042987, upload-time = "2026-03-13T13:54:13.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/aa/dfbbe24c6a6afc5c203d90cc0343e24bcbb09e76d67c4d6eef8c2558d7ba/fonttools-4.62.1-cp314-cp314t-win32.whl", hash = "sha256:b820fcb92d4655513d8402d5b219f94481c4443d825b4372c75a2072aa4b357a", size = 2348021, upload-time = "2026-03-13T13:54:16.98Z" }, + { url = "https://files.pythonhosted.org/packages/13/6f/ae9c4e4dd417948407b680855c2c7790efb52add6009aaecff1e3bc50e8e/fonttools-4.62.1-cp314-cp314t-win_amd64.whl", hash = "sha256:59b372b4f0e113d3746b88985f1c796e7bf830dd54b28374cd85c2b8acd7583e", size = 2414147, upload-time = "2026-03-13T13:54:19.416Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ba/56147c165442cc5ba7e82ecf301c9a68353cede498185869e6e02b4c264f/fonttools-4.62.1-py3-none-any.whl", hash = "sha256:7487782e2113861f4ddcc07c3436450659e3caa5e470b27dc2177cade2d8e7fd", size = 1152647, upload-time = "2026-03-13T13:54:22.735Z" }, +] + +[package.optional-dependencies] +woff = [ + { name = "brotli", marker = "platform_python_implementation == 'CPython'" }, + { name = "brotlicffi", marker = "platform_python_implementation != 'CPython'" }, + { name = "zopfli" }, +] + +[[package]] +name = "greenlet" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/94/a5935717b307d7c71fe877b52b884c6af707d2d2090db118a03fbd799369/greenlet-3.4.0.tar.gz", hash = "sha256:f50a96b64dafd6169e595a5c56c9146ef80333e67d4476a65a9c55f400fc22ff", size = 195913, upload-time = "2026-04-08T17:08:00.863Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/c6/dba32cab7e3a625b011aa5647486e2d28423a48845a2998c126dd69c85e1/greenlet-3.4.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:805bebb4945094acbab757d34d6e1098be6de8966009ab9ca54f06ff492def58", size = 285504, upload-time = "2026-04-08T15:52:14.071Z" }, + { url = "https://files.pythonhosted.org/packages/54/f4/7cb5c2b1feb9a1f50e038be79980dfa969aa91979e5e3a18fdbcfad2c517/greenlet-3.4.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:439fc2f12b9b512d9dfa681c5afe5f6b3232c708d13e6f02c845e0d9f4c2d8c6", size = 605476, upload-time = "2026-04-08T16:24:37.064Z" }, + { url = "https://files.pythonhosted.org/packages/d6/af/b66ab0b2f9a4c5a867c136bf66d9599f34f21a1bcca26a2884a29c450bd9/greenlet-3.4.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a70ed1cb0295bee1df57b63bf7f46b4e56a5c93709eea769c1fec1bb23a95875", size = 618336, upload-time = "2026-04-08T16:30:56.59Z" }, + { url = "https://files.pythonhosted.org/packages/e5/5c/8c5633ece6ba611d64bf2770219a98dd439921d6424e4e8cf16b0ac74ea5/greenlet-3.4.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c660bce1940a1acae5f51f0a064f1bc785d07ea16efcb4bc708090afc4d69e83", size = 613515, upload-time = "2026-04-08T15:56:32.478Z" }, + { url = "https://files.pythonhosted.org/packages/a9/df/950d15bca0d90a0e7395eb777903060504cdb509b7b705631e8fb69ff415/greenlet-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ee407d4d1ca9dc632265aee1c8732c4a2d60adff848057cdebfe5fe94eb2c8a2", size = 1574623, upload-time = "2026-04-08T16:26:18.596Z" }, + { url = "https://files.pythonhosted.org/packages/1a/e7/0839afab829fcb7333c9ff6d80c040949510055d2d4d63251f0d1c7c804e/greenlet-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:956215d5e355fffa7c021d168728321fd4d31fd730ac609b1653b450f6a4bc71", size = 1639579, upload-time = "2026-04-08T15:57:29.231Z" }, + { url = "https://files.pythonhosted.org/packages/d9/2b/b4482401e9bcaf9f5c97f67ead38db89c19520ff6d0d6699979c6efcc200/greenlet-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:5cb614ace7c27571270354e9c9f696554d073f8aa9319079dcba466bbdead711", size = 238233, upload-time = "2026-04-08T17:02:54.286Z" }, + { url = "https://files.pythonhosted.org/packages/0c/4d/d8123a4e0bcd583d5cfc8ddae0bbe29c67aab96711be331a7cc935a35966/greenlet-3.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:04403ac74fe295a361f650818de93be11b5038a78f49ccfb64d3b1be8fbf1267", size = 235045, upload-time = "2026-04-08T17:04:05.072Z" }, + { url = "https://files.pythonhosted.org/packages/65/8b/3669ad3b3f247a791b2b4aceb3aa5a31f5f6817bf547e4e1ff712338145a/greenlet-3.4.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:1a54a921561dd9518d31d2d3db4d7f80e589083063ab4d3e2e950756ef809e1a", size = 286902, upload-time = "2026-04-08T15:52:12.138Z" }, + { url = "https://files.pythonhosted.org/packages/38/3e/3c0e19b82900873e2d8469b590a6c4b3dfd2b316d0591f1c26b38a4879a5/greenlet-3.4.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16dec271460a9a2b154e3b1c2fa1050ce6280878430320e85e08c166772e3f97", size = 606099, upload-time = "2026-04-08T16:24:38.408Z" }, + { url = "https://files.pythonhosted.org/packages/b5/33/99fef65e7754fc76a4ed14794074c38c9ed3394a5bd129d7f61b705f3168/greenlet-3.4.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:90036ce224ed6fe75508c1907a77e4540176dcf0744473627785dd519c6f9996", size = 618837, upload-time = "2026-04-08T16:30:58.298Z" }, + { url = "https://files.pythonhosted.org/packages/36/f7/229f3aed6948faa20e0616a0b8568da22e365ede6a54d7d369058b128afd/greenlet-3.4.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a1c4f6b453006efb8310affb2d132832e9bbb4fc01ce6df6b70d810d38f1f6dc", size = 615062, upload-time = "2026-04-08T15:56:33.766Z" }, + { url = "https://files.pythonhosted.org/packages/08/97/d988180011aa40135c46cd0d0cf01dd97f7162bae14139b4a3ef54889ba5/greenlet-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b2d9a138ffa0e306d0e2b72976d2fb10b97e690d40ab36a472acaab0838e2de", size = 1573511, upload-time = "2026-04-08T16:26:20.058Z" }, + { url = "https://files.pythonhosted.org/packages/d4/0f/a5a26fe152fb3d12e6a474181f6e9848283504d0afd095f353d85726374b/greenlet-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8424683caf46eb0eb6f626cb95e008e8cc30d0cb675bdfa48200925c79b38a08", size = 1640396, upload-time = "2026-04-08T15:57:30.88Z" }, + { url = "https://files.pythonhosted.org/packages/42/cf/bb2c32d9a100e36ee9f6e38fad6b1e082b8184010cb06259b49e1266ca01/greenlet-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0a53fb071531d003b075c444014ff8f8b1a9898d36bb88abd9ac7b3524648a2", size = 238892, upload-time = "2026-04-08T17:03:10.094Z" }, + { url = "https://files.pythonhosted.org/packages/b7/47/6c41314bac56e71436ce551c7fbe3cc830ed857e6aa9708dbb9c65142eb6/greenlet-3.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:f38b81880ba28f232f1f675893a39cf7b6db25b31cc0a09bb50787ecf957e85e", size = 235599, upload-time = "2026-04-08T15:52:54.3Z" }, + { url = "https://files.pythonhosted.org/packages/7a/75/7e9cd1126a1e1f0cd67b0eda02e5221b28488d352684704a78ed505bd719/greenlet-3.4.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:43748988b097f9c6f09364f260741aa73c80747f63389824435c7a50bfdfd5c1", size = 285856, upload-time = "2026-04-08T15:52:45.82Z" }, + { url = "https://files.pythonhosted.org/packages/9d/c4/3e2df392e5cb199527c4d9dbcaa75c14edcc394b45040f0189f649631e3c/greenlet-3.4.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5566e4e2cd7a880e8c27618e3eab20f3494452d12fd5129edef7b2f7aa9a36d1", size = 610208, upload-time = "2026-04-08T16:24:39.674Z" }, + { url = "https://files.pythonhosted.org/packages/da/af/750cdfda1d1bd30a6c28080245be8d0346e669a98fdbae7f4102aa95fff3/greenlet-3.4.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1054c5a3c78e2ab599d452f23f7adafef55062a783a8e241d24f3b633ba6ff82", size = 621269, upload-time = "2026-04-08T16:30:59.767Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/0cbc693622cd54ebe25207efbb3a0eb07c2639cb8594f6e3aaaa0bb077a8/greenlet-3.4.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f82cb6cddc27dd81c96b1506f4aa7def15070c3b2a67d4e46fd19016aacce6cf", size = 617549, upload-time = "2026-04-08T15:56:34.893Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c0/8966767de01343c1ff47e8b855dc78e7d1a8ed2b7b9c83576a57e289f81d/greenlet-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:227a46251ecba4ff46ae742bc5ce95c91d5aceb4b02f885487aff269c127a729", size = 1575310, upload-time = "2026-04-08T16:26:21.671Z" }, + { url = "https://files.pythonhosted.org/packages/b8/38/bcdc71ba05e9a5fda87f63ffc2abcd1f15693b659346df994a48c968003d/greenlet-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5b99e87be7eba788dd5b75ba1cde5639edffdec5f91fe0d734a249535ec3408c", size = 1640435, upload-time = "2026-04-08T15:57:32.572Z" }, + { url = "https://files.pythonhosted.org/packages/a1/c2/19b664b7173b9e4ef5f77e8cef9f14c20ec7fce7920dc1ccd7afd955d093/greenlet-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:849f8bc17acd6295fcb5de8e46d55cc0e52381c56eaf50a2afd258e97bc65940", size = 238760, upload-time = "2026-04-08T17:04:03.878Z" }, + { url = "https://files.pythonhosted.org/packages/9b/96/795619651d39c7fbd809a522f881aa6f0ead504cc8201c3a5b789dfaef99/greenlet-3.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:9390ad88b652b1903814eaabd629ca184db15e0eeb6fe8a390bbf8b9106ae15a", size = 235498, upload-time = "2026-04-08T17:05:00.584Z" }, + { url = "https://files.pythonhosted.org/packages/78/02/bde66806e8f169cf90b14d02c500c44cdbe02c8e224c9c67bafd1b8cadd1/greenlet-3.4.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:10a07aca6babdd18c16a3f4f8880acfffc2b88dfe431ad6aa5f5740759d7d75e", size = 286291, upload-time = "2026-04-08T17:09:34.307Z" }, + { url = "https://files.pythonhosted.org/packages/05/1f/39da1c336a87d47c58352fb8a78541ce63d63ae57c5b9dae1fe02801bbc2/greenlet-3.4.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:076e21040b3a917d3ce4ad68fb5c3c6b32f1405616c4a57aa83120979649bd3d", size = 656749, upload-time = "2026-04-08T16:24:41.721Z" }, + { url = "https://files.pythonhosted.org/packages/d3/6c/90ee29a4ee27af7aa2e2ec408799eeb69ee3fcc5abcecac6ddd07a5cd0f2/greenlet-3.4.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e82689eea4a237e530bb5cb41b180ef81fa2160e1f89422a67be7d90da67f615", size = 669084, upload-time = "2026-04-08T16:31:01.372Z" }, + { url = "https://files.pythonhosted.org/packages/07/49/d4cad6e5381a50947bb973d2f6cf6592621451b09368b8c20d9b8af49c5b/greenlet-3.4.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4df3b0b2289ec686d3c821a5fee44259c05cfe824dd5e6e12c8e5f5df23085cf", size = 665621, upload-time = "2026-04-08T15:56:35.995Z" }, + { url = "https://files.pythonhosted.org/packages/37/31/d1edd54f424761b5d47718822f506b435b6aab2f3f93b465441143ea5119/greenlet-3.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8bff29d586ea415688f4cec96a591fcc3bf762d046a796cdadc1fdb6e7f2d5bf", size = 1622259, upload-time = "2026-04-08T16:26:23.201Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c6/6d3f9cdcb21c4e12a79cb332579f1c6aa1af78eb68059c5a957c7812d95e/greenlet-3.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a569c2fb840c53c13a2b8967c63621fafbd1a0e015b9c82f408c33d626a2fda", size = 1686916, upload-time = "2026-04-08T15:57:34.282Z" }, + { url = "https://files.pythonhosted.org/packages/63/45/c1ca4a1ad975de4727e52d3ffe641ae23e1d7a8ffaa8ff7a0477e1827b92/greenlet-3.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:207ba5b97ea8b0b60eb43ffcacf26969dd83726095161d676aac03ff913ee50d", size = 239821, upload-time = "2026-04-08T17:03:48.423Z" }, + { url = "https://files.pythonhosted.org/packages/71/c4/6f621023364d7e85a4769c014c8982f98053246d142420e0328980933ceb/greenlet-3.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:f8296d4e2b92af34ebde81085a01690f26a51eb9ac09a0fcadb331eb36dbc802", size = 236932, upload-time = "2026-04-08T17:04:33.551Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8f/18d72b629783f5e8d045a76f5325c1e938e659a9e4da79c7dcd10169a48d/greenlet-3.4.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d70012e51df2dbbccfaf63a40aaf9b40c8bed37c3e3a38751c926301ce538ece", size = 294681, upload-time = "2026-04-08T15:52:35.778Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ad/5fa86ec46769c4153820d58a04062285b3b9e10ba3d461ee257b68dcbf53/greenlet-3.4.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a58bec0751f43068cd40cff31bb3ca02ad6000b3a51ca81367af4eb5abc480c8", size = 658899, upload-time = "2026-04-08T16:24:43.32Z" }, + { url = "https://files.pythonhosted.org/packages/43/f0/4e8174ca0e87ae748c409f055a1ba161038c43cc0a5a6f1433a26ac2e5bf/greenlet-3.4.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05fa0803561028f4b2e3b490ee41216a842eaee11aed004cc343a996d9523aa2", size = 665284, upload-time = "2026-04-08T16:31:02.833Z" }, + { url = "https://files.pythonhosted.org/packages/19/da/991cf7cd33662e2df92a1274b7eb4d61769294d38a1bba8a45f31364845e/greenlet-3.4.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e60d38719cb80b3ab5e85f9f1aed4960acfde09868af6762ccb27b260d68f4ed", size = 661861, upload-time = "2026-04-08T15:56:37.269Z" }, + { url = "https://files.pythonhosted.org/packages/36/c5/6c2c708e14db3d9caea4b459d8464f58c32047451142fe2cfd90e7458f41/greenlet-3.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f50c804733b43eded05ae694691c9aa68bca7d0a867d67d4a3f514742a2d53f", size = 1622182, upload-time = "2026-04-08T16:26:24.777Z" }, + { url = "https://files.pythonhosted.org/packages/7a/4c/50c5fed19378e11a29fabab1f6be39ea95358f4a0a07e115a51ca93385d8/greenlet-3.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2d4f0635dc4aa638cda4b2f5a07ae9a2cff9280327b581a3fcb6f317b4fbc38a", size = 1685050, upload-time = "2026-04-08T15:57:36.453Z" }, + { url = "https://files.pythonhosted.org/packages/db/72/85ae954d734703ab48e622c59d4ce35d77ce840c265814af9c078cacc7aa/greenlet-3.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1a4a48f24681300c640f143ba7c404270e1ebbbcf34331d7104a4ff40f8ea705", size = 245554, upload-time = "2026-04-08T17:03:50.044Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "hartomat-backend" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "aiofiles" }, + { name = "alembic" }, + { name = "asyncpg" }, + { name = "bcrypt" }, + { name = "boto3" }, + { name = "celery", extra = ["redis"] }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "openai" }, + { name = "openpyxl" }, + { name = "passlib", extra = ["bcrypt"] }, + { name = "pillow" }, + { name = "psycopg2-binary" }, + { name = "pydantic", extra = ["email"] }, + { name = "pydantic-settings" }, + { name = "python-dotenv" }, + { name = "python-jose", extra = ["cryptography"] }, + { name = "python-multipart" }, + { name = "redis" }, + { name = "sqlalchemy" }, + { name = "uvicorn", extra = ["standard"] }, + { name = "weasyprint" }, +] + +[package.optional-dependencies] +cad = [ + { name = "pygltflib" }, + { name = "trimesh" }, +] +dev = [ + { name = "factory-boy" }, + { name = "httpx" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiofiles", specifier = ">=23.2.1" }, + { name = "alembic", specifier = ">=1.13.0" }, + { name = "asyncpg", specifier = ">=0.29.0" }, + { name = "bcrypt", specifier = ">=3.0.0,<4.0.0" }, + { name = "boto3", specifier = ">=1.34.0" }, + { name = "celery", extras = ["redis"], specifier = ">=5.3.6" }, + { name = "factory-boy", marker = "extra == 'dev'", specifier = ">=3.3.0" }, + { name = "fastapi", specifier = ">=0.110.0" }, + { name = "httpx", specifier = ">=0.27.0" }, + { name = "httpx", marker = "extra == 'dev'", specifier = ">=0.27.0" }, + { name = "openai", specifier = ">=1.14.0" }, + { name = "openpyxl", specifier = ">=3.1.2" }, + { name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" }, + { name = "pillow", specifier = ">=10.2.0" }, + { name = "psycopg2-binary", specifier = ">=2.9.9" }, + { name = "pydantic", extras = ["email"], specifier = ">=2.6.0" }, + { name = "pydantic-settings", specifier = ">=2.2.0" }, + { name = "pygltflib", marker = "extra == 'cad'", specifier = ">=1.16.1" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.5" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=5.0.0" }, + { name = "python-dotenv", specifier = ">=1.0.1" }, + { name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" }, + { name = "python-multipart", specifier = ">=0.0.9" }, + { name = "redis", specifier = ">=5.0.1" }, + { name = "sqlalchemy", specifier = ">=2.0.0" }, + { name = "trimesh", marker = "extra == 'cad'", specifier = ">=4.2.0" }, + { name = "uvicorn", extras = ["standard"], specifier = ">=0.27.0" }, + { name = "weasyprint", specifier = ">=62.0" }, +] +provides-extras = ["dev", "cad"] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httptools" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, + { url = "https://files.pythonhosted.org/packages/09/8f/c77b1fcbfd262d422f12da02feb0d218fa228d52485b77b953832105bb90/httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3", size = 202889, upload-time = "2025-10-10T03:54:47.089Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1a/22887f53602feaa066354867bc49a68fc295c2293433177ee90870a7d517/httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca", size = 108180, upload-time = "2025-10-10T03:54:48.052Z" }, + { url = "https://files.pythonhosted.org/packages/32/6a/6aaa91937f0010d288d3d124ca2946d48d60c3a5ee7ca62afe870e3ea011/httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c", size = 478596, upload-time = "2025-10-10T03:54:48.919Z" }, + { url = "https://files.pythonhosted.org/packages/6d/70/023d7ce117993107be88d2cbca566a7c1323ccbaf0af7eabf2064fe356f6/httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66", size = 473268, upload-time = "2025-10-10T03:54:49.993Z" }, + { url = "https://files.pythonhosted.org/packages/32/4d/9dd616c38da088e3f436e9a616e1d0cc66544b8cdac405cc4e81c8679fc7/httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346", size = 455517, upload-time = "2025-10-10T03:54:51.066Z" }, + { url = "https://files.pythonhosted.org/packages/1d/3a/a6c595c310b7df958e739aae88724e24f9246a514d909547778d776799be/httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650", size = 458337, upload-time = "2025-10-10T03:54:52.196Z" }, + { url = "https://files.pythonhosted.org/packages/fd/82/88e8d6d2c51edc1cc391b6e044c6c435b6aebe97b1abc33db1b0b24cd582/httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6", size = 85743, upload-time = "2025-10-10T03:54:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270", size = 203619, upload-time = "2025-10-10T03:54:54.321Z" }, + { url = "https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3", size = 108714, upload-time = "2025-10-10T03:54:55.163Z" }, + { url = "https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1", size = 472909, upload-time = "2025-10-10T03:54:56.056Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4a/a548bdfae6369c0d078bab5769f7b66f17f1bfaa6fa28f81d6be6959066b/httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b", size = 470831, upload-time = "2025-10-10T03:54:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/14df99e1c43bd132eec921c2e7e11cda7852f65619bc0fc5bdc2d0cb126c/httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60", size = 452631, upload-time = "2025-10-10T03:54:58.219Z" }, + { url = "https://files.pythonhosted.org/packages/22/d2/b7e131f7be8d854d48cb6d048113c30f9a46dca0c9a8b08fcb3fcd588cdc/httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca", size = 452910, upload-time = "2025-10-10T03:54:59.366Z" }, + { url = "https://files.pythonhosted.org/packages/53/cf/878f3b91e4e6e011eff6d1fa9ca39f7eb17d19c9d7971b04873734112f30/httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96", size = 88205, upload-time = "2025-10-10T03:55:00.389Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jiter" +version = "0.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/29/499f8c9eaa8a16751b1c0e45e6f5f1761d180da873d417996cc7bddc8eef/jiter-0.13.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ea026e70a9a28ebbdddcbcf0f1323128a8db66898a06eaad3a4e62d2f554d096", size = 311157, upload-time = "2026-02-02T12:35:37.758Z" }, + { url = "https://files.pythonhosted.org/packages/50/f6/566364c777d2ab450b92100bea11333c64c38d32caf8dc378b48e5b20c46/jiter-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66aa3e663840152d18cc8ff1e4faad3dd181373491b9cfdc6004b92198d67911", size = 319729, upload-time = "2026-02-02T12:35:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/73/dd/560f13ec5e4f116d8ad2658781646cca91b617ae3b8758d4a5076b278f70/jiter-0.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3524798e70655ff19aec58c7d05adb1f074fecff62da857ea9be2b908b6d701", size = 354766, upload-time = "2026-02-02T12:35:40.662Z" }, + { url = "https://files.pythonhosted.org/packages/7c/0d/061faffcfe94608cbc28a0d42a77a74222bdf5055ccdbe5fd2292b94f510/jiter-0.13.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec7e287d7fbd02cb6e22f9a00dd9c9cd504c40a61f2c61e7e1f9690a82726b4c", size = 362587, upload-time = "2026-02-02T12:35:42.025Z" }, + { url = "https://files.pythonhosted.org/packages/92/c9/c66a7864982fd38a9773ec6e932e0398d1262677b8c60faecd02ffb67bf3/jiter-0.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47455245307e4debf2ce6c6e65a717550a0244231240dcf3b8f7d64e4c2f22f4", size = 487537, upload-time = "2026-02-02T12:35:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/6c/86/84eb4352cd3668f16d1a88929b5888a3fe0418ea8c1dfc2ad4e7bf6e069a/jiter-0.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9da221dca6e0429c2704c1b3655fe7b025204a71d4d9b73390c759d776d165", size = 373717, upload-time = "2026-02-02T12:35:44.928Z" }, + { url = "https://files.pythonhosted.org/packages/6e/09/9fe4c159358176f82d4390407a03f506a8659ed13ca3ac93a843402acecf/jiter-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24ab43126d5e05f3d53a36a8e11eb2f23304c6c1117844aaaf9a0aa5e40b5018", size = 362683, upload-time = "2026-02-02T12:35:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5e/85f3ab9caca0c1d0897937d378b4a515cae9e119730563572361ea0c48ae/jiter-0.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9da38b4fedde4fb528c740c2564628fbab737166a0e73d6d46cb4bb5463ff411", size = 392345, upload-time = "2026-02-02T12:35:48.088Z" }, + { url = "https://files.pythonhosted.org/packages/12/4c/05b8629ad546191939e6f0c2f17e29f542a398f4a52fb987bc70b6d1eb8b/jiter-0.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b34c519e17658ed88d5047999a93547f8889f3c1824120c26ad6be5f27b6cf5", size = 517775, upload-time = "2026-02-02T12:35:49.482Z" }, + { url = "https://files.pythonhosted.org/packages/4d/88/367ea2eb6bc582c7052e4baf5ddf57ebe5ab924a88e0e09830dfb585c02d/jiter-0.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2a6394e6af690d462310a86b53c47ad75ac8c21dc79f120714ea449979cb1d3", size = 551325, upload-time = "2026-02-02T12:35:51.104Z" }, + { url = "https://files.pythonhosted.org/packages/f3/12/fa377ffb94a2f28c41afaed093e0d70cfe512035d5ecb0cad0ae4792d35e/jiter-0.13.0-cp311-cp311-win32.whl", hash = "sha256:0f0c065695f616a27c920a56ad0d4fc46415ef8b806bf8fc1cacf25002bd24e1", size = 204709, upload-time = "2026-02-02T12:35:52.467Z" }, + { url = "https://files.pythonhosted.org/packages/cb/16/8e8203ce92f844dfcd3d9d6a5a7322c77077248dbb12da52d23193a839cd/jiter-0.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0733312953b909688ae3c2d58d043aa040f9f1a6a75693defed7bc2cc4bf2654", size = 204560, upload-time = "2026-02-02T12:35:53.925Z" }, + { url = "https://files.pythonhosted.org/packages/44/26/97cc40663deb17b9e13c3a5cf29251788c271b18ee4d262c8f94798b8336/jiter-0.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:5d9b34ad56761b3bf0fbe8f7e55468704107608512350962d3317ffd7a4382d5", size = 189608, upload-time = "2026-02-02T12:35:55.304Z" }, + { url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" }, + { url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" }, + { url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/61/db/c1efc32b8ba4c740ab3fc2d037d8753f67685f475e26b9d6536a4322bcdd/jiter-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04670992b576fa65bd056dbac0c39fe8bd67681c380cb2b48efa885711d9d726", size = 364163, upload-time = "2026-02-02T12:36:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/55/8a/fb75556236047c8806995671a18e4a0ad646ed255276f51a20f32dceaeec/jiter-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1aff1fbdb803a376d4d22a8f63f8e7ccbce0b4890c26cc7af9e501ab339ef0", size = 483709, upload-time = "2026-02-02T12:36:03.41Z" }, + { url = "https://files.pythonhosted.org/packages/7e/16/43512e6ee863875693a8e6f6d532e19d650779d6ba9a81593ae40a9088ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b3fb8c2053acaef8580809ac1d1f7481a0a0bdc012fd7f5d8b18fb696a5a089", size = 370480, upload-time = "2026-02-02T12:36:04.791Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4c/09b93e30e984a187bc8aaa3510e1ec8dcbdcd71ca05d2f56aac0492453aa/jiter-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaba7d87e66f26a2c45d8cbadcbfc4bf7884182317907baf39cfe9775bb4d93", size = 360735, upload-time = "2026-02-02T12:36:06.994Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1b/46c5e349019874ec5dfa508c14c37e29864ea108d376ae26d90bee238cd7/jiter-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b88d649135aca526da172e48083da915ec086b54e8e73a425ba50999468cc08", size = 391814, upload-time = "2026-02-02T12:36:08.368Z" }, + { url = "https://files.pythonhosted.org/packages/15/9e/26184760e85baee7162ad37b7912797d2077718476bf91517641c92b3639/jiter-0.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e404ea551d35438013c64b4f357b0474c7abf9f781c06d44fcaf7a14c69ff9e2", size = 513990, upload-time = "2026-02-02T12:36:09.993Z" }, + { url = "https://files.pythonhosted.org/packages/e9/34/2c9355247d6debad57a0a15e76ab1566ab799388042743656e566b3b7de1/jiter-0.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f4748aad1b4a93c8bdd70f604d0f748cdc0e8744c5547798acfa52f10e79228", size = 548021, upload-time = "2026-02-02T12:36:11.376Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4a/9f2c23255d04a834398b9c2e0e665382116911dc4d06b795710503cdad25/jiter-0.13.0-cp312-cp312-win32.whl", hash = "sha256:0bf670e3b1445fc4d31612199f1744f67f889ee1bbae703c4b54dc097e5dd394", size = 203024, upload-time = "2026-02-02T12:36:12.682Z" }, + { url = "https://files.pythonhosted.org/packages/09/ee/f0ae675a957ae5a8f160be3e87acea6b11dc7b89f6b7ab057e77b2d2b13a/jiter-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:15db60e121e11fe186c0b15236bd5d18381b9ddacdcf4e659feb96fc6c969c92", size = 205424, upload-time = "2026-02-02T12:36:13.93Z" }, + { url = "https://files.pythonhosted.org/packages/1b/02/ae611edf913d3cbf02c97cdb90374af2082c48d7190d74c1111dde08bcdd/jiter-0.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:41f92313d17989102f3cb5dd533a02787cdb99454d494344b0361355da52fcb9", size = 186818, upload-time = "2026-02-02T12:36:15.308Z" }, + { url = "https://files.pythonhosted.org/packages/91/9c/7ee5a6ff4b9991e1a45263bfc46731634c4a2bde27dfda6c8251df2d958c/jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf", size = 306897, upload-time = "2026-02-02T12:36:16.748Z" }, + { url = "https://files.pythonhosted.org/packages/7c/02/be5b870d1d2be5dd6a91bdfb90f248fbb7dcbd21338f092c6b89817c3dbf/jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a", size = 317507, upload-time = "2026-02-02T12:36:18.351Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/b25d2ec333615f5f284f3a4024f7ce68cfa0604c322c6808b2344c7f5d2b/jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb", size = 350560, upload-time = "2026-02-02T12:36:19.746Z" }, + { url = "https://files.pythonhosted.org/packages/be/ec/74dcb99fef0aca9fbe56b303bf79f6bd839010cb18ad41000bf6cc71eec0/jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2", size = 363232, upload-time = "2026-02-02T12:36:21.243Z" }, + { url = "https://files.pythonhosted.org/packages/1b/37/f17375e0bb2f6a812d4dd92d7616e41917f740f3e71343627da9db2824ce/jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f", size = 483727, upload-time = "2026-02-02T12:36:22.688Z" }, + { url = "https://files.pythonhosted.org/packages/77/d2/a71160a5ae1a1e66c1395b37ef77da67513b0adba73b993a27fbe47eb048/jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159", size = 370799, upload-time = "2026-02-02T12:36:24.106Z" }, + { url = "https://files.pythonhosted.org/packages/01/99/ed5e478ff0eb4e8aa5fd998f9d69603c9fd3f32de3bd16c2b1194f68361c/jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663", size = 359120, upload-time = "2026-02-02T12:36:25.519Z" }, + { url = "https://files.pythonhosted.org/packages/16/be/7ffd08203277a813f732ba897352797fa9493faf8dc7995b31f3d9cb9488/jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa", size = 390664, upload-time = "2026-02-02T12:36:26.866Z" }, + { url = "https://files.pythonhosted.org/packages/d1/84/e0787856196d6d346264d6dcccb01f741e5f0bd014c1d9a2ebe149caf4f3/jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820", size = 513543, upload-time = "2026-02-02T12:36:28.217Z" }, + { url = "https://files.pythonhosted.org/packages/65/50/ecbd258181c4313cf79bca6c88fb63207d04d5bf5e4f65174114d072aa55/jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68", size = 547262, upload-time = "2026-02-02T12:36:29.678Z" }, + { url = "https://files.pythonhosted.org/packages/27/da/68f38d12e7111d2016cd198161b36e1f042bd115c169255bcb7ec823a3bf/jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72", size = 200630, upload-time = "2026-02-02T12:36:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/25/65/3bd1a972c9a08ecd22eb3b08a95d1941ebe6938aea620c246cf426ae09c2/jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc", size = 202602, upload-time = "2026-02-02T12:36:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/13bd3678a311aa67686bb303654792c48206a112068f8b0b21426eb6851e/jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b", size = 185939, upload-time = "2026-02-02T12:36:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/49/19/a929ec002ad3228bc97ca01dbb14f7632fffdc84a95ec92ceaf4145688ae/jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10", size = 316616, upload-time = "2026-02-02T12:36:36.579Z" }, + { url = "https://files.pythonhosted.org/packages/52/56/d19a9a194afa37c1728831e5fb81b7722c3de18a3109e8f282bfc23e587a/jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef", size = 346850, upload-time = "2026-02-02T12:36:38.058Z" }, + { url = "https://files.pythonhosted.org/packages/36/4a/94e831c6bf287754a8a019cb966ed39ff8be6ab78cadecf08df3bb02d505/jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6", size = 358551, upload-time = "2026-02-02T12:36:39.417Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ec/a4c72c822695fa80e55d2b4142b73f0012035d9fcf90eccc56bc060db37c/jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d", size = 201950, upload-time = "2026-02-02T12:36:40.791Z" }, + { url = "https://files.pythonhosted.org/packages/b6/00/393553ec27b824fbc29047e9c7cd4a3951d7fbe4a76743f17e44034fa4e4/jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d", size = 185852, upload-time = "2026-02-02T12:36:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f5/f1997e987211f6f9bd71b8083047b316208b4aca0b529bb5f8c96c89ef3e/jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0", size = 308804, upload-time = "2026-02-02T12:36:43.496Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8f/5482a7677731fd44881f0204981ce2d7175db271f82cba2085dd2212e095/jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91", size = 318787, upload-time = "2026-02-02T12:36:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b9/7257ac59778f1cd025b26a23c5520a36a424f7f1b068f2442a5b499b7464/jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09", size = 353880, upload-time = "2026-02-02T12:36:47.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/87/719eec4a3f0841dad99e3d3604ee4cba36af4419a76f3cb0b8e2e691ad67/jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607", size = 366702, upload-time = "2026-02-02T12:36:48.871Z" }, + { url = "https://files.pythonhosted.org/packages/d2/65/415f0a75cf6921e43365a1bc227c565cb949caca8b7532776e430cbaa530/jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66", size = 486319, upload-time = "2026-02-02T12:36:53.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a2/9e12b48e82c6bbc6081fd81abf915e1443add1b13d8fc586e1d90bb02bb8/jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2", size = 372289, upload-time = "2026-02-02T12:36:54.593Z" }, + { url = "https://files.pythonhosted.org/packages/4e/c1/e4693f107a1789a239c759a432e9afc592366f04e901470c2af89cfd28e1/jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad", size = 360165, upload-time = "2026-02-02T12:36:56.112Z" }, + { url = "https://files.pythonhosted.org/packages/17/08/91b9ea976c1c758240614bd88442681a87672eebc3d9a6dde476874e706b/jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d", size = 389634, upload-time = "2026-02-02T12:36:57.495Z" }, + { url = "https://files.pythonhosted.org/packages/18/23/58325ef99390d6d40427ed6005bf1ad54f2577866594bcf13ce55675f87d/jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df", size = 514933, upload-time = "2026-02-02T12:36:58.909Z" }, + { url = "https://files.pythonhosted.org/packages/5b/25/69f1120c7c395fd276c3996bb8adefa9c6b84c12bb7111e5c6ccdcd8526d/jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d", size = 548842, upload-time = "2026-02-02T12:37:00.433Z" }, + { url = "https://files.pythonhosted.org/packages/18/05/981c9669d86850c5fbb0d9e62bba144787f9fba84546ba43d624ee27ef29/jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6", size = 202108, upload-time = "2026-02-02T12:37:01.718Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/cdcf54dd0b0341db7d25413229888a346c7130bd20820530905fdb65727b/jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f", size = 204027, upload-time = "2026-02-02T12:37:03.075Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f9/724bcaaab7a3cd727031fe4f6995cb86c4bd344909177c186699c8dec51a/jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d", size = 187199, upload-time = "2026-02-02T12:37:04.414Z" }, + { url = "https://files.pythonhosted.org/packages/62/92/1661d8b9fd6a3d7a2d89831db26fe3c1509a287d83ad7838831c7b7a5c7e/jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0", size = 318423, upload-time = "2026-02-02T12:37:05.806Z" }, + { url = "https://files.pythonhosted.org/packages/4f/3b/f77d342a54d4ebcd128e520fc58ec2f5b30a423b0fd26acdfc0c6fef8e26/jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40", size = 351438, upload-time = "2026-02-02T12:37:07.189Z" }, + { url = "https://files.pythonhosted.org/packages/76/b3/ba9a69f0e4209bd3331470c723c2f5509e6f0482e416b612431a5061ed71/jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202", size = 364774, upload-time = "2026-02-02T12:37:08.579Z" }, + { url = "https://files.pythonhosted.org/packages/b3/16/6cdb31fa342932602458dbb631bfbd47f601e03d2e4950740e0b2100b570/jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0", size = 487238, upload-time = "2026-02-02T12:37:10.066Z" }, + { url = "https://files.pythonhosted.org/packages/ed/b1/956cc7abaca8d95c13aa8d6c9b3f3797241c246cd6e792934cc4c8b250d2/jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95", size = 372892, upload-time = "2026-02-02T12:37:11.656Z" }, + { url = "https://files.pythonhosted.org/packages/26/c4/97ecde8b1e74f67b8598c57c6fccf6df86ea7861ed29da84629cdbba76c4/jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59", size = 360309, upload-time = "2026-02-02T12:37:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d7/eabe3cf46715854ccc80be2cd78dd4c36aedeb30751dbf85a1d08c14373c/jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe", size = 389607, upload-time = "2026-02-02T12:37:14.881Z" }, + { url = "https://files.pythonhosted.org/packages/df/2d/03963fc0804e6109b82decfb9974eb92df3797fe7222428cae12f8ccaa0c/jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939", size = 514986, upload-time = "2026-02-02T12:37:16.326Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/8c83b45eb3eb1c1e18d841fe30b4b5bc5619d781267ca9bc03e005d8fd0a/jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9", size = 548756, upload-time = "2026-02-02T12:37:17.736Z" }, + { url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" }, + { url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/3c29819a27178d0e461a8571fb63c6ae38be6dc36b78b3ec2876bbd6a910/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b1cbfa133241d0e6bdab48dcdc2604e8ba81512f6bbd68ec3e8e1357dd3c316c", size = 307016, upload-time = "2026-02-02T12:37:42.755Z" }, + { url = "https://files.pythonhosted.org/packages/eb/ae/60993e4b07b1ac5ebe46da7aa99fdbb802eb986c38d26e3883ac0125c4e0/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:db367d8be9fad6e8ebbac4a7578b7af562e506211036cba2c06c3b998603c3d2", size = 305024, upload-time = "2026-02-02T12:37:44.774Z" }, + { url = "https://files.pythonhosted.org/packages/77/fa/2227e590e9cf98803db2811f172b2d6460a21539ab73006f251c66f44b14/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45f6f8efb2f3b0603092401dc2df79fa89ccbc027aaba4174d2d4133ed661434", size = 339337, upload-time = "2026-02-02T12:37:46.668Z" }, + { url = "https://files.pythonhosted.org/packages/2d/92/015173281f7eb96c0ef580c997da8ef50870d4f7f4c9e03c845a1d62ae04/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:597245258e6ad085d064780abfb23a284d418d3e61c57362d9449c6c7317ee2d", size = 346395, upload-time = "2026-02-02T12:37:48.09Z" }, + { url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" }, + { url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" }, + { url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" }, + { url = "https://files.pythonhosted.org/packages/67/8a/a342b2f0251f3dac4ca17618265d93bf244a2a4d089126e81e4c1056ac50/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19", size = 343768, upload-time = "2026-02-02T12:37:55.055Z" }, +] + +[[package]] +name = "jmespath" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/59/322338183ecda247fb5d1763a6cbe46eff7222eaeebafd9fa65d4bf5cb11/jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d", size = 27377, upload-time = "2026-01-22T16:35:26.279Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64", size = 20419, upload-time = "2026-01-22T16:35:24.919Z" }, +] + +[[package]] +name = "kombu" +version = "5.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "amqp" }, + { name = "packaging" }, + { name = "tzdata" }, + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/a5/607e533ed6c83ae1a696969b8e1c137dfebd5759a2e9682e26ff1b97740b/kombu-5.6.2.tar.gz", hash = "sha256:8060497058066c6f5aed7c26d7cd0d3b574990b09de842a8c5aaed0b92cc5a55", size = 472594, upload-time = "2025-12-29T20:30:07.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/0f/834427d8c03ff1d7e867d3db3d176470c64871753252b21b4f4897d1fa45/kombu-5.6.2-py3-none-any.whl", hash = "sha256:efcfc559da324d41d61ca311b0c64965ea35b4c55cc04ee36e55386145dace93", size = 214219, upload-time = "2025-12-29T20:30:05.74Z" }, +] + +[package.optional-dependencies] +redis = [ + { name = "redis" }, +] + +[[package]] +name = "mako" +version = "1.3.10" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "marshmallow" +version = "3.26.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/79/de6c16cc902f4fc372236926b0ce2ab7845268dcc30fb2fbb7f71b418631/marshmallow-3.26.2.tar.gz", hash = "sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57", size = 222095, upload-time = "2025-12-22T06:53:53.309Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/2f/5108cb3ee4ba6501748c4908b908e55f42a5b66245b4cfe0c99326e1ef6e/marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73", size = 50964, upload-time = "2025-12-22T06:53:51.801Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "numpy" +version = "2.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/9f/b8cef5bffa569759033adda9481211426f12f53299629b410340795c2514/numpy-2.4.4.tar.gz", hash = "sha256:2d390634c5182175533585cc89f3608a4682ccb173cc9bb940b2881c8d6f8fa0", size = 20731587, upload-time = "2026-03-29T13:22:01.298Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/c6/4218570d8c8ecc9704b5157a3348e486e84ef4be0ed3e38218ab473c83d2/numpy-2.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f983334aea213c99992053ede6168500e5f086ce74fbc4acc3f2b00f5762e9db", size = 16976799, upload-time = "2026-03-29T13:18:15.438Z" }, + { url = "https://files.pythonhosted.org/packages/dd/92/b4d922c4a5f5dab9ed44e6153908a5c665b71acf183a83b93b690996e39b/numpy-2.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72944b19f2324114e9dc86a159787333b77874143efcf89a5167ef83cfee8af0", size = 14971552, upload-time = "2026-03-29T13:18:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dc/df98c095978fa6ee7b9a9387d1d58cbb3d232d0e69ad169a4ce784bde4fd/numpy-2.4.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:86b6f55f5a352b48d7fbfd2dbc3d5b780b2d79f4d3c121f33eb6efb22e9a2015", size = 5476566, upload-time = "2026-03-29T13:18:21.532Z" }, + { url = "https://files.pythonhosted.org/packages/28/34/b3fdcec6e725409223dd27356bdf5a3c2cc2282e428218ecc9cb7acc9763/numpy-2.4.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:ba1f4fc670ed79f876f70082eff4f9583c15fb9a4b89d6188412de4d18ae2f40", size = 6806482, upload-time = "2026-03-29T13:18:23.634Z" }, + { url = "https://files.pythonhosted.org/packages/68/62/63417c13aa35d57bee1337c67446761dc25ea6543130cf868eace6e8157b/numpy-2.4.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a87ec22c87be071b6bdbd27920b129b94f2fc964358ce38f3822635a3e2e03d", size = 15973376, upload-time = "2026-03-29T13:18:26.677Z" }, + { url = "https://files.pythonhosted.org/packages/cf/c5/9fcb7e0e69cef59cf10c746b84f7d58b08bc66a6b7d459783c5a4f6101a6/numpy-2.4.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:df3775294accfdd75f32c74ae39fcba920c9a378a2fc18a12b6820aa8c1fb502", size = 16925137, upload-time = "2026-03-29T13:18:30.14Z" }, + { url = "https://files.pythonhosted.org/packages/7e/43/80020edacb3f84b9efdd1591120a4296462c23fd8db0dde1666f6ef66f13/numpy-2.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d4e437e295f18ec29bc79daf55e8a47a9113df44d66f702f02a293d93a2d6dd", size = 17329414, upload-time = "2026-03-29T13:18:33.733Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/af0658593b18a5f73532d377188b964f239eb0894e664a6c12f484472f97/numpy-2.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6aa3236c78803afbcb255045fbef97a9e25a1f6c9888357d205ddc42f4d6eba5", size = 18658397, upload-time = "2026-03-29T13:18:37.511Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ce/13a09ed65f5d0ce5c7dd0669250374c6e379910f97af2c08c57b0608eee4/numpy-2.4.4-cp311-cp311-win32.whl", hash = "sha256:30caa73029a225b2d40d9fae193e008e24b2026b7ee1a867b7ee8d96ca1a448e", size = 6239499, upload-time = "2026-03-29T13:18:40.372Z" }, + { url = "https://files.pythonhosted.org/packages/bd/63/05d193dbb4b5eec1eca73822d80da98b511f8328ad4ae3ca4caf0f4db91d/numpy-2.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:6bbe4eb67390b0a0265a2c25458f6b90a409d5d069f1041e6aff1e27e3d9a79e", size = 12614257, upload-time = "2026-03-29T13:18:42.95Z" }, + { url = "https://files.pythonhosted.org/packages/87/c5/8168052f080c26fa984c413305012be54741c9d0d74abd7fbeeccae3889f/numpy-2.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:fcfe2045fd2e8f3cb0ce9d4ba6dba6333b8fa05bb8a4939c908cd43322d14c7e", size = 10486775, upload-time = "2026-03-29T13:18:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/28/05/32396bec30fb2263770ee910142f49c1476d08e8ad41abf8403806b520ce/numpy-2.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15716cfef24d3a9762e3acdf87e27f58dc823d1348f765bbea6bef8c639bfa1b", size = 16689272, upload-time = "2026-03-29T13:18:49.223Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f3/a983d28637bfcd763a9c7aafdb6d5c0ebf3d487d1e1459ffdb57e2f01117/numpy-2.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23cbfd4c17357c81021f21540da84ee282b9c8fba38a03b7b9d09ba6b951421e", size = 14699573, upload-time = "2026-03-29T13:18:52.629Z" }, + { url = "https://files.pythonhosted.org/packages/9b/fd/e5ecca1e78c05106d98028114f5c00d3eddb41207686b2b7de3e477b0e22/numpy-2.4.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b3b60bb7cba2c8c81837661c488637eee696f59a877788a396d33150c35d842", size = 5204782, upload-time = "2026-03-29T13:18:55.579Z" }, + { url = "https://files.pythonhosted.org/packages/de/2f/702a4594413c1a8632092beae8aba00f1d67947389369b3777aed783fdca/numpy-2.4.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e4a010c27ff6f210ff4c6ef34394cd61470d01014439b192ec22552ee867f2a8", size = 6552038, upload-time = "2026-03-29T13:18:57.769Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/eed308a8f56cba4d1fdf467a4fc67ef4ff4bf1c888f5fc980481890104b1/numpy-2.4.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9e75681b59ddaa5e659898085ae0eaea229d054f2ac0c7e563a62205a700121", size = 15670666, upload-time = "2026-03-29T13:19:00.341Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/0e3ecece05b7a7e87ab9fb587855548da437a061326fff64a223b6dcb78a/numpy-2.4.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:81f4a14bee47aec54f883e0cad2d73986640c1590eb9bfaaba7ad17394481e6e", size = 16645480, upload-time = "2026-03-29T13:19:03.63Z" }, + { url = "https://files.pythonhosted.org/packages/34/49/f2312c154b82a286758ee2f1743336d50651f8b5195db18cdb63675ff649/numpy-2.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:62d6b0f03b694173f9fcb1fb317f7222fd0b0b103e784c6549f5e53a27718c44", size = 17020036, upload-time = "2026-03-29T13:19:07.428Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e9/736d17bd77f1b0ec4f9901aaec129c00d59f5d84d5e79bba540ef12c2330/numpy-2.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fbc356aae7adf9e6336d336b9c8111d390a05df88f1805573ebb0807bd06fd1d", size = 18368643, upload-time = "2026-03-29T13:19:10.775Z" }, + { url = "https://files.pythonhosted.org/packages/63/f6/d417977c5f519b17c8a5c3bc9e8304b0908b0e21136fe43bf628a1343914/numpy-2.4.4-cp312-cp312-win32.whl", hash = "sha256:0d35aea54ad1d420c812bfa0385c71cd7cc5bcf7c65fed95fc2cd02fe8c79827", size = 5961117, upload-time = "2026-03-29T13:19:13.464Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5b/e1deebf88ff431b01b7406ca3583ab2bbb90972bbe1c568732e49c844f7e/numpy-2.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:b5f0362dc928a6ecd9db58868fca5e48485205e3855957bdedea308f8672ea4a", size = 12320584, upload-time = "2026-03-29T13:19:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/58/89/e4e856ac82a68c3ed64486a544977d0e7bdd18b8da75b78a577ca31c4395/numpy-2.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:846300f379b5b12cc769334464656bc882e0735d27d9726568bc932fdc49d5ec", size = 10221450, upload-time = "2026-03-29T13:19:18.994Z" }, + { url = "https://files.pythonhosted.org/packages/14/1d/d0a583ce4fefcc3308806a749a536c201ed6b5ad6e1322e227ee4848979d/numpy-2.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:08f2e31ed5e6f04b118e49821397f12767934cfdd12a1ce86a058f91e004ee50", size = 16684933, upload-time = "2026-03-29T13:19:22.47Z" }, + { url = "https://files.pythonhosted.org/packages/c1/62/2b7a48fbb745d344742c0277f01286dead15f3f68e4f359fbfcf7b48f70f/numpy-2.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e823b8b6edc81e747526f70f71a9c0a07ac4e7ad13020aa736bb7c9d67196115", size = 14694532, upload-time = "2026-03-29T13:19:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/e5/87/499737bfba066b4a3bebff24a8f1c5b2dee410b209bc6668c9be692580f0/numpy-2.4.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4a19d9dba1a76618dd86b164d608566f393f8ec6ac7c44f0cc879011c45e65af", size = 5199661, upload-time = "2026-03-29T13:19:28.31Z" }, + { url = "https://files.pythonhosted.org/packages/cd/da/464d551604320d1491bc345efed99b4b7034143a85787aab78d5691d5a0e/numpy-2.4.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d2a8490669bfe99a233298348acc2d824d496dee0e66e31b66a6022c2ad74a5c", size = 6547539, upload-time = "2026-03-29T13:19:30.97Z" }, + { url = "https://files.pythonhosted.org/packages/7d/90/8d23e3b0dafd024bf31bdec225b3bb5c2dbfa6912f8a53b8659f21216cbf/numpy-2.4.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45dbed2ab436a9e826e302fcdcbe9133f9b0006e5af7168afb8963a6520da103", size = 15668806, upload-time = "2026-03-29T13:19:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/d1/73/a9d864e42a01896bb5974475438f16086be9ba1f0d19d0bb7a07427c4a8b/numpy-2.4.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c901b15172510173f5cb310eae652908340f8dede90fff9e3bf6c0d8dfd92f83", size = 16632682, upload-time = "2026-03-29T13:19:37.336Z" }, + { url = "https://files.pythonhosted.org/packages/34/fb/14570d65c3bde4e202a031210475ae9cde9b7686a2e7dc97ee67d2833b35/numpy-2.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:99d838547ace2c4aace6c4f76e879ddfe02bb58a80c1549928477862b7a6d6ed", size = 17019810, upload-time = "2026-03-29T13:19:40.963Z" }, + { url = "https://files.pythonhosted.org/packages/8a/77/2ba9d87081fd41f6d640c83f26fb7351e536b7ce6dd9061b6af5904e8e46/numpy-2.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0aec54fd785890ecca25a6003fd9a5aed47ad607bbac5cd64f836ad8666f4959", size = 18357394, upload-time = "2026-03-29T13:19:44.859Z" }, + { url = "https://files.pythonhosted.org/packages/a2/23/52666c9a41708b0853fa3b1a12c90da38c507a3074883823126d4e9d5b30/numpy-2.4.4-cp313-cp313-win32.whl", hash = "sha256:07077278157d02f65c43b1b26a3886bce886f95d20aabd11f87932750dfb14ed", size = 5959556, upload-time = "2026-03-29T13:19:47.661Z" }, + { url = "https://files.pythonhosted.org/packages/57/fb/48649b4971cde70d817cf97a2a2fdc0b4d8308569f1dd2f2611959d2e0cf/numpy-2.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:5c70f1cc1c4efbe316a572e2d8b9b9cc44e89b95f79ca3331553fbb63716e2bf", size = 12317311, upload-time = "2026-03-29T13:19:50.67Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d8/11490cddd564eb4de97b4579ef6bfe6a736cc07e94c1598590ae25415e01/numpy-2.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:ef4059d6e5152fa1a39f888e344c73fdc926e1b2dd58c771d67b0acfbf2aa67d", size = 10222060, upload-time = "2026-03-29T13:19:54.229Z" }, + { url = "https://files.pythonhosted.org/packages/99/5d/dab4339177a905aad3e2221c915b35202f1ec30d750dd2e5e9d9a72b804b/numpy-2.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4bbc7f303d125971f60ec0aaad5e12c62d0d2c925f0ab1273debd0e4ba37aba5", size = 14822302, upload-time = "2026-03-29T13:19:57.585Z" }, + { url = "https://files.pythonhosted.org/packages/eb/e4/0564a65e7d3d97562ed6f9b0fd0fb0a6f559ee444092f105938b50043876/numpy-2.4.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:4d6d57903571f86180eb98f8f0c839fa9ebbfb031356d87f1361be91e433f5b7", size = 5327407, upload-time = "2026-03-29T13:20:00.601Z" }, + { url = "https://files.pythonhosted.org/packages/29/8d/35a3a6ce5ad371afa58b4700f1c820f8f279948cca32524e0a695b0ded83/numpy-2.4.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:4636de7fd195197b7535f231b5de9e4b36d2c440b6e566d2e4e4746e6af0ca93", size = 6647631, upload-time = "2026-03-29T13:20:02.855Z" }, + { url = "https://files.pythonhosted.org/packages/f4/da/477731acbd5a58a946c736edfdabb2ac5b34c3d08d1ba1a7b437fa0884df/numpy-2.4.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad2e2ef14e0b04e544ea2fa0a36463f847f113d314aa02e5b402fdf910ef309e", size = 15727691, upload-time = "2026-03-29T13:20:06.004Z" }, + { url = "https://files.pythonhosted.org/packages/e6/db/338535d9b152beabeb511579598418ba0212ce77cf9718edd70262cc4370/numpy-2.4.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a285b3b96f951841799528cd1f4f01cd70e7e0204b4abebac9463eecfcf2a40", size = 16681241, upload-time = "2026-03-29T13:20:09.417Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a9/ad248e8f58beb7a0219b413c9c7d8151c5d285f7f946c3e26695bdbbe2df/numpy-2.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f8474c4241bc18b750be2abea9d7a9ec84f46ef861dbacf86a4f6e043401f79e", size = 17085767, upload-time = "2026-03-29T13:20:13.126Z" }, + { url = "https://files.pythonhosted.org/packages/b5/1a/3b88ccd3694681356f70da841630e4725a7264d6a885c8d442a697e1146b/numpy-2.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4e874c976154687c1f71715b034739b45c7711bec81db01914770373d125e392", size = 18403169, upload-time = "2026-03-29T13:20:17.096Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c9/fcfd5d0639222c6eac7f304829b04892ef51c96a75d479214d77e3ce6e33/numpy-2.4.4-cp313-cp313t-win32.whl", hash = "sha256:9c585a1790d5436a5374bac930dad6ed244c046ed91b2b2a3634eb2971d21008", size = 6083477, upload-time = "2026-03-29T13:20:20.195Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e3/3938a61d1c538aaec8ed6fd6323f57b0c2d2d2219512434c5c878db76553/numpy-2.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:93e15038125dc1e5345d9b5b68aa7f996ec33b98118d18c6ca0d0b7d6198b7e8", size = 12457487, upload-time = "2026-03-29T13:20:22.946Z" }, + { url = "https://files.pythonhosted.org/packages/97/6a/7e345032cc60501721ef94e0e30b60f6b0bd601f9174ebd36389a2b86d40/numpy-2.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:0dfd3f9d3adbe2920b68b5cd3d51444e13a10792ec7154cd0a2f6e74d4ab3233", size = 10292002, upload-time = "2026-03-29T13:20:25.909Z" }, + { url = "https://files.pythonhosted.org/packages/6e/06/c54062f85f673dd5c04cbe2f14c3acb8c8b95e3384869bb8cc9bff8cb9df/numpy-2.4.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f169b9a863d34f5d11b8698ead99febeaa17a13ca044961aa8e2662a6c7766a0", size = 16684353, upload-time = "2026-03-29T13:20:29.504Z" }, + { url = "https://files.pythonhosted.org/packages/4c/39/8a320264a84404c74cc7e79715de85d6130fa07a0898f67fb5cd5bd79908/numpy-2.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2483e4584a1cb3092da4470b38866634bafb223cbcd551ee047633fd2584599a", size = 14704914, upload-time = "2026-03-29T13:20:33.547Z" }, + { url = "https://files.pythonhosted.org/packages/91/fb/287076b2614e1d1044235f50f03748f31fa287e3dbe6abeb35cdfa351eca/numpy-2.4.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:2d19e6e2095506d1736b7d80595e0f252d76b89f5e715c35e06e937679ea7d7a", size = 5210005, upload-time = "2026-03-29T13:20:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/63/eb/fcc338595309910de6ecabfcef2419a9ce24399680bfb149421fa2df1280/numpy-2.4.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6a246d5914aa1c820c9443ddcee9c02bec3e203b0c080349533fae17727dfd1b", size = 6544974, upload-time = "2026-03-29T13:20:39.014Z" }, + { url = "https://files.pythonhosted.org/packages/44/5d/e7e9044032a716cdfaa3fba27a8e874bf1c5f1912a1ddd4ed071bf8a14a6/numpy-2.4.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:989824e9faf85f96ec9c7761cd8d29c531ad857bfa1daa930cba85baaecf1a9a", size = 15684591, upload-time = "2026-03-29T13:20:42.146Z" }, + { url = "https://files.pythonhosted.org/packages/98/7c/21252050676612625449b4807d6b695b9ce8a7c9e1c197ee6216c8a65c7c/numpy-2.4.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27a8d92cd10f1382a67d7cf4db7ce18341b66438bdd9f691d7b0e48d104c2a9d", size = 16637700, upload-time = "2026-03-29T13:20:46.204Z" }, + { url = "https://files.pythonhosted.org/packages/b1/29/56d2bbef9465db24ef25393383d761a1af4f446a1df9b8cded4fe3a5a5d7/numpy-2.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e44319a2953c738205bf3354537979eaa3998ed673395b964c1176083dd46252", size = 17035781, upload-time = "2026-03-29T13:20:50.242Z" }, + { url = "https://files.pythonhosted.org/packages/e3/2b/a35a6d7589d21f44cea7d0a98de5ddcbb3d421b2622a5c96b1edf18707c3/numpy-2.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e892aff75639bbef0d2a2cfd55535510df26ff92f63c92cd84ef8d4ba5a5557f", size = 18362959, upload-time = "2026-03-29T13:20:54.019Z" }, + { url = "https://files.pythonhosted.org/packages/64/c9/d52ec581f2390e0f5f85cbfd80fb83d965fc15e9f0e1aec2195faa142cde/numpy-2.4.4-cp314-cp314-win32.whl", hash = "sha256:1378871da56ca8943c2ba674530924bb8ca40cd228358a3b5f302ad60cf875fc", size = 6008768, upload-time = "2026-03-29T13:20:56.912Z" }, + { url = "https://files.pythonhosted.org/packages/fa/22/4cc31a62a6c7b74a8730e31a4274c5dc80e005751e277a2ce38e675e4923/numpy-2.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:715d1c092715954784bc79e1174fc2a90093dc4dc84ea15eb14dad8abdcdeb74", size = 12449181, upload-time = "2026-03-29T13:20:59.548Z" }, + { url = "https://files.pythonhosted.org/packages/70/2e/14cda6f4d8e396c612d1bf97f22958e92148801d7e4f110cabebdc0eef4b/numpy-2.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:2c194dd721e54ecad9ad387c1d35e63dce5c4450c6dc7dd5611283dda239aabb", size = 10496035, upload-time = "2026-03-29T13:21:02.524Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e8/8fed8c8d848d7ecea092dc3469643f9d10bc3a134a815a3b033da1d2039b/numpy-2.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2aa0613a5177c264ff5921051a5719d20095ea586ca88cc802c5c218d1c67d3e", size = 14824958, upload-time = "2026-03-29T13:21:05.671Z" }, + { url = "https://files.pythonhosted.org/packages/05/1a/d8007a5138c179c2bf33ef44503e83d70434d2642877ee8fbb230e7c0548/numpy-2.4.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:42c16925aa5a02362f986765f9ebabf20de75cdefdca827d14315c568dcab113", size = 5330020, upload-time = "2026-03-29T13:21:08.635Z" }, + { url = "https://files.pythonhosted.org/packages/99/64/ffb99ac6ae93faf117bcbd5c7ba48a7f45364a33e8e458545d3633615dda/numpy-2.4.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:874f200b2a981c647340f841730fc3a2b54c9d940566a3c4149099591e2c4c3d", size = 6650758, upload-time = "2026-03-29T13:21:10.949Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6e/795cc078b78a384052e73b2f6281ff7a700e9bf53bcce2ee579d4f6dd879/numpy-2.4.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9b39d38a9bd2ae1becd7eac1303d031c5c110ad31f2b319c6e7d98b135c934d", size = 15729948, upload-time = "2026-03-29T13:21:14.047Z" }, + { url = "https://files.pythonhosted.org/packages/5f/86/2acbda8cc2af5f3d7bfc791192863b9e3e19674da7b5e533fded124d1299/numpy-2.4.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b268594bccac7d7cf5844c7732e3f20c50921d94e36d7ec9b79e9857694b1b2f", size = 16679325, upload-time = "2026-03-29T13:21:17.561Z" }, + { url = "https://files.pythonhosted.org/packages/bc/59/cafd83018f4aa55e0ac6fa92aa066c0a1877b77a615ceff1711c260ffae8/numpy-2.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ac6b31e35612a26483e20750126d30d0941f949426974cace8e6b5c58a3657b0", size = 17084883, upload-time = "2026-03-29T13:21:21.106Z" }, + { url = "https://files.pythonhosted.org/packages/f0/85/a42548db84e65ece46ab2caea3d3f78b416a47af387fcbb47ec28e660dc2/numpy-2.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e3ed142f2728df44263aaf5fb1f5b0b99f4070c553a0d7f033be65338329150", size = 18403474, upload-time = "2026-03-29T13:21:24.828Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ad/483d9e262f4b831000062e5d8a45e342166ec8aaa1195264982bca267e62/numpy-2.4.4-cp314-cp314t-win32.whl", hash = "sha256:dddbbd259598d7240b18c9d87c56a9d2fb3b02fe266f49a7c101532e78c1d871", size = 6155500, upload-time = "2026-03-29T13:21:28.205Z" }, + { url = "https://files.pythonhosted.org/packages/c7/03/2fc4e14c7bd4ff2964b74ba90ecb8552540b6315f201df70f137faa5c589/numpy-2.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:a7164afb23be6e37ad90b2f10426149fd75aee07ca55653d2aa41e66c4ef697e", size = 12637755, upload-time = "2026-03-29T13:21:31.107Z" }, + { url = "https://files.pythonhosted.org/packages/58/78/548fb8e07b1a341746bfbecb32f2c268470f45fa028aacdbd10d9bc73aab/numpy-2.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:ba203255017337d39f89bdd58417f03c4426f12beed0440cfd933cb15f8669c7", size = 10566643, upload-time = "2026-03-29T13:21:34.339Z" }, + { url = "https://files.pythonhosted.org/packages/6b/33/8fae8f964a4f63ed528264ddf25d2b683d0b663e3cba26961eb838a7c1bd/numpy-2.4.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:58c8b5929fcb8287cbd6f0a3fae19c6e03a5c48402ae792962ac465224a629a4", size = 16854491, upload-time = "2026-03-29T13:21:38.03Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d0/1aabee441380b981cf8cdda3ae7a46aa827d1b5a8cce84d14598bc94d6d9/numpy-2.4.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:eea7ac5d2dce4189771cedb559c738a71512768210dc4e4753b107a2048b3d0e", size = 14895830, upload-time = "2026-03-29T13:21:41.509Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b8/aafb0d1065416894fccf4df6b49ef22b8db045187949545bced89c034b8e/numpy-2.4.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:51fc224f7ca4d92656d5a5eb315f12eb5fe2c97a66249aa7b5f562528a3be38c", size = 5400927, upload-time = "2026-03-29T13:21:44.747Z" }, + { url = "https://files.pythonhosted.org/packages/d6/77/063baa20b08b431038c7f9ff5435540c7b7265c78cf56012a483019ca72d/numpy-2.4.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:28a650663f7314afc3e6ec620f44f333c386aad9f6fc472030865dc0ebb26ee3", size = 6715557, upload-time = "2026-03-29T13:21:47.406Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a8/379542d45a14f149444c5c4c4e7714707239ce9cc1de8c2803958889da14/numpy-2.4.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:19710a9ca9992d7174e9c52f643d4272dcd1558c5f7af7f6f8190f633bd651a7", size = 15804253, upload-time = "2026-03-29T13:21:50.753Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c8/f0a45426d6d21e7ea3310a15cf90c43a14d9232c31a837702dba437f3373/numpy-2.4.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b2aec6af35c113b05695ebb5749a787acd63cafc83086a05771d1e1cd1e555f", size = 16753552, upload-time = "2026-03-29T13:21:54.344Z" }, + { url = "https://files.pythonhosted.org/packages/04/74/f4c001f4714c3ad9ce037e18cf2b9c64871a84951eaa0baf683a9ca9301c/numpy-2.4.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f2cf083b324a467e1ab358c105f6cad5ea950f50524668a80c486ff1db24e119", size = 12509075, upload-time = "2026-03-29T13:21:57.644Z" }, +] + +[[package]] +name = "openai" +version = "2.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/88/15/52580c8fbc16d0675d516e8749806eda679b16de1e4434ea06fb6feaa610/openai-2.30.0.tar.gz", hash = "sha256:92f7661c990bda4b22a941806c83eabe4896c3094465030dd882a71abe80c885", size = 676084, upload-time = "2026-03-25T22:08:59.96Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/9e/5bfa2270f902d5b92ab7d41ce0475b8630572e71e349b2a4996d14bdda93/openai-2.30.0-py3-none-any.whl", hash = "sha256:9a5ae616888eb2748ec5e0c5b955a51592e0b201a11f4262db920f2a78c5231d", size = 1146656, upload-time = "2026-03-25T22:08:58.2Z" }, +] + +[[package]] +name = "openpyxl" +version = "3.1.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "et-xmlfile" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464, upload-time = "2024-06-28T14:03:44.161Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910, upload-time = "2024-06-28T14:03:41.161Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "passlib" +version = "1.7.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/06/9da9ee59a67fae7761aab3ccc84fa4f3f33f125b370f1ccdb915bf967c11/passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04", size = 689844, upload-time = "2020-10-08T19:00:52.121Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/a4/ab6b7589382ca3df236e03faa71deac88cae040af60c071a78d254a62172/passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1", size = 525554, upload-time = "2020-10-08T19:00:49.856Z" }, +] + +[package.optional-dependencies] +bcrypt = [ + { name = "bcrypt" }, +] + +[[package]] +name = "pillow" +version = "12.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/21/c2bcdd5906101a30244eaffc1b6e6ce71a31bd0742a01eb89e660ebfac2d/pillow-12.2.0.tar.gz", hash = "sha256:a830b1a40919539d07806aa58e1b114df53ddd43213d9c8b75847eee6c0182b5", size = 46987819, upload-time = "2026-04-01T14:46:17.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/e1/748f5663efe6edcfc4e74b2b93edfb9b8b99b67f21a854c3ae416500a2d9/pillow-12.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:8be29e59487a79f173507c30ddf57e733a357f67881430449bb32614075a40ab", size = 5354347, upload-time = "2026-04-01T14:42:44.255Z" }, + { url = "https://files.pythonhosted.org/packages/47/a1/d5ff69e747374c33a3b53b9f98cca7889fce1fd03d79cdc4e1bccc6c5a87/pillow-12.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:71cde9a1e1551df7d34a25462fc60325e8a11a82cc2e2f54578e5e9a1e153d65", size = 4695873, upload-time = "2026-04-01T14:42:46.452Z" }, + { url = "https://files.pythonhosted.org/packages/df/21/e3fbdf54408a973c7f7f89a23b2cb97a7ef30c61ab4142af31eee6aebc88/pillow-12.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f490f9368b6fc026f021db16d7ec2fbf7d89e2edb42e8ec09d2c60505f5729c7", size = 6280168, upload-time = "2026-04-01T14:42:49.228Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f1/00b7278c7dd52b17ad4329153748f87b6756ec195ff786c2bdf12518337d/pillow-12.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8bd7903a5f2a4545f6fd5935c90058b89d30045568985a71c79f5fd6edf9b91e", size = 8088188, upload-time = "2026-04-01T14:42:51.735Z" }, + { url = "https://files.pythonhosted.org/packages/ad/cf/220a5994ef1b10e70e85748b75649d77d506499352be135a4989c957b701/pillow-12.2.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3997232e10d2920a68d25191392e3a4487d8183039e1c74c2297f00ed1c50705", size = 6394401, upload-time = "2026-04-01T14:42:54.343Z" }, + { url = "https://files.pythonhosted.org/packages/e9/bd/e51a61b1054f09437acfbc2ff9106c30d1eb76bc1453d428399946781253/pillow-12.2.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e74473c875d78b8e9d5da2a70f7099549f9eb37ded4e2f6a463e60125bccd176", size = 7079655, upload-time = "2026-04-01T14:42:56.954Z" }, + { url = "https://files.pythonhosted.org/packages/6b/3d/45132c57d5fb4b5744567c3817026480ac7fc3ce5d4c47902bc0e7f6f853/pillow-12.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:56a3f9c60a13133a98ecff6197af34d7824de9b7b38c3654861a725c970c197b", size = 6503105, upload-time = "2026-04-01T14:42:59.847Z" }, + { url = "https://files.pythonhosted.org/packages/7d/2e/9df2fc1e82097b1df3dce58dc43286aa01068e918c07574711fcc53e6fb4/pillow-12.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90e6f81de50ad6b534cab6e5aef77ff6e37722b2f5d908686f4a5c9eba17a909", size = 7203402, upload-time = "2026-04-01T14:43:02.664Z" }, + { url = "https://files.pythonhosted.org/packages/bd/2e/2941e42858ebb67e50ae741473de81c2984e6eff7b397017623c676e2e8d/pillow-12.2.0-cp311-cp311-win32.whl", hash = "sha256:8c984051042858021a54926eb597d6ee3012393ce9c181814115df4c60b9a808", size = 6378149, upload-time = "2026-04-01T14:43:05.274Z" }, + { url = "https://files.pythonhosted.org/packages/69/42/836b6f3cd7f3e5fa10a1f1a5420447c17966044c8fbf589cc0452d5502db/pillow-12.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e6b2a0c538fc200b38ff9eb6628228b77908c319a005815f2dde585a0664b60", size = 7082626, upload-time = "2026-04-01T14:43:08.557Z" }, + { url = "https://files.pythonhosted.org/packages/c2/88/549194b5d6f1f494b485e493edc6693c0a16f4ada488e5bd974ed1f42fad/pillow-12.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:9a8a34cc89c67a65ea7437ce257cea81a9dad65b29805f3ecee8c8fe8ff25ffe", size = 2463531, upload-time = "2026-04-01T14:43:10.743Z" }, + { url = "https://files.pythonhosted.org/packages/58/be/7482c8a5ebebbc6470b3eb791812fff7d5e0216c2be3827b30b8bb6603ed/pillow-12.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2d192a155bbcec180f8564f693e6fd9bccff5a7af9b32e2e4bf8c9c69dbad6b5", size = 5308279, upload-time = "2026-04-01T14:43:13.246Z" }, + { url = "https://files.pythonhosted.org/packages/d8/95/0a351b9289c2b5cbde0bacd4a83ebc44023e835490a727b2a3bd60ddc0f4/pillow-12.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3f40b3c5a968281fd507d519e444c35f0ff171237f4fdde090dd60699458421", size = 4695490, upload-time = "2026-04-01T14:43:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/de/af/4e8e6869cbed569d43c416fad3dc4ecb944cb5d9492defaed89ddd6fe871/pillow-12.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:03e7e372d5240cc23e9f07deca4d775c0817bffc641b01e9c3af208dbd300987", size = 6284462, upload-time = "2026-04-01T14:43:18.268Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/c05e19657fd57841e476be1ab46c4d501bffbadbafdc31a6d665f8b737b6/pillow-12.2.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b86024e52a1b269467a802258c25521e6d742349d760728092e1bc2d135b4d76", size = 8094744, upload-time = "2026-04-01T14:43:20.716Z" }, + { url = "https://files.pythonhosted.org/packages/2b/54/1789c455ed10176066b6e7e6da1b01e50e36f94ba584dc68d9eebfe9156d/pillow-12.2.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7371b48c4fa448d20d2714c9a1f775a81155050d383333e0a6c15b1123dda005", size = 6398371, upload-time = "2026-04-01T14:43:23.443Z" }, + { url = "https://files.pythonhosted.org/packages/43/e3/fdc657359e919462369869f1c9f0e973f353f9a9ee295a39b1fea8ee1a77/pillow-12.2.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62f5409336adb0663b7caa0da5c7d9e7bdbaae9ce761d34669420c2a801b2780", size = 7087215, upload-time = "2026-04-01T14:43:26.758Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f8/2f6825e441d5b1959d2ca5adec984210f1ec086435b0ed5f52c19b3b8a6e/pillow-12.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:01afa7cf67f74f09523699b4e88c73fb55c13346d212a59a2db1f86b0a63e8c5", size = 6509783, upload-time = "2026-04-01T14:43:29.56Z" }, + { url = "https://files.pythonhosted.org/packages/67/f9/029a27095ad20f854f9dba026b3ea6428548316e057e6fc3545409e86651/pillow-12.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc3d34d4a8fbec3e88a79b92e5465e0f9b842b628675850d860b8bd300b159f5", size = 7212112, upload-time = "2026-04-01T14:43:32.091Z" }, + { url = "https://files.pythonhosted.org/packages/be/42/025cfe05d1be22dbfdb4f264fe9de1ccda83f66e4fc3aac94748e784af04/pillow-12.2.0-cp312-cp312-win32.whl", hash = "sha256:58f62cc0f00fd29e64b29f4fd923ffdb3859c9f9e6105bfc37ba1d08994e8940", size = 6378489, upload-time = "2026-04-01T14:43:34.601Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7b/25a221d2c761c6a8ae21bfa3874988ff2583e19cf8a27bf2fee358df7942/pillow-12.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f84204dee22a783350679a0333981df803dac21a0190d706a50475e361c93f5", size = 7084129, upload-time = "2026-04-01T14:43:37.213Z" }, + { url = "https://files.pythonhosted.org/packages/10/e1/542a474affab20fd4a0f1836cb234e8493519da6b76899e30bcc5d990b8b/pillow-12.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:af73337013e0b3b46f175e79492d96845b16126ddf79c438d7ea7ff27783a414", size = 2463612, upload-time = "2026-04-01T14:43:39.421Z" }, + { url = "https://files.pythonhosted.org/packages/4a/01/53d10cf0dbad820a8db274d259a37ba50b88b24768ddccec07355382d5ad/pillow-12.2.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:8297651f5b5679c19968abefd6bb84d95fe30ef712eb1b2d9b2d31ca61267f4c", size = 4100837, upload-time = "2026-04-01T14:43:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/0f/98/f3a6657ecb698c937f6c76ee564882945f29b79bad496abcba0e84659ec5/pillow-12.2.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:50d8520da2a6ce0af445fa6d648c4273c3eeefbc32d7ce049f22e8b5c3daecc2", size = 4176528, upload-time = "2026-04-01T14:43:43.773Z" }, + { url = "https://files.pythonhosted.org/packages/69/bc/8986948f05e3ea490b8442ea1c1d4d990b24a7e43d8a51b2c7d8b1dced36/pillow-12.2.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:766cef22385fa1091258ad7e6216792b156dc16d8d3fa607e7545b2b72061f1c", size = 3640401, upload-time = "2026-04-01T14:43:45.87Z" }, + { url = "https://files.pythonhosted.org/packages/34/46/6c717baadcd62bc8ed51d238d521ab651eaa74838291bda1f86fe1f864c9/pillow-12.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5d2fd0fa6b5d9d1de415060363433f28da8b1526c1c129020435e186794b3795", size = 5308094, upload-time = "2026-04-01T14:43:48.438Z" }, + { url = "https://files.pythonhosted.org/packages/71/43/905a14a8b17fdb1ccb58d282454490662d2cb89a6bfec26af6d3520da5ec/pillow-12.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56b25336f502b6ed02e889f4ece894a72612fe885889a6e8c4c80239ff6e5f5f", size = 4695402, upload-time = "2026-04-01T14:43:51.292Z" }, + { url = "https://files.pythonhosted.org/packages/73/dd/42107efcb777b16fa0393317eac58f5b5cf30e8392e266e76e51cff28c3d/pillow-12.2.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f1c943e96e85df3d3478f7b691f229887e143f81fedab9b20205349ab04d73ed", size = 6280005, upload-time = "2026-04-01T14:43:54.242Z" }, + { url = "https://files.pythonhosted.org/packages/a8/68/b93e09e5e8549019e61acf49f65b1a8530765a7f812c77a7461bca7e4494/pillow-12.2.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03f6fab9219220f041c74aeaa2939ff0062bd5c364ba9ce037197f4c6d498cd9", size = 8090669, upload-time = "2026-04-01T14:43:57.335Z" }, + { url = "https://files.pythonhosted.org/packages/4b/6e/3ccb54ce8ec4ddd1accd2d89004308b7b0b21c4ac3d20fa70af4760a4330/pillow-12.2.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5cdfebd752ec52bf5bb4e35d9c64b40826bc5b40a13df7c3cda20a2c03a0f5ed", size = 6395194, upload-time = "2026-04-01T14:43:59.864Z" }, + { url = "https://files.pythonhosted.org/packages/67/ee/21d4e8536afd1a328f01b359b4d3997b291ffd35a237c877b331c1c3b71c/pillow-12.2.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eedf4b74eda2b5a4b2b2fb4c006d6295df3bf29e459e198c90ea48e130dc75c3", size = 7082423, upload-time = "2026-04-01T14:44:02.74Z" }, + { url = "https://files.pythonhosted.org/packages/78/5f/e9f86ab0146464e8c133fe85df987ed9e77e08b29d8d35f9f9f4d6f917ba/pillow-12.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:00a2865911330191c0b818c59103b58a5e697cae67042366970a6b6f1b20b7f9", size = 6505667, upload-time = "2026-04-01T14:44:05.381Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1e/409007f56a2fdce61584fd3acbc2bbc259857d555196cedcadc68c015c82/pillow-12.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e1757442ed87f4912397c6d35a0db6a7b52592156014706f17658ff58bbf795", size = 7208580, upload-time = "2026-04-01T14:44:08.39Z" }, + { url = "https://files.pythonhosted.org/packages/23/c4/7349421080b12fb35414607b8871e9534546c128a11965fd4a7002ccfbee/pillow-12.2.0-cp313-cp313-win32.whl", hash = "sha256:144748b3af2d1b358d41286056d0003f47cb339b8c43a9ea42f5fea4d8c66b6e", size = 6375896, upload-time = "2026-04-01T14:44:11.197Z" }, + { url = "https://files.pythonhosted.org/packages/3f/82/8a3739a5e470b3c6cbb1d21d315800d8e16bff503d1f16b03a4ec3212786/pillow-12.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:390ede346628ccc626e5730107cde16c42d3836b89662a115a921f28440e6a3b", size = 7081266, upload-time = "2026-04-01T14:44:13.947Z" }, + { url = "https://files.pythonhosted.org/packages/c3/25/f968f618a062574294592f668218f8af564830ccebdd1fa6200f598e65c5/pillow-12.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:8023abc91fba39036dbce14a7d6535632f99c0b857807cbbbf21ecc9f4717f06", size = 2463508, upload-time = "2026-04-01T14:44:16.312Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a4/b342930964e3cb4dce5038ae34b0eab4653334995336cd486c5a8c25a00c/pillow-12.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:042db20a421b9bafecc4b84a8b6e444686bd9d836c7fd24542db3e7df7baad9b", size = 5309927, upload-time = "2026-04-01T14:44:18.89Z" }, + { url = "https://files.pythonhosted.org/packages/9f/de/23198e0a65a9cf06123f5435a5d95cea62a635697f8f03d134d3f3a96151/pillow-12.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd025009355c926a84a612fecf58bb315a3f6814b17ead51a8e48d3823d9087f", size = 4698624, upload-time = "2026-04-01T14:44:21.115Z" }, + { url = "https://files.pythonhosted.org/packages/01/a6/1265e977f17d93ea37aa28aa81bad4fa597933879fac2520d24e021c8da3/pillow-12.2.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88ddbc66737e277852913bd1e07c150cc7bb124539f94c4e2df5344494e0a612", size = 6321252, upload-time = "2026-04-01T14:44:23.663Z" }, + { url = "https://files.pythonhosted.org/packages/3c/83/5982eb4a285967baa70340320be9f88e57665a387e3a53a7f0db8231a0cd/pillow-12.2.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d362d1878f00c142b7e1a16e6e5e780f02be8195123f164edf7eddd911eefe7c", size = 8126550, upload-time = "2026-04-01T14:44:26.772Z" }, + { url = "https://files.pythonhosted.org/packages/4e/48/6ffc514adce69f6050d0753b1a18fd920fce8cac87620d5a31231b04bfc5/pillow-12.2.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2c727a6d53cb0018aadd8018c2b938376af27914a68a492f59dfcaca650d5eea", size = 6433114, upload-time = "2026-04-01T14:44:29.615Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f9a77144231fb8d40ee27107b4463e205fa4677e2ca2548e14da5cf18dce/pillow-12.2.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:efd8c21c98c5cc60653bcb311bef2ce0401642b7ce9d09e03a7da87c878289d4", size = 7115667, upload-time = "2026-04-01T14:44:32.773Z" }, + { url = "https://files.pythonhosted.org/packages/c1/fc/ac4ee3041e7d5a565e1c4fd72a113f03b6394cc72ab7089d27608f8aaccb/pillow-12.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f08483a632889536b8139663db60f6724bfcb443c96f1b18855860d7d5c0fd4", size = 6538966, upload-time = "2026-04-01T14:44:35.252Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a8/27fb307055087f3668f6d0a8ccb636e7431d56ed0750e07a60547b1e083e/pillow-12.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dac8d77255a37e81a2efcbd1fc05f1c15ee82200e6c240d7e127e25e365c39ea", size = 7238241, upload-time = "2026-04-01T14:44:37.875Z" }, + { url = "https://files.pythonhosted.org/packages/ad/4b/926ab182c07fccae9fcb120043464e1ff1564775ec8864f21a0ebce6ac25/pillow-12.2.0-cp313-cp313t-win32.whl", hash = "sha256:ee3120ae9dff32f121610bb08e4313be87e03efeadfc6c0d18f89127e24d0c24", size = 6379592, upload-time = "2026-04-01T14:44:40.336Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c4/f9e476451a098181b30050cc4c9a3556b64c02cf6497ea421ac047e89e4b/pillow-12.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:325ca0528c6788d2a6c3d40e3568639398137346c3d6e66bb61db96b96511c98", size = 7085542, upload-time = "2026-04-01T14:44:43.251Z" }, + { url = "https://files.pythonhosted.org/packages/00/a4/285f12aeacbe2d6dc36c407dfbbe9e96d4a80b0fb710a337f6d2ad978c75/pillow-12.2.0-cp313-cp313t-win_arm64.whl", hash = "sha256:2e5a76d03a6c6dcef67edabda7a52494afa4035021a79c8558e14af25313d453", size = 2465765, upload-time = "2026-04-01T14:44:45.996Z" }, + { url = "https://files.pythonhosted.org/packages/bf/98/4595daa2365416a86cb0d495248a393dfc84e96d62ad080c8546256cb9c0/pillow-12.2.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:3adc9215e8be0448ed6e814966ecf3d9952f0ea40eb14e89a102b87f450660d8", size = 4100848, upload-time = "2026-04-01T14:44:48.48Z" }, + { url = "https://files.pythonhosted.org/packages/0b/79/40184d464cf89f6663e18dfcf7ca21aae2491fff1a16127681bf1fa9b8cf/pillow-12.2.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:6a9adfc6d24b10f89588096364cc726174118c62130c817c2837c60cf08a392b", size = 4176515, upload-time = "2026-04-01T14:44:51.353Z" }, + { url = "https://files.pythonhosted.org/packages/b0/63/703f86fd4c422a9cf722833670f4f71418fb116b2853ff7da722ea43f184/pillow-12.2.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:6a6e67ea2e6feda684ed370f9a1c52e7a243631c025ba42149a2cc5934dec295", size = 3640159, upload-time = "2026-04-01T14:44:53.588Z" }, + { url = "https://files.pythonhosted.org/packages/71/e0/fb22f797187d0be2270f83500aab851536101b254bfa1eae10795709d283/pillow-12.2.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2bb4a8d594eacdfc59d9e5ad972aa8afdd48d584ffd5f13a937a664c3e7db0ed", size = 5312185, upload-time = "2026-04-01T14:44:56.039Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8c/1a9e46228571de18f8e28f16fabdfc20212a5d019f3e3303452b3f0a580d/pillow-12.2.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:80b2da48193b2f33ed0c32c38140f9d3186583ce7d516526d462645fd98660ae", size = 4695386, upload-time = "2026-04-01T14:44:58.663Z" }, + { url = "https://files.pythonhosted.org/packages/70/62/98f6b7f0c88b9addd0e87c217ded307b36be024d4ff8869a812b241d1345/pillow-12.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22db17c68434de69d8ecfc2fe821569195c0c373b25cccb9cbdacf2c6e53c601", size = 6280384, upload-time = "2026-04-01T14:45:01.5Z" }, + { url = "https://files.pythonhosted.org/packages/5e/03/688747d2e91cfbe0e64f316cd2e8005698f76ada3130d0194664174fa5de/pillow-12.2.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7b14cc0106cd9aecda615dd6903840a058b4700fcb817687d0ee4fc8b6e389be", size = 8091599, upload-time = "2026-04-01T14:45:04.5Z" }, + { url = "https://files.pythonhosted.org/packages/f6/35/577e22b936fcdd66537329b33af0b4ccfefaeabd8aec04b266528cddb33c/pillow-12.2.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cbeb542b2ebc6fcdacabf8aca8c1a97c9b3ad3927d46b8723f9d4f033288a0f", size = 6396021, upload-time = "2026-04-01T14:45:07.117Z" }, + { url = "https://files.pythonhosted.org/packages/11/8d/d2532ad2a603ca2b93ad9f5135732124e57811d0168155852f37fbce2458/pillow-12.2.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4bfd07bc812fbd20395212969e41931001fd59eb55a60658b0e5710872e95286", size = 7083360, upload-time = "2026-04-01T14:45:09.763Z" }, + { url = "https://files.pythonhosted.org/packages/5e/26/d325f9f56c7e039034897e7380e9cc202b1e368bfd04d4cbe6a441f02885/pillow-12.2.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9aba9a17b623ef750a4d11b742cbafffeb48a869821252b30ee21b5e91392c50", size = 6507628, upload-time = "2026-04-01T14:45:12.378Z" }, + { url = "https://files.pythonhosted.org/packages/5f/f7/769d5632ffb0988f1c5e7660b3e731e30f7f8ec4318e94d0a5d674eb65a4/pillow-12.2.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:deede7c263feb25dba4e82ea23058a235dcc2fe1f6021025dc71f2b618e26104", size = 7209321, upload-time = "2026-04-01T14:45:15.122Z" }, + { url = "https://files.pythonhosted.org/packages/6a/7a/c253e3c645cd47f1aceea6a8bacdba9991bf45bb7dfe927f7c893e89c93c/pillow-12.2.0-cp314-cp314-win32.whl", hash = "sha256:632ff19b2778e43162304d50da0181ce24ac5bb8180122cbe1bf4673428328c7", size = 6479723, upload-time = "2026-04-01T14:45:17.797Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8b/601e6566b957ca50e28725cb6c355c59c2c8609751efbecd980db44e0349/pillow-12.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:4e6c62e9d237e9b65fac06857d511e90d8461a32adcc1b9065ea0c0fa3a28150", size = 7217400, upload-time = "2026-04-01T14:45:20.529Z" }, + { url = "https://files.pythonhosted.org/packages/d6/94/220e46c73065c3e2951bb91c11a1fb636c8c9ad427ac3ce7d7f3359b9b2f/pillow-12.2.0-cp314-cp314-win_arm64.whl", hash = "sha256:b1c1fbd8a5a1af3412a0810d060a78b5136ec0836c8a4ef9aa11807f2a22f4e1", size = 2554835, upload-time = "2026-04-01T14:45:23.162Z" }, + { url = "https://files.pythonhosted.org/packages/b6/ab/1b426a3974cb0e7da5c29ccff4807871d48110933a57207b5a676cccc155/pillow-12.2.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:57850958fe9c751670e49b2cecf6294acc99e562531f4bd317fa5ddee2068463", size = 5314225, upload-time = "2026-04-01T14:45:25.637Z" }, + { url = "https://files.pythonhosted.org/packages/19/1e/dce46f371be2438eecfee2a1960ee2a243bbe5e961890146d2dee1ff0f12/pillow-12.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d5d38f1411c0ed9f97bcb49b7bd59b6b7c314e0e27420e34d99d844b9ce3b6f3", size = 4698541, upload-time = "2026-04-01T14:45:28.355Z" }, + { url = "https://files.pythonhosted.org/packages/55/c3/7fbecf70adb3a0c33b77a300dc52e424dc22ad8cdc06557a2e49523b703d/pillow-12.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5c0a9f29ca8e79f09de89293f82fc9b0270bb4af1d58bc98f540cc4aedf03166", size = 6322251, upload-time = "2026-04-01T14:45:30.924Z" }, + { url = "https://files.pythonhosted.org/packages/1c/3c/7fbc17cfb7e4fe0ef1642e0abc17fc6c94c9f7a16be41498e12e2ba60408/pillow-12.2.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1610dd6c61621ae1cf811bef44d77e149ce3f7b95afe66a4512f8c59f25d9ebe", size = 8127807, upload-time = "2026-04-01T14:45:33.908Z" }, + { url = "https://files.pythonhosted.org/packages/ff/c3/a8ae14d6defd2e448493ff512fae903b1e9bd40b72efb6ec55ce0048c8ce/pillow-12.2.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a34329707af4f73cf1782a36cd2289c0368880654a2c11f027bcee9052d35dd", size = 6433935, upload-time = "2026-04-01T14:45:36.623Z" }, + { url = "https://files.pythonhosted.org/packages/6e/32/2880fb3a074847ac159d8f902cb43278a61e85f681661e7419e6596803ed/pillow-12.2.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e9c4f5b3c546fa3458a29ab22646c1c6c787ea8f5ef51300e5a60300736905e", size = 7116720, upload-time = "2026-04-01T14:45:39.258Z" }, + { url = "https://files.pythonhosted.org/packages/46/87/495cc9c30e0129501643f24d320076f4cc54f718341df18cc70ec94c44e1/pillow-12.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fb043ee2f06b41473269765c2feae53fc2e2fbf96e5e22ca94fb5ad677856f06", size = 6540498, upload-time = "2026-04-01T14:45:41.879Z" }, + { url = "https://files.pythonhosted.org/packages/18/53/773f5edca692009d883a72211b60fdaf8871cbef075eaa9d577f0a2f989e/pillow-12.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f278f034eb75b4e8a13a54a876cc4a5ab39173d2cdd93a638e1b467fc545ac43", size = 7239413, upload-time = "2026-04-01T14:45:44.705Z" }, + { url = "https://files.pythonhosted.org/packages/c9/e4/4b64a97d71b2a83158134abbb2f5bd3f8a2ea691361282f010998f339ec7/pillow-12.2.0-cp314-cp314t-win32.whl", hash = "sha256:6bb77b2dcb06b20f9f4b4a8454caa581cd4dd0643a08bacf821216a16d9c8354", size = 6482084, upload-time = "2026-04-01T14:45:47.568Z" }, + { url = "https://files.pythonhosted.org/packages/ba/13/306d275efd3a3453f72114b7431c877d10b1154014c1ebbedd067770d629/pillow-12.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:6562ace0d3fb5f20ed7290f1f929cae41b25ae29528f2af1722966a0a02e2aa1", size = 7225152, upload-time = "2026-04-01T14:45:50.032Z" }, + { url = "https://files.pythonhosted.org/packages/ff/6e/cf826fae916b8658848d7b9f38d88da6396895c676e8086fc0988073aaf8/pillow-12.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:aa88ccfe4e32d362816319ed727a004423aab09c5cea43c01a4b435643fa34eb", size = 2556579, upload-time = "2026-04-01T14:45:52.529Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b7/2437044fb910f499610356d1352e3423753c98e34f915252aafecc64889f/pillow-12.2.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538bd5e05efec03ae613fd89c4ce0368ecd2ba239cc25b9f9be7ed426b0af1f", size = 5273969, upload-time = "2026-04-01T14:45:55.538Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f4/8316e31de11b780f4ac08ef3654a75555e624a98db1056ecb2122d008d5a/pillow-12.2.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:394167b21da716608eac917c60aa9b969421b5dcbbe02ae7f013e7b85811c69d", size = 4659674, upload-time = "2026-04-01T14:45:58.093Z" }, + { url = "https://files.pythonhosted.org/packages/d4/37/664fca7201f8bb2aa1d20e2c3d5564a62e6ae5111741966c8319ca802361/pillow-12.2.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5d04bfa02cc2d23b497d1e90a0f927070043f6cbf303e738300532379a4b4e0f", size = 5288479, upload-time = "2026-04-01T14:46:01.141Z" }, + { url = "https://files.pythonhosted.org/packages/49/62/5b0ed78fce87346be7a5cfcfaaad91f6a1f98c26f86bdbafa2066c647ef6/pillow-12.2.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0c838a5125cee37e68edec915651521191cef1e6aa336b855f495766e77a366e", size = 7032230, upload-time = "2026-04-01T14:46:03.874Z" }, + { url = "https://files.pythonhosted.org/packages/c3/28/ec0fc38107fc32536908034e990c47914c57cd7c5a3ece4d8d8f7ffd7e27/pillow-12.2.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a6c9fa44005fa37a91ebfc95d081e8079757d2e904b27103f4f5fa6f0bf78c0", size = 5355404, upload-time = "2026-04-01T14:46:06.33Z" }, + { url = "https://files.pythonhosted.org/packages/5e/8b/51b0eddcfa2180d60e41f06bd6d0a62202b20b59c68f5a132e615b75aecf/pillow-12.2.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25373b66e0dd5905ed63fa3cae13c82fbddf3079f2c8bf15c6fb6a35586324c1", size = 6002215, upload-time = "2026-04-01T14:46:08.83Z" }, + { url = "https://files.pythonhosted.org/packages/bc/60/5382c03e1970de634027cee8e1b7d39776b778b81812aaf45b694dfe9e28/pillow-12.2.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bfa9c230d2fe991bed5318a5f119bd6780cda2915cca595393649fc118ab895e", size = 7080946, upload-time = "2026-04-01T14:46:11.734Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, + { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, + { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" }, + { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a8/a2709681b3ac11b0b1786def10006b8995125ba268c9a54bea6f5ae8bd3e/psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c", size = 3756572, upload-time = "2025-10-10T11:12:32.873Z" }, + { url = "https://files.pythonhosted.org/packages/62/e1/c2b38d256d0dafd32713e9f31982a5b028f4a3651f446be70785f484f472/psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee", size = 3864529, upload-time = "2025-10-10T11:12:36.791Z" }, + { url = "https://files.pythonhosted.org/packages/11/32/b2ffe8f3853c181e88f0a157c5fb4e383102238d73c52ac6d93a5c8bffe6/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0", size = 4411242, upload-time = "2025-10-10T11:12:42.388Z" }, + { url = "https://files.pythonhosted.org/packages/10/04/6ca7477e6160ae258dc96f67c371157776564679aefd247b66f4661501a2/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766", size = 4468258, upload-time = "2025-10-10T11:12:48.654Z" }, + { url = "https://files.pythonhosted.org/packages/3c/7e/6a1a38f86412df101435809f225d57c1a021307dd0689f7a5e7fe83588b1/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3", size = 4166295, upload-time = "2025-10-10T11:12:52.525Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7d/c07374c501b45f3579a9eb761cbf2604ddef3d96ad48679112c2c5aa9c25/psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f", size = 3983133, upload-time = "2025-10-30T02:55:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/82/56/993b7104cb8345ad7d4516538ccf8f0d0ac640b1ebd8c754a7b024e76878/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4", size = 3652383, upload-time = "2025-10-10T11:12:56.387Z" }, + { url = "https://files.pythonhosted.org/packages/2d/ac/eaeb6029362fd8d454a27374d84c6866c82c33bfc24587b4face5a8e43ef/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c", size = 3298168, upload-time = "2025-10-10T11:13:00.403Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/50c3facc66bded9ada5cbc0de867499a703dc6bca6be03070b4e3b65da6c/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60", size = 3044712, upload-time = "2025-10-30T02:55:27.975Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8e/b7de019a1f562f72ada81081a12823d3c1590bedc48d7d2559410a2763fe/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1", size = 3347549, upload-time = "2025-10-10T11:13:03.971Z" }, + { url = "https://files.pythonhosted.org/packages/80/2d/1bb683f64737bbb1f86c82b7359db1eb2be4e2c0c13b947f80efefa7d3e5/psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa", size = 2714215, upload-time = "2025-10-10T11:13:07.14Z" }, + { url = "https://files.pythonhosted.org/packages/64/12/93ef0098590cf51d9732b4f139533732565704f45bdc1ffa741b7c95fb54/psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1", size = 3756567, upload-time = "2025-10-10T11:13:11.885Z" }, + { url = "https://files.pythonhosted.org/packages/7c/a9/9d55c614a891288f15ca4b5209b09f0f01e3124056924e17b81b9fa054cc/psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f", size = 3864755, upload-time = "2025-10-10T11:13:17.727Z" }, + { url = "https://files.pythonhosted.org/packages/13/1e/98874ce72fd29cbde93209977b196a2edae03f8490d1bd8158e7f1daf3a0/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5", size = 4411646, upload-time = "2025-10-10T11:13:24.432Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bd/a335ce6645334fb8d758cc358810defca14a1d19ffbc8a10bd38a2328565/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8", size = 4468701, upload-time = "2025-10-10T11:13:29.266Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/c8b4f53f34e295e45709b7568bf9b9407a612ea30387d35eb9fa84f269b4/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c", size = 4166293, upload-time = "2025-10-10T11:13:33.336Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e0/f8cc36eadd1b716ab36bb290618a3292e009867e5c97ce4aba908cb99644/psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f", size = 3983184, upload-time = "2025-10-30T02:55:32.483Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/2a8fe18a4e61cfb3417da67b6318e12691772c0696d79434184a511906dc/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747", size = 3652650, upload-time = "2025-10-10T11:13:38.181Z" }, + { url = "https://files.pythonhosted.org/packages/76/36/03801461b31b29fe58d228c24388f999fe814dfc302856e0d17f97d7c54d/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f", size = 3298663, upload-time = "2025-10-10T11:13:44.878Z" }, + { url = "https://files.pythonhosted.org/packages/97/77/21b0ea2e1a73aa5fa9222b2a6b8ba325c43c3a8d54272839c991f2345656/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b", size = 3044737, upload-time = "2025-10-30T02:55:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/67/69/f36abe5f118c1dca6d3726ceae164b9356985805480731ac6712a63f24f0/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d", size = 3347643, upload-time = "2025-10-10T11:13:53.499Z" }, + { url = "https://files.pythonhosted.org/packages/e1/36/9c0c326fe3a4227953dfb29f5d0c8ae3b8eb8c1cd2967aa569f50cb3c61f/psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316", size = 2803913, upload-time = "2025-10-10T11:13:57.058Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[package.optional-dependencies] +email = [ + { name = "email-validator" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" }, +] + +[[package]] +name = "pydyf" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/36/ee/fb410c5c854b6a081a49077912a9765aeffd8e07cbb0663cfda310b01fb4/pydyf-0.12.1.tar.gz", hash = "sha256:fbd7e759541ac725c29c506612003de393249b94310ea78ae44cb1d04b220095", size = 17716, upload-time = "2025-12-02T14:52:14.244Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/11/47efe2f66ba848a107adfd490b508f5c0cedc82127950553dca44d29e6c4/pydyf-0.12.1-py3-none-any.whl", hash = "sha256:ea25b4e1fe7911195cb57067560daaa266639184e8335365cc3ee5214e7eaadc", size = 8028, upload-time = "2025-12-02T14:52:12.938Z" }, +] + +[[package]] +name = "pygltflib" +version = "1.16.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dataclasses-json" }, + { name = "deprecated" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/e8/f8232abdf9c085333689b0a428dcd1d0f83edd1ecafa6ed878a633d8c9d5/pygltflib-1.16.5.tar.gz", hash = "sha256:1f15740d5a7aaf71a5083e285af6b361184958e255659132f4ba8fe4f3d21ea9", size = 43272, upload-time = "2025-07-24T06:35:38.611Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/d6/7eb8a0e4eb30add2b76c957a41107a5f2ba26472d656e2733728bec0476b/pygltflib-1.16.5-py3-none-any.whl", hash = "sha256:41d3349c59dcf1586faeaee29c967be07ac2bf7cecdb8ae2b527da1f25afdaac", size = 27557, upload-time = "2025-07-24T06:35:37.328Z" }, +] + +[[package]] +name = "pygments" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, +] + +[[package]] +name = "pyphen" +version = "0.17.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/56/e4d7e1bd70d997713649c5ce530b2d15a5fc2245a74ca820fc2d51d89d4d/pyphen-0.17.2.tar.gz", hash = "sha256:f60647a9c9b30ec6c59910097af82bc5dd2d36576b918e44148d8b07ef3b4aa3", size = 2079470, upload-time = "2025-01-20T13:18:36.296Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/1f/c2142d2edf833a90728e5cdeb10bdbdc094dde8dbac078cee0cf33f5e11b/pyphen-0.17.2-py3-none-any.whl", hash = "sha256:3a07fb017cb2341e1d9ff31b8634efb1ae4dc4b130468c7c39dd3d32e7c3affd", size = 2079358, upload-time = "2025-01-20T13:18:29.629Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, +] + +[[package]] +name = "python-jose" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ecdsa" }, + { name = "pyasn1" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/77/3a1c9039db7124eb039772b935f2244fbb73fc8ee65b9acf2375da1c07bf/python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b", size = 92726, upload-time = "2025-05-28T17:31:54.288Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/c3/0bd11992072e6a1c513b16500a5d07f91a24017c5909b02c72c62d7ad024/python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771", size = 34624, upload-time = "2025-05-28T17:31:52.802Z" }, +] + +[package.optional-dependencies] +cryptography = [ + { name = "cryptography" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.24" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/45/e23b5dc14ddb9918ae4a625379506b17b6f8fc56ca1d82db62462f59aea6/python_multipart-0.0.24.tar.gz", hash = "sha256:9574c97e1c026e00bc30340ef7c7d76739512ab4dfd428fec8c330fa6a5cc3c8", size = 37695, upload-time = "2026-04-05T20:49:13.829Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/73/89930efabd4da63cea44a3f438aeb753d600123570e6d6264e763617a9ce/python_multipart-0.0.24-py3-none-any.whl", hash = "sha256:9b110a98db707df01a53c194f0af075e736a770dc5058089650d70b4a182f950", size = 24420, upload-time = "2026-04-05T20:49:12.555Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "redis" +version = "6.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/d6/e8b92798a5bd67d659d51a18170e91c16ac3b59738d91894651ee255ed49/redis-6.4.0.tar.gz", hash = "sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010", size = 4647399, upload-time = "2025-08-07T08:10:11.441Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" }, +] + +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, +] + +[[package]] +name = "s3transfer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.49" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/45/461788f35e0364a8da7bda51a1fe1b09762d0c32f12f63727998d85a873b/sqlalchemy-2.0.49.tar.gz", hash = "sha256:d15950a57a210e36dd4cec1aac22787e2a4d57ba9318233e2ef8b2daf9ff2d5f", size = 9898221, upload-time = "2026-04-03T16:38:11.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/b5/e3617cc67420f8f403efebd7b043128f94775e57e5b84e7255203390ceae/sqlalchemy-2.0.49-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5070135e1b7409c4161133aa525419b0062088ed77c92b1da95366ec5cbebbe", size = 2159126, upload-time = "2026-04-03T16:50:13.242Z" }, + { url = "https://files.pythonhosted.org/packages/20/9b/91ca80403b17cd389622a642699e5f6564096b698e7cdcbcbb6409898bc4/sqlalchemy-2.0.49-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ac7a3e245fd0310fd31495eb61af772e637bdf7d88ee81e7f10a3f271bff014", size = 3315509, upload-time = "2026-04-03T16:54:49.332Z" }, + { url = "https://files.pythonhosted.org/packages/b1/61/0722511d98c54de95acb327824cb759e8653789af2b1944ab1cc69d32565/sqlalchemy-2.0.49-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d4e5a0ceba319942fa6b585cf82539288a61e314ef006c1209f734551ab9536", size = 3315014, upload-time = "2026-04-03T16:56:56.376Z" }, + { url = "https://files.pythonhosted.org/packages/46/55/d514a653ffeb4cebf4b54c47bec32ee28ad89d39fafba16eeed1d81dccd5/sqlalchemy-2.0.49-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3ddcb27fb39171de36e207600116ac9dfd4ae46f86c82a9bf3934043e80ebb88", size = 3267388, upload-time = "2026-04-03T16:54:51.272Z" }, + { url = "https://files.pythonhosted.org/packages/2f/16/0dcc56cb6d3335c1671a2258f5d2cb8267c9a2260e27fde53cbfb1b3540a/sqlalchemy-2.0.49-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:32fe6a41ad97302db2931f05bb91abbcc65b5ce4c675cd44b972428dd2947700", size = 3289602, upload-time = "2026-04-03T16:56:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/f8ab6fb04470a133cd80608db40aa292e6bae5f162c3a3d4ab19544a67af/sqlalchemy-2.0.49-cp311-cp311-win32.whl", hash = "sha256:46d51518d53edfbe0563662c96954dc8fcace9832332b914375f45a99b77cc9a", size = 2119044, upload-time = "2026-04-03T17:00:53.455Z" }, + { url = "https://files.pythonhosted.org/packages/c4/59/55a6d627d04b6ebb290693681d7683c7da001eddf90b60cfcc41ee907978/sqlalchemy-2.0.49-cp311-cp311-win_amd64.whl", hash = "sha256:951d4a210744813be63019f3df343bf233b7432aadf0db54c75802247330d3af", size = 2143642, upload-time = "2026-04-03T17:00:54.769Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/2de412451330756aaaa72d27131db6dde23995efe62c941184e15242a5fa/sqlalchemy-2.0.49-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4bbccb45260e4ff1b7db0be80a9025bb1e6698bdb808b83fff0000f7a90b2c0b", size = 2157681, upload-time = "2026-04-03T16:53:07.132Z" }, + { url = "https://files.pythonhosted.org/packages/50/84/b2a56e2105bd11ebf9f0b93abddd748e1a78d592819099359aa98134a8bf/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb37f15714ec2652d574f021d479e78cd4eb9d04396dca36568fdfffb3487982", size = 3338976, upload-time = "2026-04-03T17:07:40Z" }, + { url = "https://files.pythonhosted.org/packages/2c/fa/65fcae2ed62f84ab72cf89536c7c3217a156e71a2c111b1305ab6f0690e2/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb9ec6436a820a4c006aad1ac351f12de2f2dbdaad171692ee457a02429b672", size = 3351937, upload-time = "2026-04-03T17:12:23.374Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2f/6fd118563572a7fe475925742eb6b3443b2250e346a0cc27d8d408e73773/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8d6efc136f44a7e8bc8088507eaabbb8c2b55b3dbb63fe102c690da0ddebe55e", size = 3281646, upload-time = "2026-04-03T17:07:41.949Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d7/410f4a007c65275b9cf82354adb4bb8ba587b176d0a6ee99caa16fe638f8/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e06e617e3d4fd9e51d385dfe45b077a41e9d1b033a7702551e3278ac597dc750", size = 3316695, upload-time = "2026-04-03T17:12:25.642Z" }, + { url = "https://files.pythonhosted.org/packages/d9/95/81f594aa60ded13273a844539041ccf1e66c5a7bed0a8e27810a3b52d522/sqlalchemy-2.0.49-cp312-cp312-win32.whl", hash = "sha256:83101a6930332b87653886c01d1ee7e294b1fe46a07dd9a2d2b4f91bcc88eec0", size = 2117483, upload-time = "2026-04-03T17:05:40.896Z" }, + { url = "https://files.pythonhosted.org/packages/47/9e/fd90114059175cac64e4fafa9bf3ac20584384d66de40793ae2e2f26f3bb/sqlalchemy-2.0.49-cp312-cp312-win_amd64.whl", hash = "sha256:618a308215b6cececb6240b9abde545e3acdabac7ae3e1d4e666896bf5ba44b4", size = 2144494, upload-time = "2026-04-03T17:05:42.282Z" }, + { url = "https://files.pythonhosted.org/packages/ae/81/81755f50eb2478eaf2049728491d4ea4f416c1eb013338682173259efa09/sqlalchemy-2.0.49-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df2d441bacf97022e81ad047e1597552eb3f83ca8a8f1a1fdd43cd7fe3898120", size = 2154547, upload-time = "2026-04-03T16:53:08.64Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bc/3494270da80811d08bcfa247404292428c4fe16294932bce5593f215cad9/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8e20e511dc15265fb433571391ba313e10dd8ea7e509d51686a51313b4ac01a2", size = 3280782, upload-time = "2026-04-03T17:07:43.508Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f5/038741f5e747a5f6ea3e72487211579d8cbea5eb9827a9cbd61d0108c4bd/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47604cb2159f8bbd5a1ab48a714557156320f20871ee64d550d8bf2683d980d3", size = 3297156, upload-time = "2026-04-03T17:12:27.697Z" }, + { url = "https://files.pythonhosted.org/packages/88/50/a6af0ff9dc954b43a65ca9b5367334e45d99684c90a3d3413fc19a02d43c/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:22d8798819f86720bc646ab015baff5ea4c971d68121cb36e2ebc2ee43ead2b7", size = 3228832, upload-time = "2026-04-03T17:07:45.38Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d1/5f6bdad8de0bf546fc74370939621396515e0cdb9067402d6ba1b8afbe9a/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9b1c058c171b739e7c330760044803099c7fff11511e3ab3573e5327116a9c33", size = 3267000, upload-time = "2026-04-03T17:12:29.657Z" }, + { url = "https://files.pythonhosted.org/packages/f7/30/ad62227b4a9819a5e1c6abff77c0f614fa7c9326e5a3bdbee90f7139382b/sqlalchemy-2.0.49-cp313-cp313-win32.whl", hash = "sha256:a143af2ea6672f2af3f44ed8f9cd020e9cc34c56f0e8db12019d5d9ecf41cb3b", size = 2115641, upload-time = "2026-04-03T17:05:43.989Z" }, + { url = "https://files.pythonhosted.org/packages/17/3a/7215b1b7d6d49dc9a87211be44562077f5f04f9bb5a59552c1c8e2d98173/sqlalchemy-2.0.49-cp313-cp313-win_amd64.whl", hash = "sha256:12b04d1db2663b421fe072d638a138460a51d5a862403295671c4f3987fb9148", size = 2141498, upload-time = "2026-04-03T17:05:45.7Z" }, + { url = "https://files.pythonhosted.org/packages/28/4b/52a0cb2687a9cd1648252bb257be5a1ba2c2ded20ba695c65756a55a15a4/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24bd94bb301ec672d8f0623eba9226cc90d775d25a0c92b5f8e4965d7f3a1518", size = 3560807, upload-time = "2026-04-03T16:58:31.666Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d8/fda95459204877eed0458550d6c7c64c98cc50c2d8d618026737de9ed41a/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a51d3db74ba489266ef55c7a4534eb0b8db9a326553df481c11e5d7660c8364d", size = 3527481, upload-time = "2026-04-03T17:06:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0a/2aac8b78ac6487240cf7afef8f203ca783e8796002dc0cf65c4ee99ff8bb/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:55250fe61d6ebfd6934a272ee16ef1244e0f16b7af6cd18ab5b1fc9f08631db0", size = 3468565, upload-time = "2026-04-03T16:58:33.414Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/ce71cfa82c50a373fd2148b3c870be05027155ce791dc9a5dcf439790b8b/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:46796877b47034b559a593d7e4b549aba151dae73f9e78212a3478161c12ab08", size = 3477769, upload-time = "2026-04-03T17:06:02.787Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e8/0a9f5c1f7c6f9ca480319bf57c2d7423f08d31445974167a27d14483c948/sqlalchemy-2.0.49-cp313-cp313t-win32.whl", hash = "sha256:9c4969a86e41454f2858256c39bdfb966a20961e9b58bf8749b65abf447e9a8d", size = 2143319, upload-time = "2026-04-03T17:02:04.328Z" }, + { url = "https://files.pythonhosted.org/packages/0e/51/fb5240729fbec73006e137c4f7a7918ffd583ab08921e6ff81a999d6517a/sqlalchemy-2.0.49-cp313-cp313t-win_amd64.whl", hash = "sha256:b9870d15ef00e4d0559ae10ee5bc71b654d1f20076dbe8bc7ed19b4c0625ceba", size = 2175104, upload-time = "2026-04-03T17:02:05.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/33/bf28f618c0a9597d14e0b9ee7d1e0622faff738d44fe986ee287cdf1b8d0/sqlalchemy-2.0.49-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:233088b4b99ebcbc5258c755a097aa52fbf90727a03a5a80781c4b9c54347a2e", size = 2156356, upload-time = "2026-04-03T16:53:09.914Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a7/5f476227576cb8644650eff68cc35fa837d3802b997465c96b8340ced1e2/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57ca426a48eb2c682dae8204cd89ea8ab7031e2675120a47924fabc7caacbc2a", size = 3276486, upload-time = "2026-04-03T17:07:46.9Z" }, + { url = "https://files.pythonhosted.org/packages/2e/84/efc7c0bf3a1c5eef81d397f6fddac855becdbb11cb38ff957888603014a7/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:685e93e9c8f399b0c96a624799820176312f5ceef958c0f88215af4013d29066", size = 3281479, upload-time = "2026-04-03T17:12:32.226Z" }, + { url = "https://files.pythonhosted.org/packages/91/68/bb406fa4257099c67bd75f3f2261b129c63204b9155de0d450b37f004698/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e0400fa22f79acc334d9a6b185dc00a44a8e6578aa7e12d0ddcd8434152b187", size = 3226269, upload-time = "2026-04-03T17:07:48.678Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/acb56c00cca9f251f437cb49e718e14f7687505749ea9255d7bd8158a6df/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a05977bffe9bffd2229f477fa75eabe3192b1b05f408961d1bebff8d1cd4d401", size = 3248260, upload-time = "2026-04-03T17:12:34.381Z" }, + { url = "https://files.pythonhosted.org/packages/56/19/6a20ea25606d1efd7bd1862149bb2a22d1451c3f851d23d887969201633f/sqlalchemy-2.0.49-cp314-cp314-win32.whl", hash = "sha256:0f2fa354ba106eafff2c14b0cc51f22801d1e8b2e4149342023bd6f0955de5f5", size = 2118463, upload-time = "2026-04-03T17:05:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4f/8297e4ed88e80baa1f5aa3c484a0ee29ef3c69c7582f206c916973b75057/sqlalchemy-2.0.49-cp314-cp314-win_amd64.whl", hash = "sha256:77641d299179c37b89cf2343ca9972c88bb6eef0d5fc504a2f86afd15cd5adf5", size = 2144204, upload-time = "2026-04-03T17:05:48.694Z" }, + { url = "https://files.pythonhosted.org/packages/1f/33/95e7216df810c706e0cd3655a778604bbd319ed4f43333127d465a46862d/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dc3368794d522f43914e03312202523cc89692f5389c32bea0233924f8d977", size = 3565474, upload-time = "2026-04-03T16:58:35.128Z" }, + { url = "https://files.pythonhosted.org/packages/0c/a4/ed7b18d8ccf7f954a83af6bb73866f5bc6f5636f44c7731fbb741f72cc4f/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c821c47ecfe05cc32140dcf8dc6fd5d21971c86dbd56eabfe5ba07a64910c01", size = 3530567, upload-time = "2026-04-03T17:06:04.587Z" }, + { url = "https://files.pythonhosted.org/packages/73/a3/20faa869c7e21a827c4a2a42b41353a54b0f9f5e96df5087629c306df71e/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9c04bff9a5335eb95c6ecf1c117576a0aa560def274876fd156cfe5510fccc61", size = 3474282, upload-time = "2026-04-03T16:58:37.131Z" }, + { url = "https://files.pythonhosted.org/packages/b7/50/276b9a007aa0764304ad467eceb70b04822dc32092492ee5f322d559a4dc/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7f605a456948c35260e7b2a39f8952a26f077fd25653c37740ed186b90aaa68a", size = 3480406, upload-time = "2026-04-03T17:06:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c3/c80fcdb41905a2df650c2a3e0337198b6848876e63d66fe9188ef9003d24/sqlalchemy-2.0.49-cp314-cp314t-win32.whl", hash = "sha256:6270d717b11c5476b0cbb21eedc8d4dbb7d1a956fd6c15a23e96f197a6193158", size = 2149151, upload-time = "2026-04-03T17:02:07.281Z" }, + { url = "https://files.pythonhosted.org/packages/05/52/9f1a62feab6ed368aff068524ff414f26a6daebc7361861035ae00b05530/sqlalchemy-2.0.49-cp314-cp314t-win_amd64.whl", hash = "sha256:275424295f4256fd301744b8f335cff367825d270f155d522b30c7bf49903ee7", size = 2184178, upload-time = "2026-04-03T17:02:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/e5/30/8519fdde58a7bdf155b714359791ad1dc018b47d60269d5d160d311fdc36/sqlalchemy-2.0.49-py3-none-any.whl", hash = "sha256:ec44cfa7ef1a728e88ad41674de50f6db8cfdb3e2af84af86e0041aaf02d43d0", size = 1942158, upload-time = "2026-04-03T16:53:44.135Z" }, +] + +[[package]] +name = "starlette" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" }, +] + +[[package]] +name = "tinycss2" +version = "1.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "webencodings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/ae/2ca4913e5c0f09781d75482874c3a95db9105462a92ddd303c7d285d3df2/tinycss2-1.5.1.tar.gz", hash = "sha256:d339d2b616ba90ccce58da8495a78f46e55d4d25f9fd71dfd526f07e7d53f957", size = 88195, upload-time = "2025-11-23T10:29:10.082Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/45/c7b5c3168458db837e8ceab06dc77824e18202679d0463f0e8f002143a97/tinycss2-1.5.1-py3-none-any.whl", hash = "sha256:3415ba0f5839c062696996998176c4a3751d18b7edaaeeb658c9ce21ec150661", size = 28404, upload-time = "2025-11-23T10:29:08.676Z" }, +] + +[[package]] +name = "tinyhtml5" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "webencodings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/1f/cfe2f6b30557c92b3f31d41707e09cef5c1efbd87392bc6c0430c46b0e4d/tinyhtml5-2.1.0.tar.gz", hash = "sha256:60a50ec3d938a37e491efa01af895853060943dcebb5627de5b10d188b338a67", size = 179242, upload-time = "2026-03-05T17:06:30.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/48/01695a036b695f83fea7aef6955d735db0f517b1c8e25ddb399ac0bdbcbf/tinyhtml5-2.1.0-py3-none-any.whl", hash = "sha256:6e11cfff38515834268daf89d5f85bbde0b6dd02e8d9e212d1385c2289b89f0a", size = 39686, upload-time = "2026-03-05T17:06:28.498Z" }, +] + +[[package]] +name = "tomli" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/22/de/48c59722572767841493b26183a0d1cc411d54fd759c5607c4590b6563a6/tomli-2.4.1.tar.gz", hash = "sha256:7c7e1a961a0b2f2472c1ac5b69affa0ae1132c39adcb67aba98568702b9cc23f", size = 17543, upload-time = "2026-03-25T20:22:03.828Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/11/db3d5885d8528263d8adc260bb2d28ebf1270b96e98f0e0268d32b8d9900/tomli-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30", size = 154704, upload-time = "2026-03-25T20:21:10.473Z" }, + { url = "https://files.pythonhosted.org/packages/6d/f7/675db52c7e46064a9aa928885a9b20f4124ecb9bc2e1ce74c9106648d202/tomli-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a", size = 149454, upload-time = "2026-03-25T20:21:12.036Z" }, + { url = "https://files.pythonhosted.org/packages/61/71/81c50943cf953efa35bce7646caab3cf457a7d8c030b27cfb40d7235f9ee/tomli-2.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96481a5786729fd470164b47cdb3e0e58062a496f455ee41b4403be77cb5a076", size = 237561, upload-time = "2026-03-25T20:21:13.098Z" }, + { url = "https://files.pythonhosted.org/packages/48/c1/f41d9cb618acccca7df82aaf682f9b49013c9397212cb9f53219e3abac37/tomli-2.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a881ab208c0baf688221f8cecc5401bd291d67e38a1ac884d6736cbcd8247e9", size = 243824, upload-time = "2026-03-25T20:21:14.569Z" }, + { url = "https://files.pythonhosted.org/packages/22/e4/5a816ecdd1f8ca51fb756ef684b90f2780afc52fc67f987e3c61d800a46d/tomli-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47149d5bd38761ac8be13a84864bf0b7b70bc051806bc3669ab1cbc56216b23c", size = 242227, upload-time = "2026-03-25T20:21:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/6b/49/2b2a0ef529aa6eec245d25f0c703e020a73955ad7edf73e7f54ddc608aa5/tomli-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec9bfaf3ad2df51ace80688143a6a4ebc09a248f6ff781a9945e51937008fcbc", size = 247859, upload-time = "2026-03-25T20:21:17.001Z" }, + { url = "https://files.pythonhosted.org/packages/83/bd/6c1a630eaca337e1e78c5903104f831bda934c426f9231429396ce3c3467/tomli-2.4.1-cp311-cp311-win32.whl", hash = "sha256:ff2983983d34813c1aeb0fa89091e76c3a22889ee83ab27c5eeb45100560c049", size = 97204, upload-time = "2026-03-25T20:21:18.079Z" }, + { url = "https://files.pythonhosted.org/packages/42/59/71461df1a885647e10b6bb7802d0b8e66480c61f3f43079e0dcd315b3954/tomli-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:5ee18d9ebdb417e384b58fe414e8d6af9f4e7a0ae761519fb50f721de398dd4e", size = 108084, upload-time = "2026-03-25T20:21:18.978Z" }, + { url = "https://files.pythonhosted.org/packages/b8/83/dceca96142499c069475b790e7913b1044c1a4337e700751f48ed723f883/tomli-2.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:c2541745709bad0264b7d4705ad453b76ccd191e64aa6f0fc66b69a293a45ece", size = 95285, upload-time = "2026-03-25T20:21:20.309Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ba/42f134a3fe2b370f555f44b1d72feebb94debcab01676bf918d0cb70e9aa/tomli-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c742f741d58a28940ce01d58f0ab2ea3ced8b12402f162f4d534dfe18ba1cd6a", size = 155924, upload-time = "2026-03-25T20:21:21.626Z" }, + { url = "https://files.pythonhosted.org/packages/dc/c7/62d7a17c26487ade21c5422b646110f2162f1fcc95980ef7f63e73c68f14/tomli-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7f86fd587c4ed9dd76f318225e7d9b29cfc5a9d43de44e5754db8d1128487085", size = 150018, upload-time = "2026-03-25T20:21:23.002Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/79d13d7c15f13bdef410bdd49a6485b1c37d28968314eabee452c22a7fda/tomli-2.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff18e6a727ee0ab0388507b89d1bc6a22b138d1e2fa56d1ad494586d61d2eae9", size = 244948, upload-time = "2026-03-25T20:21:24.04Z" }, + { url = "https://files.pythonhosted.org/packages/10/90/d62ce007a1c80d0b2c93e02cab211224756240884751b94ca72df8a875ca/tomli-2.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:136443dbd7e1dee43c68ac2694fde36b2849865fa258d39bf822c10e8068eac5", size = 253341, upload-time = "2026-03-25T20:21:25.177Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/caf6496d60152ad4ed09282c1885cca4eea150bfd007da84aea07bcc0a3e/tomli-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e262d41726bc187e69af7825504c933b6794dc3fbd5945e41a79bb14c31f585", size = 248159, upload-time = "2026-03-25T20:21:26.364Z" }, + { url = "https://files.pythonhosted.org/packages/99/e7/c6f69c3120de34bbd882c6fba7975f3d7a746e9218e56ab46a1bc4b42552/tomli-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5cb41aa38891e073ee49d55fbc7839cfdb2bc0e600add13874d048c94aadddd1", size = 253290, upload-time = "2026-03-25T20:21:27.46Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2f/4a3c322f22c5c66c4b836ec58211641a4067364f5dcdd7b974b4c5da300c/tomli-2.4.1-cp312-cp312-win32.whl", hash = "sha256:da25dc3563bff5965356133435b757a795a17b17d01dbc0f42fb32447ddfd917", size = 98141, upload-time = "2026-03-25T20:21:28.492Z" }, + { url = "https://files.pythonhosted.org/packages/24/22/4daacd05391b92c55759d55eaee21e1dfaea86ce5c571f10083360adf534/tomli-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:52c8ef851d9a240f11a88c003eacb03c31fc1c9c4ec64a99a0f922b93874fda9", size = 108847, upload-time = "2026-03-25T20:21:29.386Z" }, + { url = "https://files.pythonhosted.org/packages/68/fd/70e768887666ddd9e9f5d85129e84910f2db2796f9096aa02b721a53098d/tomli-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:f758f1b9299d059cc3f6546ae2af89670cb1c4d48ea29c3cacc4fe7de3058257", size = 95088, upload-time = "2026-03-25T20:21:30.677Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/b823a7e818c756d9a7123ba2cda7d07bc2dd32835648d1a7b7b7a05d848d/tomli-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36d2bd2ad5fb9eaddba5226aa02c8ec3fa4f192631e347b3ed28186d43be6b54", size = 155866, upload-time = "2026-03-25T20:21:31.65Z" }, + { url = "https://files.pythonhosted.org/packages/14/6f/12645cf7f08e1a20c7eb8c297c6f11d31c1b50f316a7e7e1e1de6e2e7b7e/tomli-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb0dc4e38e6a1fd579e5d50369aa2e10acfc9cace504579b2faabb478e76941a", size = 149887, upload-time = "2026-03-25T20:21:33.028Z" }, + { url = "https://files.pythonhosted.org/packages/5c/e0/90637574e5e7212c09099c67ad349b04ec4d6020324539297b634a0192b0/tomli-2.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7f2c7f2b9ca6bdeef8f0fa897f8e05085923eb091721675170254cbc5b02897", size = 243704, upload-time = "2026-03-25T20:21:34.51Z" }, + { url = "https://files.pythonhosted.org/packages/10/8f/d3ddb16c5a4befdf31a23307f72828686ab2096f068eaf56631e136c1fdd/tomli-2.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3c6818a1a86dd6dca7ddcaaf76947d5ba31aecc28cb1b67009a5877c9a64f3f", size = 251628, upload-time = "2026-03-25T20:21:36.012Z" }, + { url = "https://files.pythonhosted.org/packages/e3/f1/dbeeb9116715abee2485bf0a12d07a8f31af94d71608c171c45f64c0469d/tomli-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d312ef37c91508b0ab2cee7da26ec0b3ed2f03ce12bd87a588d771ae15dcf82d", size = 247180, upload-time = "2026-03-25T20:21:37.136Z" }, + { url = "https://files.pythonhosted.org/packages/d3/74/16336ffd19ed4da28a70959f92f506233bd7cfc2332b20bdb01591e8b1d1/tomli-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51529d40e3ca50046d7606fa99ce3956a617f9b36380da3b7f0dd3dd28e68cb5", size = 251674, upload-time = "2026-03-25T20:21:38.298Z" }, + { url = "https://files.pythonhosted.org/packages/16/f9/229fa3434c590ddf6c0aa9af64d3af4b752540686cace29e6281e3458469/tomli-2.4.1-cp313-cp313-win32.whl", hash = "sha256:2190f2e9dd7508d2a90ded5ed369255980a1bcdd58e52f7fe24b8162bf9fedbd", size = 97976, upload-time = "2026-03-25T20:21:39.316Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1e/71dfd96bcc1c775420cb8befe7a9d35f2e5b1309798f009dca17b7708c1e/tomli-2.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:8d65a2fbf9d2f8352685bc1364177ee3923d6baf5e7f43ea4959d7d8bc326a36", size = 108755, upload-time = "2026-03-25T20:21:40.248Z" }, + { url = "https://files.pythonhosted.org/packages/83/7a/d34f422a021d62420b78f5c538e5b102f62bea616d1d75a13f0a88acb04a/tomli-2.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:4b605484e43cdc43f0954ddae319fb75f04cc10dd80d830540060ee7cd0243cd", size = 95265, upload-time = "2026-03-25T20:21:41.219Z" }, + { url = "https://files.pythonhosted.org/packages/3c/fb/9a5c8d27dbab540869f7c1f8eb0abb3244189ce780ba9cd73f3770662072/tomli-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fd0409a3653af6c147209d267a0e4243f0ae46b011aa978b1080359fddc9b6cf", size = 155726, upload-time = "2026-03-25T20:21:42.23Z" }, + { url = "https://files.pythonhosted.org/packages/62/05/d2f816630cc771ad836af54f5001f47a6f611d2d39535364f148b6a92d6b/tomli-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a120733b01c45e9a0c34aeef92bf0cf1d56cfe81ed9d47d562f9ed591a9828ac", size = 149859, upload-time = "2026-03-25T20:21:43.386Z" }, + { url = "https://files.pythonhosted.org/packages/ce/48/66341bdb858ad9bd0ceab5a86f90eddab127cf8b046418009f2125630ecb/tomli-2.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:559db847dc486944896521f68d8190be1c9e719fced785720d2216fe7022b662", size = 244713, upload-time = "2026-03-25T20:21:44.474Z" }, + { url = "https://files.pythonhosted.org/packages/df/6d/c5fad00d82b3c7a3ab6189bd4b10e60466f22cfe8a08a9394185c8a8111c/tomli-2.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01f520d4f53ef97964a240a035ec2a869fe1a37dde002b57ebc4417a27ccd853", size = 252084, upload-time = "2026-03-25T20:21:45.62Z" }, + { url = "https://files.pythonhosted.org/packages/00/71/3a69e86f3eafe8c7a59d008d245888051005bd657760e96d5fbfb0b740c2/tomli-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7f94b27a62cfad8496c8d2513e1a222dd446f095fca8987fceef261225538a15", size = 247973, upload-time = "2026-03-25T20:21:46.937Z" }, + { url = "https://files.pythonhosted.org/packages/67/50/361e986652847fec4bd5e4a0208752fbe64689c603c7ae5ea7cb16b1c0ca/tomli-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ede3e6487c5ef5d28634ba3f31f989030ad6af71edfb0055cbbd14189ff240ba", size = 256223, upload-time = "2026-03-25T20:21:48.467Z" }, + { url = "https://files.pythonhosted.org/packages/8c/9a/b4173689a9203472e5467217e0154b00e260621caa227b6fa01feab16998/tomli-2.4.1-cp314-cp314-win32.whl", hash = "sha256:3d48a93ee1c9b79c04bb38772ee1b64dcf18ff43085896ea460ca8dec96f35f6", size = 98973, upload-time = "2026-03-25T20:21:49.526Z" }, + { url = "https://files.pythonhosted.org/packages/14/58/640ac93bf230cd27d002462c9af0d837779f8773bc03dee06b5835208214/tomli-2.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:88dceee75c2c63af144e456745e10101eb67361050196b0b6af5d717254dddf7", size = 109082, upload-time = "2026-03-25T20:21:50.506Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2f/702d5e05b227401c1068f0d386d79a589bb12bf64c3d2c72ce0631e3bc49/tomli-2.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:b8c198f8c1805dc42708689ed6864951fd2494f924149d3e4bce7710f8eb5232", size = 96490, upload-time = "2026-03-25T20:21:51.474Z" }, + { url = "https://files.pythonhosted.org/packages/45/4b/b877b05c8ba62927d9865dd980e34a755de541eb65fffba52b4cc495d4d2/tomli-2.4.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:d4d8fe59808a54658fcc0160ecfb1b30f9089906c50b23bcb4c69eddc19ec2b4", size = 164263, upload-time = "2026-03-25T20:21:52.543Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/6ab420d37a270b89f7195dec5448f79400d9e9c1826df982f3f8e97b24fd/tomli-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7008df2e7655c495dd12d2a4ad038ff878d4ca4b81fccaf82b714e07eae4402c", size = 160736, upload-time = "2026-03-25T20:21:53.674Z" }, + { url = "https://files.pythonhosted.org/packages/02/e0/3630057d8eb170310785723ed5adcdfb7d50cb7e6455f85ba8a3deed642b/tomli-2.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d8591993e228b0c930c4bb0db464bdad97b3289fb981255d6c9a41aedc84b2d", size = 270717, upload-time = "2026-03-25T20:21:55.129Z" }, + { url = "https://files.pythonhosted.org/packages/7a/b4/1613716072e544d1a7891f548d8f9ec6ce2faf42ca65acae01d76ea06bb0/tomli-2.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:734e20b57ba95624ecf1841e72b53f6e186355e216e5412de414e3c51e5e3c41", size = 278461, upload-time = "2026-03-25T20:21:56.228Z" }, + { url = "https://files.pythonhosted.org/packages/05/38/30f541baf6a3f6df77b3df16b01ba319221389e2da59427e221ef417ac0c/tomli-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8a650c2dbafa08d42e51ba0b62740dae4ecb9338eefa093aa5c78ceb546fcd5c", size = 274855, upload-time = "2026-03-25T20:21:57.653Z" }, + { url = "https://files.pythonhosted.org/packages/77/a3/ec9dd4fd2c38e98de34223b995a3b34813e6bdadf86c75314c928350ed14/tomli-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:504aa796fe0569bb43171066009ead363de03675276d2d121ac1a4572397870f", size = 283144, upload-time = "2026-03-25T20:21:59.089Z" }, + { url = "https://files.pythonhosted.org/packages/ef/be/605a6261cac79fba2ec0c9827e986e00323a1945700969b8ee0b30d85453/tomli-2.4.1-cp314-cp314t-win32.whl", hash = "sha256:b1d22e6e9387bf4739fbe23bfa80e93f6b0373a7f1b96c6227c32bef95a4d7a8", size = 108683, upload-time = "2026-03-25T20:22:00.214Z" }, + { url = "https://files.pythonhosted.org/packages/12/64/da524626d3b9cc40c168a13da8335fe1c51be12c0a63685cc6db7308daae/tomli-2.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2c1c351919aca02858f740c6d33adea0c5deea37f9ecca1cc1ef9e884a619d26", size = 121196, upload-time = "2026-03-25T20:22:01.169Z" }, + { url = "https://files.pythonhosted.org/packages/5a/cd/e80b62269fc78fc36c9af5a6b89c835baa8af28ff5ad28c7028d60860320/tomli-2.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eab21f45c7f66c13f2a9e0e1535309cee140182a9cdae1e041d02e47291e8396", size = 100393, upload-time = "2026-03-25T20:22:02.137Z" }, + { url = "https://files.pythonhosted.org/packages/7b/61/cceae43728b7de99d9b847560c262873a1f6c98202171fd5ed62640b494b/tomli-2.4.1-py3-none-any.whl", hash = "sha256:0d85819802132122da43cb86656f8d1f8c6587d54ae7dcaf30e90533028b49fe", size = 14583, upload-time = "2026-03-25T20:22:03.012Z" }, +] + +[[package]] +name = "tqdm" +version = "4.67.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, +] + +[[package]] +name = "trimesh" +version = "4.11.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/bf/53b69f3b6708c20ceb4d1d1250c7dc205733eb646659e5e55771f76ffabd/trimesh-4.11.5.tar.gz", hash = "sha256:b90e6cdd6ada51c52d4a7d32947f4ce44b6751c5b7cab2b04e271ecea1e397d3", size = 836449, upload-time = "2026-03-25T01:08:24.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/24/83/72e812f772daee66651f468c7b2535fa05eac27db26df7e614cae823c832/trimesh-4.11.5-py3-none-any.whl", hash = "sha256:b225a94c8af79569f7167ca7eaaab4fd05c260da58a075599453d655835258ef", size = 740833, upload-time = "2026-03-25T01:08:21.397Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825, upload-time = "2023-05-24T20:25:47.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "tzdata" +version = "2026.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/f5/cd531b2d15a671a40c0f66cf06bc3570a12cd56eef98960068ebbad1bf5a/tzdata-2026.1.tar.gz", hash = "sha256:67658a1903c75917309e753fdc349ac0efd8c27db7a0cb406a25be4840f87f98", size = 197639, upload-time = "2026-04-03T11:25:22.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/70/d460bd685a170790ec89317e9bd33047988e4bce507b831f5db771e142de/tzdata-2026.1-py2.py3-none-any.whl", hash = "sha256:4b1d2be7ac37ceafd7327b961aa3a54e467efbdb563a23655fbfe0d39cfc42a9", size = 348952, upload-time = "2026-04-03T11:25:20.313Z" }, +] + +[[package]] +name = "tzlocal" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.44.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/da/6eee1ff8b6cbeed47eeb5229749168e81eb4b7b999a1a15a7176e51410c9/uvicorn-0.44.0.tar.gz", hash = "sha256:6c942071b68f07e178264b9152f1f16dfac5da85880c4ce06366a96d70d4f31e", size = 86947, upload-time = "2026-04-06T09:23:22.826Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/23/a5bbd9600dd607411fa644c06ff4951bec3a4d82c4b852374024359c19c0/uvicorn-0.44.0-py3-none-any.whl", hash = "sha256:ce937c99a2cc70279556967274414c087888e8cec9f9c94644dfca11bd3ced89", size = 69425, upload-time = "2026-04-06T09:23:21.524Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" }, + { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" }, + { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" }, + { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" }, + { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, + { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" }, + { url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" }, + { url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" }, + { url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" }, + { url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" }, + { url = "https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142", size = 1362067, upload-time = "2025-10-16T22:16:44.503Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74", size = 752423, upload-time = "2025-10-16T22:16:45.968Z" }, + { url = "https://files.pythonhosted.org/packages/a3/94/94af78c156f88da4b3a733773ad5ba0b164393e357cc4bd0ab2e2677a7d6/uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35", size = 4272437, upload-time = "2025-10-16T22:16:47.451Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25", size = 4292101, upload-time = "2025-10-16T22:16:49.318Z" }, + { url = "https://files.pythonhosted.org/packages/02/62/67d382dfcb25d0a98ce73c11ed1a6fba5037a1a1d533dcbb7cab033a2636/uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6", size = 4114158, upload-time = "2025-10-16T22:16:50.517Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/f1171b4a882a5d13c8b7576f348acfe6074d72eaf52cccef752f748d4a9f/uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079", size = 4177360, upload-time = "2025-10-16T22:16:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/79/7b/b01414f31546caf0919da80ad57cbfe24c56b151d12af68cee1b04922ca8/uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289", size = 1454790, upload-time = "2025-10-16T22:16:54.355Z" }, + { url = "https://files.pythonhosted.org/packages/d4/31/0bb232318dd838cad3fa8fb0c68c8b40e1145b32025581975e18b11fab40/uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3", size = 796783, upload-time = "2025-10-16T22:16:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/42/38/c9b09f3271a7a723a5de69f8e237ab8e7803183131bc57c890db0b6bb872/uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c", size = 4647548, upload-time = "2025-10-16T22:16:57.008Z" }, + { url = "https://files.pythonhosted.org/packages/c1/37/945b4ca0ac27e3dc4952642d4c900edd030b3da6c9634875af6e13ae80e5/uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21", size = 4467065, upload-time = "2025-10-16T22:16:58.206Z" }, + { url = "https://files.pythonhosted.org/packages/97/cc/48d232f33d60e2e2e0b42f4e73455b146b76ebe216487e862700457fbf3c/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88", size = 4328384, upload-time = "2025-10-16T22:16:59.36Z" }, + { url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" }, +] + +[[package]] +name = "vine" +version = "5.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980, upload-time = "2023-11-05T08:46:53.857Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636, upload-time = "2023-11-05T08:46:51.205Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, + { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, + { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, +] + +[[package]] +name = "wcwidth" +version = "0.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, +] + +[[package]] +name = "weasyprint" +version = "68.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, + { name = "cssselect2" }, + { name = "fonttools", extra = ["woff"] }, + { name = "pillow" }, + { name = "pydyf" }, + { name = "pyphen" }, + { name = "tinycss2" }, + { name = "tinyhtml5" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/db/3e/65c0f176e6fb5c2b0a1ac13185b366f727d9723541babfa7fa4309998169/weasyprint-68.1.tar.gz", hash = "sha256:d3b752049b453a5c95edb27ce78d69e9319af5a34f257fa0f4c738c701b4184e", size = 1542379, upload-time = "2026-02-06T15:04:11.203Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/dd/14eb73cea481ad8162d3b18a4850d4a84d6e804a22840cca207648532265/weasyprint-68.1-py3-none-any.whl", hash = "sha256:4dc3ba63c68bbbce3e9617cb2226251c372f5ee90a8a484503b1c099da9cf5be", size = 319789, upload-time = "2026-02-06T15:04:09.189Z" }, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8", size = 177340, upload-time = "2026-01-10T09:22:34.539Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad", size = 175022, upload-time = "2026-01-10T09:22:36.332Z" }, + { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d", size = 175319, upload-time = "2026-01-10T09:22:37.602Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe", size = 184631, upload-time = "2026-01-10T09:22:38.789Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b", size = 185870, upload-time = "2026-01-10T09:22:39.893Z" }, + { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5", size = 185361, upload-time = "2026-01-10T09:22:41.016Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64", size = 184615, upload-time = "2026-01-10T09:22:42.442Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6", size = 178246, upload-time = "2026-01-10T09:22:43.654Z" }, + { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac", size = 178684, upload-time = "2026-01-10T09:22:44.941Z" }, + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d", size = 174947, upload-time = "2026-01-10T09:23:36.166Z" }, + { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03", size = 175260, upload-time = "2026-01-10T09:23:37.409Z" }, + { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da", size = 176071, upload-time = "2026-01-10T09:23:39.158Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c", size = 176968, upload-time = "2026-01-10T09:23:41.031Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735, upload-time = "2026-01-10T09:23:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] + +[[package]] +name = "wrapt" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/64/925f213fdcbb9baeb1530449ac71a4d57fc361c053d06bf78d0c5c7cd80c/wrapt-2.1.2.tar.gz", hash = "sha256:3996a67eecc2c68fd47b4e3c564405a5777367adfd9b8abb58387b63ee83b21e", size = 81678, upload-time = "2026-03-06T02:53:25.134Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/81/60c4471fce95afa5922ca09b88a25f03c93343f759aae0f31fb4412a85c7/wrapt-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:96159a0ee2b0277d44201c3b5be479a9979cf154e8c82fa5df49586a8e7679bb", size = 60666, upload-time = "2026-03-06T02:52:58.934Z" }, + { url = "https://files.pythonhosted.org/packages/6b/be/80e80e39e7cb90b006a0eaf11c73ac3a62bbfb3068469aec15cc0bc795de/wrapt-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98ba61833a77b747901e9012072f038795de7fc77849f1faa965464f3f87ff2d", size = 61601, upload-time = "2026-03-06T02:53:00.487Z" }, + { url = "https://files.pythonhosted.org/packages/b0/be/d7c88cd9293c859fc74b232abdc65a229bb953997995d6912fc85af18323/wrapt-2.1.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:767c0dbbe76cae2a60dd2b235ac0c87c9cccf4898aef8062e57bead46b5f6894", size = 114057, upload-time = "2026-03-06T02:52:44.08Z" }, + { url = "https://files.pythonhosted.org/packages/ea/25/36c04602831a4d685d45a93b3abea61eca7fe35dab6c842d6f5d570ef94a/wrapt-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c691a6bc752c0cc4711cc0c00896fcd0f116abc253609ef64ef930032821842", size = 116099, upload-time = "2026-03-06T02:54:56.74Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4e/98a6eb417ef551dc277bec1253d5246b25003cf36fdf3913b65cb7657a56/wrapt-2.1.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f3b7d73012ea75aee5844de58c88f44cf62d0d62711e39da5a82824a7c4626a8", size = 112457, upload-time = "2026-03-06T02:53:52.842Z" }, + { url = "https://files.pythonhosted.org/packages/cb/a6/a6f7186a5297cad8ec53fd7578533b28f795fdf5372368c74bd7e6e9841c/wrapt-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:577dff354e7acd9d411eaf4bfe76b724c89c89c8fc9b7e127ee28c5f7bcb25b6", size = 115351, upload-time = "2026-03-06T02:53:32.684Z" }, + { url = "https://files.pythonhosted.org/packages/97/6f/06e66189e721dbebd5cf20e138acc4d1150288ce118462f2fcbff92d38db/wrapt-2.1.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:3d7b6fd105f8b24e5bd23ccf41cb1d1099796524bcc6f7fbb8fe576c44befbc9", size = 111748, upload-time = "2026-03-06T02:53:08.455Z" }, + { url = "https://files.pythonhosted.org/packages/ef/43/4808b86f499a51370fbdbdfa6cb91e9b9169e762716456471b619fca7a70/wrapt-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:866abdbf4612e0b34764922ef8b1c5668867610a718d3053d59e24a5e5fcfc15", size = 113783, upload-time = "2026-03-06T02:53:02.02Z" }, + { url = "https://files.pythonhosted.org/packages/91/2c/a3f28b8fa7ac2cefa01cfcaca3471f9b0460608d012b693998cd61ef43df/wrapt-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5a0a0a3a882393095573344075189eb2d566e0fd205a2b6414e9997b1b800a8b", size = 57977, upload-time = "2026-03-06T02:53:27.844Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c3/2b1c7bd07a27b1db885a2fab469b707bdd35bddf30a113b4917a7e2139d2/wrapt-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:64a07a71d2730ba56f11d1a4b91f7817dc79bc134c11516b75d1921a7c6fcda1", size = 60336, upload-time = "2026-03-06T02:54:28.104Z" }, + { url = "https://files.pythonhosted.org/packages/ec/5c/76ece7b401b088daa6503d6264dd80f9a727df3e6042802de9a223084ea2/wrapt-2.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:b89f095fe98bc12107f82a9f7d570dc83a0870291aeb6b1d7a7d35575f55d98a", size = 58756, upload-time = "2026-03-06T02:53:16.319Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b6/1db817582c49c7fcbb7df6809d0f515af29d7c2fbf57eb44c36e98fb1492/wrapt-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ff2aad9c4cda28a8f0653fc2d487596458c2a3f475e56ba02909e950a9efa6a9", size = 61255, upload-time = "2026-03-06T02:52:45.663Z" }, + { url = "https://files.pythonhosted.org/packages/a2/16/9b02a6b99c09227c93cd4b73acc3678114154ec38da53043c0ddc1fba0dc/wrapt-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6433ea84e1cfacf32021d2a4ee909554ade7fd392caa6f7c13f1f4bf7b8e8748", size = 61848, upload-time = "2026-03-06T02:53:48.728Z" }, + { url = "https://files.pythonhosted.org/packages/af/aa/ead46a88f9ec3a432a4832dfedb84092fc35af2d0ba40cd04aea3889f247/wrapt-2.1.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c20b757c268d30d6215916a5fa8461048d023865d888e437fab451139cad6c8e", size = 121433, upload-time = "2026-03-06T02:54:40.328Z" }, + { url = "https://files.pythonhosted.org/packages/3a/9f/742c7c7cdf58b59085a1ee4b6c37b013f66ac33673a7ef4aaed5e992bc33/wrapt-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79847b83eb38e70d93dc392c7c5b587efe65b3e7afcc167aa8abd5d60e8761c8", size = 123013, upload-time = "2026-03-06T02:53:26.58Z" }, + { url = "https://files.pythonhosted.org/packages/e8/44/2c3dd45d53236b7ed7c646fcf212251dc19e48e599debd3926b52310fafb/wrapt-2.1.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f8fba1bae256186a83d1875b2b1f4e2d1242e8fac0f58ec0d7e41b26967b965c", size = 117326, upload-time = "2026-03-06T02:53:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/74/e2/b17d66abc26bd96f89dec0ecd0ef03da4a1286e6ff793839ec431b9fae57/wrapt-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e3d3b35eedcf5f7d022291ecd7533321c4775f7b9cd0050a31a68499ba45757c", size = 121444, upload-time = "2026-03-06T02:54:09.5Z" }, + { url = "https://files.pythonhosted.org/packages/3c/62/e2977843fdf9f03daf1586a0ff49060b1b2fc7ff85a7ea82b6217c1ae36e/wrapt-2.1.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:6f2c5390460de57fa9582bc8a1b7a6c86e1a41dfad74c5225fc07044c15cc8d1", size = 116237, upload-time = "2026-03-06T02:54:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/88/dd/27fc67914e68d740bce512f11734aec08696e6b17641fef8867c00c949fc/wrapt-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7dfa9f2cf65d027b951d05c662cc99ee3bd01f6e4691ed39848a7a5fffc902b2", size = 120563, upload-time = "2026-03-06T02:53:20.412Z" }, + { url = "https://files.pythonhosted.org/packages/ec/9f/b750b3692ed2ef4705cb305bd68858e73010492b80e43d2a4faa5573cbe7/wrapt-2.1.2-cp312-cp312-win32.whl", hash = "sha256:eba8155747eb2cae4a0b913d9ebd12a1db4d860fc4c829d7578c7b989bd3f2f0", size = 58198, upload-time = "2026-03-06T02:53:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/8e/b2/feecfe29f28483d888d76a48f03c4c4d8afea944dbee2b0cd3380f9df032/wrapt-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1c51c738d7d9faa0b3601708e7e2eda9bf779e1b601dce6c77411f2a1b324a63", size = 60441, upload-time = "2026-03-06T02:52:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/44/e1/e328f605d6e208547ea9fd120804fcdec68536ac748987a68c47c606eea8/wrapt-2.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:c8e46ae8e4032792eb2f677dbd0d557170a8e5524d22acc55199f43efedd39bf", size = 58836, upload-time = "2026-03-06T02:53:22.053Z" }, + { url = "https://files.pythonhosted.org/packages/4c/7a/d936840735c828b38d26a854e85d5338894cda544cb7a85a9d5b8b9c4df7/wrapt-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787fd6f4d67befa6fe2abdffcbd3de2d82dfc6fb8a6d850407c53332709d030b", size = 61259, upload-time = "2026-03-06T02:53:41.922Z" }, + { url = "https://files.pythonhosted.org/packages/5e/88/9a9b9a90ac8ca11c2fdb6a286cb3a1fc7dd774c00ed70929a6434f6bc634/wrapt-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4bdf26e03e6d0da3f0e9422fd36bcebf7bc0eeb55fdf9c727a09abc6b9fe472e", size = 61851, upload-time = "2026-03-06T02:52:48.672Z" }, + { url = "https://files.pythonhosted.org/packages/03/a9/5b7d6a16fd6533fed2756900fc8fc923f678179aea62ada6d65c92718c00/wrapt-2.1.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bbac24d879aa22998e87f6b3f481a5216311e7d53c7db87f189a7a0266dafffb", size = 121446, upload-time = "2026-03-06T02:54:14.013Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/34c443690c847835cfe9f892be78c533d4f32366ad2888972c094a897e39/wrapt-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16997dfb9d67addc2e3f41b62a104341e80cac52f91110dece393923c0ebd5ca", size = 123056, upload-time = "2026-03-06T02:54:10.829Z" }, + { url = "https://files.pythonhosted.org/packages/93/b9/ff205f391cb708f67f41ea148545f2b53ff543a7ac293b30d178af4d2271/wrapt-2.1.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:162e4e2ba7542da9027821cb6e7c5e068d64f9a10b5f15512ea28e954893a267", size = 117359, upload-time = "2026-03-06T02:53:03.623Z" }, + { url = "https://files.pythonhosted.org/packages/1f/3d/1ea04d7747825119c3c9a5e0874a40b33594ada92e5649347c457d982805/wrapt-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f29c827a8d9936ac320746747a016c4bc66ef639f5cd0d32df24f5eacbf9c69f", size = 121479, upload-time = "2026-03-06T02:53:45.844Z" }, + { url = "https://files.pythonhosted.org/packages/78/cc/ee3a011920c7a023b25e8df26f306b2484a531ab84ca5c96260a73de76c0/wrapt-2.1.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:a9dd9813825f7ecb018c17fd147a01845eb330254dff86d3b5816f20f4d6aaf8", size = 116271, upload-time = "2026-03-06T02:54:46.356Z" }, + { url = "https://files.pythonhosted.org/packages/98/fd/e5ff7ded41b76d802cf1191288473e850d24ba2e39a6ec540f21ae3b57cb/wrapt-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f8dbdd3719e534860d6a78526aafc220e0241f981367018c2875178cf83a413", size = 120573, upload-time = "2026-03-06T02:52:50.163Z" }, + { url = "https://files.pythonhosted.org/packages/47/c5/242cae3b5b080cd09bacef0591691ba1879739050cc7c801ff35c8886b66/wrapt-2.1.2-cp313-cp313-win32.whl", hash = "sha256:5c35b5d82b16a3bc6e0a04349b606a0582bc29f573786aebe98e0c159bc48db6", size = 58205, upload-time = "2026-03-06T02:53:47.494Z" }, + { url = "https://files.pythonhosted.org/packages/12/69/c358c61e7a50f290958809b3c61ebe8b3838ea3e070d7aac9814f95a0528/wrapt-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f8bc1c264d8d1cf5b3560a87bbdd31131573eb25f9f9447bb6252b8d4c44a3a1", size = 60452, upload-time = "2026-03-06T02:53:30.038Z" }, + { url = "https://files.pythonhosted.org/packages/8e/66/c8a6fcfe321295fd8c0ab1bd685b5a01462a9b3aa2f597254462fc2bc975/wrapt-2.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:3beb22f674550d5634642c645aba4c72a2c66fb185ae1aebe1e955fae5a13baf", size = 58842, upload-time = "2026-03-06T02:52:52.114Z" }, + { url = "https://files.pythonhosted.org/packages/da/55/9c7052c349106e0b3f17ae8db4b23a691a963c334de7f9dbd60f8f74a831/wrapt-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fc04bc8664a8bc4c8e00b37b5355cffca2535209fba1abb09ae2b7c76ddf82b", size = 63075, upload-time = "2026-03-06T02:53:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/09/a8/ce7b4006f7218248dd71b7b2b732d0710845a0e49213b18faef64811ffef/wrapt-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a9b9d50c9af998875a1482a038eb05755dfd6fe303a313f6a940bb53a83c3f18", size = 63719, upload-time = "2026-03-06T02:54:33.452Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e5/2ca472e80b9e2b7a17f106bb8f9df1db11e62101652ce210f66935c6af67/wrapt-2.1.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2d3ff4f0024dd224290c0eabf0240f1bfc1f26363431505fb1b0283d3b08f11d", size = 152643, upload-time = "2026-03-06T02:52:42.721Z" }, + { url = "https://files.pythonhosted.org/packages/36/42/30f0f2cefca9d9cbf6835f544d825064570203c3e70aa873d8ae12e23791/wrapt-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3278c471f4468ad544a691b31bb856374fbdefb7fee1a152153e64019379f015", size = 158805, upload-time = "2026-03-06T02:54:25.441Z" }, + { url = "https://files.pythonhosted.org/packages/bb/67/d08672f801f604889dcf58f1a0b424fe3808860ede9e03affc1876b295af/wrapt-2.1.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8914c754d3134a3032601c6984db1c576e6abaf3fc68094bb8ab1379d75ff92", size = 145990, upload-time = "2026-03-06T02:53:57.456Z" }, + { url = "https://files.pythonhosted.org/packages/68/a7/fd371b02e73babec1de6ade596e8cd9691051058cfdadbfd62a5898f3295/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ff95d4264e55839be37bafe1536db2ab2de19da6b65f9244f01f332b5286cfbf", size = 155670, upload-time = "2026-03-06T02:54:55.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/9fe0095dfdb621009f40117dcebf41d7396c2c22dca6eac779f4c007b86c/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:76405518ca4e1b76fbb1b9f686cff93aebae03920cc55ceeec48ff9f719c5f67", size = 144357, upload-time = "2026-03-06T02:54:24.092Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b6/ec7b4a254abbe4cde9fa15c5d2cca4518f6b07d0f1b77d4ee9655e30280e/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c0be8b5a74c5824e9359b53e7e58bef71a729bacc82e16587db1c4ebc91f7c5a", size = 150269, upload-time = "2026-03-06T02:53:31.268Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6b/2fabe8ebf148f4ee3c782aae86a795cc68ffe7d432ef550f234025ce0cfa/wrapt-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:f01277d9a5fc1862f26f7626da9cf443bebc0abd2f303f41c5e995b15887dabd", size = 59894, upload-time = "2026-03-06T02:54:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/ca/fb/9ba66fc2dedc936de5f8073c0217b5d4484e966d87723415cc8262c5d9c2/wrapt-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:84ce8f1c2104d2f6daa912b1b5b039f331febfeee74f8042ad4e04992bd95c8f", size = 63197, upload-time = "2026-03-06T02:54:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1c/012d7423c95d0e337117723eb8ecf73c622ce15a97847e84cf3f8f26cd7e/wrapt-2.1.2-cp313-cp313t-win_arm64.whl", hash = "sha256:a93cd767e37faeddbe07d8fc4212d5cba660af59bdb0f6372c93faaa13e6e679", size = 60363, upload-time = "2026-03-06T02:54:48.093Z" }, + { url = "https://files.pythonhosted.org/packages/39/25/e7ea0b417db02bb796182a5316398a75792cd9a22528783d868755e1f669/wrapt-2.1.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:1370e516598854e5b4366e09ce81e08bfe94d42b0fd569b88ec46cc56d9164a9", size = 61418, upload-time = "2026-03-06T02:53:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0f/fa539e2f6a770249907757eaeb9a5ff4deb41c026f8466c1c6d799088a9b/wrapt-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6de1a3851c27e0bd6a04ca993ea6f80fc53e6c742ee1601f486c08e9f9b900a9", size = 61914, upload-time = "2026-03-06T02:52:53.37Z" }, + { url = "https://files.pythonhosted.org/packages/53/37/02af1867f5b1441aaeda9c82deed061b7cd1372572ddcd717f6df90b5e93/wrapt-2.1.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:de9f1a2bbc5ac7f6012ec24525bdd444765a2ff64b5985ac6e0692144838542e", size = 120417, upload-time = "2026-03-06T02:54:30.74Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b7/0138a6238c8ba7476c77cf786a807f871672b37f37a422970342308276e7/wrapt-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:970d57ed83fa040d8b20c52fe74a6ae7e3775ae8cff5efd6a81e06b19078484c", size = 122797, upload-time = "2026-03-06T02:54:51.539Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ad/819ae558036d6a15b7ed290d5b14e209ca795dd4da9c58e50c067d5927b0/wrapt-2.1.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3969c56e4563c375861c8df14fa55146e81ac11c8db49ea6fb7f2ba58bc1ff9a", size = 117350, upload-time = "2026-03-06T02:54:37.651Z" }, + { url = "https://files.pythonhosted.org/packages/8b/2d/afc18dc57a4600a6e594f77a9ae09db54f55ba455440a54886694a84c71b/wrapt-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:57d7c0c980abdc5f1d98b11a2aa3bb159790add80258c717fa49a99921456d90", size = 121223, upload-time = "2026-03-06T02:54:35.221Z" }, + { url = "https://files.pythonhosted.org/packages/b9/5b/5ec189b22205697bc56eb3b62aed87a1e0423e9c8285d0781c7a83170d15/wrapt-2.1.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:776867878e83130c7a04237010463372e877c1c994d449ca6aaafeab6aab2586", size = 116287, upload-time = "2026-03-06T02:54:19.654Z" }, + { url = "https://files.pythonhosted.org/packages/f7/2d/f84939a7c9b5e6cdd8a8d0f6a26cabf36a0f7e468b967720e8b0cd2bdf69/wrapt-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:fab036efe5464ec3291411fabb80a7a39e2dd80bae9bcbeeca5087fdfa891e19", size = 119593, upload-time = "2026-03-06T02:54:16.697Z" }, + { url = "https://files.pythonhosted.org/packages/0b/fe/ccd22a1263159c4ac811ab9374c061bcb4a702773f6e06e38de5f81a1bdc/wrapt-2.1.2-cp314-cp314-win32.whl", hash = "sha256:e6ed62c82ddf58d001096ae84ce7f833db97ae2263bff31c9b336ba8cfe3f508", size = 58631, upload-time = "2026-03-06T02:53:06.498Z" }, + { url = "https://files.pythonhosted.org/packages/65/0a/6bd83be7bff2e7efaac7b4ac9748da9d75a34634bbbbc8ad077d527146df/wrapt-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:467e7c76315390331c67073073d00662015bb730c566820c9ca9b54e4d67fd04", size = 60875, upload-time = "2026-03-06T02:53:50.252Z" }, + { url = "https://files.pythonhosted.org/packages/6c/c0/0b3056397fe02ff80e5a5d72d627c11eb885d1ca78e71b1a5c1e8c7d45de/wrapt-2.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:da1f00a557c66225d53b095a97eace0fc5349e3bfda28fa34ffae238978ee575", size = 59164, upload-time = "2026-03-06T02:53:59.128Z" }, + { url = "https://files.pythonhosted.org/packages/71/ed/5d89c798741993b2371396eb9d4634f009ff1ad8a6c78d366fe2883ea7a6/wrapt-2.1.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:62503ffbc2d3a69891cf29beeaccdb4d5e0a126e2b6a851688d4777e01428dbb", size = 63163, upload-time = "2026-03-06T02:52:54.873Z" }, + { url = "https://files.pythonhosted.org/packages/c6/8c/05d277d182bf36b0a13d6bd393ed1dec3468a25b59d01fba2dd70fe4d6ae/wrapt-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c7e6cd120ef837d5b6f860a6ea3745f8763805c418bb2f12eeb1fa6e25f22d22", size = 63723, upload-time = "2026-03-06T02:52:56.374Z" }, + { url = "https://files.pythonhosted.org/packages/f4/27/6c51ec1eff4413c57e72d6106bb8dec6f0c7cdba6503d78f0fa98767bcc9/wrapt-2.1.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3769a77df8e756d65fbc050333f423c01ae012b4f6731aaf70cf2bef61b34596", size = 152652, upload-time = "2026-03-06T02:53:23.79Z" }, + { url = "https://files.pythonhosted.org/packages/db/4c/d7dd662d6963fc7335bfe29d512b02b71cdfa23eeca7ab3ac74a67505deb/wrapt-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a76d61a2e851996150ba0f80582dd92a870643fa481f3b3846f229de88caf044", size = 158807, upload-time = "2026-03-06T02:53:35.742Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/1e5eea1a78d539d346765727422976676615814029522c76b87a95f6bcdd/wrapt-2.1.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6f97edc9842cf215312b75fe737ee7c8adda75a89979f8e11558dfff6343cc4b", size = 146061, upload-time = "2026-03-06T02:52:57.574Z" }, + { url = "https://files.pythonhosted.org/packages/89/bc/62cabea7695cd12a288023251eeefdcb8465056ddaab6227cb78a2de005b/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4006c351de6d5007aa33a551f600404ba44228a89e833d2fadc5caa5de8edfbf", size = 155667, upload-time = "2026-03-06T02:53:39.422Z" }, + { url = "https://files.pythonhosted.org/packages/e9/99/6f2888cd68588f24df3a76572c69c2de28287acb9e1972bf0c83ce97dbc1/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a9372fc3639a878c8e7d87e1556fa209091b0a66e912c611e3f833e2c4202be2", size = 144392, upload-time = "2026-03-06T02:54:22.41Z" }, + { url = "https://files.pythonhosted.org/packages/40/51/1dfc783a6c57971614c48e361a82ca3b6da9055879952587bc99fe1a7171/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3144b027ff30cbd2fca07c0a87e67011adb717eb5f5bd8496325c17e454257a3", size = 150296, upload-time = "2026-03-06T02:54:07.848Z" }, + { url = "https://files.pythonhosted.org/packages/6c/38/cbb8b933a0201076c1f64fc42883b0023002bdc14a4964219154e6ff3350/wrapt-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:3b8d15e52e195813efe5db8cec156eebe339aaf84222f4f4f051a6c01f237ed7", size = 60539, upload-time = "2026-03-06T02:54:00.594Z" }, + { url = "https://files.pythonhosted.org/packages/82/dd/e5176e4b241c9f528402cebb238a36785a628179d7d8b71091154b3e4c9e/wrapt-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:08ffa54146a7559f5b8df4b289b46d963a8e74ed16ba3687f99896101a3990c5", size = 63969, upload-time = "2026-03-06T02:54:39Z" }, + { url = "https://files.pythonhosted.org/packages/5c/99/79f17046cf67e4a95b9987ea129632ba8bcec0bc81f3fb3d19bdb0bd60cd/wrapt-2.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:72aaa9d0d8e4ed0e2e98019cea47a21f823c9dd4b43c7b77bba6679ffcca6a00", size = 60554, upload-time = "2026-03-06T02:53:14.132Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c7/8528ac2dfa2c1e6708f647df7ae144ead13f0a31146f43c7264b4942bf12/wrapt-2.1.2-py3-none-any.whl", hash = "sha256:b8fd6fa2b2c4e7621808f8c62e8317f4aae56e59721ad933bac5239d913cf0e8", size = 43993, upload-time = "2026-03-06T02:53:12.905Z" }, +] + +[[package]] +name = "zopfli" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/4d/a8cc1768b2eda3c0c7470bf8059dcb94ef96d45dd91fc6edd29430d44072/zopfli-0.4.1.tar.gz", hash = "sha256:07a5cdc5d1aaa6c288c5d9f5a5383042ba743641abf8e2fd898dcad622d8a38e", size = 179001, upload-time = "2026-02-13T14:17:27.156Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/2f/1a7082e9163ae3703b27d571720bf3c954a02a9cf1fdce47c51e70639256/zopfli-0.4.1-cp310-abi3-macosx_10_9_universal2.whl", hash = "sha256:4238d4d746d1095e29c9125490985e0c12ffd3654f54a24af551e2391e936d54", size = 291570, upload-time = "2026-02-13T14:17:12.556Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/4a1a88edf9fa0ce102703f38ab4dfb285b7cd2dde5389184264ec759e06e/zopfli-0.4.1-cp310-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fdfb7ce9f5de37a5b2f75dd2642fd7717956ef2a72e0387302a36d382440db07", size = 829437, upload-time = "2026-02-13T14:17:14.431Z" }, + { url = "https://files.pythonhosted.org/packages/e3/77/d231012ddcaac9d2e184bd7808e106a8a0048855912e2e1c902b3f383413/zopfli-0.4.1-cp310-abi3-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d7bcee1b189d64ec33d1e05cfa1b6a1268c29329c382f6ca1bd6245b04925c57", size = 818542, upload-time = "2026-02-13T14:17:16.353Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4e/9b23690c4ca14fbeae2a8f7f6b2006611bf4cd7d5bcb2d9e6c718bd4b0e9/zopfli-0.4.1-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:27823dc1161a4031d1c25925fd45d9868ec0cbc7692341830a7dcfa25063662c", size = 1778034, upload-time = "2026-02-13T14:17:17.509Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1b/51f7c28d4cde639cac4f5d47ff615548c1d9809f43cbacdd66eba5cd679d/zopfli-0.4.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a4c22b6161f47f5bd34637dbaee6735abd287cd64e0d1ce28ef1871bf625f4b", size = 1863957, upload-time = "2026-02-13T14:17:19.259Z" }, + { url = "https://files.pythonhosted.org/packages/ae/4d/1ef17017d38eabe7ae28f18ef0f16d48966cc23a5657e4555fff61704539/zopfli-0.4.1-cp310-abi3-win32.whl", hash = "sha256:a899eca405662a23ae75054affa3517a060362eae1185d3d791c86a50153c4dd", size = 82314, upload-time = "2026-02-13T14:17:20.795Z" }, + { url = "https://files.pythonhosted.org/packages/0f/94/806bc84b389c7d70051d7c9a0179cff52de8b9f8dc2fc25bcf0bca302986/zopfli-0.4.1-cp310-abi3-win_amd64.whl", hash = "sha256:84a31ba9edc921b1d3a4449929394a993888f32d70de3a3617800c428a947b9b", size = 102186, upload-time = "2026-02-13T14:17:21.622Z" }, + { url = "https://files.pythonhosted.org/packages/15/53/0afc94574553bad50d7add81f54eed1a864e13f91c3a342c99775a947ff9/zopfli-0.4.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:02086247dd12fda929f9bfe8b3962b6bcdbfc8c82e99255aebcf367867cf0760", size = 147127, upload-time = "2026-02-13T14:17:22.995Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/0d9e4bdfd3d646a36b8516a01dec4ccd2967554603801e7c2d6c72fede3d/zopfli-0.4.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a93c2ecafff372de6c0aa2212eff18a75f6c71a100372fee7b4b129cc0b6f9a7", size = 127349, upload-time = "2026-02-13T14:17:24.107Z" }, + { url = "https://files.pythonhosted.org/packages/23/f0/ad6e26aa06943ce9f1be4ae6738513a7b69d8ea1f3b13e46009a249a3f73/zopfli-0.4.1-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cb136a74d14a4ecfae29cb0fdecece58a6c115abc9a74c12bc6ac62e80f229d7", size = 124371, upload-time = "2026-02-13T14:17:24.976Z" }, + { url = "https://files.pythonhosted.org/packages/7b/36/3c15d564db6dfdd740919b205bdb69be75113e9919c422cde658e6d013c0/zopfli-0.4.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2f992ac7d83cbddd889e1813ace576cbc91a05d5d7a0a21b366e2e5f492e7707", size = 102199, upload-time = "2026-02-13T14:17:26.246Z" }, +] diff --git a/docker-compose.yml b/docker-compose.yml index 3b0ac77..5e16244 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -50,6 +50,8 @@ services: dockerfile: Dockerfile command: /start.sh environment: + - PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1} + - PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache} - POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} @@ -89,8 +91,12 @@ services: build: context: ./backend dockerfile: Dockerfile + user: "${APP_UID:-1000}:0" command: celery -A app.tasks.celery_app worker --loglevel=info -Q step_processing,ai_validation --autoscale=${MAX_CONCURRENCY:-8},${MIN_CONCURRENCY:-2} --concurrency=${MIN_CONCURRENCY:-2} environment: + - PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1} + - PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache} + - HOME=/tmp - POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} @@ -123,8 +129,15 @@ services: dockerfile: render-worker/Dockerfile args: - BLENDER_VERSION=${BLENDER_VERSION:-5.0.1} + user: "${APP_UID:-1000}:0" + group_add: + - "44" + - "110" command: bash -c "python3 /check_version.py && celery -A app.tasks.celery_app worker --loglevel=info -Q asset_pipeline --autoscale=1,1 --concurrency=1" environment: + - PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1} + - PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache} + - HOME=/tmp - POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} @@ -135,13 +148,16 @@ services: - UPLOAD_DIR=/app/uploads - BLENDER_BIN=/opt/blender/blender - RENDER_SCRIPTS_DIR=/render-scripts - - CYCLES_DEVICE=${CYCLES_DEVICE:-auto} + - CYCLES_DEVICE=${CYCLES_DEVICE:-gpu} + - NVIDIA_VISIBLE_DEVICES=all + - NVIDIA_DRIVER_CAPABILITIES=compute,utility,graphics - MINIO_URL=${MINIO_URL:-http://minio:9000} - MINIO_USER=${MINIO_USER:-minioadmin} - MINIO_PASSWORD=${MINIO_PASSWORD:-minioadmin} - MINIO_BUCKET=${MINIO_BUCKET:-uploads} volumes: - ./backend:/app + - ./render-worker/scripts:/render-scripts - uploads:/app/uploads - /opt/blender:/opt/blender:ro - optix-cache:/var/tmp/OptixCache_root # persist OptiX kernel cache across container restarts @@ -165,8 +181,15 @@ services: dockerfile: render-worker/Dockerfile args: - BLENDER_VERSION=${BLENDER_VERSION:-5.0.1} + user: "${APP_UID:-1000}:0" + group_add: + - "44" + - "110" command: bash -c "python3 /check_version.py && celery -A app.tasks.celery_app worker --loglevel=info -Q asset_pipeline_light --autoscale=2,2 --concurrency=2" environment: + - PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1} + - PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache} + - HOME=/tmp - POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} @@ -177,13 +200,16 @@ services: - UPLOAD_DIR=/app/uploads - BLENDER_BIN=/opt/blender/blender - RENDER_SCRIPTS_DIR=/render-scripts - - CYCLES_DEVICE=${CYCLES_DEVICE:-auto} + - CYCLES_DEVICE=${CYCLES_DEVICE:-gpu} + - NVIDIA_VISIBLE_DEVICES=all + - NVIDIA_DRIVER_CAPABILITIES=compute,utility,graphics - MINIO_URL=${MINIO_URL:-http://minio:9000} - MINIO_USER=${MINIO_USER:-minioadmin} - MINIO_PASSWORD=${MINIO_PASSWORD:-minioadmin} - MINIO_BUCKET=${MINIO_BUCKET:-uploads} volumes: - ./backend:/app + - ./render-worker/scripts:/render-scripts - uploads:/app/uploads - /opt/blender:/opt/blender:ro - optix-cache:/var/tmp/OptixCache_root @@ -204,8 +230,12 @@ services: build: context: ./backend dockerfile: Dockerfile - command: celery -A app.tasks.celery_app beat --loglevel=info + user: "${APP_UID:-1000}:0" + command: celery -A app.tasks.celery_app beat --loglevel=info --schedule=/tmp/celerybeat-schedule environment: + - PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:-1} + - PYTHONPYCACHEPREFIX=${PYTHONPYCACHEPREFIX:-/tmp/pycache} + - HOME=/tmp - POSTGRES_DB=${POSTGRES_DB:-hartomat} - POSTGRES_USER=${POSTGRES_USER:-hartomat} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-hartomat} diff --git a/docs/workflows/CURRENT_EXECUTION_BATCH.md b/docs/workflows/CURRENT_EXECUTION_BATCH.md index f790a6b..02f3a37 100644 --- a/docs/workflows/CURRENT_EXECUTION_BATCH.md +++ b/docs/workflows/CURRENT_EXECUTION_BATCH.md @@ -147,11 +147,101 @@ Ergebnis: - Abgeschlossen: Block 6 - Abgeschlossen: Block 7 - Abgeschlossen: Block 8 -- In Arbeit: Block 9 -- Nächster geplanter Folgeblock: Block 9 +- Abgeschlossen: Block 9 +- Parallel in Arbeit: Block 11 +- Vorbereitet: Block 12 +- Nächster geplanter Folgeblock: Block 11 + +## Nächste Orchestrierte Batch-Wellen + +Diese Wellen priorisieren Root-Cause-Arbeit vor weiterer UI-Politur und halten Legacy jederzeit parallel funktionsfähig. + +### Welle P1: Vertrags- und Produktionsmodell-Schließung + +Muss zuerst laufen: + +- `P1-A` Node-Contract Closure + - Backend-Registry und Schema als harte Source of Truth schließen + - Fokus: Family-Konsistenz, param-key-Validierung, vollständige node settings contracts +- `P1-B` Output-Type / Invocation Model Closure + - Output Types als sauberes Workflow-Invocation-Modell abschließen + - Fokus: artifact/family compatibility, editor/API contract clarity, sichere Erstellung neuer Output Types +- `P1-C` Render-Template- und Asset-Library-Inputs als echte Produktionsinputs modellieren + - Fokus: template/material-library/input contracts statt versteckter Defaults + +Warum zuerst: + +- diese drei Blöcke definieren die autoritativen Verträge, an denen Editor, Runtime und Golden-Gates hängen +- weitere Runtime- und E2E-Arbeit bleibt sonst drift-anfällig + +### Welle P2: Runtime-Parität und Graph/Legacy-Unifikation + +Parallelisierbar nach P1-A: + +- `P2-A` Legacy/Graph Module Unification + - gleiche Produktionsmodule in Legacy- und Graph-Pfad verwenden + - Fokus: template resolution, samples/defaults, dispatch parity +- `P2-B` Canonical Graphs / Starter Blueprints / Seed Normalization + - eine kanonische Graph-Quelle statt Drift zwischen backend blueprints, bundles und frontend starters +- `P2-C` Run Inspection Completion + - Preflight, dispatch, comparison und node outputs operativ debugbar machen + +### Welle P3: CAD-/Material-Parität und Editor-Führung + +Parallelisierbar nach P2-A: + +- `P3-A` CAD / Material Parity + - instance-aware part/material truth zwischen exporter, viewer und render path schließen +- `P3-B` Editor Organization Around Modules / Families / Input Paths + - gemeinsame Authoring-Surface weiter auf modulare Produktionspfade zuschneiden +- `P3-C` Context Flow Simplification Follow-up + - Kontextauswahl und Output-Type-Einstieg auf die neuen Contracts ausrichten + +### Welle P4: Operative Freigabe und Hygiene + +Nach P2 und P3: + +- `P4-A` Shadow / Graph Rollout Hardening + - pro workflow / output type steuerbar, mit klarem Rückfallpfad +- `P4-B` Sequential Golden / Smoke / E2E Gates + - echte Produktionsfälle mit Templates, Varianten und Output-Types sequenziell absichern +- `P4-C` Test-Infrastruktur / Low-RAM Gates + - reproduzierbare sequentielle Verifikation +- `P4-D` Repo Hygiene / Generated Artifact Root Cause + - Ownership-, Pycache- und generated-file-Ursachen bereinigen + +## Sofort Nächste Disjunkte Arbeitsblöcke + +Für die aktuelle nächste Ausführung werden diese drei Blöcke als kleinste sinnvolle Parallel-Batch vorbereitet: + +- Batch `NB-1`: Node-Contract Closure + - Status: verifiziert + - Fokus: Registry- und Schema-Contracts als harte Source of Truth + - Verifikation: `backend/tests/domains/test_workflow_schema.py`, `backend/tests/domains/test_workflow_node_registry.py` +- Batch `NB-2`: Output-Type / Invocation Model Closure + - Status: verifiziert + - Fokus: Artifact-/Family-/Invocation-Contracts fuer Output Types + - Verifikation: `backend/tests/domains/test_output_types_api.py`, `frontend/src/__tests__/api/outputTypes.test.ts` +- Batch `NB-3`: CAD / Material parity root-cause closure + - Status: verifiziert + - Fokus: part-key-/instance-stabile Materialidentitaet zwischen Exporter, Manifest und Viewer + - Teilfortschritt April 10, 2026: scene-manifest aliasiert jetzt auch exporter-variant keys wie `_af6` auf ihren kanonischen semantischen part key; der Viewer kann damit dieselbe autoritative Materialidentitaet konsumieren wie der Manifest-Pfad + - Verifikation: gezielte low-RAM CAD-/Viewer-Tests nach Root-Cause-Fix + - Abschluss April 11, 2026: live HartOMat-Export fuer `7c214057-9982-4d6e-aa87-43bfabfdb709` liefert jetzt `146` Manifest-Parts, `146` Mesh-Nodes, `146` eindeutige `partKey`s, `0` fehlende und `0` duplizierte Zuordnungen; Root Cause war die Kombination aus stale GLB cache plus nicht-atomarem OCC-Overwrite beim Re-Export + +Merge-Reihenfolge: + +1. `NB-1` +2. `NB-2` +3. `NB-3` +4. danach erst weitere Runtime-/Editor-Folgeblöcke ## Letzte Verifikation +- `./scripts/repo_hygiene.sh` +- Ergebnis: Dry-run listet bereinigbare Cache-/Bytecode-Artefakte plus nicht dem aktuellen Nutzer gehörende Generated Files; die Repo-Hygiene deckt jetzt auch `render-worker/scripts/__pycache__` explizit ab +- `find . \! -user "$USER" -not -path './.git/*' -ls | sed -n '1,120p'` +- Ergebnis: verbleibende Ownership-Reste liegen im `render-worker`-Pycache; Compose-Härtung wird nun über `PYTHONPYCACHEPREFIX=/tmp/pycache` auf die Ursache angesetzt - `backend/.venv/bin/pytest backend/tests/test_config_runtime_resolution.py -q` - Ergebnis: 3 Tests grün; Host-Runtime normalisiert Docker-Service-Aliase (`postgres`, `redis`) außerhalb von Containern nun automatisch auf `localhost`, Container-Runtime bleibt unverändert - `backend/.venv/bin/pytest backend/tests/domains/test_workflow_runtime_services.py -q -x` @@ -166,5 +256,17 @@ Ergebnis: - Ergebnis: 5 Tests grün; autoritative Scene-Manifest-Zuweisungen werden nun im Workflow-Renderpfad auf `part_key` und `source_name` gespiegelt, Legacy-Fallback bleibt unverändert - `./backend/.venv/bin/pytest backend/tests/test_part_key_service.py -q` - Ergebnis: 1 Test grün; part-key-basierte Manifest-Auflösung bleibt konsistent +- `python3 scripts/compare_live_cad_parity.py --cad-id 7c214057-9982-4d6e-aa87-43bfabfdb709` +- Ergebnis: Live-CAD-Parität grün; Manifest, ausgeliefertes GLB und Viewer-`partKey`-Grundlage stimmen für alle 146 renderbaren Teile exakt überein - `cd frontend && npx vitest run src/__tests__/components/workflowEditorUi.test.tsx src/__tests__/api/outputTypes.test.ts --pool forks --poolOptions.forks.singleFork=true` - Ergebnis: 20 Tests grün, sequenziell ausgeführt +- `cd frontend && npm test -- src/__tests__/components/workflowEditorUi.test.tsx src/__tests__/components/workflowAuthoringGuidance.test.ts` +- Ergebnis: 17 Tests grün; die gemeinsame Authoring-Surface bleibt nach dem jüngsten Wiring-Refactor stabil +- `cd frontend && npm run build` +- Ergebnis: Build grün; `/workflows` bleibt kompilierbar nach dem Authoring-Refactor +- `./backend/.venv/bin/pytest backend/tests/test_part_key_service.py -q` +- Ergebnis: 6 Tests grün; scene-manifest deckt jetzt neben `_2`/`_3` auch exporter-style `_af*`-Varianten ueber semantische Alias-Keys ab +- `./backend/.venv/bin/pytest backend/tests/test_export_step_to_gltf.py -q` +- Ergebnis: 3 Tests grün; GLB partKey stamping bleibt mit semantic-sibling-Aufloesung stabil +- `cd frontend && npm test -- src/__tests__/components/cadUtils.test.ts` +- Ergebnis: 11 Tests grün; Viewer-seitige part-key-Aufloesung bleibt nach dem Manifest-Alias-Fix konsistent diff --git a/docs/workflows/FIRST_WAVE_EXECUTION.md b/docs/workflows/FIRST_WAVE_EXECUTION.md new file mode 100644 index 0000000..039848e --- /dev/null +++ b/docs/workflows/FIRST_WAVE_EXECUTION.md @@ -0,0 +1,252 @@ +# First Wave Execution Plan + +## Goal + +Translate the worker orchestration into concrete first patch slices that can be executed in parallel without breaking legacy rendering. + +This first wave is intentionally conservative: + +- close contract holes before broad UI refactors +- avoid cross-cutting runtime rewrites in the same slice +- keep all graph changes legacy-safe + +## Current Codebase Snapshot + +### Block A: Node Contracts + +Already in place: + +- node definitions already expose `family`, `module_key`, `execution_kind`, `input_contract`, `output_contract`, `artifact_roles_*`, and `legacy_source` +- frontend already consumes node definitions and uses family-aware palette grouping +- `glb_bbox` already gained a real editor field for `glb_path` + +Still missing: + +- backend workflow schema validation is still DAG-structural only +- backend does not yet validate graph family consistency against the registry +- backend does not yet validate node params against registry-owned field definitions +- editor-visible nodes with weak or absent settings still need an explicit audit, especially export/CAD bridge nodes +- status note: the canonical still-path bridge nodes have now been expanded with real settings for template override, material override/disable, auto-populate persistence controls, GLB source preference, output-save artifact expectations, and notify arming. The remaining audit emphasis is export/CAD parity rather than still-path hidden defaults. + +### Block D: Output-Type Invocation Contracts + +Already in place: + +- `workflow_family`, `artifact_kind`, and `invocation_overrides` exist +- backend already blocks mixed-family workflow links and direct family mismatches +- frontend admin UI already exposes family, artifact kind, and workflow selection + +Still missing: + +- workflow selection can still communicate contract semantics more clearly +- status note: backend now rejects workflow-family mismatches, mixed-family workflow links, and workflow/artifact mismatches against the linked workflow graph. The admin form has also been re-ordered into workflow-contract and invocation-profile sections so renderer details no longer dominate the first screenful. + +### Block E: Editor Organization + +Already in place: + +- right-click canvas insertion exists +- searchable node command menu exists +- split of `legacy` / `bridge` / `graph` node groupings exists +- auto-align exists +- edge deletion exists via button, Delete key, right-click, and double-click +- the workflow toolbar has been compressed so context, mode, authoring actions, and run actions sit in one compact canvas-adjacent strip + +Still missing: + +- the editor is feature-rich but still structurally busy +- node insertion, run inspection, and inspector controls need clearer hierarchy +- UX cleanup should follow contract cleanup so the editor does not encode unstable assumptions + +### Block I: Rollout And Regression Gates + +Already in place: + +- `legacy`, `graph`, and `shadow` dispatch modes exist +- graph mode falls back to legacy on failure +- shadow mode keeps legacy authoritative +- workflow comparison endpoint exists with hash, dimensions, and mean pixel delta reporting +- there is meaningful backend coverage for shadow dispatch and comparison behavior +- the live rollout harness now exposes `--workflow-still-smoke` and `--workflow-golden-suite` paths for canonical still and representative graph cases + +Still missing: + +- per-workflow and per-output-type rollout enablement is still an operational step, not yet a guided product workflow +- smoke and golden harnesses still depend on live stack health and seeded render fixtures, so parity coverage is not yet CI-grade deterministic + +## Parallel Patch Slices + +### Slice A1: Registry-Backed Schema Validation + +Owner: + +- Block A worker + +Files: + +- `backend/app/domains/rendering/workflow_schema.py` +- `backend/tests/domains/test_workflow_schema.py` +- `backend/tests/domains/test_workflow_node_registry.py` + +Patch scope: + +- add backend validation that all nodes in a graph belong to one family unless explicitly allowed for migration +- validate that node params only use keys declared by the node registry +- return clear validation errors that name the offending node id, step, and param key + +Why this slice first: + +- it closes the largest backend contract gap without changing runtime execution +- it gives Block D and Block E a stable source of truth to build on + +Acceptance: + +- a mixed-family graph without migration exemption is rejected +- unknown node param keys are rejected +- current canonical still graph remains valid + +### Slice A2: Export/CAD Contract Audit + +Owner: + +- Block A worker + +Files: + +- `backend/app/domains/rendering/workflow_node_registry.py` +- `backend/tests/domains/test_workflow_node_registry.py` + +Patch scope: + +- audit `occ_glb_export`, `export_blend`, `thumbnail_save`, and `stl_cache_generate` +- add real field definitions only where runtime behavior genuinely supports editable inputs +- avoid fake settings just to make the editor look complete + +Acceptance: + +- each audited node either has a justified field schema or an explicit no-settings contract backed by tests + +### Slice D1: Artifact/Family Validation Tightening + +Owner: + +- Block D worker + +Files: + +- `backend/app/domains/rendering/output_type_contracts.py` +- `backend/app/api/routers/output_types.py` +- `backend/tests/domains/test_output_types_api.py` + +Patch scope: + +- define artifact-kind compatibility rules per workflow family +- reject impossible combinations early in create/edit APIs +- keep legacy output types renderable if they predate strict linkage + +Acceptance: + +- `cad_file` output types cannot declare order-line-only artifact kinds +- invalid create/edit payloads fail with actionable errors +- existing valid output types still load and render + +### Slice D2: Workflow-First Output-Type Form Cleanup + +Owner: + +- Block D worker with light coordination from Block E + +Files: + +- `frontend/src/components/admin/OutputTypeTable.tsx` +- `frontend/src/api/outputTypes.ts` + +Patch scope: + +- reorder the form to lead with family, workflow, artifact kind, then invocation overrides +- visually separate invocation profile fields from legacy compatibility fields +- preserve current API payload shape + +Acceptance: + +- a new output type can be created from top to bottom as a workflow invocation profile +- renderer-specific detail fields no longer dominate the first screenful +- status: completed with a four-section admin form (`Workflow Contract`, `Invocation Profile`, `Renderer Compatibility / Legacy Details`, `Catalog / Business`) while preserving the existing API payload and legacy fallback behavior + +### Slice E1: Workflow Editor Header Simplification + +Owner: + +- Block E worker + +Files: + +- `frontend/src/components/workflows/WorkflowCanvasToolbar.tsx` +- `frontend/src/pages/WorkflowEditor.tsx` +- `frontend/src/__tests__/components/workflowEditorUi.test.tsx` + +Patch scope: + +- compress the oversized top area +- move non-primary metadata into secondary badges or sidebar context +- keep node insertion, align, save, dry-run, and run controls near the canvas + +Acceptance: + +- above-the-fold editor space is materially reduced +- primary actions remain visible without scrolling +- existing right-click and edge deletion behaviors remain intact + +### Slice I1: Golden-Case Rollout Harness + +Owner: + +- Block I worker + +Files: + +- `scripts/test_render_pipeline.py` +- `backend/tests/domains/test_workflow_dispatch_service.py` +- `docs/workflows/WORKFLOW_DELIVERY_CHECKLIST.md` + +Patch scope: + +- add a canonical non-legacy still workflow smoke path to the render test script +- record whether the run was legacy, graph, or shadow and surface comparison output when shadow is used +- document the rollout gate needed before enabling graph mode on real output types + +Acceptance: + +- one command can exercise the canonical graph still path end to end +- the script clearly reports whether rollout conditions were met or blocked + +## Merge Order Inside First Wave + +1. Slice A1 +2. Slice D1 +3. Slice A2 +4. Slice D2 +5. Slice E1 +6. Slice I1 + +## Integration Gates + +### Gate FW-1 + +- Slice A1 merged +- schema validation errors are deterministic and test-covered + +### Gate FW-2 + +- Slice D1 merged +- output-type API rules align with the tightened workflow contracts + +### Gate FW-3 + +- Slice E1 merged +- editor remains functionally complete after toolbar simplification + +### Gate FW-4 + +- Slice I1 merged +- canonical graph still smoke path is runnable and documented diff --git a/docs/workflows/NEXT_BATCH_ORCHESTRATION.md b/docs/workflows/NEXT_BATCH_ORCHESTRATION.md new file mode 100644 index 0000000..4fc1269 --- /dev/null +++ b/docs/workflows/NEXT_BATCH_ORCHESTRATION.md @@ -0,0 +1,399 @@ +# Next Batch Orchestration + +## Goal + +Define the next sensible implementation batch after the export/CAD contract audit, with work split for parallel execution and an integration order that keeps the legacy workflow operational. + +## Current Batch + +### Batch B1: Test Infrastructure Recovery + +Purpose: +Restore deterministic backend test execution so workflow parity work can be validated against real DB-backed tests again. + +Why now: + +- current targeted DB-backed tests fail on missing tables such as `users` and `cad_files` +- this blocks trustworthy validation for further workflow runtime work + +Primary ownership: + +- `backend/tests/**` +- `backend/app/database.py` +- `backend/app/config.py` only if required for test bootstrapping + +Acceptance: + +- targeted workflow tests create their schema reliably +- DB-backed pytest runs do not fail due to missing core tables +- no production runtime behavior changes unless strictly required for test setup correctness + +### Batch B2: Workflow Editor Authoring Organization + +Purpose: +Reduce authoring friction in `/workflows` by tightening node organization around family, module, and execution role while reclaiming canvas space. + +Why now: + +- the editor already has the needed primitives +- the remaining gap is structural clarity, not missing mechanics + +Primary ownership: + +- `frontend/src/components/workflows/**` +- `frontend/src/pages/WorkflowEditor.tsx` +- `frontend/src/__tests__/components/workflowEditorUi.test.tsx` + +Acceptance: + +- top-area clutter is reduced +- node discovery is cleaner by family/module grouping +- right-click insertion, edge deletion, align, dry-run, and run inspection still work + +### Batch B3: Canonical Still Path Smoke-Harness Closure + +Purpose: +Move the non-legacy still workflow closer to a runnable, documented smoke path without weakening legacy fallback. + +Why now: + +- backend graph/runtime coverage is already substantial +- the next risk is proving that the canonical still graph can be exercised as a stable rollout candidate + +Primary ownership: + +- `backend/app/domains/rendering/**` +- `backend/app/domains/pipeline/tasks/**` +- `backend/tests/domains/test_workflow_*.py` +- `scripts/test_render_pipeline.py` +- `docs/workflows/**` where needed + +Acceptance: + +- canonical still graph path has a bounded smoke route +- graph-vs-legacy safety remains explicit +- remaining blockers are documented as concrete runtime or fixture issues, not vague parity claims + +## Updated Immediate Next Batch + +The next implementation batch should now be cut along contract and root-cause boundaries instead of UI-only slices. + +### Batch N1: Node-Contract Closure + +Purpose: +Make the backend node registry and workflow schema the authoritative source for graph family safety, parameter validity, and editor-visible node settings. + +Why now: + +- authoring UX is already good enough to build on +- remaining parity work depends on trustworthy backend-owned contracts + +Primary ownership: + +- `backend/app/domains/rendering/workflow_node_registry.py` +- `backend/app/domains/rendering/workflow_schema.py` +- `backend/tests/domains/test_workflow_node_registry.py` +- `backend/tests/domains/test_workflow_schema.py` + +Acceptance: + +- unknown node param keys are rejected deterministically +- family drift is blocked by schema validation +- every production-facing node has either a justified field schema or an explicit no-settings contract +- existing canonical still graph remains valid + +### Batch N2: Output-Type / Invocation Model Closure + +Purpose: +Finish the shift from legacy renderer flags to a real workflow invocation model with explicit family, artifact, and override semantics. + +Why now: + +- new output types and workflow-linked variants still depend on this contract being airtight +- this is the clean boundary between product configuration and runtime dispatch + +Primary ownership: + +- `backend/app/domains/rendering/output_type_contracts.py` +- `backend/app/api/routers/output_types.py` +- `backend/app/domains/rendering/schemas.py` +- `backend/app/domains/rendering/models.py` +- `frontend/src/api/outputTypes.ts` +- `frontend/src/components/admin/OutputTypeTable.tsx` + +Acceptance: + +- impossible workflow/artifact/family combinations are rejected early +- new output types can be created top-to-bottom as invocation profiles +- legacy-safe output types continue to render + +### Batch N3: CAD / Material Parity Root-Cause Closure + +Purpose: +Close the remaining instance- and part-key-related drift between CAD exporter, GLTF viewer, preview, and downstream render consumption. + +Why now: + +- this is still a real production blocker, not a polish item +- workflow parity stays superficial until geometry/material identity is stable + +Primary ownership: + +- `backend/app/domains/pipeline/tasks/export_glb.py` +- `backend/app/domains/pipeline/tasks/extract_metadata.py` +- `backend/app/services/part_key_service.py` +- `frontend/src/components/cad/cadUtils.ts` +- `frontend/src/components/cad/ThreeDViewer.tsx` +- `frontend/src/components/cad/InlineCadViewer.tsx` + +Acceptance: + +- viewer and manifest resolve the same authoritative material identity +- unresolved nodes are surfaced explicitly instead of silently using pseudo keys +- legacy preview and render behavior remain intact + +## Parallelization Rule + +These three blocks should be prepared in parallel, but merged in order: + +1. `N1` first, because it establishes the source of truth for the other two. +2. `N2` second, because it builds directly on those contracts. +3. `N3` can be investigated in parallel, but should merge after `N1` unless it proves fully isolated. + +## Gate For The Following Batch + +For the updated immediate next batch, the following gate applies: + +- `N1` has deterministic backend validation and focused tests +- `N2` preserves legacy-safe output types while tightening impossible combinations +- `N3` proves the authoritative part/material identity chain with a focused low-RAM verification sequence + +## Current Execution Status + +- `N1` verified + - focused checks green on April 10, 2026: + - `backend/tests/domains/test_workflow_schema.py` + - `backend/tests/domains/test_workflow_node_registry.py` +- `N2` verified + - focused checks green on April 10, 2026: + - `backend/tests/domains/test_output_types_api.py` + - `frontend/src/__tests__/api/outputTypes.test.ts` +- `N3` in progress + - April 10, 2026: + - scene-manifest alias coverage expanded for exporter `_af*` suffix keys + - order-line runtime now prefers authoritative scene-manifest assignments where manifest metadata exists + - inline and fullscreen viewers now share the same manifest-plus-fallback merge contract + - unresolved meshes are now surfaced explicitly in both viewers instead of silently disappearing behind pseudo keys + - output-type authoring now consumes a backend-authored contract catalog for family/artifact/format/override constraints + - next action: manual product-level parity check plus B04 node/module contract completion + +## Executable Block List + +Die naechste sinnvolle Abarbeitung ist nicht mehr nach einzelnen Features, sondern nach stabilen Vertrags- und Produktionsgrenzen geschnitten. + +### Batch Wave W1: Identity And Contract Closure + +- `B01` CAD manifest alias closure + - Ziel: scene-manifest und viewer auf denselben kanonischen semantischen part keys bringen + - Fokus: exporter-style suffixe wie `_af6`, `_af0_asm`, dedup keys, alias inheritance + - Status: abgeschlossen am April 10, 2026 + - Gate: `backend/tests/test_part_key_service.py`, `backend/tests/test_export_step_to_gltf.py`, `frontend/src/__tests__/components/cadUtils.test.ts` +- `B02` CAD viewer/manifest convergence + - Ziel: unresolved parts explizit sichtbar halten, aber alle autoritativ aufloesbaren parts im Viewer korrekt materialisieren + - Fokus: `ThreeDViewer`, `InlineCadViewer`, logical part keys, reconciliation UX + - Parallel zu: `B03`, `B04` + - Status: abgeschlossen am April 10, 2026 + - April 10, 2026: + - inline und fullscreen viewer auf denselben `buildEffectiveViewerMaterials(...)` contract gezogen + - unresolved meshes werden explizit gezaehlt und sichtbar angezeigt statt pseudo-keys zu synthetisieren + - focused gates gruen: + - `frontend/src/__tests__/components/cadUtils.test.ts` + - `npm run build` + - `./scripts/workflow_sequential_gates.sh` + - Gate: manueller Produktcheck gegen reales CAD-Beispiel +- `B03` Output-type authoring closure + - Ziel: neue output types workflow-first und ohne hidden legacy assumptions anlegbar machen + - Fokus: form-state, invocation overrides, artifact/family guards, defaults + - Parallel zu: `B02`, `B04` + - Status: abgeschlossen am April 10, 2026 + - April 10, 2026: + - backend publiziert `GET /api/output-types/contract-catalog` als read-only Source of Truth fuer Family-, Artifact-, Format- und Override-Regeln + - `frontend/src/api/outputTypes.ts` nutzt den Catalog mit lokalem Fallback statt Hardcode-Regeltabellen als primaere Truth + - `OutputTypeTable` speist Family-, Artifact- und Rollout-Auswahl jetzt aus dem Backend-Catalog + - focused gates gruen: + - `backend/tests/domains/test_output_types_api.py` + - `frontend/src/__tests__/api/outputTypes.test.ts` + - `frontend npm run build` + - `./scripts/workflow_sequential_gates.sh` + - Gate: `backend/tests/domains/test_output_types_api.py`, `frontend/src/__tests__/api/outputTypes.test.ts` +- `B04` Node/module contract completion + - Ziel: jede produktionsrelevante node hat einen klaren backend-owned settings/input/output contract + - Fokus: registry metadata, no-settings contracts, family-safe validation + - Gate: `backend/tests/domains/test_workflow_schema.py`, `backend/tests/domains/test_workflow_node_registry.py` + +### Batch Wave W2: Canonical Authoring Surface + +- `B05` Family-scoped node organization + - Ziel: CAD, Bridge und Graph nodes im Editor klar trennen und suchbar halten + - Fokus: family/module grouping, right-click search, low-noise discovery + - Abhaengigkeit: `B04` + - Status: abgeschlossen am April 10, 2026 + - April 10, 2026: + - raw node catalog ist jetzt family-first organisiert: family -> module -> stage -> category + - family/module runtime chips und stage scopes bleiben sichtbar, ohne zur stage-first Navigation zurueckzufallen + - focused gates gruen: + - `frontend/src/__tests__/components/workflowNodeCatalog.test.ts` + - `frontend/src/__tests__/components/workflowEditorUi.test.tsx` + - `frontend npm run build` +- `B06` Authoring surface simplification + - Ziel: eine gemeinsame authoring surface fuer canvas-menu, sidebar und starter paths + - Fokus: shared controller/model statt mehrfacher UI-Sonderlogik + - Abhaengigkeit: `B04` + - Status: abgeschlossen am April 10, 2026 + - April 10, 2026: + - `workflowAuthoringSurface.ts` kapselt jetzt shared section resolution, active-section validity und insert bindings als gemeinsame Surface-Controller-Logik + - `NodeCommandMenu` und `NodeDefinitionsPanel` nutzen denselben Controller statt paralleler lokaler Section-/Insert-State-Implementierungen + - focused gates gruen: + - `frontend/src/__tests__/components/workflowEditorUi.test.tsx` + - `frontend/src/__tests__/components/workflowAuthoringGuidance.test.ts` + - `frontend npm run build` +- `B07` Canvas ergonomics closure + - Ziel: edge deletion, auto-align, reduced top-area clutter, faster insertion paths sauber zusammenziehen + - Fokus: interaction consistency statt punktuelle UX-Patches + - Abhaengigkeit: `B06` + - Status: in Arbeit am April 10, 2026 + - April 10, 2026: + - `WorkflowCanvasToolbar` auf kompaktere, wiederverwendbare Action-/Field-/Badge-Bausteine gezogen + - Top-Area auf zwei dichtere Informationsreihen reduziert: Identitaet/Status oben, Context/Hint-Rail unten + - focused gates gruen: + - `frontend/src/__tests__/components/workflowEditorUi.test.tsx` + - `frontend npm run build` +- `B08` Starter graph and module bundle normalization + - Ziel: starter blueprints, reference bundles und seed workflows auf dieselben kanonischen module paths ziehen + - Fokus: still graph, CAD intake graph, bundle drift verhindern + - Parallel zu: `B07` + +### Batch Wave W3: Runtime And Production Parity + +- `B09` Template-aware runtime unification + - Ziel: legacy und graph nutzen dieselbe template/material/output orchestration + - Fokus: template resolution, samples/transparency, publish semantics + - Abhaengigkeit: `B03`, `B04`, `B08` +- `B10` Non-legacy still smoke closure + - Ziel: der kanonische still graph wird als wiederholbarer smoke path belastbar + - Fokus: preflight, dispatch, authoritative output_save, failure visibility + - Abhaengigkeit: `B09` +- `B11` Template parity matrix + - Ziel: graph vs legacy mit echten render-templates, output-varianten und alpha/sample settings vergleichen + - Fokus: echte parity-beweise statt pillow-only checks + - Abhaengigkeit: `B09` +- `B12` CAD intake moduleization + - Ziel: CAD import/extract/export/bbox/material-steps als echte workflow-module verfuegbar machen + - Fokus: node-based production fuer intake workflows + - Abhaengigkeit: `B04`, `B08` + +### Batch Wave W4: Operational Rollout + +- `B13` Rollout and fallback controls + - Ziel: graph/shadow/legacy pro workflow und pro output type sicher steuerbar halten + - Fokus: rollout mode, immediate rollback, operator clarity + - Abhaengigkeit: `B10`, `B11` +- `B14` Sequential E2E gates + - Ziel: low-RAM, reproduzierbare smoke/golden/browser gates fuer `/workflows` + - Fokus: sequenzielle statt parallele Verifikation + - Abhaengigkeit: `B10`, `B11`, `B13` +- `B15` Repo hygiene root-cause closure + - Ziel: generated artifacts, root-owned caches und compose-side effects ursachenseitig schliessen + - Fokus: ownership, pycache, build artifacts, helper script cleanup + - Kann parallel laufen zu: `B10` bis `B14` + +## Recommended Immediate Parallel Batch + +Die naechste sinnvolle Batch mit minimalen Konflikten ist: + +1. `B02` lokal auf explizite unresolved-state-Fuehrung und viewer-level parity checks ziehen +2. `B03` parallel als contract-catalog dedup zwischen Backend und Admin-UI bearbeiten +3. `B15` parallel als Hygiene-Nebenstrang treiben + +Danach: + +1. `B04` +2. `B05` bis `B08` als Authoring-Welle +3. `B09` bis `B12` als Runtime-/Produktionswelle + +## Latest Verification + +- April 10, 2026: + - `./scripts/workflow_sequential_gates.sh` gruen + - backend runtime gates: `34 passed` + - frontend workflow/editor gates: `23 passed` + +## Executable Next Queue + +Die naechsten 12 Blöcke werden ab jetzt als eine gemeinsame Queue gefahren. Parallel bedeutet hier: Analyse, Vorbereitung und isolierte Teilpatches koennen parallel laufen. Merge und Verifikation bleiben bewusst sequentiell. + +### Queue Q2: Merge Order + +1. `B04-a` Node text-contract validation + - Ziel: unvalidierte produktionsrelevante Text-Inputs im Registry-/Schema-Layer schliessen + - Scope: `workflow_node_registry.py`, `workflow_schema.py`, `test_workflow_schema.py` + - Gate: neue Schema-Tests fuer UUID, absolute Pfade, float-string, hex-color, suffix-format + - Status: abgeschlossen am April 10, 2026 +2. `B04-b` Node registry invariants + - Ziel: defaults/fields/module_key/input-output-contracts als Registry-Invarianten pruefen + - Scope: `test_workflow_node_registry.py` + - Gate: registryweite Invariant-Tests gruen + - Status: abgeschlossen am April 10, 2026 +3. `B06` Shared authoring surface + - Ziel: gemeinsame Authoring-Schicht fuer Canvas-Menu, Sidebar und Starter-Aktionen + - Why now: verhindert doppelte UI-Logik in `NodeCommandMenu` und `NodeDefinitionsPanel` + - Status: abgeschlossen am April 10, 2026 +4. `B05` Family-scoped node organization + - Ziel: modul-/family-basierte Node-Organisation auf der gemeinsamen Authoring-Schicht + - Status: abgeschlossen am April 10, 2026 +5. `B07` Canvas ergonomics closure + - Ziel: reduzierte Top-Area, konsistente Edge-/Insert-Interaktionen, Auto-Align sauber abschliessen +6. `B08` Starter graph and module bundle normalization + - Ziel: Blueprints, Bundles und New-Workflow-Einstieg auf dieselben kanonischen Pfade ziehen +7. `B09` Template-aware runtime unification + - Ziel: Graph und Legacy durch dieselbe Template-/Output-Orchestrierung fuehren +8. `B10` Non-legacy still smoke closure + - Ziel: kanonischer Still-Graph als wiederholbarer Smoke-Pfad mit klarer Fehlerflaeche +9. `B11` Template parity matrix + - Ziel: echte Graph-vs-Legacy-Vergleiche mit Render-Templates, Varianten und Alpha/Samples +10. `B12` CAD intake moduleization + - Ziel: CAD-Import/Extract/Export/BBox als echte Production-Module im Editor und in der Runtime +11. `B13` Rollout and fallback controls + - Ziel: Graph/Shadow/Legacy pro Workflow und Output-Type operativ steuerbar halten +12. `B14` Sequential E2E gates + - Ziel: Low-RAM Golden-/Smoke-/Browser-Gates fuer `/workflows` +13. `B15` Repo hygiene root-cause closure + - Ziel: generated artifacts, root-owned caches und compose side effects ursachenseitig schliessen + - Parallel vorbereitbar zu `B09` bis `B14` + +### Queue Q2: Parallel Preparation Tracks + +- Track A: Backend contracts + - aktiv: `B04-a`, danach `B04-b` + - Merge-Blocker fuer fast alle Folgearbeiten +- Track B: Frontend authoring refactor + - Vorbereitung jetzt, Merge erst nach `B04` + - Reihenfolge laut Analyse: `B06` -> `B05` -> `B07` -> `B08` +- Track C: Runtime and parity + - Investigation parallel moeglich + - Merge-Reihenfolge: `B09` -> `B10` -> `B11` -> `B12` -> `B13` -> `B14` +- Track D: Hygiene + - Root-cause Sammlung und Script-Haertung parallel + - Merge spaet, solange keine produktionskritische Blockade sichtbar wird + +### Queue Q2: Immediate Start + +- Aktiver Implementierungsblock: `B07` +- Bereits abgeschlossene Merge-Slices in dieser Queue: + - `B04-a` + - `B04-b` + - `B06` + - `B05` +- Vorbereitete Folgeblöcke: + - `B07` + - `B08` diff --git a/docs/workflows/NODE_BASED_PRODUCTION_ARCHITECTURE.md b/docs/workflows/NODE_BASED_PRODUCTION_ARCHITECTURE.md new file mode 100644 index 0000000..4a7cc03 --- /dev/null +++ b/docs/workflows/NODE_BASED_PRODUCTION_ARCHITECTURE.md @@ -0,0 +1,245 @@ +# Node-Based Production Architecture + +## Purpose + +Define the target model for a reusable, node-based production system where workflow steps are backend-owned modules, the editor is schema-driven, and legacy rendering stays operational during migration. + +## Problem Statement + +The current workflow system already has meaningful extraction work: + +- bridge/runtime services exist for setup, template resolution, material mapping, bbox resolution, publish, and notify +- graph execution can already orchestrate still, turntable, and blend export flows +- the editor already consumes backend node definitions + +What is still missing is a clean production model. + +Today, three different concerns are still partially collapsed into each other: + +1. `OutputType` as user-facing commercial/render choice +2. workflow graph as orchestration definition +3. legacy/internal render settings as implicit execution contract + +That makes it hard to: + +- reuse a process step like CAD import as a true module +- expose all node settings coherently in the editor +- bind output types to workflows without fragile implicit assumptions +- preserve legacy behavior while enabling graph-native production + +## Target Model + +### 1. Production Module + +A production module is the canonical backend capability unit. + +Examples: + +- `cad.resolve_step_path` +- `cad.extract_objects` +- `cad.export_glb` +- `cad.compute_bbox` +- `materials.resolve_map` +- `materials.auto_populate` +- `render.resolve_template` +- `render.blender_still` +- `render.blender_turntable` +- `output.publish_asset` +- `output.notify` + +Rules: + +- modules are backend-owned +- modules define typed input contract, output contract, defaults, and execution semantics +- modules are reusable from legacy code, graph runtime, shadow mode, and tests +- modules must not depend on editor-only UI metadata + +### 2. Workflow Node + +A workflow node is an orchestration wrapper around a production module. + +It adds: + +- node id +- graph connectivity +- per-instance parameter overrides +- editor UI metadata +- retry/failure policy + +It must not redefine business behavior that belongs to the production module itself. + +Implication: + +- the node registry should evolve from "palette metadata + field definitions" to "module-backed node definitions" +- `step` should remain the stable runtime key, but internally map to a reusable module contract + +### 3. Workflow Family + +Workflows must be separated into runtime families, not inferred ad hoc: + +- `cad_file` +- `order_line` + +Family drives: + +- valid entry context +- allowed node palette +- validation rules +- available output contracts +- preflight expectations + +Mixed-family graphs may still exist temporarily for migration visibility, but must not be the target authoring model. + +### 4. Output Type as Invocation Profile + +`OutputType` should no longer be treated as a loose bag of renderer flags. + +It should be the product-facing invocation profile for a workflow: + +- commercial name and visibility +- compatible categories +- pricing tier binding +- workflow family +- linked workflow definition +- invocation-level parameter overrides +- output artifact contract + +Examples of invocation-level overrides: + +- resolution +- samples +- engine +- transparency +- animation timing +- material override + +Examples of artifact contract: + +- still image +- turntable video +- production `.blend` +- preview thumbnail +- future exported package types + +This keeps the responsibility split clean: + +- workflow definition answers: "what steps run and in what order?" +- output type answers: "what productized variant of that workflow do we sell and with which defaults/constraints?" + +## Required Refactor Direction + +### A. Formalize Node Contracts + +Extend the node registry so each definition exposes: + +- `family` +- `module_key` +- `input_contract` +- `output_contract` +- `param_schema` +- `artifact_roles_produced` +- `artifact_roles_consumed` +- `legacy_source` + +Current definitions already cover labels, categories, defaults, and fields. They do not yet fully express machine-usable production contracts. + +### B. Promote Runtime Services to Module Layer + +The extracted bridge/runtime services are the right foundation. They now need a clear module boundary so both legacy and graph runtimes call the same backend operation layer. + +Desired shape: + +- legacy pipeline calls module layer directly +- graph runtime calls module layer directly or via async task adapters +- Celery task mapping becomes transport/adaptation, not the primary execution model + +### C. Split Graph Authoring by Family + +The editor should author against family-scoped graphs: + +- CAD Intake graph +- Order Rendering graph + +That includes: + +- family-specific starter templates +- family-specific node palette groups +- validation that rejects wrong-family entry nodes early +- cleaner organization than a single mixed library + +### D. Reframe Output Type Creation + +Output type creation is currently too close to legacy render settings and too far from workflow invocation. + +Create/edit flow should become: + +1. choose family +2. choose or create workflow +3. choose artifact kind +4. set invocation overrides +5. bind pricing/category/material constraints + +The current `workflow_definition_id` field is directionally correct, but too weak on its own because there is no explicit invocation contract or family validation around it yet. + +## Compatibility Rules + +### Legacy Safety + +- legacy dispatch remains the fallback path +- existing output types without workflow linkage remain valid +- graph rollout must be opt-in per output type/workflow + +### Migration Safety + +- old output types may continue to store render settings in their current shape +- a compatibility adapter should map legacy render settings into invocation overrides +- workflow definitions must remain canonical JSON DAGs + +## Recommended Implementation Sequence + +### Phase A: Stabilize Broken Contracts + +- align frontend/backend `OutputType` defaults and allowed values +- add backend validation for output-type family/workflow compatibility +- make output type creation/editing reflect current real backend constraints + +### Phase B: Contract-First Registry + +- add `family`, contracts, and module metadata to node definitions +- expose them over `/api/workflows/node-definitions` +- move editor grouping/validation to registry-owned family metadata + +### Phase C: Invocation Profiles + +- extend `OutputType` into a workflow invocation profile +- add explicit artifact kind and workflow family +- separate invocation overrides from raw render settings + +### Phase D: Module Unification + +- route legacy and graph execution through the same module layer +- keep Celery as transport where async work is needed +- reduce duplicate logic in tasks and runtime adapters + +### Phase E: Full Parity Authoring + +- ship family-specific starter workflows +- expose all module settings in editor +- support end-to-end preflight, dispatch, run inspection, and parity verification + +## Immediate Code Implications + +- `workflow_node_registry.py` is the correct extension point for module contracts +- `workflow_schema.py` will need stronger family- and contract-aware validation +- `OutputType` needs a clearer model than raw renderer/backend defaults plus optional workflow id +- editor UX should follow model cleanup, not lead it + +## Decision + +We should simplify and refactor before doing more isolated workflow-editor UX work. + +The next implementation blocks should prioritize: + +1. fixing the output-type/workflow contract +2. formalizing node/module contracts +3. only then expanding editor affordances on top of the cleaned model diff --git a/docs/workflows/TEMPLATE_INPUT_AUDIT.md b/docs/workflows/TEMPLATE_INPUT_AUDIT.md new file mode 100644 index 0000000..ff7a748 --- /dev/null +++ b/docs/workflows/TEMPLATE_INPUT_AUDIT.md @@ -0,0 +1,52 @@ +# Template Input Audit + +Stand: 12. April 2026 + +## Befund + +Die Transportkette fuer `workflow_input_schema` und `template_inputs` funktioniert inzwischen end-to-end im Graph-Workflow. Der aktuelle Engpass liegt in den live hinterlegten `.blend`-Templates selbst: + +- Alle produktiven Render-Templates haben aktuell `workflow_input_schema = []`. +- Die live hochgeladenen `.blend`-Dateien enthalten keine erkennbaren Template-Input-Marker auf Collections, Objekten oder Worlds. +- Damit gibt es derzeit keine realen, template-spezifischen Dropdown-/Options-Felder, die wir ehrlich in `resolve_template` exponieren koennen. + +## Gepruefte Live-Templates + +- `Blender_Studio_Schadowcatcher_Anim_RotOBJ` +- `Default` +- `Blender_Studio_Schadowcatcher_Anim` +- `BlenderStudio_Shadowcatcher` +- `BlenderStudio` + +## Beobachtete Blend-Struktur + +- `BlenderStudio`: `Collection`, `Export`, `Lighting`, `World` +- `BlenderStudio_Shadowcatcher`: `Collection`, `Export`, `Lighting`, `Shadowcatcher`, `World` +- `Blender_Studio_Schadowcatcher_Anim`: gleiche Struktur wie Shadowcatcher-Template +- `Blender_Studio_Schadowcatcher_Anim_RotOBJ`: gleiche Struktur wie Shadowcatcher-Template +- `Default`: nur `Export`, keine World, keine Marker + +## Konsequenz + +Der naechste saubere Schritt ist nicht ein blinder Schema-Backfill, sondern Template-Authoring: + +1. Marker oder Scene-Property-gesteuerte Varianten in den `.blend`-Dateien anlegen. +2. Daraus eine echte `workflow_input_schema` ableiten. +3. Danach die Felder im Admin pflegen oder per Script backfillen. + +## Tooling + +Fuer diese Authoring-Arbeit gibt es jetzt ein reproduzierbares Audit-Script: + +```bash +python3 scripts/audit_render_templates.py --json +python3 scripts/audit_render_templates.py --write-markdown docs/workflows/TEMPLATE_INPUT_AUDIT.generated.md +``` + +Das Script: + +- loggt sich gegen die lokale API ein, +- kopiert die live verwendeten `.blend`-Dateien aus dem Backend-Container, +- inspiziert sie mit host-Blender, +- erkennt Marker gemaess der HartOMat-Konventionen, +- und erzeugt daraus ggf. Schema-Vorschlaege. diff --git a/docs/workflows/WORKFLOW_DELIVERY_CHECKLIST.md b/docs/workflows/WORKFLOW_DELIVERY_CHECKLIST.md index e2ee62d..b2390d8 100644 --- a/docs/workflows/WORKFLOW_DELIVERY_CHECKLIST.md +++ b/docs/workflows/WORKFLOW_DELIVERY_CHECKLIST.md @@ -39,23 +39,29 @@ Parallel execution ownership and stage gates are defined in [`docs/workflows/WOR - [ ] Editor saves nodes and edges - [ ] Editor roundtrip preserves workflow configs - [ ] All node settings are editable -- [ ] Validate, dry-run, and dispatch are available -- [ ] Runs are visible with node-level status and logs +- [x] Validate, dry-run, and dispatch are available +- [x] Runs are visible with node-level status and logs - [ ] Editor authoring follows family-safe module contracts instead of ad hoc node metadata +- Progress: The workflow canvas header has been compressed into a single canvas-adjacent control strip, preserving right-click node insertion, auto-align, edge deletion, preflight, dispatch, and save actions while reducing top-of-page bloat. +- Progress: The canonical still-path bridge nodes now expose editor-visible, runtime-backed settings for template overrides, material resolution, auto-population behavior, GLB source preference, output-save artifact expectations, and notify arming. The remaining Phase 5 work is authoring hierarchy and end-to-end editor verification, not hidden backend-only params. ### Phase 7 - [x] Output-type create defaults match current backend constraints -- [ ] Output types model workflow invocation profiles -- [ ] Output types validate against workflow family and artifact contract -- [ ] Admin create/edit flow is workflow-first instead of renderer-first +- [x] Output types model workflow invocation profiles +- [x] Output types validate against workflow family and artifact contract +- [x] Admin create/edit flow is workflow-first instead of renderer-first +- Progress: The admin output-type form now opens with a dedicated workflow-contract section, keeps invocation-profile inputs ahead of renderer compatibility knobs, and moves catalog/business fields into a separate closing section so legacy Blender details no longer dominate the primary authoring flow. +- Progress: Output-type contract helpers now expose family-safe format lists, `blend` is treated as a first-class `blend_asset` contract in both frontend and backend, and the admin form no longer steers users into obviously invalid `cad_file`/video or `order_line`/model-export combinations. +- Progress: API responses now serialize the invocation profile through the typed schema, and create/edit validation rejects mixed-family workflows plus workflow/artifact mismatches before dispatch time. ### Phase 6 - [x] Shadow mode parity execution dispatches real graph observer runs alongside authoritative legacy dispatch - Progress: Workflow runs now expose a comparison endpoint that resolves authoritative legacy outputs and matching shadow artifacts, including file hashes, image dimensions, and mean pixel delta for parity inspection. - Progress: `scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode shadow` now provisions the canonical still smoke contract, runs preflight, dispatches via the real order/output-type workflow linkage, resolves the resulting workflow run, and prints the shadow comparison verdict. -- [ ] Golden cases pass against legacy outputs +- [x] Golden cases pass against legacy outputs +- Progress: On April 8, 2026, the live `--workflow-golden-suite` passed end to end for `still_legacy`, `still_graph`, `still_shadow`, `turntable_graph`, and `blend_graph`. The blend export contract now completes the order line, persists the primary `.blend` result, and links the resulting `blend_production` media asset back to the workflow run. - [ ] Rollout can be enabled per workflow or output type - [ ] Rollback to legacy is immediate @@ -100,12 +106,14 @@ Parallel execution ownership and stage gates are defined in [`docs/workflows/WOR - Invalid graphs are blocked before dispatch. - All node settings needed for parity are present in the editor. - Family-specific authoring prevents invalid `cad_file`/`order_line` graph composition. +- Progress: Backend-owned still bridge modules now declare the settings required for parity; the next gate is browser-level confirmation that the inspector presents them cleanly enough for real authoring. ### QG-7: Invocation Gate - Output type creation and editing use valid backend defaults. - Output types bind to workflows through an explicit invocation contract. - Legacy output types remain renderable during migration. +- Progress: This gate is functionally green at the API layer. Remaining rollout work is operational adoption, not missing contract primitives. ### QG-6: Rollout Gate @@ -116,11 +124,21 @@ Parallel execution ownership and stage gates are defined in [`docs/workflows/WOR - `python scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode legacy` - `python scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode graph` - `python scripts/test_render_pipeline.py --workflow-still-smoke --execution-mode shadow` +- Sequential low-RAM gate wrapper: + - `./scripts/workflow_sequential_gates.sh` + - `./scripts/workflow_sequential_gates.sh --with-cad-parity` + - `./scripts/workflow_sequential_gates.sh --with-live-shadow` + - `./scripts/workflow_sequential_gates.sh --with-cad-parity --with-live-shadow --with-golden` +- CAD/Viewer parity smoke for repeated-instance products: + - `python3 scripts/compare_live_cad_parity.py --cad-id 7c214057-9982-4d6e-aa87-43bfabfdb709` +- Progress: The repeated-instance CAD regression now has a dedicated live gate. The current reference case passes with `146` manifest parts, `146` mesh nodes, `146` unique `partKey`s, and no missing or duplicate assignments. - Rollout approval rule for the canonical still workflow: - `shadow` must finish with a successful order line and a comparison verdict of `pass` - `warn` or `fail` means legacy remains authoritative - `graph` may only be enabled on real output types after the shadow command passes cleanly -- Progress: the canonical still smoke flow now passes live in `legacy` and `graph`; `shadow` stabilizes after a short observer-output lag and currently reports `warn` because the observer image differs slightly, so legacy remains authoritative for rollout decisions. +- Progress: the canonical still smoke flow now passes live in `legacy`, `graph`, and `shadow`. Shadow observer output may arrive slightly later than the authoritative legacy file, but the rollout gate now treats proven de-minimis Blender drift as `pass` instead of a rollout-blocking `warn`. +- Progress: the live smoke and golden harnesses now provision explicit `workflow_rollout_mode` values when linking non-legacy output types, so `graph` and `shadow` exercises no longer depend on whatever rollout state happened to be left behind in the database. +- Progress: the broader golden suite is green in live graph execution, and the remaining rollout work is operational enablement per workflow/output type rather than still-shadow image drift. ## Definition of Done diff --git a/docs/workflows/WORKFLOW_IMPLEMENTATION_BACKLOG.md b/docs/workflows/WORKFLOW_IMPLEMENTATION_BACKLOG.md index f108ed8..ccd664f 100644 --- a/docs/workflows/WORKFLOW_IMPLEMENTATION_BACKLOG.md +++ b/docs/workflows/WORKFLOW_IMPLEMENTATION_BACKLOG.md @@ -1,5 +1,7 @@ # Workflow Implementation Backlog +Execution orchestration, ownership split, and merge order are tracked in [`docs/workflows/WORKERS.md`](/home/hartmut/Documents/Copilot/schaefflerautomat/docs/workflows/WORKERS.md). + ## Epic 1: Canonical Workflow Model ### Tickets @@ -25,6 +27,7 @@ - `E2-T3` Add `GET /api/workflows/node-definitions`. - `E2-T4` Provide schema-driven defaults and editor field groups. - `E2-T5` Add composite bridge nodes for safe migration. +- `E2-T6` Extend node definitions with family, module key, input contract, output contract, and artifact roles. ### Required Node Coverage @@ -76,6 +79,11 @@ - `E4-T4` Support retry and failure policies. `completed` - `E4-T5` Add execution mode switch: `legacy`, `graph`, `shadow`. `completed` - `E4-T6` Add hard fallback to legacy dispatch on graph failure. `completed` +- `E4-T7` Make `output_save` graph-authoritative for still renders by disabling render-task self-publish whenever a downstream `output_save` node is present. `completed` +- `E4-T8` Persist authoritative still output metadata back into `WorkflowNodeResult` rows and keep shadow mode non-authoritative. `completed` +- `E4-T9` Extend runtime, dispatch, and task tests for graph-authoritative still persistence and legacy-safe notify handoff. `completed` +- `E4-T10` Extend graph-authoritative `output_save` semantics to `export_blend`, including asset persistence and node-result updates without mutating the primary order-line render output. `completed` +- `E4-T11` Extend graph-authoritative `output_save` semantics to `blender_turntable`, including graph/legacy-safe task argument handling, authoritative persistence, and node-result updates for video outputs. `completed` ## Epic 5: Editor Parity @@ -87,6 +95,20 @@ - `E5-T4` Add graph validation in the editor. - `E5-T5` Add dry-run and dispatch from the editor. - `E5-T6` Add workflow run inspection UI. +- `E5-T7` Reorganize authoring around family-specific starter graphs and family-safe palettes. +- Progress: The editor already supports right-click node insertion, searchable family-aware palettes, auto-align, edge deletion, dry-run/dispatch, and run inspection. The current remaining parity slice is authoring polish and validation around the canonical non-legacy still graph, not missing basic editor mechanics. + +## Epic 7: Output-Type Invocation Profiles + +### Tickets + +- `E7-T1` Align frontend/backend defaults and allowed values for output-type creation. `completed` +- `E7-T2` Define explicit workflow family on output types. `completed` +- `E7-T3` Separate invocation overrides from legacy raw render settings. `completed` +- `E7-T4` Add artifact-kind contract to output types. `completed` +- `E7-T5` Validate output-type family/workflow compatibility in backend APIs. `completed` +- `E7-T6` Redesign admin create/edit flow around workflow invocation instead of legacy renderer flags. `completed` +- Progress: Output types now persist an explicit workflow family, artifact kind, and invocation override set; backend APIs reject family/artifact mismatches against linked workflows; serializer output exposes a typed invocation profile instead of leaking raw dict payloads. ## Epic 6: Rollout and Quality @@ -94,6 +116,8 @@ - `E6-T1` Add shadow mode parity execution. `completed` - `E6-T2` Build output comparison tooling. `completed` -- `E6-T3` Define golden test cases. +- `E6-T3` Define golden test cases. `completed` +- `E6-T3a` Harden the golden-suite harness against transient backend disconnects and 502/503/504 responses. `completed` +- `E6-T3b` Close the primary `.blend` graph-authoritative persistence gap so graph blend exports complete the order, persist the order-line output, and publish a `blend_production` media asset. `completed` - `E6-T4` Roll out per workflow or output type. - `E6-T5` Keep legacy fallback after rollout. diff --git a/docs/workflows/WORKFLOW_MIGRATION_PLAN.md b/docs/workflows/WORKFLOW_MIGRATION_PLAN.md index 8e36c7d..3fe02aa 100644 --- a/docs/workflows/WORKFLOW_MIGRATION_PLAN.md +++ b/docs/workflows/WORKFLOW_MIGRATION_PLAN.md @@ -9,6 +9,12 @@ Bring `/workflows` to full production parity with the existing legacy render pip - Phase 1 completed on canonical config storage, preset migration, and legacy-safe runtime extraction. - Phase 2 completed on backend node registry, node definitions API, and schema-driven editor palette/settings. - Phase 3 completed: `order_line_setup`, `resolve_template`, `material_map_resolve`, `auto_populate_materials`, `glb_bbox`, `output_save`, and `notify` are extracted behind the legacy task boundary, validated with targeted backend tests, and covered by workflow executor dispatch tests. +- Phase 4 is partially completed: graph runtime dispatch now treats downstream `output_save` as the authoritative persistence boundary for still renders, turntable/video renders, and `.blend` exports, updates node results after persistence, and keeps shadow executions non-authoritative. +- The canonical still bridge path now exposes runtime-backed node settings for template overrides, material resolution, auto-population rules, GLB source selection, output-save artifact filtering, and notify arming, so `/workflows` can author the real still-render contract instead of relying on hidden legacy defaults. +- Output types now behave as explicit invocation profiles in both API and admin UI: workflow family, artifact kind, and invocation overrides are first-class, and linked workflow compatibility is enforced before dispatch. +- Canonical reference workflows now need to be managed as at least two families, not one mixed graph: + - `CAD Intake` + - `Order Rendering` ## Non-Negotiables @@ -46,6 +52,16 @@ Notes: - `ui` is editor-only metadata and must not change runtime semantics. - `edges` are mandatory for graph persistence and validation. +## Architecture Direction + +- Workflow nodes are orchestration wrappers, not the canonical implementation unit. +- The canonical implementation unit is a backend-owned production module with typed inputs, outputs, defaults, and execution semantics. +- Workflow definitions answer "what runs"; output types answer "which productized invocation profile of that workflow is offered". +- Workflow authoring remains split by family: + - `cad_file` + - `order_line` +- The detailed target model is captured in [`docs/workflows/NODE_BASED_PRODUCTION_ARCHITECTURE.md`](/home/hartmut/Documents/Copilot/schaefflerautomat/docs/workflows/NODE_BASED_PRODUCTION_ARCHITECTURE.md). + ## Phases ### Phase 1: Canonical Model and Migration Base @@ -81,12 +97,17 @@ Notes: - Support node outputs and artifact handoff across edges. - Keep `legacy`, `graph`, and `shadow` execution modes. - Current slice: graph dispatch executes extracted bridge nodes for order-line setup, template/material resolution, auto-material population, and bounding-box resolution before queueing render/export tasks. +- Current slice completed: still-render graphs, turntable/video graphs, and `.blend` export graphs with downstream `output_save` now disable task-level self-publish, persist authoritative output/media metadata through shared runtime services, and write the result back into `WorkflowNodeResult`. +- Next Phase 4 slice: extend the same authoritative `output_save` contract to any remaining legacy export variants and close the remaining notify/editor parity gaps. ### Phase 5: Workflow Editor Parity - Persist and load `nodes`, `edges`, `step`, `params`, and `ui`. - Render all node settings dynamically from backend schemas. - Add validation, dry-run, dispatch, and run inspection. +- Organize workflows and node palette by family so the editor reflects the runtime split between `cad_file` and `order_line` contexts. +- Current state: right-click insertion, searchable family-aware palettes, auto-align, edge deletion, dry-run, dispatch, and run inspection are already in place; the remaining work is authoring clarity, node organization, and browser-verified end-to-end usability for the non-legacy still graph. +- Do not add further editor-only UX surface before node/module contracts and output-type invocation profiles are stabilized. ### Phase 6: Shadow Mode and Rollout diff --git a/docs/workflows/template-inputs.md b/docs/workflows/template-inputs.md new file mode 100644 index 0000000..a35b6b6 --- /dev/null +++ b/docs/workflows/template-inputs.md @@ -0,0 +1,82 @@ +# Template Inputs For Graph Workflows + +`resolve_template` can now expose template-specific inputs into the graph workflow without breaking legacy rendering. + +## Where the inputs come from + +Render templates can define a `workflow_input_schema` JSON array in Admin. + +Example: + +```json +[ + { + "key": "studio_variant", + "label": "Studio Variant", + "type": "select", + "section": "Template Inputs", + "default": "default", + "options": [ + { "value": "default", "label": "Default" }, + { "value": "warm", "label": "Warm" } + ] + }, + { + "key": "camera_profile", + "label": "Camera Profile", + "type": "text", + "section": "Template Inputs", + "default": "macro" + } +] +``` + +These fields appear in the workflow editor on the `resolve_template` node after a template override is selected. + +## Runtime behavior + +At runtime the graph path resolves these values into `template_inputs` and forwards them through: + +1. `resolve_template` +2. workflow runtime invocation +3. still / turntable / cinematic Blender services +4. Blender worker CLI via `--template-inputs` +5. template scene setup after opening the `.blend` + +Legacy workflows continue to work if no template input schema is defined. + +## Blender-side conventions + +Every resolved template input is always written onto the active scene as: + +- `template_input__` +- `hartomat_template_input__` + +Example: + +- `template_input__studio_variant = "warm"` +- `hartomat_template_input__camera_profile = "macro"` + +Templates can optionally react to these values using markers on collections, objects, or worlds. + +Supported marker styles: + +- custom property value: `template_input=studio_variant=warm` +- object or collection name: `template-input:studio_variant=warm` +- object or collection name: `ti::studio_variant::warm` + +If a marker matches the resolved input value, the target is enabled. Non-matching variants are hidden. + +## Practical authoring pattern + +For a lighting setup with two variants: + +1. Put warm lights into a collection named `template-input:studio_variant=warm` +2. Put cool lights into a collection named `template-input:studio_variant=cool` +3. Define `studio_variant` in `workflow_input_schema` +4. Select the template in `resolve_template` +5. Pick the wanted variant in the node inspector + +## Important limitation + +The graph runtime now transports template inputs end-to-end, but existing `.blend` files only change visually if they use one of the conventions above or read the scene custom properties themselves. diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 67a64bd..1ea0fa5 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,30 +1,40 @@ +import { Suspense, lazy } from 'react' import { BrowserRouter, Routes, Route, Navigate, useLocation } from 'react-router-dom' import { useAuthStore, isPrivileged as checkIsPrivileged } from './store/auth' import { WebSocketProvider } from './contexts/WebSocketContext' import Layout from './components/layout/Layout' -import LoginPage from './pages/Login' -import NotFoundPage from './pages/NotFound' -import DashboardPage from './pages/Dashboard' -import OrdersPage from './pages/Orders' -import OrderDetailPage from './pages/OrderDetail' -import NewOrderPage from './pages/NewOrder' -import UploadPage from './pages/Upload' -import AdminPage from './pages/Admin' -import CadPreviewPage from './pages/CadPreview' -import MaterialsPage from './pages/Materials' -import WorkerActivityPage from './pages/WorkerActivity' -import ProductLibraryPage from './pages/ProductLibrary' -import ProductDetailPage from './pages/ProductDetail' -import NewProductOrderPage from './pages/NewProductOrder' -import NotificationsPage from './pages/Notifications' -import NotificationSettingsPage from './pages/NotificationSettings' -import PreferencesPage from './pages/Preferences' -import TenantsPage from './pages/Tenants' -import WorkflowEditorPage from './pages/WorkflowEditor' -import MediaBrowserPage from './pages/MediaBrowser' -import BillingPage from './pages/Billing' -import WorkerManagementPage from './pages/WorkerManagement' -import AssetLibraryPage from './pages/AssetLibrary' + +const LoginPage = lazy(() => import('./pages/Login')) +const NotFoundPage = lazy(() => import('./pages/NotFound')) +const DashboardPage = lazy(() => import('./pages/Dashboard')) +const OrdersPage = lazy(() => import('./pages/Orders')) +const OrderDetailPage = lazy(() => import('./pages/OrderDetail')) +const NewOrderPage = lazy(() => import('./pages/NewOrder')) +const UploadPage = lazy(() => import('./pages/Upload')) +const AdminPage = lazy(() => import('./pages/Admin')) +const CadPreviewPage = lazy(() => import('./pages/CadPreview')) +const MaterialsPage = lazy(() => import('./pages/Materials')) +const WorkerActivityPage = lazy(() => import('./pages/WorkerActivity')) +const ProductLibraryPage = lazy(() => import('./pages/ProductLibrary')) +const ProductDetailPage = lazy(() => import('./pages/ProductDetail')) +const NewProductOrderPage = lazy(() => import('./pages/NewProductOrder')) +const NotificationsPage = lazy(() => import('./pages/Notifications')) +const NotificationSettingsPage = lazy(() => import('./pages/NotificationSettings')) +const PreferencesPage = lazy(() => import('./pages/Preferences')) +const TenantsPage = lazy(() => import('./pages/Tenants')) +const WorkflowEditorPage = lazy(() => import('./pages/WorkflowEditor')) +const MediaBrowserPage = lazy(() => import('./pages/MediaBrowser')) +const BillingPage = lazy(() => import('./pages/Billing')) +const WorkerManagementPage = lazy(() => import('./pages/WorkerManagement')) +const AssetLibraryPage = lazy(() => import('./pages/AssetLibrary')) + +function RouteFallback() { + return ( +
+ Loading... +
+ ) +} function ProtectedRoute({ children }: { children: React.ReactNode }) { const token = useAuthStore((s) => s.token) @@ -42,91 +52,93 @@ function AdminRoute({ children }: { children: React.ReactNode }) { export default function App() { return ( - + - - } /> - - - - } - > - } /> - } /> - } /> - } /> - } /> - } /> - - - - } - /> - - - - } - /> - - - - } - /> - } /> - } /> - } /> - } /> - } /> - } /> - } /> - } /> - - - - } - /> - - - - } - /> - - - - } - /> - - - - } - /> - - } /> - + }> + + } /> + + + + } + > + } /> + } /> + } /> + } /> + } /> + } /> + + + + } + /> + + + + } + /> + + + + } + /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + + + + } + /> + + + + } + /> + + + + } + /> + + + + } + /> + + } /> + + ) diff --git a/frontend/src/__tests__/api/outputTypes.test.ts b/frontend/src/__tests__/api/outputTypes.test.ts new file mode 100644 index 0000000..7c87003 --- /dev/null +++ b/frontend/src/__tests__/api/outputTypes.test.ts @@ -0,0 +1,253 @@ +import { describe, expect, test } from 'vitest' + +import { + getCachedOutputTypeContractCatalog, + getCompatibleWorkflowsForOutputTypeContract, + getOutputTypeInvocationOverrides, + getOutputTypeWorkflowContractIssues, + getDefaultOutputFormatForArtifactKind, + inferArtifactKind, + listAllowedInvocationOverrideKeysForArtifactKind, + listAllowedOutputFormatsForFamily, + type OutputType, +} from '../../api/outputTypes' + +describe('output type contract helpers', () => { + test('lists family-safe output formats', () => { + const contractCatalog = getCachedOutputTypeContractCatalog() + + expect(listAllowedOutputFormatsForFamily('cad_file', contractCatalog)).toEqual([ + 'png', + 'jpg', + 'jpeg', + 'webp', + 'gltf', + 'glb', + 'stl', + 'obj', + 'usd', + 'usdz', + ]) + + expect(listAllowedOutputFormatsForFamily('order_line', contractCatalog)).toEqual([ + 'png', + 'jpg', + 'jpeg', + 'webp', + 'mp4', + 'webm', + 'mov', + 'blend', + ]) + }) + + test('infers artifact kinds for family-safe formats', () => { + expect(inferArtifactKind('order_line', 'blend', false)).toBe('blend_asset') + expect(inferArtifactKind('order_line', 'mp4', true)).toBe('turntable_video') + expect(inferArtifactKind('cad_file', 'gltf', false)).toBe('model_export') + expect(inferArtifactKind('cad_file', 'png', false)).toBe('thumbnail_image') + }) + + test('returns defaults that match artifact contracts', () => { + const contractCatalog = getCachedOutputTypeContractCatalog() + + expect(getDefaultOutputFormatForArtifactKind('still_image', contractCatalog)).toBe('png') + expect(getDefaultOutputFormatForArtifactKind('thumbnail_image', contractCatalog)).toBe('png') + expect(getDefaultOutputFormatForArtifactKind('turntable_video', contractCatalog)).toBe('mp4') + expect(getDefaultOutputFormatForArtifactKind('model_export', contractCatalog)).toBe('gltf') + expect(getDefaultOutputFormatForArtifactKind('blend_asset', contractCatalog)).toBe('blend') + }) + + test('exposes invocation override keys by artifact contract', () => { + const contractCatalog = getCachedOutputTypeContractCatalog() + + expect(listAllowedInvocationOverrideKeysForArtifactKind('turntable_video', contractCatalog)).toEqual([ + 'width', + 'height', + 'engine', + 'samples', + 'bg_color', + 'noise_threshold', + 'denoiser', + 'denoising_input_passes', + 'denoising_prefilter', + 'denoising_quality', + 'denoising_use_gpu', + 'frame_count', + 'fps', + 'turntable_axis', + ]) + expect(listAllowedInvocationOverrideKeysForArtifactKind('blend_asset', contractCatalog)).toEqual([]) + }) + + test('respects backend-authored contract catalog when provided', () => { + const contractCatalog = { + ...getCachedOutputTypeContractCatalog(), + allowed_output_formats_by_family: { + order_line: ['png', 'heic'], + cad_file: ['jpg'], + }, + default_output_format_by_artifact_kind: { + ...getCachedOutputTypeContractCatalog().default_output_format_by_artifact_kind, + still_image: 'heic', + }, + } + + expect(listAllowedOutputFormatsForFamily('order_line', contractCatalog)).toEqual(['png', 'heic']) + expect(getDefaultOutputFormatForArtifactKind('still_image', contractCatalog)).toBe('heic') + }) + + test('exposes parameter ownership boundaries in the contract catalog', () => { + const contractCatalog = getCachedOutputTypeContractCatalog() + + expect(contractCatalog.parameter_ownership.output_type_profile_keys).toEqual([ + 'transparent_bg', + 'cycles_device', + 'material_override', + ]) + expect(contractCatalog.parameter_ownership.template_runtime_keys).toEqual([ + 'target_collection', + 'lighting_only', + 'shadow_catcher', + 'camera_orbit', + 'template_inputs', + ]) + expect(contractCatalog.parameter_ownership.workflow_node_keys_by_step.resolve_template).toContain('target_collection') + expect(contractCatalog.parameter_ownership.workflow_node_keys_by_step.blender_still).toContain('material_override') + expect(contractCatalog.parameter_ownership.workflow_node_keys_by_step.blender_turntable).toContain('camera_orbit') + }) + + test('prefers invocation_profile overrides over legacy render settings', () => { + const outputType = { + id: 'ot-1', + name: 'Still', + description: null, + renderer: 'blender', + render_settings: { width: 4096, frame_count: 180 }, + invocation_overrides: { width: 2048, frame_count: 120 }, + output_format: 'png', + sort_order: 0, + compatible_categories: [], + render_backend: 'celery', + is_animation: false, + transparent_bg: false, + workflow_family: 'order_line', + artifact_kind: 'still_image', + cycles_device: null, + pricing_tier_id: null, + pricing_tier_name: null, + price_per_item: null, + workflow_definition_id: null, + workflow_rollout_mode: 'legacy_only', + workflow_name: null, + material_override: null, + invocation_profile: { + renderer: 'blender', + render_backend: 'celery', + workflow_family: 'order_line', + artifact_kind: 'still_image', + output_format: 'png', + is_animation: false, + workflow_definition_id: null, + workflow_rollout_mode: 'legacy_only', + transparent_bg: false, + cycles_device: null, + material_override: null, + allowed_override_keys: ['width', 'height', 'engine', 'samples', 'bg_color', 'noise_threshold', 'denoiser', 'denoising_input_passes', 'denoising_prefilter', 'denoising_quality', 'denoising_use_gpu'], + invocation_overrides: { width: 2048 }, + }, + is_active: true, + created_at: '', + updated_at: '', + } satisfies OutputType + + expect(getOutputTypeInvocationOverrides(outputType)).toEqual({ width: 2048 }) + }) + + test('flags rollout modes without a linked workflow', () => { + expect(getOutputTypeWorkflowContractIssues({ + workflowFamily: 'order_line', + artifactKind: 'still_image', + outputFormat: 'png', + isAnimation: false, + workflowDefinitionId: '', + workflowRolloutMode: 'graph', + workflows: [], + })).toEqual([ + expect.objectContaining({ + code: 'rollout_requires_workflow', + severity: 'error', + }), + ]) + }) + + test('flags workflow family and artifact mismatches', () => { + const issues = getOutputTypeWorkflowContractIssues({ + workflowFamily: 'order_line', + artifactKind: 'still_image', + outputFormat: 'png', + isAnimation: false, + workflowDefinitionId: 'wf-cad', + workflowRolloutMode: 'shadow', + workflows: [ + { + id: 'wf-cad', + name: 'CAD Intake', + family: 'cad_file', + supported_artifact_kinds: ['thumbnail_image'], + }, + ], + }) + + expect(issues).toEqual([ + expect.objectContaining({ code: 'workflow_family_mismatch', severity: 'error' }), + expect.objectContaining({ code: 'workflow_artifact_mismatch', severity: 'error' }), + ]) + }) + + test('flags output formats that are incompatible with the workflow family', () => { + expect(getOutputTypeWorkflowContractIssues({ + workflowFamily: 'order_line', + artifactKind: 'custom', + outputFormat: 'gltf', + isAnimation: false, + workflowDefinitionId: '', + workflowRolloutMode: 'legacy_only', + workflows: [], + })).toEqual([ + expect.objectContaining({ + code: 'format_family_mismatch', + severity: 'error', + }), + ]) + }) + + test('returns only workflows that satisfy family and artifact contract', () => { + expect(getCompatibleWorkflowsForOutputTypeContract( + [ + { + id: 'wf-1', + name: 'Still Graph', + family: 'order_line', + supported_artifact_kinds: ['still_image', 'turntable_video'], + }, + { + id: 'wf-2', + name: 'CAD Intake', + family: 'cad_file', + supported_artifact_kinds: ['thumbnail_image'], + }, + { + id: 'wf-3', + name: 'Mixed Graph', + family: 'mixed', + supported_artifact_kinds: ['still_image'], + }, + ], + 'order_line', + 'still_image', + )).toEqual([ + expect.objectContaining({ id: 'wf-1' }), + ]) + }) +}) diff --git a/frontend/src/__tests__/api/workflows.test.ts b/frontend/src/__tests__/api/workflows.test.ts index 8c7da67..2b3a528 100644 --- a/frontend/src/__tests__/api/workflows.test.ts +++ b/frontend/src/__tests__/api/workflows.test.ts @@ -1,6 +1,18 @@ -import { describe, expect, test } from 'vitest' +import { describe, expect, test, vi } from 'vitest' -import { createPresetWorkflowConfig, createStarterWorkflowConfig, normalizeWorkflowConfig } from '../../api/workflows' +import { + createPresetWorkflowConfig, + createStarterWorkflowConfig, + normalizeWorkflowConfig, + getWorkflows, +} from '../../api/workflows' +import api from '../../api/client' + +vi.mock('../../api/client', () => ({ + default: { + get: vi.fn(), + }, +})) describe('workflow preset config builders', () => { test('builds a non-legacy still graph preset', () => { @@ -24,7 +36,7 @@ describe('workflow preset config builders', () => { 'notify', ]) expect(config.nodes.find(node => node.step === 'blender_still')?.params).toMatchObject({ - use_custom_render_settings: true, + use_custom_render_settings: false, render_engine: 'cycles', samples: 128, width: 1600, @@ -62,4 +74,107 @@ describe('workflow preset config builders', () => { expect(config.ui?.family).toBe('order_line') expect(config.ui?.execution_mode).toBe('shadow') }) + + test('rebuilds canonical reference blueprints during normalization', () => { + const config = normalizeWorkflowConfig({ + version: 1, + ui: { + preset: 'custom', + execution_mode: 'legacy', + blueprint: 'cad_intake', + }, + nodes: [ + { id: 'resolve_step', step: 'resolve_step_path', params: {} }, + ], + edges: [], + }) + + expect(config.ui?.blueprint).toBe('cad_intake') + expect(config.ui?.family).toBe('cad_file') + expect(config.nodes.map(node => node.step)).toEqual([ + 'resolve_step_path', + 'occ_object_extract', + 'occ_glb_export', + 'glb_bbox', + 'stl_cache_generate', + 'blender_render', + 'threejs_render', + 'thumbnail_save', + 'thumbnail_save', + ]) + expect(config.edges).toEqual( + expect.arrayContaining([ + { from: 'export_glb', to: 'bbox' }, + { from: 'bbox', to: 'threejs_thumb' }, + ]), + ) + }) + + test('rebuilds canonical starter blueprints during normalization', () => { + const config = normalizeWorkflowConfig({ + version: 1, + ui: { + preset: 'custom', + execution_mode: 'legacy', + blueprint: 'starter_order_rendering', + }, + nodes: [], + edges: [], + }) + + expect(config.ui?.blueprint).toBe('starter_order_rendering') + expect(config.ui?.family).toBe('order_line') + expect(config.nodes.map(node => node.step)).toEqual(['order_line_setup']) + }) + + test('normalizes workflow rollout summary from the API payload', async () => { + vi.mocked(api.get).mockResolvedValueOnce({ + data: [ + { + id: 'wf-1', + name: 'Still Graph', + output_type_id: null, + config: createPresetWorkflowConfig('still_graph'), + family: 'order_line', + supported_artifact_kinds: ['still_image'], + rollout_summary: { + linked_output_type_count: 2, + active_output_type_count: 1, + linked_output_type_names: ['Still Render', 'Still Render Shadow'], + rollout_modes: ['shadow'], + has_blocking_contracts: false, + blocking_reasons: [], + latest_run: { + workflow_run_id: 'run-1', + execution_mode: 'graph', + status: 'completed', + created_at: '2026-04-11T10:00:00Z', + completed_at: '2026-04-11T10:01:00Z', + }, + latest_shadow_run: { + workflow_run_id: 'run-shadow-1', + execution_mode: 'shadow', + status: 'completed', + created_at: '2026-04-11T09:00:00Z', + completed_at: '2026-04-11T09:01:00Z', + }, + latest_rollout_gate_verdict: 'pass', + latest_rollout_ready: true, + latest_rollout_status: 'ready_for_rollout', + latest_rollout_reasons: ['Observer output matches the authoritative legacy output byte-for-byte.'], + }, + is_active: true, + created_at: '2026-04-11T08:00:00Z', + }, + ], + }) + + const [workflow] = await getWorkflows() + + expect(workflow.rollout_summary.linked_output_type_count).toBe(2) + expect(workflow.rollout_summary.rollout_modes).toEqual(['shadow']) + expect(workflow.rollout_summary.latest_shadow_run?.execution_mode).toBe('shadow') + expect(workflow.rollout_summary.latest_rollout_gate_verdict).toBe('pass') + expect(workflow.rollout_summary.latest_rollout_ready).toBe(true) + }) }) diff --git a/frontend/src/__tests__/components/WorkflowNodeInspector.test.tsx b/frontend/src/__tests__/components/WorkflowNodeInspector.test.tsx new file mode 100644 index 0000000..5aef837 --- /dev/null +++ b/frontend/src/__tests__/components/WorkflowNodeInspector.test.tsx @@ -0,0 +1,307 @@ +import { useState } from 'react' +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import { render, screen, waitFor, within } from '@testing-library/react' +import userEvent from '@testing-library/user-event' +import { beforeEach, describe, expect, test, vi } from 'vitest' + +import type { RenderTemplate } from '../../api/renderTemplates' +import type { WorkflowNodeDefinition, WorkflowParams } from '../../api/workflows' +import { WorkflowNodeInspector } from '../../components/workflows/WorkflowNodeInspector' + +const listRenderTemplates = vi.fn<() => Promise>() + +vi.mock('../../api/renderTemplates', () => ({ + listRenderTemplates: () => listRenderTemplates(), +})) + +const resolveTemplateDefinition: WorkflowNodeDefinition = { + step: 'resolve_template', + label: 'Resolve Template', + family: 'order_line', + module_key: 'rendering.resolve_template', + category: 'processing', + description: 'Resolve a template for order-line rendering.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [ + { + key: 'template_id_override', + label: 'Template Override', + type: 'text', + description: 'Manual template override.', + section: 'General', + default: '', + min: null, + max: null, + step: null, + unit: null, + options: [], + allow_blank: true, + max_length: null, + text_format: 'uuid', + }, + ], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['order_line_context'] }, + output_contract: { context: 'order_line', provides: ['render_template'] }, + artifact_roles_consumed: ['order_line_context'], + artifact_roles_produced: ['render_template'], + legacy_source: 'legacy.resolve_template', +} + +const notifyDefinition: WorkflowNodeDefinition = { + step: 'notify', + label: 'Notify', + family: 'order_line', + module_key: 'notifications.emit', + category: 'output', + description: 'Emit the workflow result.', + node_type: 'outputNode', + icon: 'bell', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['workflow_result'] }, + output_contract: { context: 'order_line', provides: ['notification_event'] }, + artifact_roles_consumed: ['workflow_result'], + artifact_roles_produced: ['notification_event'], + legacy_source: 'legacy.notify', +} + +function createRenderTemplate(overrides: Partial = {}): RenderTemplate { + return { + id: '0d87b85f-c454-4d61-a124-d5b59e6a43a2', + name: 'Bearing Studio', + category_key: 'bearings', + output_type_id: null, + output_type_name: null, + output_type_ids: [], + output_type_names: ['Still'], + blend_file_path: '/templates/bearing.blend', + original_filename: 'bearing.blend', + target_collection: 'Product', + material_replace_enabled: true, + lighting_only: false, + shadow_catcher_enabled: false, + camera_orbit: true, + workflow_input_schema: [], + is_active: true, + created_at: '2026-04-11T00:00:00Z', + updated_at: '2026-04-11T00:00:00Z', + ...overrides, + } +} + +function renderInspector( + params: WorkflowParams, + onChange = vi.fn(), + options: { stateful?: boolean } = {}, +) { + const queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, + }) + + function StatefulInspector() { + const [currentParams, setCurrentParams] = useState(params) + + return ( + { + setCurrentParams(nextParams) + onChange(nextParams) + }} + nodeDefinition={resolveTemplateDefinition} + step="resolve_template" + nodeDefinitions={[resolveTemplateDefinition]} + graphFamily="order_line" + onStepChange={vi.fn()} + /> + ) + } + + render( + + {options.stateful ? ( + + ) : ( + + )} + , + ) + + return { onChange, queryClient } +} + +describe('WorkflowNodeInspector', () => { + beforeEach(() => { + listRenderTemplates.mockReset() + }) + + test('renders template-defined workflow input fields for resolve_template nodes', async () => { + listRenderTemplates.mockResolvedValue([ + createRenderTemplate({ + workflow_input_schema: [ + { + key: 'studio_variant', + label: 'Studio Variant', + type: 'select', + section: 'Template Inputs', + description: 'Choose the blend lighting preset.', + default: 'default', + min: null, + max: null, + step: null, + unit: null, + options: [ + { value: 'default', label: 'Default' }, + { value: 'warm', label: 'Warm' }, + ], + allow_blank: false, + }, + ], + }), + ]) + + renderInspector({ + template_id_override: '0d87b85f-c454-4d61-a124-d5b59e6a43a2', + template_input__studio_variant: 'warm', + }) + + expect(await screen.findByLabelText('Template Override')).toHaveValue( + '0d87b85f-c454-4d61-a124-d5b59e6a43a2', + ) + expect(await screen.findByLabelText('Studio Variant')).toHaveValue('warm') + expect(screen.getByText('Bearing Studio')).toBeInTheDocument() + }) + + test('clears dynamic template inputs when template override is removed', async () => { + listRenderTemplates.mockResolvedValue([ + createRenderTemplate({ + workflow_input_schema: [ + { + key: 'studio_variant', + label: 'Studio Variant', + type: 'select', + section: 'Template Inputs', + description: 'Choose the blend lighting preset.', + default: 'default', + min: null, + max: null, + step: null, + unit: null, + options: [ + { value: 'default', label: 'Default' }, + { value: 'warm', label: 'Warm' }, + ], + allow_blank: false, + }, + ], + }), + ]) + + const user = userEvent.setup() + const { onChange } = renderInspector({ + template_id_override: '0d87b85f-c454-4d61-a124-d5b59e6a43a2', + template_input__studio_variant: 'warm', + template_input__camera_profile: 'macro', + }) + + const templateOverride = await screen.findByLabelText('Template Override') + await user.selectOptions(templateOverride, '') + + await waitFor(() => { + expect(onChange).toHaveBeenCalledWith({}) + }) + }) + + test('explains connection-driven nodes when no editor fields are available', async () => { + listRenderTemplates.mockResolvedValue([]) + + const queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, + }) + + render( + + + , + ) + + expect(screen.getByText('This node has no editor settings.')).toBeInTheDocument() + expect(screen.getByText(/each required upstream input gets its own socket/i)).toBeInTheDocument() + expect(screen.getByText(/0 local variables by design/i)).toBeInTheDocument() + expect(screen.getByText('Socket 1')).toBeInTheDocument() + expect( + screen.getAllByText('Any of: Rendered Image / Rendered Frames / Rendered Video / Workflow Result / Blend Asset').length, + ).toBeGreaterThan(0) + }) + + test('summarizes wired inputs and inspector variables separately', async () => { + listRenderTemplates.mockResolvedValue([ + createRenderTemplate({ + workflow_input_schema: [ + { + key: 'studio_variant', + label: 'Studio Variant', + type: 'select', + section: 'Template Inputs', + description: 'Choose the blend lighting preset.', + default: 'default', + min: null, + max: null, + step: null, + unit: null, + options: [ + { value: 'default', label: 'Default' }, + { value: 'warm', label: 'Warm' }, + ], + allow_blank: false, + }, + ], + }), + ]) + + const user = userEvent.setup() + renderInspector({}, vi.fn(), { stateful: true }) + + const templateOverride = await screen.findByLabelText('Template Override') + await waitFor(() => { + expect(within(templateOverride).getByRole('option', { name: /bearing studio/i })).toBeInTheDocument() + }) + await user.selectOptions(templateOverride, '0d87b85f-c454-4d61-a124-d5b59e6a43a2') + + expect(await screen.findByText(/1 canvas socket is required/i)).toBeInTheDocument() + expect(screen.getByText('Socket 1')).toBeInTheDocument() + expect(await screen.findByText(/2 local variables are edited in the inspector/i)).toBeInTheDocument() + expect(screen.getByText(/Static: Template Override/i)).toBeInTheDocument() + expect(screen.getByText(/Template-driven: Studio Variant/i)).toBeInTheDocument() + }) +}) diff --git a/frontend/src/__tests__/components/cadUtils.test.ts b/frontend/src/__tests__/components/cadUtils.test.ts index 4e2d26e..aa5080c 100644 --- a/frontend/src/__tests__/components/cadUtils.test.ts +++ b/frontend/src/__tests__/components/cadUtils.test.ts @@ -1,7 +1,7 @@ import { describe, expect, test } from 'vitest' import * as THREE from 'three' -import { buildScenePartRegistry, convertSceneManifestMaterials, remapToPartKeys, resolveObjectPartKey } from '../../components/cad/cadUtils' +import { alignSceneManifestToLogicalPartKeys, buildEffectiveViewerMaterials, buildScenePartRegistry, convertSceneManifestMaterials, mergeViewerMaterialSources, remapToPartKeys, resolveObjectPartKey } from '../../components/cad/cadUtils' describe('cadUtils scene manifest conversion', () => { test('uses scene manifest part keys as authoritative viewer material map', () => { @@ -31,6 +31,55 @@ describe('cadUtils scene manifest conversion', () => { }, }) }) + + test('adds viewer logical keys when manifest stores repeated leaf instances under deduplicated part keys', () => { + const materials = alignSceneManifestToLogicalPartKeys( + convertSceneManifestMaterials([ + { + part_key: 'kero_z_575693_qp_drh_isb_1_1', + effective_material: 'HARTOMAT_010101_Steel-Bare', + }, + { + part_key: 'kero_z_575693_qp_drh_isb_1_1_2', + effective_material: 'HARTOMAT_010101_Steel-Bare', + }, + ]), + new Set(['kero_z_575693_qp_drh_isb_1']), + ) + + expect(materials.kero_z_575693_qp_drh_isb_1).toEqual({ + type: 'library', + value: 'HARTOMAT_010101_Steel-Bare', + }) + }) + + test('backfills helper and af-instance logical keys through legacy fuzzy lookup when manifest keys differ', () => { + const materials = alignSceneManifestToLogicalPartKeys( + convertSceneManifestMaterials([ + { + part_key: 'kero_z_575693_qp_drh_isb', + effective_material: 'HARTOMAT_010101_Steel-Bare', + }, + { + part_key: 'kero_z_575693_qp_drh_isb_1_1', + effective_material: 'HARTOMAT_010101_Steel-Bare', + }, + ]), + new Set([ + 'kero_z_575693_qp_drh_isb_1', + 'kero_z_575693_qp_drh_isb_1_af0', + ]), + ) + + expect(materials.kero_z_575693_qp_drh_isb_1).toEqual({ + type: 'library', + value: 'HARTOMAT_010101_Steel-Bare', + }) + expect(materials.kero_z_575693_qp_drh_isb_1_af0).toEqual({ + type: 'library', + value: 'HARTOMAT_010101_Steel-Bare', + }) + }) }) describe('cadUtils legacy fallback remapping', () => { @@ -113,8 +162,82 @@ describe('cadUtils legacy fallback remapping', () => { }) }) +describe('cadUtils viewer material source merge', () => { + test('keeps fallback assignments authoritative while filling manifest-only gaps', () => { + const merged = mergeViewerMaterialSources( + { + rwdr_b_f_802044_tr4_h122bk: { + type: 'library', + value: 'HARTOMAT_010101_Steel-Bare', + }, + usd_only_part: { + type: 'library', + value: 'HARTOMAT_050101_Elastomer-Black', + }, + }, + { + rwdr_b_f_802044_tr4_h122bk: { + type: 'library', + value: 'Steel--Stahl', + }, + }, + ) + + expect(merged).toEqual({ + rwdr_b_f_802044_tr4_h122bk: { + type: 'library', + value: 'Steel--Stahl', + }, + usd_only_part: { + type: 'library', + value: 'HARTOMAT_050101_Elastomer-Black', + }, + }) + }) + + test('applies manual overrides last on top of fallback-authoritative merged sources', () => { + const merged = buildEffectiveViewerMaterials( + { + rwdr_b_f_802044_tr4_h122bk: { + type: 'library', + value: 'HARTOMAT_010101_Steel-Bare', + }, + usd_only_part: { + type: 'library', + value: 'HARTOMAT_050101_Elastomer-Black', + }, + }, + { + rwdr_b_f_802044_tr4_h122bk: { + type: 'library', + value: 'Steel--Stahl', + }, + }, + { + rwdr_b_f_802044_tr4_h122bk: '#123456', + manual_only_part: 'HARTOMAT_070707_Test-Material', + }, + ) + + expect(merged).toEqual({ + rwdr_b_f_802044_tr4_h122bk: { + type: 'hex', + value: '#123456', + }, + usd_only_part: { + type: 'library', + value: 'HARTOMAT_050101_Elastomer-Black', + }, + manual_only_part: { + type: 'library', + value: 'HARTOMAT_070707_Test-Material', + }, + }) + }) +}) + describe('cadUtils scene graph part-key registry', () => { - test('inherits instance part keys from ancestor nodes and keeps logical keys from scene metadata', () => { + test('inherits instance part keys from ancestor nodes while excluding helper-only logical keys from renderable counts', () => { const scene = new THREE.Group() const instanceGroup = new THREE.Group() @@ -139,14 +262,10 @@ describe('cadUtils scene graph part-key registry', () => { expect(meshRegistry).toHaveLength(1) expect(meshRegistry[0].partKey).toBe('kero_z_575693_qp_drh_isb_1') expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1') - expect(logicalPartKeys).toEqual(new Set([ - 'kero_z_575693_qp_drh_isb_1', - 'rwdr_skel_f_802044_tr4_h122bk', - 'f_802044_tr4_h122bk_04', - ])) + expect(logicalPartKeys).toEqual(new Set(['kero_z_575693_qp_drh_isb_1'])) }) - test('prefers sibling semantic instance nodes over mesh-local exporter keys when transforms match', () => { + test('prefers explicit mesh-local exporter keys over sibling semantic instance nodes when transforms match', () => { const scene = new THREE.Group() const assembly = new THREE.Group() @@ -169,12 +288,12 @@ describe('cadUtils scene graph part-key registry', () => { const { meshRegistry } = buildScenePartRegistry(scene, {}) expect(meshRegistry).toHaveLength(1) - expect(meshRegistry[0].partKey).toBe('kero_z_575693_qp_drh_isb_1') - expect(mesh.userData.partKey).toBe('kero_z_575693_qp_drh_isb_1') - expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1') + expect(meshRegistry[0].partKey).toBe('kero_z_575693_qp_drh_isb_1_1') + expect(mesh.userData.partKey).toBe('kero_z_575693_qp_drh_isb_1_1') + expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1_1') }) - test('prefers sibling semantic instance nodes even when transforms do not match', () => { + test('falls back to sibling semantic instance nodes when explicit mesh keys are absent, even when transforms do not match', () => { const scene = new THREE.Group() const assembly = new THREE.Group() @@ -185,7 +304,6 @@ describe('cadUtils scene graph part-key registry', () => { const mesh = new THREE.Mesh(new THREE.BufferGeometry(), new THREE.MeshStandardMaterial()) mesh.name = 'KERO_Z-575693-QP-DRH_ISB_1_1' - mesh.userData.partKey = 'kero_z_575693_qp_drh_isb_1_1' mesh.position.set(0.2422435981345, 0.06134441033723, 0.2109037401181) assembly.add(semanticSibling) @@ -199,4 +317,19 @@ describe('cadUtils scene graph part-key registry', () => { expect(mesh.userData.partKey).toBe('kero_z_575693_qp_drh_isb_1') expect(resolveObjectPartKey(mesh, {})).toBe('kero_z_575693_qp_drh_isb_1') }) + + test('does not synthesize pseudo part keys from normalized mesh names when no authoritative mapping exists', () => { + const scene = new THREE.Group() + const mesh = new THREE.Mesh(new THREE.BufferGeometry(), new THREE.MeshStandardMaterial()) + mesh.name = 'RWDR_B_F-802044_TR4_H122BK_AF0' + scene.add(mesh) + + const { meshRegistry, logicalPartKeys, unresolvedMeshNames } = buildScenePartRegistry(scene, {}) + + expect(resolveObjectPartKey(mesh, {})).toBe('') + expect(mesh.userData.partKey).toBeUndefined() + expect(meshRegistry).toHaveLength(0) + expect(logicalPartKeys).toEqual(new Set()) + expect(unresolvedMeshNames).toEqual(new Set(['RWDR_B_F-802044_TR4_H122BK'])) + }) }) diff --git a/frontend/src/__tests__/components/outputTypeRolloutPresentation.test.ts b/frontend/src/__tests__/components/outputTypeRolloutPresentation.test.ts new file mode 100644 index 0000000..09187fd --- /dev/null +++ b/frontend/src/__tests__/components/outputTypeRolloutPresentation.test.ts @@ -0,0 +1,49 @@ +import { describe, expect, test } from 'vitest' + +import { getOutputTypeRolloutPresentation } from '../../components/admin/outputTypeRolloutPresentation' + +describe('outputTypeRolloutPresentation', () => { + test('describes unlinked output types as fully legacy', () => { + expect(getOutputTypeRolloutPresentation({ + hasWorkflowLink: false, + workflowRolloutMode: 'legacy_only', + })).toEqual(expect.objectContaining({ + badgeLabel: 'Legacy Only', + statusLabel: 'Production: Legacy', + rowSummary: 'No linked graph workflow.', + })) + }) + + test('describes shadow rollout as legacy-authoritative observer mode', () => { + expect(getOutputTypeRolloutPresentation({ + hasWorkflowLink: true, + workflowRolloutMode: 'shadow', + })).toEqual(expect.objectContaining({ + badgeLabel: 'Shadow', + statusLabel: 'Production: Legacy', + rowSummary: 'Graph observes only; legacy remains authoritative.', + })) + }) + + test('describes graph rollout as production-graph with legacy fallback', () => { + expect(getOutputTypeRolloutPresentation({ + hasWorkflowLink: true, + workflowRolloutMode: 'graph', + })).toEqual(expect.objectContaining({ + badgeLabel: 'Graph Authoritative', + statusLabel: 'Production: Graph', + rowSummary: 'Graph drives production with legacy fallback armed.', + })) + }) + + test('elevates blocking issues above rollout mode', () => { + expect(getOutputTypeRolloutPresentation({ + hasWorkflowLink: true, + workflowRolloutMode: 'graph', + hasBlockingIssues: true, + })).toEqual(expect.objectContaining({ + badgeLabel: 'Contract Blocked', + statusLabel: 'Do Not Promote', + })) + }) +}) diff --git a/frontend/src/__tests__/components/workflowAuthoringActions.test.ts b/frontend/src/__tests__/components/workflowAuthoringActions.test.ts new file mode 100644 index 0000000..5cbe3c1 --- /dev/null +++ b/frontend/src/__tests__/components/workflowAuthoringActions.test.ts @@ -0,0 +1,50 @@ +import { describe, expect, test, vi } from 'vitest' + +import { + bindWorkflowAuthoringInsertActions, + type WorkflowAuthoringActions, +} from '../../components/workflows/workflowAuthoringActions' + +describe('bindWorkflowAuthoringInsertActions', () => { + test('binds preferred canvas position into insert actions', () => { + const insertNode = vi.fn() + const insertModule = vi.fn() + const insertReferencePath = vi.fn() + const actions: WorkflowAuthoringActions = { + insertNode, + insertModule, + insertReferencePath, + } + + const bindings = bindWorkflowAuthoringInsertActions(actions, { + preferredPosition: { x: 120, y: 240 }, + }) + + bindings.onSelectStep?.('blender_still') + bindings.onInsertModule?.('still_render_core') + bindings.onInsertReferencePath?.('still_render_reference') + + expect(insertNode).toHaveBeenCalledWith('blender_still', { x: 120, y: 240 }) + expect(insertModule).toHaveBeenCalledWith('still_render_core', { x: 120, y: 240 }) + expect(insertReferencePath).toHaveBeenCalledWith('still_render_reference', { x: 120, y: 240 }) + }) + + test('runs the after-insert callback after successful bound actions', () => { + const afterInsert = vi.fn() + const insertNode = vi.fn() + + const bindings = bindWorkflowAuthoringInsertActions( + { + insertNode, + }, + { + onAfterInsert: afterInsert, + }, + ) + + bindings.onSelectStep?.('resolve_template') + + expect(insertNode).toHaveBeenCalledWith('resolve_template', undefined) + expect(afterInsert).toHaveBeenCalledOnce() + }) +}) diff --git a/frontend/src/__tests__/components/workflowAuthoringGuidance.test.ts b/frontend/src/__tests__/components/workflowAuthoringGuidance.test.ts new file mode 100644 index 0000000..b3a8b34 --- /dev/null +++ b/frontend/src/__tests__/components/workflowAuthoringGuidance.test.ts @@ -0,0 +1,227 @@ +import { describe, expect, test } from 'vitest' + +import type { WorkflowNodeDefinition } from '../../api/workflows' +import { getWorkflowAuthoringPlan } from '../../components/workflows/workflowAuthoringGuidance' +import { + getWorkflowAuthoringSurfaceModel, + resolveWorkflowAuthoringSection, +} from '../../components/workflows/workflowAuthoringSurface' + +const definitions: WorkflowNodeDefinition[] = [ + { + step: 'order_line_setup', + label: 'Order Line Setup', + family: 'order_line', + module_key: 'order_line.prepare_render_context', + category: 'input', + description: 'Prepare order line.', + node_type: 'inputNode', + icon: 'refresh-cw', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line' }, + output_contract: { context: 'order_line', provides: ['order_line'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.order_line_setup', + }, + { + step: 'resolve_template', + label: 'Resolve Template', + family: 'order_line', + module_key: 'rendering.resolve_template', + category: 'processing', + description: 'Resolve template.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['render_template'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.resolve_template', + }, + { + step: 'auto_populate_materials', + label: 'Auto Populate Materials', + family: 'order_line', + module_key: 'materials.auto_populate', + category: 'processing', + description: 'Populate materials.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['cad_materials'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.auto_populate_materials', + }, + { + step: 'glb_bbox', + label: 'Compute Bounding Box', + family: 'order_line', + module_key: 'geometry.compute_bbox', + category: 'processing', + description: 'Compute bbox.', + node_type: 'processNode', + icon: 'box', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['bbox'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'material_map_resolve', + label: 'Resolve Material Map', + family: 'order_line', + module_key: 'materials.resolve_map', + category: 'processing', + description: 'Resolve material map.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['material_map'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'blender_still', + label: 'Blender Still', + family: 'order_line', + module_key: 'render.production.still', + category: 'rendering', + description: 'Render still.', + node_type: 'renderNode', + icon: 'camera', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['render_image'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'output_save', + label: 'Save Output', + family: 'order_line', + module_key: 'media.save_output', + category: 'output', + description: 'Persist output.', + node_type: 'outputNode', + icon: 'download', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['render_image'] }, + output_contract: { context: 'order_line', provides: ['saved_output'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.output_save', + }, + { + step: 'notify', + label: 'Notify Result', + family: 'order_line', + module_key: 'notifications.emit', + category: 'output', + description: 'Notify completion.', + node_type: 'outputNode', + icon: 'bell', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['saved_output'] }, + output_contract: { context: 'order_line', provides: ['notification'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.notify', + }, +] + +describe('workflow authoring guidance', () => { + test('derives a single shared order-line authoring plan', () => { + const plan = getWorkflowAuthoringPlan(definitions, 'order_line', ['blender_still']) + + expect(plan.referenceBundles).toHaveLength(1) + expect(plan.moduleBundles).toHaveLength(2) + expect(plan.referenceBundles[0]?.presentCount).toBe(1) + expect(plan.moduleBundles.find(bundle => bundle.id === 'still_render_core')?.presentCount).toBe(1) + expect(plan.stageProgress.map(stage => stage.id)).toEqual([ + 'still_render_reference', + 'still_render_core', + 'output_publish_notify', + 'order_line_setup', + ]) + expect(plan.gapFillDefinitions.map(definition => definition.step)).toEqual([ + 'order_line_setup', + 'resolve_template', + 'auto_populate_materials', + ]) + }) + + test('hides starter-only guidance for mixed graphs', () => { + const plan = getWorkflowAuthoringPlan(definitions, 'mixed', ['blender_still']) + + expect(plan.title).toBe('Guided Authoring') + expect(plan.starterItems).toEqual([]) + expect(plan.stageProgress).toEqual([]) + expect(plan.authoringFlow[2]?.title).toBe('Starter Path') + }) + + test('derives one shared authoring surface model from the same plan', () => { + const surface = getWorkflowAuthoringSurfaceModel({ + definitions, + graphFamily: 'order_line', + activeSteps: ['order_line_setup'], + }) + + expect(surface.defaultSection).toBe('overview') + expect(surface.sections.map(section => section.key)).toEqual([ + 'overview', + 'paths', + 'modules', + 'starter', + 'nodes', + ]) + expect(surface.plan.referenceBundles[0]?.id).toBe('still_render_reference') + expect(surface.plan.moduleBundles.map(bundle => bundle.id)).toEqual([ + 'still_render_core', + 'output_publish_notify', + ]) + }) + + test('falls back to a valid section when the requested section is unavailable', () => { + const surface = getWorkflowAuthoringSurfaceModel({ + definitions, + graphFamily: 'mixed', + activeSteps: ['order_line_setup'], + }) + + expect(resolveWorkflowAuthoringSection('overview', surface.sections, surface.defaultSection)).toBe('nodes') + expect(resolveWorkflowAuthoringSection('nodes', surface.sections, surface.defaultSection)).toBe('nodes') + }) +}) diff --git a/frontend/src/__tests__/components/workflowEditorUi.test.tsx b/frontend/src/__tests__/components/workflowEditorUi.test.tsx index 49ef1ac..27f104e 100644 --- a/frontend/src/__tests__/components/workflowEditorUi.test.tsx +++ b/frontend/src/__tests__/components/workflowEditorUi.test.tsx @@ -1,4 +1,4 @@ -import { render, screen } from '@testing-library/react' +import { render, screen, within } from '@testing-library/react' import userEvent from '@testing-library/user-event' import { describe, expect, test, vi } from 'vitest' @@ -14,6 +14,11 @@ import { WorkflowCanvasToolbar } from '../../components/workflows/WorkflowCanvas import { WorkflowNodeContractCard } from '../../components/workflows/WorkflowNodeContractCard' import { WorkflowPreflightPanel } from '../../components/workflows/WorkflowPreflightPanel' import { WorkflowRunsPanel } from '../../components/workflows/WorkflowRunsPanel' +import { + bindWorkflowAuthoringInsertActions, + getWorkflowAuthoringEntryAction, +} from '../../components/workflows/workflowAuthoringActions' +import { getWorkflowAuthoringSurfaceModel } from '../../components/workflows/workflowAuthoringSurface' const nodeDefinitions: WorkflowNodeDefinition[] = [ { @@ -35,6 +40,177 @@ const nodeDefinitions: WorkflowNodeDefinition[] = [ artifact_roles_consumed: [], legacy_source: 'legacy.resolve_step_path', }, + { + step: 'occ_object_extract', + label: 'Extract OCC Objects', + family: 'cad_file', + module_key: 'cad.intake', + category: 'processing', + description: 'Extract assembly objects from CAD input.', + node_type: 'processNode', + icon: 'boxes', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'cad_file', requires: ['step_path'] }, + output_contract: { context: 'cad_file', provides: ['cad_objects'] }, + artifact_roles_produced: [], + artifact_roles_consumed: ['step_file'], + legacy_source: null, + }, + { + step: 'occ_glb_export', + label: 'Export GLB', + family: 'cad_file', + module_key: 'cad.intake', + category: 'processing', + description: 'Export preview GLB from CAD geometry.', + node_type: 'processNode', + icon: 'box', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'cad_file', requires: ['cad_objects'] }, + output_contract: { context: 'cad_file', provides: ['cad_preview'] }, + artifact_roles_produced: ['cad_preview'], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'stl_cache_generate', + label: 'Generate STL Cache', + family: 'cad_file', + module_key: 'cad.intake', + category: 'processing', + description: 'Build STL cache for downstream consumers.', + node_type: 'processNode', + icon: 'database', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'cad_file', requires: ['cad_objects'] }, + output_contract: { context: 'cad_file', provides: ['stl_cache'] }, + artifact_roles_produced: ['stl_cache'], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'thumbnail_save', + label: 'Publish Thumbnail', + family: 'cad_file', + module_key: 'cad.intake', + category: 'output', + description: 'Persist preview thumbnail output.', + node_type: 'outputNode', + icon: 'image', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file', requires: ['cad_preview'] }, + output_contract: { context: 'cad_file', provides: ['thumbnail'] }, + artifact_roles_produced: ['thumbnail'], + artifact_roles_consumed: ['cad_preview'], + legacy_source: 'legacy.thumbnail_save', + }, + { + step: 'order_line_setup', + label: 'Order Line Setup', + family: 'order_line', + module_key: 'order_line.prepare_render_context', + category: 'input', + description: 'Prepare order-line render context.', + node_type: 'inputNode', + icon: 'refresh-cw', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line' }, + output_contract: { context: 'order_line', provides: ['order_line'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.order_line_setup', + }, + { + step: 'resolve_template', + label: 'Resolve Template', + family: 'order_line', + module_key: 'rendering.resolve_template', + category: 'processing', + description: 'Resolve render template.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['render_template'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.resolve_template', + }, + { + step: 'auto_populate_materials', + label: 'Auto Populate Materials', + family: 'order_line', + module_key: 'materials.auto_populate', + category: 'processing', + description: 'Populate materials automatically.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['cad_materials'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.auto_populate_materials', + }, + { + step: 'glb_bbox', + label: 'Compute Bounding Box', + family: 'shared', + module_key: 'geometry.compute_bbox', + category: 'processing', + description: 'Compute GLB bounding box.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { requires: ['cad_preview'] }, + output_contract: { provides: ['bbox'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'material_map_resolve', + label: 'Resolve Material Map', + family: 'order_line', + module_key: 'materials.resolve_map', + category: 'processing', + description: 'Resolve material mapping.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['material_map'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, { step: 'blender_still', label: 'Blender Still', @@ -54,6 +230,44 @@ const nodeDefinitions: WorkflowNodeDefinition[] = [ artifact_roles_consumed: ['cad_preview'], legacy_source: null, }, + { + step: 'output_save', + label: 'Save Output', + family: 'order_line', + module_key: 'media.save_output', + category: 'output', + description: 'Save rendered output.', + node_type: 'outputNode', + icon: 'download', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['rendered_image'] }, + output_contract: { context: 'order_line', provides: ['saved_output'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.output_save', + }, + { + step: 'notify', + label: 'Notify Result', + family: 'order_line', + module_key: 'notifications.emit', + category: 'output', + description: 'Emit completion notification.', + node_type: 'outputNode', + icon: 'bell', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['saved_output'] }, + output_contract: { context: 'order_line', provides: ['notification'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.notify', + }, ] const shadowRun: WorkflowRun = { @@ -72,7 +286,10 @@ const shadowRun: WorkflowRun = { id: 'node-result-1', node_name: 'Blender Still', status: 'completed', - output: null, + output: { + image_path: '/tmp/render.png', + artifact_role: 'png_output', + }, log: 'Rendered successfully.', duration_s: 2.3, created_at: '2026-04-08T10:02:00Z', @@ -87,6 +304,16 @@ const shadowComparison: WorkflowRunComparison = { execution_mode: 'shadow', status: 'matched', summary: 'Observer output matches authoritative output.', + rollout_gate_verdict: 'pass', + workflow_rollout_ready: true, + workflow_rollout_status: 'ready_for_rollout', + rollout_reasons: [ + 'Observer output matches the authoritative legacy output byte-for-byte.', + ], + rollout_thresholds: { + pass_max_mean_pixel_delta: 0.000001, + warn_max_mean_pixel_delta: 0.02, + }, authoritative_output: { path: null, storage_key: null, @@ -122,7 +349,7 @@ const preflightResponse: WorkflowPreflightResponse = { summary: 'Graph requires one missing upstream artifact.', resolved_order_line_id: 'ol-1', resolved_cad_file_id: null, - unsupported_node_ids: [], + unsupported_node_ids: ['node-legacy-1'], issues: [ { severity: 'warning', @@ -168,6 +395,7 @@ describe('WorkflowNodeContractCard', () => { inputContextLabel="Order Rendering" outputContextLabel="Order Rendering" requiredInputs={['order_line', 'render_template']} + requiredAnyInputs={[['rendered_image', 'rendered_frames']]} consumedArtifacts={['cad_preview']} providedOutputs={['render_image']} producedArtifacts={['png_output']} @@ -180,7 +408,8 @@ describe('WorkflowNodeContractCard', () => { expect(screen.getByText('legacy.still_render')).toBeInTheDocument() expect(screen.getByText('Order Line')).toBeInTheDocument() expect(screen.getByText('Render Template')).toBeInTheDocument() - expect(screen.getByText('Cad Preview')).toBeInTheDocument() + expect(screen.getByText('Any of: Rendered Image / Rendered Frames')).toBeInTheDocument() + expect(screen.getByText('CAD Preview')).toBeInTheDocument() expect(screen.getByText('Render Image')).toBeInTheDocument() expect(screen.getByText('Png Output')).toBeInTheDocument() }) @@ -197,19 +426,58 @@ describe('WorkflowCanvasToolbar', () => { const onPreflight = vi.fn() const onDispatch = vi.fn() const onSave = vi.fn() + const onRollbackOutputType = vi.fn() render( { ]} selectedEdgeCount={2} canAutoLayout + canPreflight + canDispatch hasValidationErrors={false} isPreflightPending={false} isDispatchPending={false} + isContextOptionsLoading={false} isSaving={false} + rollbackPendingOutputTypeId={null} + preflightState="ready" + authoringActions={{ openNodeMenu: onOpenNodeMenu }} + authoringEntryAction={{ + label: 'Author', + title: 'Open guided workflow authoring browser', + helper: 'Open reference paths, production modules, starter steps, and raw nodes.', + icon: () => null, + }} onDispatchContextIdChange={onDispatchContextIdChange} onExecutionModeChange={onExecutionModeChange} - onOpenNodeMenu={onOpenNodeMenu} onAutoLayout={onAutoLayout} onDeleteSelectedEdges={onDeleteSelectedEdges} onPreflight={onPreflight} onDispatch={onDispatch} onSave={onSave} + onRollbackOutputType={onRollbackOutputType} />, ) expect(screen.getByText('Workflow Canvas')).toBeInTheDocument() expect(screen.getByText('Still Image - Graph')).toBeInTheDocument() expect(screen.getByText('Still Graph')).toBeInTheDocument() + expect(screen.getAllByText('Shadow').length).toBeGreaterThan(0) + expect(screen.getByText('Legacy Authoritative')).toBeInTheDocument() + expect(screen.getByText(/2 linked output types/i)).toBeInTheDocument() + expect(screen.getByText('Rollout Controls')).toBeInTheDocument() + expect(screen.getByText('Shadow Still Output')).toBeInTheDocument() + expect(screen.getByText('Legacy Archive Output')).toBeInTheDocument() + expect(screen.getAllByText('Order Line').length).toBeGreaterThan(0) + expect(screen.getAllByText('Product A · Still').length).toBeGreaterThan(0) expect(screen.getByText('Right-click to add')).toBeInTheDocument() + expect(screen.getByText('Preflight ready')).toBeInTheDocument() expect(screen.getByRole('button', { name: 'Delete (2)' })).toBeEnabled() + const rollbackButtons = screen.getAllByRole('button', { name: 'Set Legacy' }) + expect(rollbackButtons.length).toBe(2) + expect(rollbackButtons[0]?.getAttribute('title')).toContain('legacy_only') + expect(rollbackButtons[1]).toBeDisabled() - await user.click(screen.getByRole('button', { name: 'Node' })) + await user.click(screen.getByRole('button', { name: 'Author' })) await user.click(screen.getByRole('button', { name: 'Align' })) await user.click(screen.getByRole('button', { name: 'Delete (2)' })) + await user.click(rollbackButtons[0] as HTMLElement) await user.click(screen.getByRole('button', { name: 'Dry Run' })) await user.click(screen.getByRole('button', { name: 'Run' })) await user.click(screen.getByRole('button', { name: 'Save' })) - await user.type(screen.getByPlaceholderText('context id'), 'order-123') - await user.selectOptions(screen.getByRole('combobox'), 'shadow') + await user.selectOptions(screen.getByRole('combobox', { name: 'Order line context' }), 'line-2') + await user.selectOptions(screen.getByRole('combobox', { name: 'Mode' }), 'shadow') expect(onOpenNodeMenu).toHaveBeenCalledOnce() expect(onAutoLayout).toHaveBeenCalledOnce() expect(onDeleteSelectedEdges).toHaveBeenCalledOnce() + expect(onRollbackOutputType).toHaveBeenCalledWith('ot-1') expect(onPreflight).toHaveBeenCalledOnce() expect(onDispatch).toHaveBeenCalledOnce() expect(onSave).toHaveBeenCalledOnce() @@ -263,28 +558,54 @@ describe('WorkflowCanvasToolbar', () => { workflowName="CAD Intake" blueprintLabel={null} blueprintDescription={null} + authoringFamilyLabel="CAD Intake" + authoringFamilyClassName="bg-sky-100 text-sky-700" graphFamilyLabel="CAD Intake" graphFamilyClassName="bg-sky-100 text-sky-700" executionMode="legacy" executionModeLabel="Legacy" executionModeClassName="bg-slate-100 text-slate-700" executionModeHint="Legacy dispatcher stays authoritative." + rolloutBadgeLabel="Unlinked" + rolloutBadgeClassName="bg-surface-muted text-content-muted" + rolloutStatusLabel="Legacy Only" + rolloutStatusClassName="bg-slate-100 text-slate-700" + rolloutSummary="No output types are linked to this workflow yet." + linkedOutputTypeCount={0} + linkedOutputTypes={[]} + dispatchContextKind="cad_file" + dispatchContextLabel="CAD File" dispatchContextId="" + dispatchContextSummary={null} + dispatchContextMeta={null} + orderLineContextGroups={[]} executionModes={[{ value: 'legacy', label: 'Legacy' }]} selectedEdgeCount={0} canAutoLayout={false} + canPreflight={false} + canDispatch={false} hasValidationErrors isPreflightPending={false} isDispatchPending={false} + isContextOptionsLoading={false} isSaving={false} + rollbackPendingOutputTypeId={null} + preflightState="required" + authoringActions={{ openNodeMenu: vi.fn() }} + authoringEntryAction={{ + label: 'Node', + title: 'Open raw node browser', + helper: 'Open the searchable node catalog directly on the canvas.', + icon: () => null, + }} onDispatchContextIdChange={vi.fn()} onExecutionModeChange={vi.fn()} - onOpenNodeMenu={vi.fn()} onAutoLayout={vi.fn()} onDeleteSelectedEdges={vi.fn()} onPreflight={vi.fn()} onDispatch={vi.fn()} onSave={vi.fn()} + onRollbackOutputType={vi.fn()} />, ) @@ -305,7 +626,8 @@ describe('NodeCommandMenu', () => { onSelectStep(step) }} onClose={vi.fn()} renderIcon={iconName => {iconName}} />, @@ -315,22 +637,187 @@ describe('NodeCommandMenu', () => { await user.type(screen.getByPlaceholderText('Search nodes'), 'blender{enter}') expect(onSelectStep).toHaveBeenCalledWith('blender_still') + expect(screen.getByRole('button', { name: 'All Categories' })).toBeInTheDocument() + expect(screen.getByText('Quick Insert')).toBeInTheDocument() expect(screen.getByText('Graph Nodes')).toBeInTheDocument() }) + + test('supports module insertion directly from the canvas authoring menu', async () => { + const user = userEvent.setup() + const onInsertReferencePath = vi.fn() + const onInsertModule = vi.fn() + const onSelectStep = vi.fn() + + render( + onSelectStep(step), + insertReferencePath: bundleId => onInsertReferencePath(bundleId), + insertModule: bundleId => onInsertModule(bundleId), + }} + onClose={vi.fn()} + renderIcon={iconName => {iconName}} + />, + ) + + expect(screen.getByRole('button', { name: 'Overview' })).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Paths' })).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Modules' })).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Starter' })).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Nodes' })).toBeInTheDocument() + expect(screen.getByText('Recommended Path')).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Insert Still Reference' })).toBeInTheDocument() + + await user.click(screen.getByRole('button', { name: 'Insert Still Reference' })) + + expect(onInsertReferencePath).toHaveBeenCalledWith('still_render_reference') + + await user.click(screen.getByRole('button', { name: 'Paths' })) + expect(screen.getByText('Reference Paths')).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Insert Still Render Reference' })).toBeInTheDocument() + + await user.click(screen.getByRole('button', { name: 'Insert Still Render Reference' })) + + expect(onInsertReferencePath).toHaveBeenNthCalledWith(2, 'still_render_reference') + + await user.click(screen.getByRole('button', { name: 'Modules' })) + expect(screen.getByText('Production Modules')).toBeInTheDocument() + + await user.click(screen.getByRole('button', { name: 'Insert Still Render Core' })) + + expect(onInsertModule).toHaveBeenCalledWith('still_render_core') + }) }) describe('NodeDefinitionsPanel', () => { - test('groups nodes by runtime bucket and module in the utility rail library view', () => { + test('organizes library authoring into overview and focused browser sections', async () => { + const user = userEvent.setup() render() expect(screen.getByText('Node Library')).toBeInTheDocument() + expect(screen.getByText('Authoring Browser')).toBeInTheDocument() + expect(screen.getByText('Authoring Flow')).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Paths' })).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Modules' })).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Nodes' })).toBeInTheDocument() + + await user.click(screen.getByRole('button', { name: 'Paths' })) + expect(screen.getByText('Reference Paths')).toBeInTheDocument() + expect(screen.getByText('Still Render Reference')).toBeInTheDocument() + + await user.click(screen.getByRole('button', { name: 'Modules' })) + expect(screen.getByText('Production Modules')).toBeInTheDocument() + expect(screen.getByText('Still Render Core')).toBeInTheDocument() + + await user.click(screen.getByRole('button', { name: 'Nodes' })) + expect(screen.getAllByText('Raw Node Catalog').length).toBeGreaterThan(0) + expect(screen.getByText('Quick Insert')).toBeInTheDocument() + expect(screen.getByText('Runtime')).toBeInTheDocument() + expect(screen.getByText('Family')).toBeInTheDocument() + expect(screen.getByText('Category')).toBeInTheDocument() + expect(screen.getByPlaceholderText('Search modules')).toBeInTheDocument() expect(screen.getAllByText('CAD Intake').length).toBeGreaterThan(0) expect(screen.getAllByText('Order Rendering').length).toBeGreaterThan(0) expect(screen.getByText('Legacy Nodes')).toBeInTheDocument() expect(screen.getByText('Graph Nodes')).toBeInTheDocument() - expect(screen.getByText('Blender Still')).toBeInTheDocument() + expect(screen.getAllByText('Blender Still').length).toBeGreaterThan(0) expect(screen.getAllByText('Graph').length).toBeGreaterThan(0) expect(screen.getByRole('button', { name: 'All Modules' })).toBeInTheDocument() + expect(screen.getAllByText('Cad').length).toBeGreaterThan(0) + expect(screen.getAllByText('Notifications').length).toBeGreaterThan(0) + }) + + test('shows starter-path progress for still-render authoring flows', async () => { + const user = userEvent.setup() + const onInsertReferencePath = vi.fn() + const onInsertModule = vi.fn() + const onSelectStep = vi.fn() + render( + onInsertReferencePath(bundleId), + insertModule: bundleId => onInsertModule(bundleId), + insertNode: step => onSelectStep(step), + }} + />, + ) + + const stageStatusHeading = screen.getByText('Stage Status') + const recommendedPathHeading = screen.getByText('Recommended Path') + + expect(stageStatusHeading).toBeInTheDocument() + expect(recommendedPathHeading).toBeInTheDocument() + expect( + stageStatusHeading.compareDocumentPosition(recommendedPathHeading) & + Node.DOCUMENT_POSITION_FOLLOWING, + ).toBeTruthy() + expect(screen.getByText('Still Render Reference')).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Reapply Still Reference' })).toBeInTheDocument() + expect(screen.getAllByRole('button', { name: 'Insert Publish' }).length).toBeGreaterThan(0) + expect(screen.getAllByRole('button', { name: 'Add Order Line Setup' }).length).toBeGreaterThan(0) + + await user.click(screen.getByRole('button', { name: 'Reapply Still Reference' })) + await user.click(screen.getAllByRole('button', { name: 'Insert Publish' })[0] as HTMLElement) + await user.click(screen.getAllByRole('button', { name: 'Add Order Line Setup' })[0] as HTMLElement) + + expect(onInsertReferencePath).toHaveBeenCalledWith('still_render_reference') + expect(onInsertModule).toHaveBeenCalledWith('output_publish_notify') + expect(onSelectStep).toHaveBeenCalledWith('order_line_setup') + + await user.click(screen.getByRole('button', { name: 'Starter' })) + expect(screen.getByText('Starter Path')).toBeInTheDocument() + expect(screen.getAllByText('Still-render assembly').length).toBeGreaterThan(0) + expect(screen.getAllByText('1/8 present').length).toBeGreaterThan(0) + expect(screen.getAllByText('Present').length).toBeGreaterThan(0) + }) + + test('gives CAD authoring the same guided reference-path flow without duplicate intake stages', async () => { + const user = userEvent.setup() + const onInsertReferencePath = vi.fn() + const onInsertModule = vi.fn() + const onSelectStep = vi.fn() + + render( + onInsertReferencePath(bundleId), + insertModule: bundleId => onInsertModule(bundleId), + insertNode: step => onSelectStep(step), + }} + />, + ) + + expect(screen.getByText('Stage Status')).toBeInTheDocument() + expect(screen.getByText('Start with the CAD intake assembly')).toBeInTheDocument() + expect(screen.getByText('CAD Intake Reference')).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Reapply CAD Intake Reference' })).toBeInTheDocument() + expect(screen.queryByRole('button', { name: 'Complete CAD Intake' })).not.toBeInTheDocument() + expect(screen.getAllByRole('button', { name: 'Add Extract OCC Objects' }).length).toBeGreaterThan(0) + + await user.click(screen.getByRole('button', { name: 'Reapply CAD Intake Reference' })) + await user.click(screen.getAllByRole('button', { name: 'Add Extract OCC Objects' })[0] as HTMLElement) + + expect(onInsertReferencePath).toHaveBeenCalledWith('cad_intake_reference') + expect(onInsertModule).not.toHaveBeenCalled() + expect(onSelectStep).toHaveBeenCalledWith('occ_object_extract') + + await user.click(screen.getByRole('button', { name: 'Paths' })) + expect(screen.getByText('Reference Paths')).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'Insert CAD Intake Reference' })).toBeInTheDocument() + + await user.click(screen.getByRole('button', { name: 'Starter' })) + expect(screen.getByText('Starter Path')).toBeInTheDocument() + expect(screen.getAllByText('CAD intake assembly').length).toBeGreaterThan(0) + expect(screen.getAllByText('1/5 present').length).toBeGreaterThan(0) }) test('supports direct node insertion from the library sidebar', async () => { @@ -341,15 +828,89 @@ describe('NodeDefinitionsPanel', () => { onSelectStep(step) }} renderIcon={iconName => {iconName}} />, ) - await user.click(screen.getAllByRole('button', { name: 'Insert' })[1]) + await user.click(screen.getByRole('button', { name: 'Nodes' })) + const blenderCard = screen.getAllByText('Blender Still')[0]?.closest('div.rounded-lg') + expect(blenderCard).not.toBeNull() + await user.click(within(blenderCard as HTMLElement).getByRole('button', { name: 'Insert Blender Still' })) expect(onSelectStep).toHaveBeenCalledWith('blender_still') }) + + test('supports direct workflow-module insertion from the library sidebar', async () => { + const user = userEvent.setup() + const onInsertModule = vi.fn() + + render( + onInsertModule(bundleId) }} + />, + ) + + await user.click(screen.getByRole('button', { name: 'Modules' })) + await user.click(screen.getByRole('button', { name: 'Insert Still Render Core' })) + + expect(onInsertModule).toHaveBeenCalledWith('still_render_core') + }) +}) + +describe('workflowAuthoringActions', () => { + test('binds preferred position and after-insert callback once for every insert action', () => { + const insertNode = vi.fn() + const insertModule = vi.fn() + const insertReferencePath = vi.fn() + const onAfterInsert = vi.fn() + + const bindings = bindWorkflowAuthoringInsertActions( + { + insertNode, + insertModule, + insertReferencePath, + }, + { + preferredPosition: { x: 240, y: 180 }, + onAfterInsert, + }, + ) + + bindings.onSelectStep?.('blender_still') + bindings.onInsertModule?.('still_render_core') + bindings.onInsertReferencePath?.('still_render_reference') + + expect(insertNode).toHaveBeenCalledWith('blender_still', { x: 240, y: 180 }) + expect(insertModule).toHaveBeenCalledWith('still_render_core', { x: 240, y: 180 }) + expect(insertReferencePath).toHaveBeenCalledWith('still_render_reference', { x: 240, y: 180 }) + expect(onAfterInsert).toHaveBeenCalledTimes(3) + }) + + test('derives the primary authoring entry from the shared surface model', () => { + const guidedEntry = getWorkflowAuthoringEntryAction( + getWorkflowAuthoringSurfaceModel({ + definitions: nodeDefinitions, + graphFamily: 'order_line', + activeSteps: [], + }), + ) + const rawEntry = getWorkflowAuthoringEntryAction( + getWorkflowAuthoringSurfaceModel({ + definitions: nodeDefinitions, + graphFamily: 'mixed', + activeSteps: [], + }), + ) + + expect(guidedEntry.label).toBe('Author') + expect(guidedEntry.title).toContain('guided workflow authoring') + expect(rawEntry.label).toBe('Node') + expect(rawEntry.title).toContain('raw node browser') + }) }) describe('WorkflowRunsPanel', () => { @@ -370,6 +931,15 @@ describe('WorkflowRunsPanel', () => { expect(screen.getByText('Workflow Runs')).toBeInTheDocument() expect(screen.getByText('Shadow Comparison')).toBeInTheDocument() expect(screen.getByText('Observer output matches authoritative output.')).toBeInTheDocument() + expect(screen.getByText('Execution Mode')).toBeInTheDocument() + expect(screen.getByText('Celery Task')).toBeInTheDocument() + expect(screen.getByText('Duration: 2.3 s')).toBeInTheDocument() + expect(screen.getByText('Rollout Gate: pass')).toBeInTheDocument() + expect(screen.getByText('Ready For Rollout')).toBeInTheDocument() + expect(screen.getByText('Operator Decision')).toBeInTheDocument() + expect(screen.getByText('Observer output matches the authoritative legacy output byte-for-byte.')).toBeInTheDocument() + expect(screen.getByText('Exact Match: yes')).toBeInTheDocument() + expect(screen.getByText('Dimensions: match')).toBeInTheDocument() await user.click(screen.getByRole('button', { name: /run-shad/i })) @@ -384,6 +954,12 @@ describe('WorkflowPreflightPanel', () => { expect(screen.getByText('Graph Preflight')).toBeInTheDocument() expect(screen.getByText('Graph requires one missing upstream artifact.')).toBeInTheDocument() expect(screen.getByText('Missing cad_preview artifact.')).toBeInTheDocument() + expect(screen.getByText('Mode: graph')).toBeInTheDocument() + expect(screen.getByText('Unsupported Node IDs')).toBeInTheDocument() + expect(screen.getByText('node-legacy-1')).toBeInTheDocument() + expect(screen.getByText('Code: missing-artifact')).toBeInTheDocument() + expect(screen.getByText('Runtime: native')).toBeInTheDocument() + expect(screen.getByText('Supported: yes')).toBeInTheDocument() expect(screen.getByText('cad_preview must be produced upstream.')).toBeInTheDocument() expect(screen.getByText('blocked')).toBeInTheDocument() }) diff --git a/frontend/src/__tests__/components/workflowGraphDraft.test.ts b/frontend/src/__tests__/components/workflowGraphDraft.test.ts index 488506d..ece37e5 100644 --- a/frontend/src/__tests__/components/workflowGraphDraft.test.ts +++ b/frontend/src/__tests__/components/workflowGraphDraft.test.ts @@ -2,7 +2,16 @@ import type { Edge, Node } from '@xyflow/react' import { describe, expect, test } from 'vitest' import type { WorkflowNodeDefinition } from '../../api/workflows' -import { resolveParamsForStepChange, validateWorkflowDraft } from '../../components/workflows/workflowGraphDraft' +import { + buildWorkflowCanvasNodeData, + graphNeedsAutoLayout, + resolveParamsForStepChange, + resolveNodeCollisions, + validateWorkflowDraft, + WORKFLOW_NODE_MIN_HEIGHT, + WORKFLOW_NODE_VERTICAL_GAP, + workflowToGraph, +} from '../../components/workflows/workflowGraphDraft' function createNode(id: string, step: string, label = step): Node { return { @@ -17,6 +26,13 @@ function createNode(id: string, step: string, label = step): Node { } as Node } +function createPositionedNode(id: string, step: string, x: number, y: number, label = step): Node { + return { + ...createNode(id, step, label), + position: { x, y }, + } +} + function createEdge(source: string, target: string): Edge { return { id: `${source}-${target}`, @@ -105,7 +121,7 @@ const definitions: Record = { glb_bbox: { step: 'glb_bbox', label: 'Compute Bounding Box', - family: 'order_line', + family: 'shared', module_key: 'geometry.compute_bbox', category: 'processing', description: 'Compute bbox.', @@ -115,8 +131,8 @@ const definitions: Record = { fields: [], execution_kind: 'bridge', legacy_compatible: true, - input_contract: { context: 'order_line', requires: ['glb_preview'] }, - output_contract: { context: 'order_line', provides: ['bbox'] }, + input_contract: { requires: ['glb_preview'] }, + output_contract: { provides: ['bbox'] }, artifact_roles_consumed: ['glb_preview'], artifact_roles_produced: ['bbox'], legacy_source: 'legacy.glb_bbox', @@ -160,7 +176,7 @@ const definitions: Record = { }, ], execution_kind: 'native', - legacy_compatible: false, + legacy_compatible: true, input_contract: { context: 'order_line', requires: ['order_line_context', 'render_template', 'material_assignments', 'bbox'], @@ -208,6 +224,25 @@ const definitions: Record = { artifact_roles_produced: ['notification_event'], legacy_source: 'legacy.notify', }, + export_blend: { + step: 'export_blend', + label: 'Export Blend', + family: 'order_line', + module_key: 'rendering.export_blend', + category: 'output', + description: 'Export blend asset.', + node_type: 'outputNode', + icon: 'download', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['render_template'] }, + output_contract: { context: 'order_line', provides: ['blend_asset'] }, + artifact_roles_consumed: ['render_template'], + artifact_roles_produced: ['blend_asset'], + legacy_source: 'legacy.export_blend', + }, resolve_step_path: { step: 'resolve_step_path', label: 'Resolve STEP Path', @@ -299,6 +334,46 @@ describe('validateWorkflowDraft', () => { expect(result.errors).toEqual([]) }) + test('accepts legacy-compatible still render chains without an explicit material map resolver', () => { + const nodes = [ + createNode('setup', 'order_line_setup', 'Order Line Setup'), + createNode('template', 'resolve_template', 'Resolve Template'), + createNode('bbox', 'glb_bbox', 'Compute Bounding Box'), + createNode('render', 'blender_still', 'Render Still'), + createNode('save', 'output_save', 'Save Output'), + ] + const edges = [ + createEdge('setup', 'template'), + createEdge('setup', 'bbox'), + createEdge('setup', 'render'), + createEdge('template', 'render'), + createEdge('bbox', 'render'), + createEdge('render', 'save'), + ] + + const result = validateWorkflowDraft(nodes, edges, definitions, true) + + expect(result.errors).toEqual([]) + }) + + test('accepts notify nodes fed by blend-export outputs', () => { + const nodes = [ + createNode('setup', 'order_line_setup', 'Order Line Setup'), + createNode('template', 'resolve_template', 'Resolve Template'), + createNode('blend', 'export_blend', 'Export Blend'), + createNode('notify', 'notify', 'Notify'), + ] + const edges = [ + createEdge('setup', 'template'), + createEdge('template', 'blend'), + createEdge('blend', 'notify'), + ] + + const result = validateWorkflowDraft(nodes, edges, definitions, true) + + expect(result.errors).toEqual([]) + }) + test('blocks mixed CAD-file and order-line graphs', () => { const result = validateWorkflowDraft( [createNode('cad', 'resolve_step_path', 'Resolve STEP Path'), createNode('render', 'blender_still', 'Render Still')], @@ -309,6 +384,92 @@ describe('validateWorkflowDraft', () => { expect(result.errors).toContain('Workflow mixes CAD-file and order-line nodes. Split them into separate workflows.') }) + + test('accepts a CAD intake graph that feeds shared bbox into threejs thumbnail render', () => { + const threejsDefinition: WorkflowNodeDefinition = { + step: 'threejs_render', + label: 'Render Thumbnail', + family: 'cad_file', + module_key: 'render.thumbnail.threejs', + category: 'rendering', + description: 'Render a thumbnail from the GLB preview.', + node_type: 'renderNode', + icon: 'camera', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file', requires: ['glb_preview', 'bbox'] }, + output_contract: { context: 'cad_file', provides: ['rendered_image'] }, + artifact_roles_consumed: ['glb_preview', 'bbox'], + artifact_roles_produced: ['rendered_image'], + legacy_source: 'legacy.threejs_render', + } + const thumbnailSaveDefinition: WorkflowNodeDefinition = { + step: 'thumbnail_save', + label: 'Save Thumbnail', + family: 'cad_file', + module_key: 'media.save_thumbnail', + category: 'output', + description: 'Persist the thumbnail.', + node_type: 'outputNode', + icon: 'download', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file', requires: ['rendered_image'] }, + output_contract: { context: 'cad_file', provides: ['cad_thumbnail_media'] }, + artifact_roles_consumed: ['rendered_image'], + artifact_roles_produced: ['cad_thumbnail_media'], + legacy_source: 'legacy.thumbnail_save', + } + const occGlbDefinition: WorkflowNodeDefinition = { + step: 'occ_glb_export', + label: 'Export GLB', + family: 'cad_file', + module_key: 'cad.export_glb', + category: 'processing', + description: 'Export a GLB preview.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file', requires: ['step_path'] }, + output_contract: { context: 'cad_file', provides: ['glb_preview'] }, + artifact_roles_consumed: ['step_path'], + artifact_roles_produced: ['glb_preview'], + legacy_source: 'legacy.occ_glb_export', + } + + const result = validateWorkflowDraft( + [ + createNode('resolve', 'resolve_step_path', 'Resolve STEP Path'), + createNode('glb', 'occ_glb_export', 'Export GLB'), + createNode('bbox', 'glb_bbox', 'Compute Bounding Box'), + createNode('thumb', 'threejs_render', 'Render Thumbnail'), + createNode('save', 'thumbnail_save', 'Save Thumbnail'), + ], + [ + createEdge('resolve', 'glb'), + createEdge('glb', 'bbox'), + createEdge('glb', 'thumb'), + createEdge('bbox', 'thumb'), + createEdge('thumb', 'save'), + ], + { + ...definitions, + occ_glb_export: occGlbDefinition, + threejs_render: threejsDefinition, + thumbnail_save: thumbnailSaveDefinition, + }, + true, + ) + + expect(result.errors).toEqual([]) + }) }) describe('resolveParamsForStepChange', () => { @@ -325,4 +486,103 @@ describe('resolveParamsForStepChange', () => { width: 1024, }) }) + + test('preserves dynamic template input overrides for resolve_template nodes', () => { + const next = resolveParamsForStepChange(definitions.resolve_template, { + template_id_override: 'd7d7a1bb-2f14-4d83-99d1-7d7e36eb05d9', + template_input__studio_variant: 'warm', + template_input__camera_profile: 'macro', + stale_key: 'drop-me', + }) + + expect(next).toEqual({ + template_input__studio_variant: 'warm', + template_input__camera_profile: 'macro', + }) + }) +}) + +describe('resolveNodeCollisions', () => { + test('pushes overlapping nodes away from a settled anchor without moving the anchor', () => { + const nodes = [ + createPositionedNode('anchor', 'order_line_setup', 56, 48, 'Anchor'), + createPositionedNode('overlap', 'resolve_template', 88, 76, 'Overlap'), + ] + + const resolved = resolveNodeCollisions(nodes, ['anchor']) + + expect(resolved.find(node => node.id === 'anchor')?.position).toEqual({ x: 56, y: 48 }) + expect(graphNeedsAutoLayout(resolved)).toBe(false) + expect(resolved.find(node => node.id === 'overlap')?.position.y).toBeGreaterThanOrEqual( + 48 + WORKFLOW_NODE_MIN_HEIGHT + WORKFLOW_NODE_VERTICAL_GAP, + ) + }) + + test('cascades pushed nodes so stacked collisions are fully resolved', () => { + const nodes = [ + createPositionedNode('anchor', 'order_line_setup', 56, 48, 'Anchor'), + createPositionedNode('middle', 'resolve_template', 56, 48, 'Middle'), + createPositionedNode('tail', 'blender_still', 64, 56, 'Tail'), + ] + + const resolved = resolveNodeCollisions(nodes, ['anchor']) + const middle = resolved.find(node => node.id === 'middle') + const tail = resolved.find(node => node.id === 'tail') + + expect(middle).toBeTruthy() + expect(tail).toBeTruthy() + expect(graphNeedsAutoLayout(resolved)).toBe(false) + expect(middle?.position).not.toEqual({ x: 56, y: 48 }) + expect(tail?.position).not.toEqual({ x: 64, y: 56 }) + }) +}) + +describe('workflowToGraph', () => { + test('derives explicit input and output ports from the node contract', () => { + const data = buildWorkflowCanvasNodeData('blender_still', {}, definitions.blender_still) + + expect(data.inputPorts?.map(port => port.label)).toEqual([ + 'Order Line Context', + 'Render Template', + 'Material Assignments', + 'Bounding Box', + ]) + expect(data.outputPorts?.map(port => port.label)).toEqual(['Rendered Image']) + expect(data.editableFieldCount).toBe(2) + }) + + test('assigns semantic handle ids to edges based on matching contracts', () => { + const graph = workflowToGraph( + { + version: 1, + nodes: [ + { id: 'setup', step: 'order_line_setup', params: {} }, + { id: 'template', step: 'resolve_template', params: {} }, + { id: 'materials', step: 'material_map_resolve', params: {} }, + { id: 'bbox', step: 'glb_bbox', params: {} }, + { id: 'render', step: 'blender_still', params: {} }, + ], + edges: [ + { from: 'setup', to: 'render' }, + { from: 'template', to: 'render' }, + { from: 'materials', to: 'render' }, + { from: 'bbox', to: 'render' }, + ], + }, + definitions, + ) + + expect(graph.edges.find(edge => edge.source === 'setup' && edge.target === 'render')?.targetHandle).toBe( + 'input:order_line_context', + ) + expect(graph.edges.find(edge => edge.source === 'template' && edge.target === 'render')?.targetHandle).toBe( + 'input:render_template', + ) + expect(graph.edges.find(edge => edge.source === 'materials' && edge.target === 'render')?.targetHandle).toBe( + 'input:material_assignments', + ) + expect(graph.edges.find(edge => edge.source === 'bbox' && edge.target === 'render')?.targetHandle).toBe( + 'input:bbox', + ) + }) }) diff --git a/frontend/src/__tests__/components/workflowModuleBundles.test.ts b/frontend/src/__tests__/components/workflowModuleBundles.test.ts new file mode 100644 index 0000000..1b0a98a --- /dev/null +++ b/frontend/src/__tests__/components/workflowModuleBundles.test.ts @@ -0,0 +1,414 @@ +import { describe, expect, test } from 'vitest' + +import type { WorkflowNodeDefinition } from '../../api/workflows' +import { createWorkflowModuleBundleInsertion, getWorkflowModuleBundles } from '../../components/workflows/workflowModuleBundles' +import { + createWorkflowReferenceBundleInsertion, + getWorkflowReferenceBundles, +} from '../../components/workflows/workflowReferenceBundles' + +const definitions: WorkflowNodeDefinition[] = [ + { + step: 'order_line_setup', + label: 'Order Line Setup', + family: 'order_line', + module_key: 'order_line.prepare_render_context', + category: 'input', + description: 'Prepare render context.', + node_type: 'inputNode', + icon: 'refresh-cw', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line' }, + output_contract: { context: 'order_line', provides: ['order_line'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.order_line_setup', + }, + { + step: 'resolve_template', + label: 'Resolve Template', + family: 'order_line', + module_key: 'rendering.resolve_template', + category: 'processing', + description: 'Resolve template.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['render_template'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.resolve_template', + }, + { + step: 'auto_populate_materials', + label: 'Auto Populate Materials', + family: 'order_line', + module_key: 'materials.auto_populate', + category: 'processing', + description: 'Populate materials.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['cad_materials'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.auto_populate_materials', + }, + { + step: 'glb_bbox', + label: 'Compute Bounding Box', + family: 'order_line', + module_key: 'geometry.compute_bbox', + category: 'processing', + description: 'Compute bbox.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['bbox'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'material_map_resolve', + label: 'Resolve Material Map', + family: 'order_line', + module_key: 'materials.resolve_map', + category: 'processing', + description: 'Resolve material map.', + node_type: 'processNode', + icon: 'layers', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['material_map'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'blender_still', + label: 'Blender Still', + family: 'order_line', + module_key: 'render.production.still', + category: 'rendering', + description: 'Render still image.', + node_type: 'renderNode', + icon: 'camera', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['rendered_image'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'output_save', + label: 'Save Output', + family: 'order_line', + module_key: 'media.save_output', + category: 'output', + description: 'Save output.', + node_type: 'outputNode', + icon: 'download', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['rendered_image'] }, + output_contract: { context: 'order_line', provides: ['saved_output'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.output_save', + }, + { + step: 'notify', + label: 'Notify Result', + family: 'order_line', + module_key: 'notifications.emit', + category: 'output', + description: 'Notify result.', + node_type: 'outputNode', + icon: 'bell', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['saved_output'] }, + output_contract: { context: 'order_line', provides: ['notification'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.notify', + }, +] + +const cadDefinitions: WorkflowNodeDefinition[] = [ + { + step: 'resolve_step_path', + label: 'Resolve STEP Path', + family: 'cad_file', + module_key: 'cad.resolve_step_path', + category: 'input', + description: 'Resolve the STEP path.', + node_type: 'inputNode', + icon: 'file-code-2', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file' }, + output_contract: { context: 'cad_file', provides: ['cad_file_record'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.resolve_step_path', + }, + { + step: 'occ_object_extract', + label: 'Extract STEP Objects', + family: 'cad_file', + module_key: 'cad.extract_objects', + category: 'processing', + description: 'Extract objects.', + node_type: 'processNode', + icon: 'boxes', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'cad_file', requires: ['cad_file_record'] }, + output_contract: { context: 'cad_file', provides: ['occ_scene'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'occ_glb_export', + label: 'Export GLB', + family: 'cad_file', + module_key: 'cad.export_glb', + category: 'processing', + description: 'Export GLB.', + node_type: 'processNode', + icon: 'package', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'cad_file', requires: ['occ_scene'] }, + output_contract: { context: 'cad_file', provides: ['glb_preview'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'stl_cache_generate', + label: 'Generate STL Cache', + family: 'cad_file', + module_key: 'cad.generate_stl_cache', + category: 'processing', + description: 'Generate STL cache.', + node_type: 'processNode', + icon: 'database', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file', requires: ['glb_preview'] }, + output_contract: { context: 'cad_file', provides: ['stl_cache'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.stl_cache_generate', + }, + { + step: 'blender_render', + label: 'Render Thumbnail (Blender)', + family: 'cad_file', + module_key: 'cad.thumbnail.blender', + category: 'rendering', + description: 'Render Blender thumbnail.', + node_type: 'renderNode', + icon: 'camera', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file', requires: ['glb_preview'] }, + output_contract: { context: 'cad_file', provides: ['rendered_image'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.blender_render', + }, + { + step: 'threejs_render', + label: 'Render Thumbnail (Three.js)', + family: 'cad_file', + module_key: 'cad.thumbnail.threejs', + category: 'rendering', + description: 'Render Three.js thumbnail.', + node_type: 'renderNode', + icon: 'box', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file', requires: ['glb_preview'] }, + output_contract: { context: 'cad_file', provides: ['rendered_image'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.threejs_render', + }, + { + step: 'thumbnail_save', + label: 'Save Thumbnail', + family: 'cad_file', + module_key: 'cad.thumbnail.save', + category: 'output', + description: 'Persist thumbnail.', + node_type: 'outputNode', + icon: 'download', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file', requires: ['rendered_image'] }, + output_contract: { context: 'cad_file', provides: ['saved_thumbnail'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.thumbnail_save', + }, +] + +describe('workflowModuleBundles', () => { + test('exposes family-scoped bundles when required steps exist', () => { + const bundles = getWorkflowModuleBundles(definitions, 'order_line') + + expect(bundles.map(bundle => bundle.id)).toEqual(['still_render_core', 'output_publish_notify']) + }) + + test('creates a connected bundle insertion graph for still-render authoring', () => { + const insertion = createWorkflowModuleBundleInsertion({ + bundleId: 'still_render_core', + graphFamily: 'order_line', + nodeDefinitionsByStep: Object.fromEntries(definitions.map(definition => [definition.step, definition])), + existingNodes: [], + preferredPosition: { x: 200, y: 320 }, + }) + + expect(insertion.ok).toBe(true) + if (!insertion.ok) return + + expect(insertion.nodes).toHaveLength(6) + expect(insertion.edges).toHaveLength(5) + expect(insertion.nodes[0].position).toEqual({ x: 200, y: 320 }) + expect(insertion.nodes[1].position).toEqual({ x: 420, y: 320 }) + expect(insertion.nodes[0].data).toMatchObject({ step: 'order_line_setup', label: 'Order Line Setup' }) + expect(insertion.nodes[5].data).toMatchObject({ step: 'blender_still', label: 'Blender Still' }) + expect(insertion.edges[0]).toMatchObject({ + source: insertion.nodes[0].id, + target: insertion.nodes[1].id, + }) + }) + + test('exposes full reference paths for complete non-legacy authoring flows', () => { + const bundles = getWorkflowReferenceBundles(definitions, 'order_line') + + expect(bundles.map(bundle => bundle.id)).toEqual(['still_render_reference']) + }) + + test('creates the canonical still-render reference graph with branched edges', () => { + const insertion = createWorkflowReferenceBundleInsertion({ + bundleId: 'still_render_reference', + graphFamily: 'order_line', + nodeDefinitionsByStep: Object.fromEntries(definitions.map(definition => [definition.step, definition])), + existingNodes: [], + preferredPosition: { x: 200, y: 320 }, + }) + + expect(insertion.ok).toBe(true) + if (!insertion.ok) return + + expect(insertion.nodes).toHaveLength(8) + expect(insertion.edges).toHaveLength(10) + expect(insertion.nodes[0].position).toEqual({ x: 200, y: 440 }) + expect(insertion.nodes[1].position).toEqual({ x: 420, y: 440 }) + expect(insertion.nodes[5].data).toMatchObject({ + step: 'blender_still', + label: 'Still Render', + params: { use_custom_render_settings: true }, + }) + expect(insertion.edges).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + source: insertion.nodes[0].id, + target: insertion.nodes[1].id, + }), + expect.objectContaining({ + source: insertion.nodes[5].id, + target: insertion.nodes[6].id, + }), + expect.objectContaining({ + source: insertion.nodes[5].id, + target: insertion.nodes[7].id, + }), + ]), + ) + }) + + test('creates the canonical CAD intake reference graph from the shared blueprint', () => { + const insertion = createWorkflowReferenceBundleInsertion({ + bundleId: 'cad_intake_reference', + graphFamily: 'cad_file', + nodeDefinitionsByStep: Object.fromEntries(cadDefinitions.map(definition => [definition.step, definition])), + existingNodes: [], + preferredPosition: { x: 120, y: 240 }, + }) + + expect(insertion.ok).toBe(true) + if (!insertion.ok) return + + expect(insertion.nodes).toHaveLength(8) + expect(insertion.edges).toHaveLength(7) + expect(insertion.nodes.map(node => node.data.step)).toEqual([ + 'resolve_step_path', + 'occ_object_extract', + 'occ_glb_export', + 'stl_cache_generate', + 'blender_render', + 'threejs_render', + 'thumbnail_save', + 'thumbnail_save', + ]) + expect(insertion.edges).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + source: insertion.nodes[2].id, + target: insertion.nodes[4].id, + }), + expect.objectContaining({ + source: insertion.nodes[2].id, + target: insertion.nodes[5].id, + }), + ]), + ) + }) +}) diff --git a/frontend/src/__tests__/components/workflowNodeCatalog.test.ts b/frontend/src/__tests__/components/workflowNodeCatalog.test.ts new file mode 100644 index 0000000..b9e2fbc --- /dev/null +++ b/frontend/src/__tests__/components/workflowNodeCatalog.test.ts @@ -0,0 +1,158 @@ +import { describe, expect, test } from 'vitest' + +import type { WorkflowNodeDefinition } from '../../api/workflows' +import { buildWorkflowNodeCatalogModel } from '../../components/workflows/workflowNodeCatalog' +import { getDefinitionAuthoringStage } from '../../components/workflows/workflowNodeLibrary' + +const definitions: WorkflowNodeDefinition[] = [ + { + step: 'resolve_step_path', + label: 'Resolve STEP Path', + family: 'cad_file', + module_key: 'cad.resolve_step_path', + category: 'input', + description: 'Resolve CAD path.', + node_type: 'inputNode', + icon: 'file', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'cad_file' }, + output_contract: { context: 'cad_file', provides: ['cad_file'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.resolve_step_path', + }, + { + step: 'order_line_setup', + label: 'Order Line Setup', + family: 'order_line', + module_key: 'order_line.prepare_render_context', + category: 'input', + description: 'Prepare order line.', + node_type: 'inputNode', + icon: 'file', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line' }, + output_contract: { context: 'order_line', provides: ['order_line'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.order_line_setup', + }, + { + step: 'material_map_resolve', + label: 'Resolve Material Map', + family: 'order_line', + module_key: 'materials.resolve_map', + category: 'processing', + description: 'Resolve materials.', + node_type: 'processNode', + icon: 'palette', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['material_map'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'blender_still', + label: 'Blender Still', + family: 'order_line', + module_key: 'render.production.still', + category: 'rendering', + description: 'Render image.', + node_type: 'renderNode', + icon: 'camera', + defaults: {}, + fields: [], + execution_kind: 'native', + legacy_compatible: false, + input_contract: { context: 'order_line', requires: ['order_line'] }, + output_contract: { context: 'order_line', provides: ['rendered_image'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: null, + }, + { + step: 'output_save', + label: 'Save Output', + family: 'order_line', + module_key: 'media.save_output', + category: 'output', + description: 'Save output.', + node_type: 'outputNode', + icon: 'download', + defaults: {}, + fields: [], + execution_kind: 'bridge', + legacy_compatible: true, + input_contract: { context: 'order_line', requires: ['rendered_image'] }, + output_contract: { context: 'order_line', provides: ['saved_output'] }, + artifact_roles_produced: [], + artifact_roles_consumed: [], + legacy_source: 'legacy.output_save', + }, +] + +describe('workflow node organization', () => { + test('assigns authoring stages from module namespaces and categories', () => { + expect(getDefinitionAuthoringStage(definitions[0])).toBe('cad_intake') + expect(getDefinitionAuthoringStage(definitions[1])).toBe('scene_prep') + expect(getDefinitionAuthoringStage(definitions[2])).toBe('materials') + expect(getDefinitionAuthoringStage(definitions[3])).toBe('render') + expect(getDefinitionAuthoringStage(definitions[4])).toBe('publish') + }) + + test('builds stage and family catalog models with module and runtime counts', () => { + const model = buildWorkflowNodeCatalogModel(definitions) + + expect(model.moduleFilters.map(module => module.label)).toEqual([ + 'Cad', + 'Materials', + 'Media', + 'Order Line', + 'Render', + ]) + expect(model.runtimeCounts).toEqual({ + legacy: 3, + bridge: 0, + graph: 2, + }) + + const stageIds = model.stageSections.map(section => section.stage) + expect(stageIds).toEqual(['cad_intake', 'scene_prep', 'materials', 'render', 'publish']) + + const renderSection = model.stageSections.find(section => section.stage === 'render') + expect(renderSection?.modules[0]?.namespace).toBe('render') + expect(renderSection?.runtimeCounts.graph).toBe(1) + + const publishSection = model.stageSections.find(section => section.stage === 'publish') + expect(publishSection?.modules[0]?.runtimeCounts.legacy).toBe(1) + + expect(model.familySections.map(section => section.family)).toEqual(['cad_file', 'order_line']) + + const cadFamilySection = model.familySections.find(section => section.family === 'cad_file') + expect(cadFamilySection?.modules.map(module => module.namespace)).toEqual(['cad']) + expect(cadFamilySection?.modules[0]?.stageSections.map(section => section.stage)).toEqual(['cad_intake']) + + const orderFamilySection = model.familySections.find(section => section.family === 'order_line') + expect(orderFamilySection?.modules.map(module => module.namespace)).toEqual([ + 'materials', + 'media', + 'order_line', + 'render', + ]) + expect(orderFamilySection?.runtimeCounts.graph).toBe(2) + expect(orderFamilySection?.modules.find(module => module.namespace === 'materials')?.stageSections[0]?.stage).toBe( + 'materials', + ) + }) +}) diff --git a/frontend/src/__tests__/components/workflowNodePresentation.test.ts b/frontend/src/__tests__/components/workflowNodePresentation.test.ts new file mode 100644 index 0000000..7c78a83 --- /dev/null +++ b/frontend/src/__tests__/components/workflowNodePresentation.test.ts @@ -0,0 +1,37 @@ +import { describe, expect, test } from 'vitest' + +import type { WorkflowCanvasPort } from '../../components/workflows/workflowGraphDraft' +import { + getWorkflowNodePortBadgeLabel, + getWorkflowNodePortTitle, +} from '../../components/workflows/workflowNodePresentation' + +describe('workflowNodePresentation', () => { + test('renders explicit labels for direct required inputs', () => { + const port: WorkflowCanvasPort = { + id: 'input:material_assignments', + label: 'Material Assignments', + roles: ['material_assignments'], + kind: 'required', + } + + expect(getWorkflowNodePortBadgeLabel(port)).toBe('Material Assignments') + expect(getWorkflowNodePortTitle(port)).toBe('Material Assignments') + }) + + test('renders alternative sockets as explicit role choices', () => { + const port: WorkflowCanvasPort = { + id: 'input-any:rendered_image|rendered_frames|rendered_video|workflow_result|blend_asset', + label: 'Any of Rendered Image / Rendered Frames / Rendered Video / Workflow Result / Blend Asset', + roles: ['rendered_image', 'rendered_frames', 'rendered_video', 'workflow_result', 'blend_asset'], + kind: 'alternative', + } + + expect(getWorkflowNodePortBadgeLabel(port)).toBe( + 'Any: Image / Frames / Video / Workflow Result / Blend Asset', + ) + expect(getWorkflowNodePortTitle(port)).toBe( + 'Accepts any of: Rendered Image / Rendered Frames / Rendered Video / Workflow Result / Blend Asset', + ) + }) +}) diff --git a/frontend/src/api/outputTypes.ts b/frontend/src/api/outputTypes.ts index ffd6b2c..dba754f 100644 --- a/frontend/src/api/outputTypes.ts +++ b/frontend/src/api/outputTypes.ts @@ -1,6 +1,7 @@ import api from './client' export type OutputTypeWorkflowFamily = 'cad_file' | 'order_line' +export type OutputTypeWorkflowRolloutMode = 'legacy_only' | 'shadow' | 'graph' export type OutputTypeArtifactKind = | 'still_image' | 'turntable_video' @@ -10,6 +11,17 @@ export type OutputTypeArtifactKind = | 'package' | 'custom' +export type OutputTypeInvocationOverrideKey = typeof OUTPUT_TYPE_INVOCATION_OVERRIDE_KEYS[number] +export type OutputTypeInvocationOverrides = Partial> +export type OutputTypeContractIssueSeverity = 'error' | 'warning' +export type OutputTypeContractCatalogMap = Record + +export interface OutputTypeParameterOwnershipCatalog { + output_type_profile_keys: string[] + template_runtime_keys: string[] + workflow_node_keys_by_step: Record +} + export const OUTPUT_TYPE_INVOCATION_OVERRIDE_KEYS = [ 'width', 'height', @@ -27,8 +39,171 @@ export const OUTPUT_TYPE_INVOCATION_OVERRIDE_KEYS = [ 'denoising_use_gpu', ] as const +export const IMAGE_OUTPUT_FORMATS = ['png', 'jpg', 'jpeg', 'webp'] as const +export const VIDEO_OUTPUT_FORMATS = ['mp4', 'webm', 'mov'] as const +export const MODEL_OUTPUT_FORMATS = ['gltf', 'glb', 'stl', 'obj', 'usd', 'usdz'] as const +export const BLEND_OUTPUT_FORMATS = ['blend'] as const + const CAD_FILE_ARTIFACT_KINDS: OutputTypeArtifactKind[] = ['thumbnail_image', 'model_export', 'package', 'custom'] const ORDER_LINE_ARTIFACT_KINDS: OutputTypeArtifactKind[] = ['still_image', 'turntable_video', 'blend_asset', 'package', 'custom'] +const STATIC_RENDER_OVERRIDE_KEYS: OutputTypeInvocationOverrideKey[] = [ + 'width', + 'height', + 'engine', + 'samples', + 'bg_color', + 'noise_threshold', + 'denoiser', + 'denoising_input_passes', + 'denoising_prefilter', + 'denoising_quality', + 'denoising_use_gpu', +] +const ANIMATION_OVERRIDE_KEYS: OutputTypeInvocationOverrideKey[] = ['frame_count', 'fps', 'turntable_axis'] + +export interface OutputTypeContractCatalog { + workflow_families: OutputTypeWorkflowFamily[] + workflow_rollout_modes: OutputTypeWorkflowRolloutMode[] + artifact_kinds: OutputTypeArtifactKind[] + allowed_artifact_kinds_by_family: OutputTypeContractCatalogMap + allowed_output_formats_by_family: OutputTypeContractCatalogMap + allowed_invocation_override_keys_by_artifact_kind: OutputTypeContractCatalogMap< + OutputTypeArtifactKind, + OutputTypeInvocationOverrideKey[] + > + default_output_format_by_artifact_kind: OutputTypeContractCatalogMap + parameter_ownership: OutputTypeParameterOwnershipCatalog +} + +const FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG: OutputTypeContractCatalog = { + workflow_families: ['order_line', 'cad_file'], + workflow_rollout_modes: ['legacy_only', 'shadow', 'graph'], + artifact_kinds: ['still_image', 'turntable_video', 'model_export', 'thumbnail_image', 'blend_asset', 'package', 'custom'], + allowed_artifact_kinds_by_family: { + cad_file: [...CAD_FILE_ARTIFACT_KINDS], + order_line: [...ORDER_LINE_ARTIFACT_KINDS], + }, + allowed_output_formats_by_family: { + cad_file: [...IMAGE_OUTPUT_FORMATS, ...MODEL_OUTPUT_FORMATS], + order_line: [...IMAGE_OUTPUT_FORMATS, ...VIDEO_OUTPUT_FORMATS, ...BLEND_OUTPUT_FORMATS], + }, + allowed_invocation_override_keys_by_artifact_kind: { + still_image: [...STATIC_RENDER_OVERRIDE_KEYS], + thumbnail_image: [...STATIC_RENDER_OVERRIDE_KEYS], + turntable_video: [...STATIC_RENDER_OVERRIDE_KEYS, ...ANIMATION_OVERRIDE_KEYS], + model_export: [], + blend_asset: [], + package: [...OUTPUT_TYPE_INVOCATION_OVERRIDE_KEYS], + custom: [...OUTPUT_TYPE_INVOCATION_OVERRIDE_KEYS], + }, + default_output_format_by_artifact_kind: { + still_image: 'png', + thumbnail_image: 'png', + turntable_video: 'mp4', + model_export: 'gltf', + blend_asset: 'blend', + package: 'png', + custom: 'png', + }, + parameter_ownership: { + output_type_profile_keys: ['transparent_bg', 'cycles_device', 'material_override'], + template_runtime_keys: ['target_collection', 'lighting_only', 'shadow_catcher', 'camera_orbit', 'template_inputs'], + workflow_node_keys_by_step: { + resolve_template: [ + 'template_id_override', + 'require_template', + 'material_library_path', + 'disable_materials', + 'target_collection', + 'material_replace_mode', + 'lighting_only_mode', + 'shadow_catcher_mode', + 'camera_orbit_mode', + ], + blender_still: [ + 'use_custom_render_settings', + 'render_engine', + 'cycles_device', + 'samples', + 'width', + 'height', + 'transparent_bg', + 'noise_threshold', + 'denoiser', + 'denoising_input_passes', + 'denoising_prefilter', + 'denoising_quality', + 'denoising_use_gpu', + 'target_collection', + 'lighting_only', + 'shadow_catcher', + 'rotation_x', + 'rotation_y', + 'rotation_z', + 'focal_length_mm', + 'sensor_width_mm', + 'material_override', + ], + blender_turntable: [ + 'use_custom_render_settings', + 'render_engine', + 'cycles_device', + 'samples', + 'width', + 'height', + 'transparent_bg', + 'bg_color', + 'fps', + 'frame_count', + 'duration_s', + 'turntable_degrees', + 'turntable_axis', + 'camera_orbit', + 'target_collection', + 'lighting_only', + 'shadow_catcher', + 'rotation_x', + 'rotation_y', + 'rotation_z', + 'focal_length_mm', + 'sensor_width_mm', + 'material_override', + ], + export_blend: ['output_name_suffix'], + }, + }, +} + +let cachedOutputTypeContractCatalog: OutputTypeContractCatalog = FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG + +export interface OutputTypeWorkflowContractWorkflowLike { + id: string + name: string + family: OutputTypeWorkflowFamily | 'mixed' | null + supported_artifact_kinds?: OutputTypeArtifactKind[] +} + +export interface OutputTypeWorkflowContractIssue { + code: string + severity: OutputTypeContractIssueSeverity + message: string +} + +export interface OutputTypeInvocationProfile { + renderer: string + render_backend: string + workflow_family: OutputTypeWorkflowFamily + artifact_kind: OutputTypeArtifactKind + output_format: string + is_animation: boolean + workflow_definition_id: string | null + workflow_rollout_mode: OutputTypeWorkflowRolloutMode + transparent_bg: boolean + cycles_device: string | null + material_override: string | null + allowed_override_keys: OutputTypeInvocationOverrideKey[] + invocation_overrides: OutputTypeInvocationOverrides +} export interface OutputType { id: string @@ -36,7 +211,7 @@ export interface OutputType { description: string | null renderer: string render_settings: Record - invocation_overrides: Record + invocation_overrides: OutputTypeInvocationOverrides output_format: string sort_order: number compatible_categories: string[] @@ -50,13 +225,141 @@ export interface OutputType { pricing_tier_name: string | null price_per_item: number | null workflow_definition_id: string | null + workflow_rollout_mode: OutputTypeWorkflowRolloutMode workflow_name?: string | null material_override: string | null + invocation_profile: OutputTypeInvocationProfile | null is_active: boolean created_at: string updated_at: string } +function isWorkflowFamily(value: unknown): value is OutputTypeWorkflowFamily { + return value === 'cad_file' || value === 'order_line' +} + +function isWorkflowRolloutMode(value: unknown): value is OutputTypeWorkflowRolloutMode { + return value === 'legacy_only' || value === 'shadow' || value === 'graph' +} + +function isArtifactKind(value: unknown): value is OutputTypeArtifactKind { + return FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.artifact_kinds.includes(value as OutputTypeArtifactKind) +} + +function isInvocationOverrideKey(value: unknown): value is OutputTypeInvocationOverrideKey { + return OUTPUT_TYPE_INVOCATION_OVERRIDE_KEYS.includes(value as OutputTypeInvocationOverrideKey) +} + +function normalizeOrderedStrings( + values: unknown, + fallback: readonly T[], + predicate: (value: unknown) => value is T, +): T[] { + const provided = Array.isArray(values) ? values.filter(predicate) : [] + const usable = provided.length > 0 ? provided : [...fallback] + const usableSet = new Set(usable) + return fallback.filter(value => usableSet.has(value)) +} + +function normalizeStringList(values: unknown): string[] { + return Array.isArray(values) ? values.filter((value): value is string => typeof value === 'string' && value.trim().length > 0) : [] +} + +function normalizeRecordOfStringLists(values: unknown): Record { + if (!values || typeof values !== 'object' || Array.isArray(values)) return {} + return Object.fromEntries( + Object.entries(values).map(([key, value]) => [key, normalizeStringList(value)]), + ) +} + +function normalizeOutputTypeContractCatalog( + catalog: Partial | undefined | null, +): OutputTypeContractCatalog { + const workflowFamilies = normalizeOrderedStrings( + catalog?.workflow_families, + FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.workflow_families, + isWorkflowFamily, + ) + const workflowRolloutModes = normalizeOrderedStrings( + catalog?.workflow_rollout_modes, + FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.workflow_rollout_modes, + isWorkflowRolloutMode, + ) + const artifactKinds = normalizeOrderedStrings( + catalog?.artifact_kinds, + FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.artifact_kinds, + isArtifactKind, + ) + + const allowedArtifactKindsByFamily = Object.fromEntries( + workflowFamilies.map(family => [ + family, + normalizeOrderedStrings( + catalog?.allowed_artifact_kinds_by_family?.[family], + FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.allowed_artifact_kinds_by_family[family], + isArtifactKind, + ), + ]), + ) as OutputTypeContractCatalog['allowed_artifact_kinds_by_family'] + + const allowedOutputFormatsByFamily = Object.fromEntries( + workflowFamilies.map(family => [ + family, + normalizeStringList(catalog?.allowed_output_formats_by_family?.[family]).length > 0 + ? normalizeStringList(catalog?.allowed_output_formats_by_family?.[family]) + : [...FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.allowed_output_formats_by_family[family]], + ]), + ) as OutputTypeContractCatalog['allowed_output_formats_by_family'] + + const allowedInvocationOverrideKeysByArtifactKind = Object.fromEntries( + artifactKinds.map(artifactKind => [ + artifactKind, + normalizeOrderedStrings( + catalog?.allowed_invocation_override_keys_by_artifact_kind?.[artifactKind], + FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.allowed_invocation_override_keys_by_artifact_kind[artifactKind], + isInvocationOverrideKey, + ), + ]), + ) as OutputTypeContractCatalog['allowed_invocation_override_keys_by_artifact_kind'] + + const defaultOutputFormatByArtifactKind = Object.fromEntries( + artifactKinds.map(artifactKind => [ + artifactKind, + typeof catalog?.default_output_format_by_artifact_kind?.[artifactKind] === 'string' && + catalog.default_output_format_by_artifact_kind[artifactKind].trim().length > 0 + ? catalog.default_output_format_by_artifact_kind[artifactKind] + : FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.default_output_format_by_artifact_kind[artifactKind], + ]), + ) as OutputTypeContractCatalog['default_output_format_by_artifact_kind'] + + const normalizedNodeOwnership = normalizeRecordOfStringLists(catalog?.parameter_ownership?.workflow_node_keys_by_step) + const parameterOwnership: OutputTypeParameterOwnershipCatalog = { + output_type_profile_keys: + normalizeStringList(catalog?.parameter_ownership?.output_type_profile_keys).length > 0 + ? normalizeStringList(catalog?.parameter_ownership?.output_type_profile_keys) + : [...FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.parameter_ownership.output_type_profile_keys], + template_runtime_keys: + normalizeStringList(catalog?.parameter_ownership?.template_runtime_keys).length > 0 + ? normalizeStringList(catalog?.parameter_ownership?.template_runtime_keys) + : [...FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.parameter_ownership.template_runtime_keys], + workflow_node_keys_by_step: + Object.keys(normalizedNodeOwnership).length > 0 + ? normalizedNodeOwnership + : { ...FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.parameter_ownership.workflow_node_keys_by_step }, + } + + return { + workflow_families: workflowFamilies, + workflow_rollout_modes: workflowRolloutModes, + artifact_kinds: artifactKinds, + allowed_artifact_kinds_by_family: allowedArtifactKindsByFamily, + allowed_output_formats_by_family: allowedOutputFormatsByFamily, + allowed_invocation_override_keys_by_artifact_kind: allowedInvocationOverrideKeysByArtifactKind, + default_output_format_by_artifact_kind: defaultOutputFormatByArtifactKind, + parameter_ownership: parameterOwnership, + } +} + export async function listOutputTypes( includeInactive = false, category?: string, @@ -64,25 +367,151 @@ export async function listOutputTypes( const params: Record = { include_inactive: includeInactive } if (category) params.category = category const res = await api.get('/output-types', { params }) - return res.data + return res.data.map(normalizeOutputType) } export async function createOutputType(data: Partial): Promise { const res = await api.post('/output-types', data) - return res.data + return normalizeOutputType(res.data) } export async function updateOutputType(id: string, data: Partial): Promise { const res = await api.patch(`/output-types/${id}`, data) - return res.data + return normalizeOutputType(res.data) } export async function deleteOutputType(id: string): Promise { await api.delete(`/output-types/${id}`) } -export function listAllowedArtifactKindsForFamily(family: OutputTypeWorkflowFamily): OutputTypeArtifactKind[] { - return family === 'cad_file' ? [...CAD_FILE_ARTIFACT_KINDS] : [...ORDER_LINE_ARTIFACT_KINDS] +export async function getOutputTypeContractCatalog(): Promise { + const res = await api.get('/output-types/contract-catalog') + cachedOutputTypeContractCatalog = normalizeOutputTypeContractCatalog(res.data) + return cachedOutputTypeContractCatalog +} + +export function getCachedOutputTypeContractCatalog(): OutputTypeContractCatalog { + return cachedOutputTypeContractCatalog +} + +export function listAllowedArtifactKindsForFamily( + family: OutputTypeWorkflowFamily, + contractCatalog: OutputTypeContractCatalog = cachedOutputTypeContractCatalog, +): OutputTypeArtifactKind[] { + return [...(contractCatalog.allowed_artifact_kinds_by_family[family] ?? FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.allowed_artifact_kinds_by_family[family])] +} + +export function listAllowedOutputFormatsForFamily( + family: OutputTypeWorkflowFamily, + contractCatalog: OutputTypeContractCatalog = cachedOutputTypeContractCatalog, +): string[] { + return [...(contractCatalog.allowed_output_formats_by_family[family] ?? FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.allowed_output_formats_by_family[family])] +} + +export function getDefaultOutputFormatForArtifactKind( + artifactKind: OutputTypeArtifactKind, + contractCatalog: OutputTypeContractCatalog = cachedOutputTypeContractCatalog, +): string { + return contractCatalog.default_output_format_by_artifact_kind[artifactKind] + ?? FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.default_output_format_by_artifact_kind[artifactKind] +} + +export function workflowSupportsArtifactKindForOutputTypeContract( + workflow: OutputTypeWorkflowContractWorkflowLike, + artifactKind: OutputTypeArtifactKind, +): boolean { + return Array.isArray(workflow.supported_artifact_kinds) && workflow.supported_artifact_kinds.includes(artifactKind) +} + +export function getCompatibleWorkflowsForOutputTypeContract( + workflows: OutputTypeWorkflowContractWorkflowLike[], + workflowFamily: OutputTypeWorkflowFamily, + artifactKind: OutputTypeArtifactKind, +): OutputTypeWorkflowContractWorkflowLike[] { + return workflows.filter(workflow => + workflow.family === workflowFamily && + workflowSupportsArtifactKindForOutputTypeContract(workflow, artifactKind), + ) +} + +export function listAllowedInvocationOverrideKeysForArtifactKind( + artifactKind: OutputTypeArtifactKind, + contractCatalog: OutputTypeContractCatalog = cachedOutputTypeContractCatalog, +): OutputTypeInvocationOverrideKey[] { + return [ + ...(contractCatalog.allowed_invocation_override_keys_by_artifact_kind[artifactKind] + ?? FALLBACK_OUTPUT_TYPE_CONTRACT_CATALOG.allowed_invocation_override_keys_by_artifact_kind[artifactKind]), + ] +} + +function isInvocationOverrideValue(value: unknown): value is string | number | boolean { + return typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean' +} + +function sanitizeInvocationOverrides( + artifactKind: OutputTypeArtifactKind, + overrides: Record | undefined | null, + contractCatalog: OutputTypeContractCatalog = cachedOutputTypeContractCatalog, +): OutputTypeInvocationOverrides { + const normalized: OutputTypeInvocationOverrides = {} + const allowed = new Set(listAllowedInvocationOverrideKeysForArtifactKind(artifactKind, contractCatalog)) + for (const key of OUTPUT_TYPE_INVOCATION_OVERRIDE_KEYS) { + if (!allowed.has(key)) continue + const value = overrides?.[key] + if (value !== undefined && value !== null && value !== '' && isInvocationOverrideValue(value)) { + normalized[key] = value + } + } + return normalized +} + +function buildFallbackInvocationProfile(outputType: OutputType): OutputTypeInvocationProfile { + const artifactKind = outputType.artifact_kind ?? inferArtifactKind( + outputType.workflow_family, + outputType.output_format, + outputType.is_animation, + ) + return { + renderer: outputType.renderer, + render_backend: outputType.render_backend, + workflow_family: outputType.workflow_family, + artifact_kind: artifactKind, + output_format: outputType.output_format, + is_animation: outputType.is_animation, + workflow_definition_id: outputType.workflow_definition_id, + workflow_rollout_mode: outputType.workflow_rollout_mode ?? 'legacy_only', + transparent_bg: outputType.transparent_bg, + cycles_device: outputType.cycles_device, + material_override: outputType.material_override, + allowed_override_keys: listAllowedInvocationOverrideKeysForArtifactKind(artifactKind), + invocation_overrides: sanitizeInvocationOverrides(artifactKind, { + ...outputType.render_settings, + ...outputType.invocation_overrides, + }), + } +} + +function normalizeOutputType(outputType: OutputType): OutputType { + const invocationProfile = outputType.invocation_profile + ? { + ...outputType.invocation_profile, + workflow_rollout_mode: outputType.invocation_profile.workflow_rollout_mode ?? outputType.workflow_rollout_mode ?? 'legacy_only', + allowed_override_keys: outputType.invocation_profile.allowed_override_keys + ?.filter((key): key is OutputTypeInvocationOverrideKey => OUTPUT_TYPE_INVOCATION_OVERRIDE_KEYS.includes(key as OutputTypeInvocationOverrideKey)) + ?? listAllowedInvocationOverrideKeysForArtifactKind(outputType.artifact_kind), + invocation_overrides: sanitizeInvocationOverrides( + outputType.invocation_profile.artifact_kind ?? outputType.artifact_kind, + outputType.invocation_profile.invocation_overrides, + ), + } + : buildFallbackInvocationProfile(outputType) + + return { + ...outputType, + workflow_rollout_mode: outputType.workflow_rollout_mode ?? 'legacy_only', + invocation_overrides: invocationProfile.invocation_overrides, + invocation_profile: invocationProfile, + } } export function inferArtifactKind( @@ -92,10 +521,13 @@ export function inferArtifactKind( ): OutputTypeArtifactKind { const normalizedFormat = outputFormat.trim().toLowerCase() - if (isAnimation || ['mp4', 'webm', 'mov'].includes(normalizedFormat)) { + if (BLEND_OUTPUT_FORMATS.includes(normalizedFormat as (typeof BLEND_OUTPUT_FORMATS)[number])) { + return 'blend_asset' + } + if (isAnimation || VIDEO_OUTPUT_FORMATS.includes(normalizedFormat as (typeof VIDEO_OUTPUT_FORMATS)[number])) { return 'turntable_video' } - if (['gltf', 'glb', 'stl', 'obj', 'usd', 'usdz'].includes(normalizedFormat)) { + if (MODEL_OUTPUT_FORMATS.includes(normalizedFormat as (typeof MODEL_OUTPUT_FORMATS)[number])) { return 'model_export' } if (workflowFamily === 'cad_file') { @@ -107,19 +539,179 @@ export function inferArtifactKind( export function isArtifactKindAllowedForFamily( workflowFamily: OutputTypeWorkflowFamily, artifactKind: OutputTypeArtifactKind, + contractCatalog: OutputTypeContractCatalog = cachedOutputTypeContractCatalog, ): boolean { - return listAllowedArtifactKindsForFamily(workflowFamily).includes(artifactKind) + return listAllowedArtifactKindsForFamily(workflowFamily, contractCatalog).includes(artifactKind) +} + +export function getOutputTypeWorkflowContractIssues(args: { + workflowFamily: OutputTypeWorkflowFamily + artifactKind: OutputTypeArtifactKind + outputFormat: string + isAnimation: boolean + workflowDefinitionId?: string | null + workflowRolloutMode: OutputTypeWorkflowRolloutMode + workflows?: OutputTypeWorkflowContractWorkflowLike[] + contractCatalog?: OutputTypeContractCatalog +}): OutputTypeWorkflowContractIssue[] { + const { + workflowFamily, + artifactKind, + outputFormat, + isAnimation, + workflowDefinitionId, + workflowRolloutMode, + workflows = [], + contractCatalog = cachedOutputTypeContractCatalog, + } = args + + const issues: OutputTypeWorkflowContractIssue[] = [] + const normalizedFormat = outputFormat.trim().toLowerCase() + const selectedWorkflowId = workflowDefinitionId?.trim() ?? '' + const selectedWorkflow = selectedWorkflowId + ? workflows.find(workflow => workflow.id === selectedWorkflowId) ?? null + : null + + if (!isArtifactKindAllowedForFamily(workflowFamily, artifactKind, contractCatalog)) { + issues.push({ + code: 'artifact_family_mismatch', + severity: 'error', + message: `${artifactKind} is not allowed for the ${workflowFamily} workflow family.`, + }) + } + + if (normalizedFormat && !listAllowedOutputFormatsForFamily(workflowFamily, contractCatalog).includes(normalizedFormat)) { + issues.push({ + code: 'format_family_mismatch', + severity: 'error', + message: `${normalizedFormat} is not allowed for the ${workflowFamily} workflow family.`, + }) + } + + if (workflowFamily === 'cad_file' && isAnimation) { + issues.push({ + code: 'cad_animation_unsupported', + severity: 'error', + message: 'CAD intake workflows do not support animated output types.', + }) + } + + if (artifactKind === 'turntable_video') { + if (!isAnimation) { + issues.push({ + code: 'turntable_requires_animation', + severity: 'error', + message: 'Turntable Video requires animation to be enabled.', + }) + } + if (normalizedFormat && !VIDEO_OUTPUT_FORMATS.includes(normalizedFormat as (typeof VIDEO_OUTPUT_FORMATS)[number])) { + issues.push({ + code: 'turntable_requires_video_format', + severity: 'error', + message: 'Turntable Video requires a video output format.', + }) + } + } + + if ( + (artifactKind === 'still_image' || artifactKind === 'thumbnail_image') && + VIDEO_OUTPUT_FORMATS.includes(normalizedFormat as (typeof VIDEO_OUTPUT_FORMATS)[number]) + ) { + issues.push({ + code: 'image_artifact_with_video_format', + severity: 'error', + message: `${artifactKind} cannot use a video output format.`, + }) + } + + if ( + artifactKind === 'model_export' && + normalizedFormat && + !MODEL_OUTPUT_FORMATS.includes(normalizedFormat as (typeof MODEL_OUTPUT_FORMATS)[number]) + ) { + issues.push({ + code: 'model_export_requires_model_format', + severity: 'error', + message: 'Model Export requires a 3D export format such as gltf, glb, stl, obj, usd, or usdz.', + }) + } + + if (artifactKind === 'blend_asset') { + if (isAnimation) { + issues.push({ + code: 'blend_asset_animation_unsupported', + severity: 'error', + message: 'Blend Asset does not support animation output.', + }) + } + if (normalizedFormat && !BLEND_OUTPUT_FORMATS.includes(normalizedFormat as (typeof BLEND_OUTPUT_FORMATS)[number])) { + issues.push({ + code: 'blend_asset_requires_blend_format', + severity: 'error', + message: 'Blend Asset requires the blend output format.', + }) + } + } + + if ( + BLEND_OUTPUT_FORMATS.includes(normalizedFormat as (typeof BLEND_OUTPUT_FORMATS)[number]) && + artifactKind !== 'blend_asset' + ) { + issues.push({ + code: 'blend_format_requires_blend_asset', + severity: 'error', + message: 'The blend output format requires the Blend Asset artifact kind.', + }) + } + + if (!selectedWorkflowId) { + if (workflowRolloutMode !== 'legacy_only') { + issues.push({ + code: 'rollout_requires_workflow', + severity: 'error', + message: 'Shadow or graph rollout requires a linked workflow definition.', + }) + } + return issues + } + + if (selectedWorkflow == null) { + issues.push({ + code: 'workflow_missing', + severity: 'error', + message: 'The selected workflow definition could not be resolved.', + }) + return issues + } + + if (selectedWorkflow.family === 'mixed') { + issues.push({ + code: 'workflow_family_mixed', + severity: 'error', + message: `Workflow "${selectedWorkflow.name}" mixes CAD and order-line nodes and cannot be linked to an output type.`, + }) + } else if (selectedWorkflow.family !== workflowFamily) { + issues.push({ + code: 'workflow_family_mismatch', + severity: 'error', + message: `Workflow "${selectedWorkflow.name}" belongs to ${selectedWorkflow.family ?? 'an unknown'} family and does not match ${workflowFamily}.`, + }) + } + + if (!workflowSupportsArtifactKindForOutputTypeContract(selectedWorkflow, artifactKind)) { + issues.push({ + code: 'workflow_artifact_mismatch', + severity: 'error', + message: `Workflow "${selectedWorkflow.name}" does not produce the ${artifactKind} artifact contract.`, + }) + } + + return issues } export function getOutputTypeInvocationOverrides(outputType: OutputType): Record { - const normalized: Record = {} - for (const key of OUTPUT_TYPE_INVOCATION_OVERRIDE_KEYS) { - const explicitValue = outputType.invocation_overrides?.[key] - const legacyValue = outputType.render_settings?.[key] - const value = explicitValue ?? legacyValue - if (value !== undefined && value !== null && value !== '') { - normalized[key] = value - } + if (outputType.invocation_profile?.invocation_overrides) { + return outputType.invocation_profile.invocation_overrides } - return normalized + return buildFallbackInvocationProfile(outputType).invocation_overrides } diff --git a/frontend/src/api/renderTemplates.ts b/frontend/src/api/renderTemplates.ts index 9031aa7..7022565 100644 --- a/frontend/src/api/renderTemplates.ts +++ b/frontend/src/api/renderTemplates.ts @@ -1,4 +1,5 @@ import api from './client'; +import type { WorkflowNodeFieldDefinition } from './workflows' export interface RenderTemplate { id: string; @@ -15,6 +16,7 @@ export interface RenderTemplate { lighting_only: boolean; shadow_catcher_enabled: boolean; camera_orbit: boolean; + workflow_input_schema: WorkflowNodeFieldDefinition[]; is_active: boolean; created_at: string; updated_at: string; @@ -41,7 +43,7 @@ export async function createRenderTemplate(formData: FormData): Promise> & { output_type_ids?: string[] }, + overrides: Partial> & { output_type_ids?: string[] }, ): Promise { const fd = new FormData(); fd.append('name', overrides.name || 'Untitled (copy)'); @@ -53,6 +55,7 @@ export async function duplicateRenderTemplate( fd.append('lighting_only', String(overrides.lighting_only ?? false)); fd.append('shadow_catcher_enabled', String(overrides.shadow_catcher_enabled ?? false)); fd.append('camera_orbit', String(overrides.camera_orbit ?? true)); + fd.append('workflow_input_schema', JSON.stringify(overrides.workflow_input_schema ?? [])); const { data } = await api.post('/render-templates', fd, { headers: { 'Content-Type': 'multipart/form-data' }, }); @@ -61,7 +64,7 @@ export async function duplicateRenderTemplate( export async function updateRenderTemplate( id: string, - updates: Partial>, + updates: Partial>, ): Promise { const { data } = await api.patch(`/render-templates/${id}`, updates); return data; diff --git a/frontend/src/api/workflows.ts b/frontend/src/api/workflows.ts index 705b9fa..2b3a361 100644 --- a/frontend/src/api/workflows.ts +++ b/frontend/src/api/workflows.ts @@ -1,8 +1,43 @@ import api from './client' +import type { OutputTypeArtifactKind, OutputTypeWorkflowRolloutMode } from './outputTypes' export type WorkflowPresetType = 'still' | 'still_graph' | 'turntable' | 'multi_angle' | 'still_with_exports' | 'custom' export type WorkflowExecutionMode = 'legacy' | 'graph' | 'shadow' export type WorkflowStarterFamily = 'cad_file' | 'order_line' +export type WorkflowBlueprintType = 'cad_intake' | 'order_rendering' | 'still_graph_reference' +export type WorkflowCanonicalBlueprintType = WorkflowBlueprintType | 'starter_cad_intake' | 'starter_order_rendering' + +export interface WorkflowRolloutLatestRun { + workflow_run_id: string + execution_mode: WorkflowExecutionMode + status: string + created_at: string + completed_at: string | null +} + +export interface WorkflowRolloutLinkedOutputType { + id: string + name: string + is_active: boolean + artifact_kind: OutputTypeArtifactKind + workflow_rollout_mode: OutputTypeWorkflowRolloutMode +} + +export interface WorkflowRolloutSummary { + linked_output_type_count: number + active_output_type_count: number + linked_output_type_names: string[] + linked_output_types: WorkflowRolloutLinkedOutputType[] + rollout_modes: ('legacy_only' | 'shadow' | 'graph' | string)[] + has_blocking_contracts: boolean + blocking_reasons: string[] + latest_run: WorkflowRolloutLatestRun | null + latest_shadow_run: WorkflowRolloutLatestRun | null + latest_rollout_gate_verdict: 'pass' | 'warn' | 'fail' | null + latest_rollout_ready: boolean | null + latest_rollout_status: 'ready_for_rollout' | 'hold_legacy_authoritative' | string | null + latest_rollout_reasons: string[] +} export interface WorkflowDefinition { id: string @@ -10,6 +45,8 @@ export interface WorkflowDefinition { output_type_id: string | null config: WorkflowConfig family: WorkflowNodeFamily | 'mixed' | null + supported_artifact_kinds?: OutputTypeArtifactKind[] + rollout_summary: WorkflowRolloutSummary is_active: boolean created_at: string } @@ -132,6 +169,18 @@ export interface WorkflowPreflightResponse { nodes: WorkflowPreflightNode[] } +export interface WorkflowOrderLineContextOption { + value: string + label: string + meta: string +} + +export interface WorkflowOrderLineContextGroup { + order_id: string + order_label: string + options: WorkflowOrderLineContextOption[] +} + export interface WorkflowDraftPreflightRequest { workflow_id?: string | null context_id: string @@ -162,6 +211,11 @@ export interface WorkflowRunComparison { execution_mode: WorkflowExecutionMode status: string summary: string + rollout_gate_verdict: 'pass' | 'warn' | 'fail' + workflow_rollout_ready: boolean + workflow_rollout_status: 'ready_for_rollout' | 'hold_legacy_authoritative' + rollout_reasons: string[] + rollout_thresholds: Record authoritative_output: WorkflowComparisonArtifact observer_output: WorkflowComparisonArtifact exact_match: boolean | null @@ -209,6 +263,9 @@ export const preflightWorkflowDraft = ( ): Promise => api.post('/workflows/preflight', data).then(r => r.data) +export const getWorkflowOrderLineContexts = (limit = 50): Promise => + api.get('/workflows/contexts/order-lines', { params: { limit } }).then(r => r.data) + export const getWorkflowRunComparison = (runId: string): Promise => api.get(`/workflows/runs/${runId}/comparison`).then(r => r.data) @@ -235,9 +292,12 @@ export interface WorkflowNodeFieldDefinition { step: number | null unit: string | null options: WorkflowNodeFieldOption[] + allow_blank?: boolean + max_length?: number | null + text_format?: string } -export type WorkflowNodeFamily = 'cad_file' | 'order_line' +export type WorkflowNodeFamily = 'cad_file' | 'order_line' | 'shared' export interface WorkflowNodeDefinition { step: string @@ -280,32 +340,75 @@ export const getNodeDefinitions = (): Promise = export const getPipelineSteps = (): Promise => api.get('/workflows/pipeline-steps').then(r => r.data) -function buildStillGraphNodes(renderParams: WorkflowParams): { nodes: WorkflowNode[]; edges: WorkflowEdge[] } { +function normalizeRenderParams(params: WorkflowParams = {}): WorkflowParams { + const normalized = { ...params } + const resolution = Array.isArray(normalized.resolution) ? normalized.resolution : undefined + if (resolution && resolution.length === 2) { + normalized.width = Number(resolution[0]) + normalized.height = Number(resolution[1]) + delete normalized.resolution + } + return normalized +} + +function buildWorkflowNode( + id: string, + step: string, + x: number, + y: number, + options: { + label: string + type?: string + params?: WorkflowParams + }, +): WorkflowNode { + return { + id, + step, + params: { ...(options.params ?? {}) }, + ui: { + type: options.type, + label: options.label, + position: { x, y }, + }, + } +} + +function extractRenderParamsFromNodes(nodes: WorkflowNode[], step: string): WorkflowParams { + const match = nodes.find(node => node.step === step) + return normalizeRenderParams(match?.params ?? {}) +} + +function buildOrderLineStillGraphNodes(renderParams: WorkflowParams): { nodes: WorkflowNode[]; edges: WorkflowEdge[] } { return { nodes: [ - { id: 'setup', step: 'order_line_setup', params: {}, ui: { label: 'Order Line Setup', position: { x: 0, y: 160 } } }, - { id: 'template', step: 'resolve_template', params: {}, ui: { label: 'Resolve Template', position: { x: 220, y: 160 } } }, - { - id: 'populate_materials', - step: 'auto_populate_materials', - params: {}, - ui: { type: 'processNode', label: 'Auto Populate Materials', position: { x: 220, y: 320 } }, - }, - { id: 'bbox', step: 'glb_bbox', params: {}, ui: { type: 'processNode', label: 'Compute Bounding Box', position: { x: 220, y: 40 } } }, - { - id: 'resolve_materials', - step: 'material_map_resolve', - params: {}, - ui: { type: 'processNode', label: 'Resolve Material Map', position: { x: 440, y: 200 } }, - }, - { - id: 'render', - step: 'blender_still', - params: { use_custom_render_settings: true, ...renderParams }, - ui: { type: 'renderNode', label: 'Still Render', position: { x: 680, y: 160 } }, - }, - { id: 'output', step: 'output_save', params: {}, ui: { type: 'outputNode', label: 'Save Output', position: { x: 920, y: 120 } } }, - { id: 'notify', step: 'notify', params: {}, ui: { type: 'outputNode', label: 'Notify Result', position: { x: 920, y: 220 } } }, + buildWorkflowNode('setup', 'order_line_setup', 0, 160, { label: 'Order Line Setup' }), + buildWorkflowNode('template', 'resolve_template', 220, 160, { label: 'Resolve Template' }), + buildWorkflowNode('populate_materials', 'auto_populate_materials', 220, 320, { + label: 'Auto Populate Materials', + type: 'processNode', + }), + buildWorkflowNode('bbox', 'glb_bbox', 220, 40, { + label: 'Compute Bounding Box', + type: 'processNode', + }), + buildWorkflowNode('resolve_materials', 'material_map_resolve', 440, 200, { + label: 'Resolve Material Map', + type: 'processNode', + }), + buildWorkflowNode('render', 'blender_still', 680, 160, { + label: 'Still Render', + type: 'renderNode', + params: { use_custom_render_settings: false, ...renderParams }, + }), + buildWorkflowNode('output', 'output_save', 920, 120, { + label: 'Save Output', + type: 'outputNode', + }), + buildWorkflowNode('notify', 'notify', 920, 220, { + label: 'Notify Result', + type: 'outputNode', + }), ], edges: [ { from: 'setup', to: 'template' }, @@ -322,24 +425,25 @@ function buildStillGraphNodes(renderParams: WorkflowParams): { nodes: WorkflowNo } } -function migratePresetConfig(type: WorkflowPresetType, params: WorkflowParams = {}): WorkflowConfig { - const renderParams = { ...params } - const resolution = Array.isArray(renderParams.resolution) ? renderParams.resolution : undefined - if (resolution && resolution.length === 2) { - renderParams.width = Number(resolution[0]) - renderParams.height = Number(resolution[1]) - delete renderParams.resolution - } +function buildPresetWorkflowConfigInternal(type: WorkflowPresetType, params: WorkflowParams = {}): WorkflowConfig { + const renderParams = normalizeRenderParams(params) if (type === 'still') { return { version: 1, ui: { preset: type, execution_mode: 'legacy', family: 'order_line' }, nodes: [ - { id: 'setup', step: 'order_line_setup', params: {}, ui: { label: 'Order Line Setup', position: { x: 0, y: 100 } } }, - { id: 'template', step: 'resolve_template', params: {}, ui: { label: 'Resolve Template', position: { x: 220, y: 100 } } }, - { id: 'render', step: 'blender_still', params: renderParams, ui: { type: 'renderNode', label: 'Still Render', position: { x: 440, y: 100 } } }, - { id: 'output', step: 'output_save', params: {}, ui: { type: 'outputNode', label: 'Save Output', position: { x: 660, y: 100 } } }, + buildWorkflowNode('setup', 'order_line_setup', 0, 100, { label: 'Order Line Setup' }), + buildWorkflowNode('template', 'resolve_template', 220, 100, { label: 'Resolve Template' }), + buildWorkflowNode('render', 'blender_still', 440, 100, { + label: 'Still Render', + type: 'renderNode', + params: renderParams, + }), + buildWorkflowNode('output', 'output_save', 660, 100, { + label: 'Save Output', + type: 'outputNode', + }), ], edges: [ { from: 'setup', to: 'template' }, @@ -350,7 +454,7 @@ function migratePresetConfig(type: WorkflowPresetType, params: WorkflowParams = } if (type === 'still_graph') { - const { nodes, edges } = buildStillGraphNodes(renderParams) + const { nodes, edges } = buildOrderLineStillGraphNodes(renderParams) return { version: 1, ui: { preset: type, execution_mode: 'graph', family: 'order_line' }, @@ -364,10 +468,17 @@ function migratePresetConfig(type: WorkflowPresetType, params: WorkflowParams = version: 1, ui: { preset: type, execution_mode: 'legacy', family: 'order_line' }, nodes: [ - { id: 'setup', step: 'order_line_setup', params: {}, ui: { label: 'Order Line Setup', position: { x: 0, y: 100 } } }, - { id: 'template', step: 'resolve_template', params: {}, ui: { label: 'Resolve Template', position: { x: 220, y: 100 } } }, - { id: 'turntable', step: 'blender_turntable', params: renderParams, ui: { type: 'renderFramesNode', label: 'Turntable Render', position: { x: 440, y: 100 } } }, - { id: 'output', step: 'output_save', params: {}, ui: { type: 'outputNode', label: 'Save Output', position: { x: 660, y: 100 } } }, + buildWorkflowNode('setup', 'order_line_setup', 0, 100, { label: 'Order Line Setup' }), + buildWorkflowNode('template', 'resolve_template', 220, 100, { label: 'Resolve Template' }), + buildWorkflowNode('turntable', 'blender_turntable', 440, 100, { + label: 'Turntable Render', + type: 'renderFramesNode', + params: renderParams, + }), + buildWorkflowNode('output', 'output_save', 660, 100, { + label: 'Save Output', + type: 'outputNode', + }), ], edges: [ { from: 'setup', to: 'template' }, @@ -385,15 +496,19 @@ function migratePresetConfig(type: WorkflowPresetType, params: WorkflowParams = version: 1, ui: { preset: type, execution_mode: 'legacy', family: 'order_line' }, nodes: [ - { id: 'setup', step: 'order_line_setup', params: {}, ui: { label: 'Order Line Setup', position: { x: 0, y: 195 } } }, - { id: 'template', step: 'resolve_template', params: {}, ui: { label: 'Resolve Template', position: { x: 220, y: 195 } } }, - ...angles.map((angle, index) => ({ - id: `render_${index}`, - step: 'blender_still', - params: { ...sharedParams, rotation_z: angle }, - ui: { type: 'renderNode', label: `Render ${angle}°`, position: { x: 440, y: index * 130 } }, - })), - { id: 'output', step: 'output_save', params: {}, ui: { type: 'outputNode', label: 'Save Output', position: { x: 700, y: 195 } } }, + buildWorkflowNode('setup', 'order_line_setup', 0, 195, { label: 'Order Line Setup' }), + buildWorkflowNode('template', 'resolve_template', 220, 195, { label: 'Resolve Template' }), + ...angles.map((angle, index) => + buildWorkflowNode(`render_${index}`, 'blender_still', 440, index * 130, { + label: `Render ${angle}°`, + type: 'renderNode', + params: { ...sharedParams, rotation_z: angle }, + }), + ), + buildWorkflowNode('output', 'output_save', 700, 195, { + label: 'Save Output', + type: 'outputNode', + }), ], edges: [ { from: 'setup', to: 'template' }, @@ -408,11 +523,21 @@ function migratePresetConfig(type: WorkflowPresetType, params: WorkflowParams = version: 1, ui: { preset: type, execution_mode: 'legacy', family: 'order_line' }, nodes: [ - { id: 'setup', step: 'order_line_setup', params: {}, ui: { label: 'Order Line Setup', position: { x: 0, y: 100 } } }, - { id: 'template', step: 'resolve_template', params: {}, ui: { label: 'Resolve Template', position: { x: 220, y: 100 } } }, - { id: 'render', step: 'blender_still', params: renderParams, ui: { type: 'renderNode', label: 'Still Render', position: { x: 440, y: 100 } } }, - { id: 'output', step: 'output_save', params: {}, ui: { type: 'outputNode', label: 'Save Output', position: { x: 660, y: 70 } } }, - { id: 'blend', step: 'export_blend', params: {}, ui: { type: 'outputNode', label: 'Export Blend', position: { x: 660, y: 160 } } }, + buildWorkflowNode('setup', 'order_line_setup', 0, 100, { label: 'Order Line Setup' }), + buildWorkflowNode('template', 'resolve_template', 220, 100, { label: 'Resolve Template' }), + buildWorkflowNode('render', 'blender_still', 440, 100, { + label: 'Still Render', + type: 'renderNode', + params: renderParams, + }), + buildWorkflowNode('output', 'output_save', 660, 70, { + label: 'Save Output', + type: 'outputNode', + }), + buildWorkflowNode('blend', 'export_blend', 660, 160, { + label: 'Export Blend', + type: 'outputNode', + }), ], edges: [ { from: 'setup', to: 'template' }, @@ -427,22 +552,245 @@ function migratePresetConfig(type: WorkflowPresetType, params: WorkflowParams = version: 1, ui: { preset: 'custom', execution_mode: 'legacy', family: 'order_line' }, nodes: [ - { - id: 'setup', - step: 'order_line_setup', - params: {}, - ui: { label: 'Order Line Setup', position: { x: 120, y: 140 } }, - }, + buildWorkflowNode('setup', 'order_line_setup', 120, 140, { + label: 'Order Line Setup', + type: 'processNode', + }), ], edges: [], } } +export function buildWorkflowBlueprintConfig(blueprint: WorkflowBlueprintType): WorkflowConfig { + if (blueprint === 'cad_intake') { + return { + version: 1, + ui: { preset: 'custom', execution_mode: 'legacy', family: 'cad_file', blueprint }, + nodes: [ + buildWorkflowNode('resolve_step', 'resolve_step_path', 0, 180, { label: 'Resolve STEP Path' }), + buildWorkflowNode('extract_objects', 'occ_object_extract', 220, 180, { + label: 'Extract STEP Objects', + }), + buildWorkflowNode('export_glb', 'occ_glb_export', 440, 180, { label: 'Export GLB' }), + buildWorkflowNode('bbox', 'glb_bbox', 660, 120, { + label: 'Compute Bounding Box', + type: 'processNode', + }), + buildWorkflowNode('stl_cache', 'stl_cache_generate', 660, 300, { label: 'Generate STL Cache' }), + buildWorkflowNode('blender_thumb', 'blender_render', 880, 120, { + label: 'Render Thumbnail (Blender)', + type: 'renderNode', + params: { render_engine: 'cycles', samples: 64, width: 512, height: 512 }, + }), + buildWorkflowNode('threejs_thumb', 'threejs_render', 880, 320, { + label: 'Render Thumbnail (Three.js)', + type: 'renderNode', + params: { width: 512, height: 512, transparent_bg: true }, + }), + buildWorkflowNode('save_blender_thumb', 'thumbnail_save', 1100, 120, { + label: 'Save Blender Thumbnail', + type: 'outputNode', + }), + buildWorkflowNode('save_threejs_thumb', 'thumbnail_save', 1100, 320, { + label: 'Save Three.js Thumbnail', + type: 'outputNode', + }), + ], + edges: [ + { from: 'resolve_step', to: 'extract_objects' }, + { from: 'extract_objects', to: 'export_glb' }, + { from: 'export_glb', to: 'bbox' }, + { from: 'export_glb', to: 'stl_cache' }, + { from: 'export_glb', to: 'blender_thumb' }, + { from: 'export_glb', to: 'threejs_thumb' }, + { from: 'bbox', to: 'threejs_thumb' }, + { from: 'blender_thumb', to: 'save_blender_thumb' }, + { from: 'threejs_thumb', to: 'save_threejs_thumb' }, + ], + } + } + + if (blueprint === 'order_rendering') { + return { + version: 1, + ui: { preset: 'custom', execution_mode: 'legacy', family: 'order_line', blueprint }, + nodes: [ + buildWorkflowNode('setup', 'order_line_setup', 0, 220, { label: 'Order Line Setup' }), + buildWorkflowNode('template', 'resolve_template', 220, 220, { label: 'Resolve Template' }), + buildWorkflowNode('populate_materials', 'auto_populate_materials', 220, 360, { + label: 'Auto Populate Materials', + }), + buildWorkflowNode('bbox', 'glb_bbox', 220, 80, { label: 'Compute Bounding Box' }), + buildWorkflowNode('resolve_materials', 'material_map_resolve', 440, 220, { + label: 'Resolve Material Map', + }), + buildWorkflowNode('still_render', 'blender_still', 680, 80, { + label: 'Render Still', + type: 'renderNode', + params: { rotation_z: 0 }, + }), + buildWorkflowNode('turntable_render', 'blender_turntable', 680, 220, { + label: 'Render Turntable', + type: 'renderFramesNode', + params: { fps: 24, duration_s: 5 }, + }), + buildWorkflowNode('blend_export', 'export_blend', 680, 360, { + label: 'Export Blend', + type: 'outputNode', + }), + buildWorkflowNode('save_still', 'output_save', 920, 80, { + label: 'Save Still Output', + type: 'outputNode', + }), + buildWorkflowNode('save_turntable', 'output_save', 920, 220, { + label: 'Save Turntable Output', + type: 'outputNode', + }), + buildWorkflowNode('notify_still', 'notify', 920, 140, { + label: 'Notify Still Result', + type: 'outputNode', + }), + buildWorkflowNode('notify_turntable', 'notify', 920, 280, { + label: 'Notify Turntable Result', + type: 'outputNode', + }), + buildWorkflowNode('notify_export', 'notify', 920, 360, { + label: 'Notify Blend Export', + type: 'outputNode', + }), + ], + edges: [ + { from: 'setup', to: 'template' }, + { from: 'setup', to: 'populate_materials' }, + { from: 'setup', to: 'bbox' }, + { from: 'template', to: 'resolve_materials' }, + { from: 'populate_materials', to: 'resolve_materials' }, + { from: 'resolve_materials', to: 'still_render' }, + { from: 'resolve_materials', to: 'turntable_render' }, + { from: 'bbox', to: 'still_render' }, + { from: 'bbox', to: 'turntable_render' }, + { from: 'template', to: 'still_render' }, + { from: 'template', to: 'turntable_render' }, + { from: 'template', to: 'blend_export' }, + { from: 'still_render', to: 'save_still' }, + { from: 'still_render', to: 'notify_still' }, + { from: 'turntable_render', to: 'save_turntable' }, + { from: 'turntable_render', to: 'notify_turntable' }, + { from: 'blend_export', to: 'notify_export' }, + ], + } + } + + const { nodes, edges } = buildOrderLineStillGraphNodes({ + render_engine: 'cycles', + samples: 256, + width: 1920, + height: 1080, + }) + + return { + version: 1, + ui: { preset: 'custom', execution_mode: 'graph', family: 'order_line', blueprint }, + nodes, + edges, + } +} + +function buildStarterWorkflowConfigInternal(family: WorkflowStarterFamily = 'order_line'): WorkflowConfig { + if (family === 'cad_file') { + return { + version: 1, + ui: { + preset: 'custom', + execution_mode: 'legacy', + family: 'cad_file', + blueprint: 'starter_cad_intake', + }, + nodes: [ + buildWorkflowNode('resolve_step', 'resolve_step_path', 120, 140, { + label: 'Resolve STEP Path', + type: 'inputNode', + }), + ], + edges: [], + } + } + + return { + version: 1, + ui: { + preset: 'custom', + execution_mode: 'legacy', + family: 'order_line', + blueprint: 'starter_order_rendering', + }, + nodes: [ + buildWorkflowNode('setup', 'order_line_setup', 120, 140, { + label: 'Order Line Setup', + type: 'processNode', + }), + ], + edges: [], + } +} + +export function buildStillGraphNodes(renderParams: WorkflowParams): { nodes: WorkflowNode[]; edges: WorkflowEdge[] } { + return buildOrderLineStillGraphNodes(normalizeRenderParams(renderParams)) +} + +function migratePresetConfig(type: WorkflowPresetType, params: WorkflowParams = {}): WorkflowConfig { + return buildPresetWorkflowConfigInternal(type, params) +} + function normalizeWorkflowDefinition(raw: WorkflowDefinition): WorkflowDefinition { const config = normalizeWorkflowConfig(raw.config as unknown as Record) return { ...raw, family: raw.family ?? inferWorkflowFamily(config), + supported_artifact_kinds: Array.isArray(raw.supported_artifact_kinds) + ? raw.supported_artifact_kinds + : [], + rollout_summary: { + linked_output_type_count: Number(raw.rollout_summary?.linked_output_type_count ?? 0), + active_output_type_count: Number(raw.rollout_summary?.active_output_type_count ?? 0), + linked_output_type_names: Array.isArray(raw.rollout_summary?.linked_output_type_names) + ? raw.rollout_summary.linked_output_type_names + : [], + linked_output_types: Array.isArray(raw.rollout_summary?.linked_output_types) + ? raw.rollout_summary.linked_output_types + .filter((outputType): outputType is WorkflowRolloutLinkedOutputType => ( + outputType != null + && typeof outputType === 'object' + && typeof outputType.id === 'string' + && typeof outputType.name === 'string' + )) + .map(outputType => ({ + id: outputType.id, + name: outputType.name, + is_active: Boolean(outputType.is_active), + artifact_kind: outputType.artifact_kind, + workflow_rollout_mode: outputType.workflow_rollout_mode ?? 'legacy_only', + })) + : [], + rollout_modes: Array.isArray(raw.rollout_summary?.rollout_modes) + ? raw.rollout_summary.rollout_modes + : [], + has_blocking_contracts: Boolean(raw.rollout_summary?.has_blocking_contracts), + blocking_reasons: Array.isArray(raw.rollout_summary?.blocking_reasons) + ? raw.rollout_summary.blocking_reasons + : [], + latest_run: raw.rollout_summary?.latest_run ?? null, + latest_shadow_run: raw.rollout_summary?.latest_shadow_run ?? null, + latest_rollout_gate_verdict: raw.rollout_summary?.latest_rollout_gate_verdict ?? null, + latest_rollout_ready: + typeof raw.rollout_summary?.latest_rollout_ready === 'boolean' + ? raw.rollout_summary.latest_rollout_ready + : null, + latest_rollout_status: raw.rollout_summary?.latest_rollout_status ?? null, + latest_rollout_reasons: Array.isArray(raw.rollout_summary?.latest_rollout_reasons) + ? raw.rollout_summary.latest_rollout_reasons + : [], + }, config, } } @@ -455,14 +803,51 @@ export function normalizeWorkflowConfig(raw: Record): WorkflowC params: { ...(node.params ?? {}) }, })) const edges = Array.isArray(raw.edges) ? (raw.edges as WorkflowEdge[]) : [] + const mergedUi = { + ...rawUi, + execution_mode: rawUi.execution_mode ?? 'legacy', + } + + if (rawUi.preset === 'still_graph') { + const canonical = buildPresetWorkflowConfigInternal('still_graph', extractRenderParamsFromNodes(nodes, 'blender_still')) + return { + ...canonical, + ui: { + ...canonical.ui, + ...mergedUi, + }, + } + } + + if (rawUi.blueprint === 'cad_intake' || rawUi.blueprint === 'order_rendering' || rawUi.blueprint === 'still_graph_reference') { + const canonical = buildWorkflowBlueprintConfig(rawUi.blueprint) + return { + ...canonical, + ui: { + ...canonical.ui, + ...mergedUi, + }, + } + } + + if (rawUi.blueprint === 'starter_cad_intake' || rawUi.blueprint === 'starter_order_rendering') { + const canonical = buildStarterWorkflowConfigInternal(rawUi.blueprint === 'starter_cad_intake' ? 'cad_file' : 'order_line') + return { + ...canonical, + ui: { + ...canonical.ui, + ...mergedUi, + }, + } + } + return { version: Number(raw.version ?? 1), nodes, edges, ui: { - ...rawUi, - execution_mode: rawUi.execution_mode ?? 'legacy', - family: rawUi.family ?? inferWorkflowFamily({ version: Number(raw.version ?? 1), nodes, edges }), + ...mergedUi, + family: rawUi.family ?? inferWorkflowFamily({ version: Number(raw.version ?? 1), nodes, edges }) ?? undefined, }, } } @@ -480,49 +865,11 @@ export function normalizeWorkflowConfig(raw: Record): WorkflowC } export function createPresetWorkflowConfig(type: WorkflowPresetType, params: WorkflowParams = {}): WorkflowConfig { - return migratePresetConfig(type, params) + return buildPresetWorkflowConfigInternal(type, params) } export function createStarterWorkflowConfig(family: WorkflowStarterFamily = 'order_line'): WorkflowConfig { - if (family === 'cad_file') { - return { - version: 1, - ui: { - preset: 'custom', - execution_mode: 'legacy', - family: 'cad_file', - blueprint: 'starter_cad_intake', - }, - nodes: [ - { - id: 'resolve_step', - step: 'resolve_step_path', - params: {}, - ui: { type: 'inputNode', label: 'Resolve STEP Path', position: { x: 120, y: 140 } }, - }, - ], - edges: [], - } - } - - return { - version: 1, - ui: { - preset: 'custom', - execution_mode: 'legacy', - family: 'order_line', - blueprint: 'starter_order_rendering', - }, - nodes: [ - { - id: 'setup', - step: 'order_line_setup', - params: {}, - ui: { type: 'processNode', label: 'Order Line Setup', position: { x: 120, y: 140 } }, - }, - ], - edges: [], - } + return buildStarterWorkflowConfigInternal(family) } export function getWorkflowPresetType(config: WorkflowConfig): WorkflowPresetType { @@ -542,11 +889,12 @@ export function inferWorkflowFamily(config: WorkflowConfig): WorkflowNodeFamily case 'threejs_render': case 'thumbnail_save': return 'cad_file' + case 'glb_bbox': + return null case 'order_line_setup': case 'resolve_template': case 'material_map_resolve': case 'auto_populate_materials': - case 'glb_bbox': case 'blender_still': case 'blender_turntable': case 'output_save': @@ -557,7 +905,7 @@ export function inferWorkflowFamily(config: WorkflowConfig): WorkflowNodeFamily return null } }) - .filter((family): family is WorkflowNodeFamily => family !== null), + .filter((family): family is Exclude => family !== null), ) if (families.size === 0) return null if (families.size > 1) return 'mixed' diff --git a/frontend/src/components/admin/OutputTypeTable.tsx b/frontend/src/components/admin/OutputTypeTable.tsx index a037e12..bdde829 100644 --- a/frontend/src/components/admin/OutputTypeTable.tsx +++ b/frontend/src/components/admin/OutputTypeTable.tsx @@ -1,37 +1,62 @@ -import React, { useState, useRef, useEffect } from 'react' +import React, { useState, useRef, useEffect, useMemo } from 'react' import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' import { Pencil, Trash2, Plus, Check, X, ChevronDown, Copy } from 'lucide-react' import { toast } from 'sonner' import { listOutputTypes, createOutputType, updateOutputType, deleteOutputType, + getCachedOutputTypeContractCatalog, + getOutputTypeContractCatalog, + getCompatibleWorkflowsForOutputTypeContract, + getDefaultOutputFormatForArtifactKind, getOutputTypeInvocationOverrides, + getOutputTypeWorkflowContractIssues, inferArtifactKind, isArtifactKindAllowedForFamily, listAllowedArtifactKindsForFamily, + listAllowedInvocationOverrideKeysForArtifactKind, + listAllowedOutputFormatsForFamily, +} from '../../api/outputTypes' +import type { + OutputType, + OutputTypeContractCatalog, + OutputTypeArtifactKind, + OutputTypeWorkflowContractIssue, + OutputTypeWorkflowContractWorkflowLike, + OutputTypeWorkflowFamily, + OutputTypeWorkflowRolloutMode, } from '../../api/outputTypes' -import type { OutputType, OutputTypeArtifactKind, OutputTypeWorkflowFamily } from '../../api/outputTypes' import { listMaterials } from '../../api/materials' import type { Material } from '../../api/materials' import { listPricingTiers } from '../../api/pricing' import type { PricingTier } from '../../api/pricing' import { getWorkflows, inferWorkflowFamily as inferWorkflowFamilyFromConfig } from '../../api/workflows' import type { WorkflowDefinition } from '../../api/workflows' +import { getOutputTypeRolloutPresentation } from './outputTypeRolloutPresentation' const RENDERERS = ['blender'] -const FORMATS = ['png', 'jpg', 'gltf', 'stl', 'mp4', 'webm'] -const WORKFLOW_FAMILIES = [ - { value: 'order_line', label: 'Order Rendering' }, - { value: 'cad_file', label: 'CAD Intake' }, -] as const -const ARTIFACT_KINDS = [ - { value: 'still_image', label: 'Still Image' }, - { value: 'turntable_video', label: 'Turntable Video' }, - { value: 'model_export', label: 'Model Export' }, - { value: 'thumbnail_image', label: 'Thumbnail Image' }, - { value: 'blend_asset', label: 'Blend Asset' }, - { value: 'package', label: 'Package' }, - { value: 'custom', label: 'Custom' }, -] as const +const WORKFLOW_FAMILY_LABELS: Record = { + order_line: 'Order Rendering', + cad_file: 'CAD Intake', +} +const ARTIFACT_KIND_LABELS: Record = { + still_image: 'Still Image', + turntable_video: 'Turntable Video', + model_export: 'Model Export', + thumbnail_image: 'Thumbnail Image', + blend_asset: 'Blend Asset', + package: 'Package', + custom: 'Custom', +} +const WORKFLOW_ROLLOUT_LABELS: Record = { + legacy_only: 'Legacy Only', + shadow: 'Shadow', + graph: 'Graph Authoritative', +} +const WORKFLOW_ROLLOUT_HINTS: Record = { + legacy_only: 'Graph workflow stays linked but production remains on legacy.', + shadow: 'Legacy stays authoritative while the graph runs as observer.', + graph: 'Production dispatch uses the linked workflow graph.', +} const ALL_CATEGORIES = [ { key: 'TRB', label: 'TRB' }, { key: 'Kugellager', label: 'Kugellager' }, @@ -41,31 +66,93 @@ const ALL_CATEGORIES = [ { key: 'Linear_schiene', label: 'Linear' }, { key: 'Anschlagplatten', label: 'Anschlag' }, ] -const EMPTY_FORM ={ name: '', description: '', renderer: 'blender', output_format: 'png', sort_order: 0, compatible_categories: [] as string[], render_backend: 'celery', is_animation: false, transparent_bg: false, workflow_family: 'order_line' as OutputTypeWorkflowFamily, artifact_kind: 'still_image' as OutputTypeArtifactKind, workflow_definition_id: '' as string, cycles_device: '' as string, pricing_tier_id: null as number | null, material_override: '' as string, width: '', height: '', engine: '', samples: '', frame_count: '', fps: '', turntable_axis: 'world_z', bg_color: '', noise_threshold: '', denoiser: '', denoising_input_passes: '', denoising_prefilter: '', denoising_quality: '', denoising_use_gpu: '' } +const EMPTY_FORM ={ name: '', description: '', renderer: 'blender', output_format: 'png', sort_order: 0, compatible_categories: [] as string[], render_backend: 'celery', is_animation: false, transparent_bg: false, workflow_family: 'order_line' as OutputTypeWorkflowFamily, artifact_kind: 'still_image' as OutputTypeArtifactKind, workflow_definition_id: '' as string, workflow_rollout_mode: 'legacy_only' as OutputTypeWorkflowRolloutMode, cycles_device: '' as string, pricing_tier_id: null as number | null, material_override: '' as string, width: '', height: '', engine: '', samples: '', frame_count: '', fps: '', turntable_axis: 'world_z', bg_color: '', noise_threshold: '', denoiser: '', denoising_input_passes: '', denoising_prefilter: '', denoising_quality: '', denoising_use_gpu: '' } -function getWorkflowFamily(workflow: WorkflowDefinition): 'cad_file' | 'order_line' | 'mixed' | null { - return workflow.family ?? inferWorkflowFamilyFromConfig(workflow.config) +type OutputTypeEditorContractValues = Pick< + typeof EMPTY_FORM, + 'workflow_family' | 'artifact_kind' | 'output_format' | 'is_animation' | 'workflow_definition_id' | 'workflow_rollout_mode' +> + +function formatWorkflowRolloutLabel(mode: OutputTypeWorkflowRolloutMode): string { + return WORKFLOW_ROLLOUT_LABELS[mode] ?? mode } -function buildInvocationOverridesFromValues(values: Record): Record { +function getWorkflowFamily(workflow: WorkflowDefinition): 'cad_file' | 'order_line' | 'mixed' | null { + const inferredFamily = inferWorkflowFamilyFromConfig(workflow.config) + if (inferredFamily && inferredFamily !== 'shared') return inferredFamily + if (workflow.family && workflow.family !== 'shared') return workflow.family + return null +} + +function getWorkflowSupportedArtifactKinds(workflow: WorkflowDefinition): OutputTypeArtifactKind[] { + return Array.isArray(workflow.supported_artifact_kinds) + ? workflow.supported_artifact_kinds + : [] +} + +function workflowSupportsArtifactKind( + workflow: WorkflowDefinition, + artifactKind: OutputTypeArtifactKind, +): boolean { + return getWorkflowSupportedArtifactKinds(workflow).includes(artifactKind) +} + +function formatArtifactKindLabel(artifactKind: OutputTypeArtifactKind | string): string { + return ARTIFACT_KIND_LABELS[artifactKind as OutputTypeArtifactKind] ?? artifactKind +} + +function isArtifactKindFormatInferred(artifactKind: OutputTypeArtifactKind): boolean { + return ['still_image', 'thumbnail_image', 'turntable_video', 'model_export', 'blend_asset'].includes(artifactKind) +} + +function buildInvocationOverridesFromValues( + artifactKind: OutputTypeArtifactKind, + values: Record, + contractCatalog: OutputTypeContractCatalog, +): Record { + const allowedKeys = new Set(listAllowedInvocationOverrideKeysForArtifactKind(artifactKind, contractCatalog)) const overrides: Record = {} - if (values.width) overrides.width = Number(values.width) - if (values.height) overrides.height = Number(values.height) - if (values.engine) overrides.engine = values.engine - if (values.samples) overrides.samples = Number(values.samples) - if (values.frame_count) overrides.frame_count = Number(values.frame_count) - if (values.fps) overrides.fps = Number(values.fps) - if (values.turntable_axis) overrides.turntable_axis = values.turntable_axis - if (values.bg_color) overrides.bg_color = values.bg_color - if (values.noise_threshold) overrides.noise_threshold = values.noise_threshold - if (values.denoiser) overrides.denoiser = values.denoiser - if (values.denoising_input_passes) overrides.denoising_input_passes = values.denoising_input_passes - if (values.denoising_prefilter) overrides.denoising_prefilter = values.denoising_prefilter - if (values.denoising_quality) overrides.denoising_quality = values.denoising_quality - if (values.denoising_use_gpu) overrides.denoising_use_gpu = values.denoising_use_gpu + if (allowedKeys.has('width') && values.width) overrides.width = Number(values.width) + if (allowedKeys.has('height') && values.height) overrides.height = Number(values.height) + if (allowedKeys.has('engine') && values.engine) overrides.engine = values.engine + if (allowedKeys.has('samples') && values.samples) overrides.samples = Number(values.samples) + if (allowedKeys.has('frame_count') && values.frame_count) overrides.frame_count = Number(values.frame_count) + if (allowedKeys.has('fps') && values.fps) overrides.fps = Number(values.fps) + if (allowedKeys.has('turntable_axis') && values.turntable_axis) overrides.turntable_axis = values.turntable_axis + if (allowedKeys.has('bg_color') && values.bg_color) overrides.bg_color = values.bg_color + if (allowedKeys.has('noise_threshold') && values.noise_threshold) overrides.noise_threshold = values.noise_threshold + if (allowedKeys.has('denoiser') && values.denoiser) overrides.denoiser = values.denoiser + if (allowedKeys.has('denoising_input_passes') && values.denoising_input_passes) overrides.denoising_input_passes = values.denoising_input_passes + if (allowedKeys.has('denoising_prefilter') && values.denoising_prefilter) overrides.denoising_prefilter = values.denoising_prefilter + if (allowedKeys.has('denoising_quality') && values.denoising_quality) overrides.denoising_quality = values.denoising_quality + if (allowedKeys.has('denoising_use_gpu') && values.denoising_use_gpu) overrides.denoising_use_gpu = values.denoising_use_gpu return overrides } +function buildContractValuesFromOutputType( + outputType: OutputType, + editDraft?: Partial, +): OutputTypeEditorContractValues { + return { + workflow_family: editDraft?.workflow_family ?? outputType.workflow_family, + artifact_kind: editDraft?.artifact_kind ?? outputType.artifact_kind, + output_format: editDraft?.output_format ?? outputType.output_format, + is_animation: editDraft?.is_animation ?? outputType.is_animation, + workflow_definition_id: editDraft?.workflow_definition_id ?? outputType.workflow_definition_id ?? '', + workflow_rollout_mode: editDraft?.workflow_rollout_mode ?? outputType.workflow_rollout_mode ?? 'legacy_only', + } +} + +function hasContractErrors(issues: OutputTypeWorkflowContractIssue[]): boolean { + return issues.some(issue => issue.severity === 'error') +} + +function getContractCalloutClassName(severity: OutputTypeWorkflowContractIssue['severity']): string { + return severity === 'error' + ? 'border-red-200 bg-red-50 text-red-800' + : 'border-amber-200 bg-amber-50 text-amber-800' +} + export default function OutputTypeTable() { const qc = useQueryClient() const [showAdd, setShowAdd] = useState(false) @@ -78,6 +165,12 @@ export default function OutputTypeTable() { queryFn: () => listOutputTypes(true), }) + const { data: contractCatalogData } = useQuery({ + queryKey: ['output-type-contract-catalog'], + queryFn: getOutputTypeContractCatalog, + staleTime: 5 * 60 * 1000, + }) + const { data: pricingTiers } = useQuery({ queryKey: ['pricing-tiers'], queryFn: listPricingTiers, @@ -94,6 +187,36 @@ export default function OutputTypeTable() { queryFn: getWorkflows, }) + const contractCatalog = contractCatalogData ?? getCachedOutputTypeContractCatalog() + const workflowFamilyOptions = useMemo( + () => contractCatalog.workflow_families.map(value => ({ value, label: WORKFLOW_FAMILY_LABELS[value] ?? value })), + [contractCatalog], + ) + const artifactKindOptions = useMemo( + () => contractCatalog.artifact_kinds.map(value => ({ value, label: ARTIFACT_KIND_LABELS[value] ?? value })), + [contractCatalog], + ) + const workflowRolloutOptions = useMemo( + () => + contractCatalog.workflow_rollout_modes.map(value => ({ + value, + label: WORKFLOW_ROLLOUT_LABELS[value] ?? value, + hint: WORKFLOW_ROLLOUT_HINTS[value] ?? value, + })), + [contractCatalog], + ) + + const workflowContractCatalog = useMemo( + () => + (workflows ?? []).map(workflow => ({ + id: workflow.id, + name: workflow.name, + family: getWorkflowFamily(workflow), + supported_artifact_kinds: getWorkflowSupportedArtifactKinds(workflow), + })), + [workflows], + ) + const workflowsByFamily = (workflows ?? []).filter(w => w.is_active).reduce>((acc, workflow) => { const family = getWorkflowFamily(workflow) if (family === null) return acc @@ -102,6 +225,21 @@ export default function OutputTypeTable() { return acc }, {}) + const getContractIssues = (values: OutputTypeEditorContractValues) => + getOutputTypeWorkflowContractIssues({ + workflowFamily: values.workflow_family, + artifactKind: values.artifact_kind, + outputFormat: values.output_format, + isAnimation: values.is_animation, + workflowDefinitionId: values.workflow_definition_id, + workflowRolloutMode: values.workflow_rollout_mode, + workflows: workflowContractCatalog, + contractCatalog, + }) + + const addContractIssues = getContractIssues(form) + const addContractBlocked = hasContractErrors(addContractIssues) + const updateWorkflowMut = useMutation({ mutationFn: ({ id, workflow_definition_id }: { id: string; workflow_definition_id: string | null }) => updateOutputType(id, { workflow_definition_id }), @@ -115,7 +253,11 @@ export default function OutputTypeTable() { const createMut = useMutation({ mutationFn: () => { - const invocationOverrides = buildInvocationOverridesFromValues({ + const contractIssues = getContractIssues(form) + if (hasContractErrors(contractIssues)) { + throw new Error(contractIssues[0].message) + } + const invocationOverrides = buildInvocationOverridesFromValues(form.artifact_kind, { width: form.width, height: form.height, engine: form.engine, @@ -130,7 +272,7 @@ export default function OutputTypeTable() { denoising_prefilter: form.denoising_prefilter, denoising_quality: form.denoising_quality, denoising_use_gpu: form.denoising_use_gpu, - }) + }, contractCatalog) return createOutputType({ name: form.name.trim(), description: form.description.trim() || undefined, @@ -145,6 +287,7 @@ export default function OutputTypeTable() { artifact_kind: form.artifact_kind, invocation_overrides: invocationOverrides, workflow_definition_id: form.workflow_definition_id || null, + workflow_rollout_mode: form.workflow_rollout_mode, cycles_device: form.cycles_device || null, pricing_tier_id: form.pricing_tier_id, material_override: form.material_override || null, @@ -162,9 +305,16 @@ export default function OutputTypeTable() { const updateMut = useMutation({ mutationFn: ({ id, data }: { id: string; data: Partial & { _width?: string; _height?: string; _engine?: string; _samples?: string; _frame_count?: string; _fps?: string; _turntable_axis?: string; _bg_color?: string; _noise_threshold?: string; _denoiser?: string; _denoising_input_passes?: string; _denoising_prefilter?: string; _denoising_quality?: string; _denoising_use_gpu?: string } }) => { + const ot = types?.find((t) => t.id === id) + if (!ot) { + throw new Error('Output type could not be resolved for update.') + } + const contractIssues = getContractIssues(buildContractValuesFromOutputType(ot, data)) + if (hasContractErrors(contractIssues)) { + throw new Error(contractIssues[0].message) + } const { _width, _height, _engine, _samples, _frame_count, _fps, _turntable_axis, _bg_color, _noise_threshold, _denoiser, _denoising_input_passes, _denoising_prefilter, _denoising_quality, _denoising_use_gpu, ...rest } = data if (_width !== undefined || _height !== undefined || _engine !== undefined || _samples !== undefined || _frame_count !== undefined || _fps !== undefined || _turntable_axis !== undefined || _bg_color !== undefined || _noise_threshold !== undefined || _denoiser !== undefined || _denoising_input_passes !== undefined || _denoising_prefilter !== undefined || _denoising_quality !== undefined || _denoising_use_gpu !== undefined) { - const ot = types?.find((t) => t.id === id) const existing = ot ? getOutputTypeInvocationOverrides(ot) : {} const rs = { ...existing } if (_width !== undefined) { @@ -209,7 +359,8 @@ export default function OutputTypeTable() { if (_denoising_use_gpu !== undefined) { if (_denoising_use_gpu) rs.denoising_use_gpu = _denoising_use_gpu; else delete rs.denoising_use_gpu } - rest.invocation_overrides = rs + const nextArtifactKind = (rest.artifact_kind ?? ot?.artifact_kind ?? 'still_image') as OutputTypeArtifactKind + rest.invocation_overrides = buildInvocationOverridesFromValues(nextArtifactKind, rs, contractCatalog) } return updateOutputType(id, rest) }, @@ -249,6 +400,7 @@ export default function OutputTypeTable() { cycles_device: ot.cycles_device, pricing_tier_id: ot.pricing_tier_id, workflow_definition_id: ot.workflow_definition_id, + workflow_rollout_mode: ot.workflow_rollout_mode, workflow_family: ot.workflow_family, artifact_kind: ot.artifact_kind, is_active: ot.is_active, @@ -291,6 +443,7 @@ export default function OutputTypeTable() { if (field === 'workflow_family') return editDraft.workflow_family ?? ot!.workflow_family if (field === 'artifact_kind') return editDraft.artifact_kind ?? ot!.artifact_kind if (field === 'workflow_definition_id') return editDraft.workflow_definition_id ?? ot!.workflow_definition_id ?? '' + if (field === 'workflow_rollout_mode') return editDraft.workflow_rollout_mode ?? ot!.workflow_rollout_mode if (field === 'cycles_device') return editDraft.cycles_device ?? (ot!.cycles_device || '') if (field === 'sort_order') return editDraft.sort_order ?? ot!.sort_order if (field === 'pricing_tier_id') return editDraft.pricing_tier_id ?? ot!.pricing_tier_id ?? '' @@ -340,358 +493,597 @@ export default function OutputTypeTable() { const currentArtifactKind = val('artifact_kind') as OutputTypeArtifactKind const isBlender = showBlenderSettings(currentRenderer) const showBg = showTransparentBg(currentRenderer, currentFormat) + const allowedInvocationKeys = new Set( + listAllowedInvocationOverrideKeysForArtifactKind(currentArtifactKind, contractCatalog), + ) + const supportsResolution = allowedInvocationKeys.has('width') || allowedInvocationKeys.has('height') + const supportsTurntable = allowedInvocationKeys.has('frame_count') || allowedInvocationKeys.has('fps') || allowedInvocationKeys.has('turntable_axis') + const supportsBgColor = allowedInvocationKeys.has('bg_color') + const supportsEngine = allowedInvocationKeys.has('engine') + const supportsSamples = allowedInvocationKeys.has('samples') + const supportsDenoising = [ + 'noise_threshold', + 'denoiser', + 'denoising_input_passes', + 'denoising_prefilter', + 'denoising_quality', + 'denoising_use_gpu', + ].some((key) => allowedInvocationKeys.has(key as any)) const bgColor = val('bg_color') as string const bgEnabled = bgColor !== '' const workflowById = new Map((workflows ?? []).map(workflow => [workflow.id, workflow])) - const selectableWorkflows = (workflowsByFamily[currentFamily] ?? []).filter(workflow => getWorkflowFamily(workflow) !== 'mixed') - const artifactOptions = ARTIFACT_KINDS.filter(kind => listAllowedArtifactKindsForFamily(currentFamily).includes(kind.value)) + const contractValues: OutputTypeEditorContractValues = { + workflow_family: currentFamily, + artifact_kind: currentArtifactKind, + output_format: currentFormat, + is_animation: currentIsAnimation, + workflow_definition_id: val('workflow_definition_id') as string, + workflow_rollout_mode: val('workflow_rollout_mode') as OutputTypeWorkflowRolloutMode, + } + const contractIssues = getContractIssues(contractValues) + const familySelectableWorkflows = (workflowsByFamily[currentFamily] ?? []).filter( + workflow => getWorkflowFamily(workflow) !== 'mixed', + ) + const compatibleWorkflowIds = new Set( + getCompatibleWorkflowsForOutputTypeContract(workflowContractCatalog, currentFamily, currentArtifactKind) + .map(workflow => workflow.id), + ) + const selectableWorkflows = familySelectableWorkflows.filter(workflow => compatibleWorkflowIds.has(workflow.id)) + const selectedWorkflowId = contractValues.workflow_definition_id + const currentRolloutMode = contractValues.workflow_rollout_mode + const selectedWorkflow = workflowById.get(selectedWorkflowId) + const selectedWorkflowMismatch = selectedWorkflow != null && !workflowSupportsArtifactKind(selectedWorkflow, currentArtifactKind) + const rolloutPresentation = getOutputTypeRolloutPresentation({ + hasWorkflowLink: Boolean(selectedWorkflowId), + workflowRolloutMode: currentRolloutMode, + hasBlockingIssues: hasContractErrors(contractIssues), + }) + const profileOwnershipKeys = contractCatalog.parameter_ownership.output_type_profile_keys + const templateRuntimeKeys = contractCatalog.parameter_ownership.template_runtime_keys + const resolveTemplateKeys = contractCatalog.parameter_ownership.workflow_node_keys_by_step.resolve_template ?? [] + const renderNodeOwnershipKeys = + currentArtifactKind === 'turntable_video' + ? contractCatalog.parameter_ownership.workflow_node_keys_by_step.blender_turntable ?? [] + : contractCatalog.parameter_ownership.workflow_node_keys_by_step.blender_still ?? [] + const artifactOptions = artifactKindOptions.filter(kind => + listAllowedArtifactKindsForFamily(currentFamily, contractCatalog).includes(kind.value), + ) + const allowedFormats = listAllowedOutputFormatsForFamily(currentFamily, contractCatalog) + const formatOptions = allowedFormats.includes(currentFormat) + ? allowedFormats + : [...allowedFormats, currentFormat] + const currentFormatSupported = allowedFormats.includes(currentFormat) const categoriesValue = isEdit ? (editDraft.compatible_categories ?? ot!.compatible_categories) || [] : form.compatible_categories + const sectionClassName = 'mt-4 rounded-2xl border border-border-light bg-surface px-4 py-4' + const sectionHeadingClassName = 'text-xs font-semibold uppercase tracking-[0.12em] text-content-muted' + const sectionBodyClassName = 'mt-3 grid grid-cols-2 gap-4 md:grid-cols-4' return ( <> - {/* Row 1: Name | Family | Artifact | Workflow */} -
-
- - set('name', e.target.value)} - /> +
+
+
+
Workflow Contract
+

+ Output types should bind to a workflow family, artifact contract, and optional graph workflow before any renderer-specific tuning is applied. +

+
+
+ Legacy remains available when no workflow is assigned +
-
- - { const nextFamily = e.target.value as OutputTypeWorkflowFamily - const nextArtifact = isArtifactKindAllowedForFamily(nextFamily, currentArtifactKind) + const nextAllowedFormats = listAllowedOutputFormatsForFamily(nextFamily, contractCatalog) + const nextIsAnimation = nextFamily === 'cad_file' ? false : currentIsAnimation + const nextArtifactSeed = isArtifactKindAllowedForFamily(nextFamily, currentArtifactKind, contractCatalog) ? currentArtifactKind - : inferArtifactKind(nextFamily, currentFormat, currentIsAnimation) + : inferArtifactKind(nextFamily, currentFormat, nextIsAnimation) + const nextFormat = nextAllowedFormats.includes(currentFormat) + ? currentFormat + : getDefaultOutputFormatForArtifactKind(nextArtifactSeed, contractCatalog) + const nextArtifact = isArtifactKindAllowedForFamily(nextFamily, nextArtifactSeed, contractCatalog) + ? ( + isArtifactKindFormatInferred(nextArtifactSeed) + ? inferArtifactKind(nextFamily, nextFormat, nextIsAnimation) + : nextArtifactSeed + ) + : inferArtifactKind(nextFamily, nextFormat, nextIsAnimation) const currentWorkflowId = val('workflow_definition_id') as string const currentWorkflow = workflowById.get(currentWorkflowId) set('workflow_family', nextFamily) - set('artifact_kind', nextArtifact) - if (currentWorkflow && getWorkflowFamily(currentWorkflow) !== nextFamily) { - set('workflow_definition_id', '') - } - }} - > - {WORKFLOW_FAMILIES.map((family) => )} - -
-
- - -
-
- - -
-
- - {/* Row 2: Renderer | Format | Animation | Pricing Tier */} -
-
- - -
-
- - -
-
- - -
-
- - -
-
- - {/* Row 3: Turntable | Background | Device | Engine */} -
-
- - {currentIsAnimation ? ( -
-
- set('frame_count', e.target.value)} - title="Frame count" - /> - f - set('fps', e.target.value)} - title="FPS" - /> - fps -
- -
- ) : ( - N/A (not animation) - )} -
-
- - {showBg ? ( -
- - - {bgEnabled && ( -
- set('bg_color', e.target.value)} - /> - set('bg_color', e.target.value)} - /> -
- )} -
- ) : ( - N/A (not Blender) - )} -
-
- - {isBlender ? ( + > + {workflowFamilyOptions.map((family) => )} + +
+
+ - ) : ( - N/A (not Blender) - )} -
-
- - {isBlender ? ( +
+
+ - ) : ( - N/A (not Blender) - )} -
-
- - {/* Row 4: Samples | Resolution | Categories | Material Override */} -
-
- - {isBlender ? ( - + {selectedWorkflowMismatch + ? `Aktuelle Verknüpfung passt nicht zu ${formatArtifactKindLabel(currentArtifactKind)} und wird beim Speichern entfernt.` + : selectableWorkflows.length > 0 + ? `Es werden nur Workflows angezeigt, die ${formatArtifactKindLabel(currentArtifactKind)} produzieren.` + : `Kein aktiver ${currentFamily === 'cad_file' ? 'CAD-Intake' : 'Order-Rendering'}-Workflow produziert aktuell ${formatArtifactKindLabel(currentArtifactKind)}.`} +

+
+
+ + set('width', e.target.value)} - /> - x - set('height', e.target.value)} - /> + value={currentRolloutMode} + disabled={!selectedWorkflowId} + onChange={(e) => set('workflow_rollout_mode', e.target.value as OutputTypeWorkflowRolloutMode)} + > + {workflowRolloutOptions.map((option) => ( + + ))} + +

+ {selectedWorkflowId + ? workflowRolloutOptions.find((option) => option.value === currentRolloutMode)?.hint + : 'Ohne Workflow-Link bleibt der Output Type immer im Legacy-Pfad.'} +

-
- - set('compatible_categories', cats)} - /> +
+
+ + {rolloutPresentation.badgeLabel} + + + {rolloutPresentation.statusLabel} + +
+

{rolloutPresentation.operatorHint}

-
- - +
+ {contractIssues.length > 0 ? ( + contractIssues.map(issue => ( +
+ {issue.severity === 'error' ? 'Blocker' : 'Hinweis'}: {issue.message} +
+ )) + ) : selectedWorkflowId ? ( +
+ Workflow contract valid: Der verknüpfte Workflow, das Artefakt und der Rollout-Modus sind konsistent. +
+ ) : ( +
+ Legacy path active: Ohne Workflow-Link bleibt dieser Output Type auf dem Legacy-Pfad und kann später sauber an einen Graph-Workflow gebunden werden. +
+ )}
- {/* Row 5: Sort Order | Active */} -
+
- - set('sort_order', Number(e.target.value))} - /> +
Invocation Profile
+

+ Diese Einstellungen definieren, welches Artefakt der Workflow liefern soll und mit welchen Laufzeit-Parametern der Auftrag gestartet wird. +

-
- - {isEdit ? ( +
+

+ Owner split: Output Type steuert {profileOwnershipKeys.join(', ')} sowie die freigegebenen Invocation-Overrides. + Template-Auflösung steuert {resolveTemplateKeys.join(', ')}. + Der {currentArtifactKind === 'turntable_video' ? 'Turntable-' : 'Still-'}Graph-Node steuert {renderNodeOwnershipKeys.join(', ')}. +

+

+ Zur Laufzeit materialisiert der Template-Kontext daraus {templateRuntimeKeys.join(', ')}. Diese Trennung bleibt bewusst stabil, damit Legacy, Shadow und Graph denselben Auftragsvertrag teilen. +

+
+
+
+ + + {!currentFormatSupported ? ( +

+ Dieses Format passt nicht mehr zur gewählten Workflow-Familie und bleibt nur sichtbar, bis du es umstellst. +

+ ) : null} +
+
+ - ) : ( - — (active by default) - )} +
+
+ + {supportsResolution ? ( +
+ set('width', e.target.value)} + /> + x + set('height', e.target.value)} + /> +
+ ) : ( + N/A for this artifact + )} +
+
+ + {supportsTurntable && currentIsAnimation ? ( +
+
+ set('frame_count', e.target.value)} + title="Frame count" + /> + f + set('fps', e.target.value)} + title="FPS" + /> + fps +
+ +
+ ) : ( + + {supportsTurntable ? 'N/A (not animation)' : 'N/A for this artifact'} + + )} +
- {/* Row 6: Denoising settings (only for Blender) */} - {isBlender && ( -
+
+
+
Renderer Compatibility / Legacy Details
+

+ Diese Felder bleiben für Legacy- und Blender-Kompatibilität erhalten, sollen aber den Workflow-Vertrag nicht mehr dominieren. +

+
+
+
+ + +
+
+ + {showBg && supportsBgColor ? ( +
+ + + {bgEnabled && ( +
+ set('bg_color', e.target.value)} + /> + set('bg_color', e.target.value)} + /> +
+ )} +
+ ) : ( + + {showBg ? 'N/A for this artifact' : 'N/A (not Blender)'} + + )} +
+
+ + {isBlender ? ( + + ) : ( + N/A (not Blender) + )} +
+
+ + {isBlender && supportsEngine ? ( + + ) : ( + + {isBlender ? 'N/A for this artifact' : 'N/A (not Blender)'} + + )} +
+
+ + {isBlender && supportsSamples ? ( + set('samples', e.target.value)} + /> + ) : ( + + {isBlender ? 'N/A for this artifact' : 'N/A (not Blender)'} + + )} +
+
+
+ +
+
+
Catalog / Business
+

+ Sichtbarkeit, Sortierung und kaufmännische Zuordnung bleiben separat von Workflow- und Render-Definition. +

+
+
+
+ + set('name', e.target.value)} + /> +
+
+ + +
+
+ + set('compatible_categories', cats)} + /> +
+
+ + +
+
+ + set('sort_order', Number(e.target.value))} + /> +
+
+ + {isEdit ? ( + + ) : ( + — (active by default) + )} +
+
+
+ + {isBlender && supportsDenoising && ( +
@@ -784,7 +1176,7 @@ export default function OutputTypeTable() { return (
- +
@@ -801,6 +1193,7 @@ export default function OutputTypeTable() { + @@ -810,11 +1203,17 @@ export default function OutputTypeTable() { {isLoading && ( - + )} {types?.map((ot) => { const invocationProfile = getOutputTypeInvocationOverrides(ot) + const rowContractIssues = getContractIssues(buildContractValuesFromOutputType(ot)) + const rowRolloutPresentation = getOutputTypeRolloutPresentation({ + hasWorkflowLink: Boolean(ot.workflow_definition_id), + workflowRolloutMode: ot.workflow_rollout_mode, + hasBlockingIssues: hasContractErrors(rowContractIssues), + }) return ( @@ -828,7 +1227,7 @@ export default function OutputTypeTable() { {ot.workflow_family === 'cad_file' ? 'CAD Intake' : 'Order Rendering'} - {ARTIFACT_KINDS.find(kind => kind.value === ot.artifact_kind)?.label ?? ot.artifact_kind} + {artifactKindOptions.find(kind => kind.value === ot.artifact_kind)?.label ?? ot.artifact_kind} @@ -982,14 +1381,42 @@ export default function OutputTypeTable() { {getWorkflowFamily(wf) === 'cad_file' ? 'CAD Intake' : getWorkflowFamily(wf) === 'order_line' ? 'Order Rendering' : 'Mixed'} + {getWorkflowSupportedArtifactKinds(wf).length > 0 ? ( + + {getWorkflowSupportedArtifactKinds(wf).map(formatArtifactKindLabel).join(', ')} + + ) : null} + + {rowRolloutPresentation.statusLabel} + ) : ( - - Legacy - +
+ + Legacy + + + {rowRolloutPresentation.statusLabel} + +
) })()} +
- -
NameResolution Pricing WorkflowRollout Mat Override Sort Active
Loading...Loading...
+
+ + {ot.workflow_definition_id ? formatWorkflowRolloutLabel(ot.workflow_rollout_mode) : rowRolloutPresentation.badgeLabel} + + + {rowRolloutPresentation.rowSummary} + + {rowContractIssues.length > 0 ? ( + + {rowContractIssues[0].severity === 'error' ? 'Blocker:' : 'Hinweis:'} {rowContractIssues[0].message} + + ) : null} +
+
{ot.material_override ? ( @@ -1044,7 +1471,7 @@ export default function OutputTypeTable() { {/* Expandable edit form row */} {editingId === ot.id && (
+ {renderEditFormGrid('edit', ot)}
+
New Output Type
{renderEditFormGrid('add', null)}
@@ -1082,7 +1514,7 @@ export default function OutputTypeTable() {
- {/* Row 4: Active + Save/Cancel */} +
+ +