chore: snapshot workflow migration progress

This commit is contained in:
2026-04-12 11:49:04 +02:00
parent 0cd02513d5
commit 3e810c74a3
163 changed files with 31774 additions and 2753 deletions
@@ -13,8 +13,25 @@ from app.core.pipeline_logger import PipelineLogger
logger = logging.getLogger(__name__)
def _usd_cache_hit_refresh_reason(cad_file, usd_asset, usd_render_path) -> str | None:
"""Reuse the runtime freshness checks before accepting a USD cache hit."""
from app.domains.rendering.workflow_runtime_services import _usd_master_refresh_reason
return _usd_master_refresh_reason(
cad_file,
usd_asset=usd_asset,
usd_render_path=usd_render_path,
)
@celery_app.task(bind=True, name="app.tasks.step_tasks.generate_gltf_geometry_task", queue="asset_pipeline", max_retries=1)
def generate_gltf_geometry_task(self, cad_file_id: str):
def generate_gltf_geometry_task(
self,
cad_file_id: str,
workflow_run_id: str | None = None,
workflow_node_id: str | None = None,
**_: object,
):
"""Export a geometry GLB directly from STEP via OCC (no STL intermediary).
Pipeline:
@@ -94,10 +111,10 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
_current_hash = _compute_step_hash(str(step_path_str))
_cache_hit_asset_id = None
# Composite cache key includes deflection settings so changing them invalidates cache
# v3: removed BRepBuilderAPI_Transform, writer handles mm→m from STEP unit metadata
# Composite cache key includes deflection settings so changing them invalidates cache.
# v5: occurrence-aware part-key stamping for repeated leaf meshes changed.
effective_cache_key = (
f"v3:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}"
f"v5:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}"
if _current_hash else None
)
@@ -112,6 +129,9 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
if stored_key == effective_cache_key:
_asset_disk_path = _Path(app_settings.upload_dir) / existing_geo.storage_key
if _asset_disk_path.exists():
if cad_file.gltf_path != str(_asset_disk_path):
cad_file.gltf_path = str(_asset_disk_path)
session.commit()
logger.info("[CACHE] cache key match — skipping geometry GLB tessellation for %s", cad_file_id)
pl.step_done("export_glb_geometry", result={"cached": True, "asset_id": str(existing_geo.id)})
_cache_hit_asset_id = str(existing_geo.id)
@@ -133,6 +153,20 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
generate_usd_master_task.delay(cad_file_id)
except Exception:
logger.debug("Could not queue generate_usd_master_task from cache-hit path (non-fatal)")
try:
from app.domains.rendering.tasks import _update_workflow_run_status
_update_workflow_run_status(
cad_file_id,
"completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception(
"Failed to update workflow state for cached GLB export %s",
cad_file_id,
)
return {"cached": True, "asset_id": _cache_hit_asset_id}
step = _Path(step_path_str)
@@ -219,6 +253,9 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
existing.render_config = {"cache_key": effective_cache_key}
if product_id:
existing.product_id = _uuid.UUID(product_id)
cad_file = _sess.get(CadFile, _uuid.UUID(cad_file_id))
if cad_file is not None:
cad_file.gltf_path = str(output_path)
_sess.commit()
asset_id = str(existing.id)
else:
@@ -232,12 +269,26 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
render_config={"cache_key": effective_cache_key},
)
_sess.add(asset)
cad_file = _sess.get(CadFile, _uuid.UUID(cad_file_id))
if cad_file is not None:
cad_file.gltf_path = str(output_path)
_sess.commit()
asset_id = str(asset.id)
_eng2.dispose()
pl.step_done("export_glb_geometry", result={"glb_path": str(output_path), "asset_id": asset_id})
logger.info("generate_gltf_geometry_task: MediaAsset %s created for cad %s", asset_id, cad_file_id)
try:
from app.domains.rendering.tasks import _update_workflow_run_status
_update_workflow_run_status(
cad_file_id,
"completed",
workflow_run_id=workflow_run_id,
workflow_node_id=workflow_node_id,
)
except Exception:
logger.exception("Failed to update workflow state for GLB export %s", cad_file_id)
# Auto-chain USD master export so the canonical scene is always up to date
try:
@@ -346,6 +397,33 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
angular_deflection = float(sys_settings.get("render_angular_deflection", "0.05"))
sharp_threshold = float(sys_settings.get("sharp_edge_threshold", "20.0"))
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script_path = scripts_dir / "export_step_to_usd.py"
materials_helper_path = scripts_dir / "_blender_materials.py"
if not script_path.exists():
err = f"export_step_to_usd.py not found at {script_path}"
pl.step_error("usd_master", err, None)
raise RuntimeError(err)
# Cache must include the active render-script revision. Otherwise
# material resolution fixes never invalidate previously generated USD masters.
script_fingerprint = "unknown"
try:
import hashlib as _hashlib_script
_script_hash = _hashlib_script.sha256()
for candidate in (script_path, materials_helper_path):
if not candidate.exists():
continue
_script_hash.update(candidate.read_bytes())
script_fingerprint = _script_hash.hexdigest()[:12]
except Exception as exc:
logger.warning(
"[USD_MASTER] failed to fingerprint render scripts, falling back to legacy cache key: %s",
exc,
)
# Hash-based cache check: skip tessellation if file and settings haven't changed
from app.domains.products.cache_service import compute_step_hash as _compute_step_hash_usd
_current_hash_usd = _compute_step_hash_usd(str(step_path))
@@ -357,7 +435,7 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
_json.dumps(material_map, sort_keys=True).encode()
).hexdigest()[:12] if material_map else "none"
effective_cache_key = (
f"{_current_hash_usd}:{linear_deflection}:{angular_deflection}:{sharp_threshold}:{_mat_hash}"
f"{_current_hash_usd}:{linear_deflection}:{angular_deflection}:{sharp_threshold}:{_mat_hash}:{script_fingerprint}"
if _current_hash_usd else None
)
@@ -372,9 +450,21 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
if stored_key == effective_cache_key:
_usd_disk_path = _Path(app_settings.upload_dir) / existing_usd.storage_key
if _usd_disk_path.exists():
logger.info("[CACHE] cache key match — skipping USD master tessellation for %s", cad_file_id)
pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)})
_cache_hit_asset_id = str(existing_usd.id)
refresh_reason = _usd_cache_hit_refresh_reason(
cad_file,
existing_usd,
_usd_disk_path,
)
if refresh_reason is None:
logger.info("[CACHE] cache key match — skipping USD master tessellation for %s", cad_file_id)
pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)})
_cache_hit_asset_id = str(existing_usd.id)
else:
logger.info(
"[CACHE] USD cache key matched for %s but asset is stale (%s) — rebuilding",
cad_file_id,
refresh_reason,
)
else:
logger.info("[CACHE] cache key match but USD asset missing on disk — re-running tessellation for %s", cad_file_id)
else:
@@ -396,13 +486,6 @@ def generate_usd_master_task(self, cad_file_id: str) -> dict:
raise RuntimeError(err)
output_path = step_path.parent / f"{step_path.stem}_master.usd"
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script_path = scripts_dir / "export_step_to_usd.py"
if not script_path.exists():
err = f"export_step_to_usd.py not found at {script_path}"
pl.step_error("usd_master", err, None)
raise RuntimeError(err)
cmd = [
_sys.executable, str(script_path),