feat(P2): USD Foundation — canonical part identity + material overrides

M1 — USD exporter:
- render-worker/scripts/export_step_to_usd.py (631 lines)
  Full XCAF traversal, one UsdGeom.Mesh per leaf part,
  schaeffler:partKey on every prim, index-space sharpEdgeVertexPairs
- render-worker/Dockerfile: usd-core>=24.11 installed (USD 0.26.3)

M2 — usd_master MediaAsset + pipeline auto-chain:
- migrations 060 (usd_master enum), 061 (3 JSONB columns),
  062 (rename tessellation settings keys)
- generate_usd_master_task: runs export_step_to_usd.py, upserts
  usd_master MediaAsset, writes resolved_material_assignments to CadFile
- Auto-chained from generate_gltf_geometry_task after every GLB export
- step_tasks.py shim re-exports generate_usd_master_task

M3 — scene-manifest API:
- part_key_service.py: build_scene_manifest(), generate_part_key(),
  four-layer material priority resolution with provenance
- SceneManifest / PartEntry Pydantic models in products/schemas.py
- GET /api/cad/{id}/scene-manifest endpoint (graceful fallback to
  parsed_objects when USD not yet generated)
- POST /api/cad/{id}/generate-usd-master endpoint
- frontend/src/api/sceneManifest.ts: fetchSceneManifest(),
  triggerUsdMasterGeneration()

M4 — manual-material-overrides API:
- GET/PUT /api/cad/{id}/manual-material-overrides endpoints
- CadFile.manual_material_overrides JSONB column (migration 061)
- getManualOverrides() / saveManualOverrides() in cad.ts

M5 — ThreeDViewer partKey integration:
- export_step_to_gltf.py injects partKeyMap into GLB extras
- ThreeDViewer: partKeyMap extraction, resolvePartKey(), effectiveMaterials
  merges legacy partMaterials + new manualOverrides (server-side persistence)
- MaterialPanel: dual-path save (partKey vs legacy), provenance badge,
  reconciliation panel for unmatched/unassigned parts

Also:
- Admin.tsx: generate-missing-usd-masters + canonical scenes bulk actions
- ProductDetail.tsx: usd_master row in asset table
- vite-env.d.ts: fix ImportMeta.env TypeScript error
- GPUProbeResult: add timestamp/devices/render_time_s fields

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-12 13:11:09 +01:00
parent 47b5d42bb5
commit 409fb92899
33 changed files with 2070 additions and 303 deletions
@@ -40,9 +40,14 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
pl = PipelineLogger(task_id=self.request.id)
pl.step_start("export_glb_geometry", {"cad_file_id": cad_file_id})
# Resolve and log tenant context at task start (required for RLS)
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
sync_url = app_settings.database_url.replace("+asyncpg", "")
eng = create_engine(sync_url)
with Session(eng) as session:
set_tenant_context_sync(session, _tenant_id)
cad_file = session.get(CadFile, cad_file_id)
if not cad_file or not cad_file.stored_path:
logger.error("generate_gltf_geometry_task: no stored_path for %s", cad_file_id)
@@ -66,10 +71,32 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
settings_rows = session.execute(_select(_SysSetting)).scalars().all()
sys_settings = {s.key: s.value for s in settings_rows}
# Hash-based cache check: skip tessellation if file hasn't changed
step_file_hash = cad_file.step_file_hash
if step_file_hash:
from app.domains.media.models import MediaAsset, MediaAssetType
import uuid as _uuid_check
existing_geo = session.execute(
_select(MediaAsset).where(
MediaAsset.cad_file_id == _uuid_check.UUID(cad_file_id),
MediaAsset.asset_type == MediaAssetType.gltf_geometry,
)
).scalars().first()
if existing_geo:
logger.info("[CACHE] hash match — skipping geometry GLB tessellation for %s", cad_file_id)
pl.step_done("export_glb_geometry", result={"cached": True, "asset_id": str(existing_geo.id)})
eng.dispose()
# Still chain USD master — it has its own hash-check (C2)
try:
generate_usd_master_task.delay(cad_file_id)
except Exception:
logger.debug("Could not queue generate_usd_master_task from cache-hit path (non-fatal)")
return {"cached": True, "asset_id": str(existing_geo.id)}
eng.dispose()
linear_deflection = float(sys_settings.get("gltf_preview_linear_deflection", "0.1"))
angular_deflection = float(sys_settings.get("gltf_preview_angular_deflection", "0.1"))
linear_deflection = float(sys_settings.get("scene_linear_deflection", "0.1"))
angular_deflection = float(sys_settings.get("scene_angular_deflection", "0.1"))
tessellation_engine = sys_settings.get("tessellation_engine", "occ")
step = _Path(step_path_str)
@@ -135,6 +162,7 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
_sync_url = app_settings.database_url.replace("+asyncpg", "")
_eng2 = _ce(_sync_url)
with _Session(_eng2) as _sess:
set_tenant_context_sync(_sess, _tenant_id)
_key = str(output_path)
_prefix = str(app_settings.upload_dir).rstrip("/") + "/"
if _key.startswith(_prefix):
@@ -172,6 +200,14 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
pl.step_done("export_glb_geometry", result={"glb_path": str(output_path), "asset_id": asset_id})
logger.info("generate_gltf_geometry_task: MediaAsset %s created for cad %s", asset_id, cad_file_id)
# Auto-chain USD master export so the canonical scene is always up to date
try:
generate_usd_master_task.delay(cad_file_id)
logger.info("generate_gltf_geometry_task: queued generate_usd_master_task for %s", cad_file_id)
except Exception:
logger.debug("Could not queue generate_usd_master_task (non-fatal)")
return {"glb_path": str(output_path), "asset_id": asset_id}
@@ -207,6 +243,10 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
pl.step_start("export_glb_production", {"cad_file_id": cad_file_id})
log_task_event(self.request.id, f"generate_gltf_production_task started for cad {cad_file_id}", "info")
# Resolve and log tenant context at task start (required for RLS)
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
_sync_url = app_settings.database_url.replace("+asyncpg", "")
_eng = _ce(_sync_url)
@@ -215,6 +255,7 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
from app.models.system_setting import SystemSetting
with _Session(_eng) as _sess:
set_tenant_context_sync(_sess, _tenant_id)
_cad = _sess.execute(
_sel(_CF).where(_CF.id == _uuid.UUID(cad_file_id))
).scalar_one_or_none()
@@ -231,8 +272,8 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
raise RuntimeError(f"STEP file not found: {step_path}")
smooth_angle = float(sys_settings.get("blender_smooth_angle", "30"))
prod_linear = float(sys_settings.get("gltf_production_linear_deflection", "0.03"))
prod_angular = float(sys_settings.get("gltf_production_angular_deflection", "0.05"))
prod_linear = float(sys_settings.get("render_linear_deflection", "0.03"))
prod_angular = float(sys_settings.get("render_angular_deflection", "0.05"))
tessellation_engine = sys_settings.get("tessellation_engine", "occ")
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
@@ -289,8 +330,8 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
# because CharacteristicLengthMax becomes too small. GMSH quality is algorithmic
# (conforming seams) not density-based — a denser GMSH mesh adds no UV-unwrap benefit.
if tessellation_engine == "gmsh":
eff_linear = float(sys_settings.get("gltf_preview_linear_deflection", "0.1"))
eff_angular = float(sys_settings.get("gltf_preview_angular_deflection", "0.1"))
eff_linear = float(sys_settings.get("scene_linear_deflection", "0.1"))
eff_angular = float(sys_settings.get("scene_angular_deflection", "0.1"))
else:
eff_linear = prod_linear
eff_angular = prod_angular
@@ -330,6 +371,7 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
from app.domains.products.models import Product as _Product
with _Session(_eng) as _sess:
set_tenant_context_sync(_sess, _tenant_id)
_prod_query = _sel(_Product).where(_Product.cad_file_id == _uuid.UUID(cad_file_id))
if product_id:
_prod_query = _prod_query.where(_Product.id == _uuid.UUID(product_id))
@@ -405,6 +447,7 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
# any frontend page holding a stale download_url continues to resolve correctly.
_eng2 = _ce(_sync_url)
with _Session(_eng2) as _sess:
set_tenant_context_sync(_sess, _tenant_id)
_key = str(output_path)
_prefix = str(app_settings.upload_dir).rstrip("/") + "/"
if _key.startswith(_prefix):
@@ -443,3 +486,204 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
pl.step_done("export_glb_production", result={"glb_path": str(output_path), "asset_id": asset_id})
logger.info("generate_gltf_production_task: MediaAsset %s created for cad %s", asset_id, cad_file_id)
return {"glb_path": str(output_path), "asset_id": asset_id}
@celery_app.task(
bind=True,
name="app.tasks.step_tasks.generate_usd_master_task",
queue="thumbnail_rendering",
max_retries=1,
)
def generate_usd_master_task(self, cad_file_id: str) -> dict:
"""Export a USD master file from STEP via OCC + pxr authoring.
Pipeline:
1. Reads STEP file via export_step_to_usd.py (OCC XCAF + pxr)
2. Writes .usd file alongside the STEP file
3. Stores result as usd_master MediaAsset
4. Parses MANIFEST_JSON from stdout → writes resolved_material_assignments to CadFile
"""
import json as _json
import os as _os
import subprocess as _subprocess
import sys as _sys
import uuid as _uuid
from pathlib import Path as _Path
from sqlalchemy import create_engine as _ce, select as _sel
from sqlalchemy.orm import Session as _Session
from app.config import settings as app_settings
from app.domains.media.models import MediaAsset, MediaAssetType
from app.models.cad_file import CadFile
from app.models.system_setting import SystemSetting
from app.domains.products.models import Product
pl = PipelineLogger(task_id=self.request.id)
pl.step_start("usd_master", {"cad_file_id": cad_file_id})
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
sync_url = app_settings.database_url.replace("+asyncpg", "")
eng = _ce(sync_url)
with _Session(eng) as sess:
set_tenant_context_sync(sess, _tenant_id)
cad_file = sess.get(CadFile, cad_file_id)
if not cad_file or not cad_file.stored_path:
logger.error("generate_usd_master_task: no stored_path for %s", cad_file_id)
return {"error": "no stored_path"}
step_path = _Path(cad_file.stored_path)
product = sess.execute(
_sel(Product).where(Product.cad_file_id == cad_file.id)
).scalar_one_or_none()
color_map: dict[str, str] = {}
if product and product.cad_part_materials:
for entry in product.cad_part_materials:
part_name = entry.get("part_name") or entry.get("name", "")
hex_color = entry.get("hex_color") or entry.get("color", "")
if part_name and hex_color:
color_map[part_name] = hex_color
settings_rows = sess.execute(_sel(SystemSetting)).scalars().all()
sys_settings = {s.key: s.value for s in settings_rows}
# Hash-based cache check: skip tessellation if file hasn't changed
step_file_hash = cad_file.step_file_hash
if step_file_hash:
existing_usd = sess.execute(
_sel(MediaAsset).where(
MediaAsset.cad_file_id == cad_file.id,
MediaAsset.asset_type == MediaAssetType.usd_master,
)
).scalars().first()
if existing_usd:
logger.info("[CACHE] hash match — skipping USD master tessellation for %s", cad_file_id)
pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)})
eng.dispose()
return {"cached": True, "asset_id": str(existing_usd.id)}
eng.dispose()
if not step_path.exists():
err = f"STEP file not found: {step_path}"
pl.step_error("usd_master", err, None)
raise RuntimeError(err)
linear_deflection = float(sys_settings.get("render_linear_deflection", "0.03"))
angular_deflection = float(sys_settings.get("render_angular_deflection", "0.05"))
sharp_threshold = float(sys_settings.get("sharp_edge_threshold", "20.0"))
output_path = step_path.parent / f"{step_path.stem}_master.usd"
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
script_path = scripts_dir / "export_step_to_usd.py"
if not script_path.exists():
err = f"export_step_to_usd.py not found at {script_path}"
pl.step_error("usd_master", err, None)
raise RuntimeError(err)
cmd = [
_sys.executable, str(script_path),
"--step_path", str(step_path),
"--output_path", str(output_path),
"--color_map", _json.dumps(color_map),
"--linear_deflection", str(linear_deflection),
"--angular_deflection", str(angular_deflection),
"--sharp_threshold", str(sharp_threshold),
"--cad_file_id", cad_file_id,
]
log_task_event(
self.request.id,
f"[USD_MASTER] exporting STEP → USD: {step_path.name}",
"info",
)
try:
result = _subprocess.run(cmd, capture_output=True, text=True, timeout=600)
for line in result.stdout.splitlines():
logger.info("[usd-master] %s", line)
for line in result.stderr.splitlines():
logger.warning("[usd-master stderr] %s", line)
if result.returncode != 0 or not output_path.exists() or output_path.stat().st_size == 0:
raise RuntimeError(
f"export_step_to_usd.py failed (exit {result.returncode}).\n"
f"STDERR: {result.stderr[-1000:]}"
)
except Exception as exc:
log_task_event(self.request.id, f"[USD_MASTER] failed: {exc}", "error")
pl.step_error("usd_master", str(exc), exc)
raise self.retry(exc=exc, countdown=15)
# --- Store MediaAsset (upsert) ---
eng2 = _ce(sync_url)
asset_id: str = ""
with _Session(eng2) as sess2:
set_tenant_context_sync(sess2, _tenant_id)
_key = str(output_path)
_prefix = str(app_settings.upload_dir).rstrip("/") + "/"
if _key.startswith(_prefix):
_key = _key[len(_prefix):]
_file_size = output_path.stat().st_size if output_path.exists() else None
existing = sess2.execute(
_sel(MediaAsset).where(
MediaAsset.cad_file_id == _uuid.UUID(cad_file_id),
MediaAsset.asset_type == MediaAssetType.usd_master,
)
).scalars().first()
if existing:
existing.storage_key = _key
existing.mime_type = "model/vnd.usd"
existing.file_size_bytes = _file_size
sess2.commit()
asset_id = str(existing.id)
else:
asset = MediaAsset(
cad_file_id=_uuid.UUID(cad_file_id),
asset_type=MediaAssetType.usd_master,
storage_key=_key,
mime_type="model/vnd.usd",
file_size_bytes=_file_size,
)
sess2.add(asset)
sess2.commit()
asset_id = str(asset.id)
eng2.dispose()
# --- Parse MANIFEST_JSON and write resolved_material_assignments ---
manifest_parts: list = []
for line in result.stdout.splitlines():
if line.startswith("MANIFEST_JSON: "):
try:
manifest_parts = _json.loads(line[len("MANIFEST_JSON: "):]).get("parts", [])
except Exception as parse_exc:
logger.warning("[USD_MASTER] MANIFEST_JSON parse failed: %s", parse_exc)
break
if manifest_parts:
try:
resolved = {
p["part_key"]: {"source_name": p["source_name"], "prim_path": p["prim_path"]}
for p in manifest_parts
}
eng3 = _ce(sync_url)
with _Session(eng3) as sess3:
set_tenant_context_sync(sess3, _tenant_id)
row = sess3.get(CadFile, cad_file_id)
if row:
row.resolved_material_assignments = resolved
sess3.commit()
eng3.dispose()
logger.info("[USD_MASTER] wrote resolved_material_assignments (%d parts)", len(resolved))
except Exception as write_exc:
logger.warning("[USD_MASTER] failed to write resolved_material_assignments: %s", write_exc)
log_task_event(self.request.id, f"[USD_MASTER] done: {output_path.name}", "done")
pl.step_done("usd_master", result={"usd_path": str(output_path), "asset_id": asset_id})
return {"usd_path": str(output_path), "asset_id": asset_id, "n_parts": len(manifest_parts)}
@@ -89,6 +89,10 @@ def process_step_file(self, cad_file_id: str):
pl = PipelineLogger(task_id=self.request.id)
pl.step_start("process_step_file", {"cad_file_id": cad_file_id})
# Resolve and log tenant context at task start (required for RLS)
from app.core.tenant_context import resolve_tenant_id_for_cad
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
lock_key = f"step_processing_lock:{cad_file_id}"
r = redis_lib.from_url(app_settings.redis_url)
acquired = r.set(lock_key, "1", nx=True, ex=600) # 10-minute TTL
@@ -213,9 +217,14 @@ def reextract_cad_metadata(cad_file_id: str):
pl = PipelineLogger(task_id=None)
pl.step_start("reextract_cad_metadata", {"cad_file_id": cad_file_id})
# Resolve and log tenant context at task start (required for RLS)
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
sync_url = app_settings.database_url.replace("+asyncpg", "")
eng = create_engine(sync_url)
with Session(eng) as session:
set_tenant_context_sync(session, _tenant_id)
cad_file = session.get(CadFile, cad_file_id)
if not cad_file or not cad_file.stored_path:
logger.warning(f"reextract_cad_metadata: file not found {cad_file_id}")
@@ -229,6 +238,7 @@ def reextract_cad_metadata(cad_file_id: str):
patch = _bbox_from_glb(str(glb_path)) or _bbox_from_step_cadquery(step_path)
if patch:
with Session(eng) as session:
set_tenant_context_sync(session, _tenant_id)
cad_file = session.get(CadFile, cad_file_id)
if cad_file:
cad_file.mesh_attributes = {**(cad_file.mesh_attributes or {}), **patch}
@@ -30,6 +30,11 @@ def render_order_line_task(self, order_line_id: str):
pl = PipelineLogger(task_id=self.request.id, order_line_id=order_line_id)
pl.step_start("render_order_line_task", {"order_line_id": order_line_id})
logger.info(f"Rendering order line: {order_line_id}")
# Resolve and log tenant context at task start (required for RLS)
from app.core.tenant_context import resolve_tenant_id_for_order_line, set_tenant_context_sync
_tenant_id = resolve_tenant_id_for_order_line(order_line_id)
from app.services.render_log import emit
emit(order_line_id, "Celery render task started")
@@ -43,6 +48,7 @@ def render_order_line_task(self, order_line_id: str):
engine = create_engine(sync_url)
with Session(engine) as session:
set_tenant_context_sync(session, _tenant_id)
from app.models.order_line import OrderLine
from app.models.product import Product
@@ -89,6 +95,30 @@ def render_order_line_task(self, order_line_id: str):
cad_file = line.product.cad_file
materials_source = line.product.cad_part_materials
# Look up USD master asset for this CAD file — used when rendering
# via USD path instead of production GLB
from app.domains.media.models import MediaAsset, MediaAssetType
from pathlib import Path as _Path
usd_render_path = None
if cad_file:
_usd_asset = session.execute(
select(MediaAsset)
.where(
MediaAsset.cad_file_id == cad_file.id,
MediaAsset.asset_type == MediaAssetType.usd_master,
)
.order_by(MediaAsset.created_at.desc())
.limit(1)
).scalar_one_or_none()
if _usd_asset and _usd_asset.storage_key:
_usd_candidate = _Path(app_settings.upload_dir) / _usd_asset.storage_key
if _usd_candidate.exists():
usd_render_path = _usd_candidate
logger.info(
"render_order_line: using usd_master %s for cad %s",
_usd_candidate.name, cad_file.id,
)
part_colors = {}
if cad_file and cad_file.parsed_objects:
parsed_names = cad_file.parsed_objects.get("objects", [])
@@ -242,7 +272,6 @@ def render_order_line_task(self, order_line_id: str):
height=render_height or 1920,
engine=render_engine or _sys.get("blender_engine", "cycles"),
samples=render_samples or int(_sys.get(f"blender_{render_engine or _sys.get('blender_engine','cycles')}_samples", 128)),
stl_quality=_sys.get("stl_quality", "low"),
smooth_angle=int(_sys.get("blender_smooth_angle", 30)),
cycles_device=cycles_device_val,
transparent_bg=transparent_bg,
@@ -259,6 +288,7 @@ def render_order_line_task(self, order_line_id: str):
rotation_x=rotation_x,
rotation_y=rotation_y,
rotation_z=rotation_z,
usd_path=usd_render_path,
)
success = True
render_log = {
@@ -323,6 +353,7 @@ def render_order_line_task(self, order_line_id: str):
denoising_prefilter=denoising_prefilter,
denoising_quality=denoising_quality,
denoising_use_gpu=denoising_use_gpu,
usd_path=usd_render_path,
)
if success:
pl.step_done("blender_still")
@@ -376,13 +407,6 @@ def render_order_line_task(self, order_line_id: str):
_file_size = _os.path.getsize(output_path)
except OSError:
pass
if _ext in ("png", "jpg", "jpeg"):
try:
from PIL import Image as _PILImage
with _PILImage.open(output_path) as _im:
_width, _height = _im.size
except Exception:
pass
# Snapshot key render settings into render_config
_render_config = None
if isinstance(render_log, dict):
@@ -485,6 +509,7 @@ def render_order_line_task(self, order_line_id: str):
sync_url2 = app_settings.database_url.replace("+asyncpg", "")
eng2 = create_engine(sync_url2)
with SyncSession(eng2) as s2:
set_tenant_context_sync(s2, _tenant_id)
from datetime import datetime as dt2
s2.execute(
sql_update2(OL2).where(OL2.id == order_line_id)
@@ -500,6 +525,7 @@ def render_order_line_task(self, order_line_id: str):
# Try to get order_id from DB
eng3 = create_engine(sync_url2)
with SyncSession(eng3) as s3:
set_tenant_context_sync(s3, _tenant_id)
from sqlalchemy import select as sel
row = s3.execute(sel(OL2.order_id).where(OL2.id == order_line_id)).scalar_one_or_none()
if row:
@@ -511,6 +537,7 @@ def render_order_line_task(self, order_line_id: str):
from app.models.order import Order as OrderModel2
eng4 = create_engine(sync_url2)
with SyncSession(eng4) as s4:
set_tenant_context_sync(s4, _tenant_id)
order_row2 = s4.execute(
sel2(OrderModel2.created_by, OrderModel2.order_number)
.join(OL2, OL2.order_id == OrderModel2.id)
@@ -26,6 +26,10 @@ def render_step_thumbnail(self, cad_file_id: str):
pl.step_start("render_step_thumbnail", {"cad_file_id": cad_file_id})
logger.info(f"Rendering thumbnail for CAD file: {cad_file_id}")
# Resolve and log tenant context at task start (required for RLS)
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
# Compute and persist STEP file hash for STL cache lookups
try:
from sqlalchemy import create_engine
@@ -37,6 +41,7 @@ def render_step_thumbnail(self, cad_file_id: str):
sync_url = app_settings.database_url.replace("+asyncpg", "")
_eng = create_engine(sync_url)
with Session(_eng) as _sess:
set_tenant_context_sync(_sess, _tenant_id)
_cad = _sess.get(CadFile, cad_file_id)
if _cad and _cad.stored_path and not _cad.step_file_hash:
_hash = compute_step_hash(_cad.stored_path)
@@ -71,6 +76,7 @@ def render_step_thumbnail(self, cad_file_id: str):
_sync_url2 = _cfg2.database_url.replace("+asyncpg", "")
_eng2 = create_engine(_sync_url2)
with Session(_eng2) as _sess2:
set_tenant_context_sync(_sess2, _tenant_id)
_cad2 = _sess2.get(_CadFile2, cad_file_id)
_step_path = _cad2.stored_path if _cad2 else None
_eng2.dispose()
@@ -82,6 +88,7 @@ def render_step_thumbnail(self, cad_file_id: str):
if bbox_data:
_eng2 = create_engine(_sync_url2)
with Session(_eng2) as _sess2:
set_tenant_context_sync(_sess2, _tenant_id)
_cad2 = _sess2.get(_CadFile2, cad_file_id)
if _cad2:
_cad2.mesh_attributes = {**( _cad2.mesh_attributes or {}), **bbox_data}
@@ -107,6 +114,7 @@ def render_step_thumbnail(self, cad_file_id: str):
_sync_url3 = _cfg3.database_url.replace("+asyncpg", "")
_eng3 = create_engine(_sync_url3)
with Session(_eng3) as _sess3:
set_tenant_context_sync(_sess3, _tenant_id)
_cad3 = _sess3.get(_CadFile3, cad_file_id)
_attrs = _cad3.mesh_attributes or {} if _cad3 else {}
_step_path3 = _cad3.stored_path if _cad3 else None
@@ -117,6 +125,7 @@ def render_step_thumbnail(self, cad_file_id: str):
if edge_data:
_eng3 = create_engine(_sync_url3)
with Session(_eng3) as _sess3:
set_tenant_context_sync(_sess3, _tenant_id)
_cad3 = _sess3.get(_CadFile3, cad_file_id)
if _cad3:
_cad3.mesh_attributes = {**(_cad3.mesh_attributes or {}), **edge_data}
@@ -145,6 +154,7 @@ def render_step_thumbnail(self, cad_file_id: str):
_sync_url = _cfg.database_url.replace("+asyncpg", "")
_eng = create_engine(_sync_url)
with _Session(_eng) as _s:
set_tenant_context_sync(_s, _tenant_id)
_cad = _s.get(_CadFile, cad_file_id)
_tid = str(_cad.tenant_id) if _cad and _cad.tenant_id else None
_eng.dispose()
@@ -176,6 +186,11 @@ def regenerate_thumbnail(self, cad_file_id: str, part_colors: dict):
pl = PipelineLogger(task_id=self.request.id)
pl.step_start("regenerate_thumbnail", {"cad_file_id": cad_file_id})
logger.info(f"Regenerating thumbnail for CAD file: {cad_file_id}")
# Resolve and log tenant context at task start (required for RLS)
from app.core.tenant_context import resolve_tenant_id_for_cad
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
try:
from app.services.step_processor import regenerate_cad_thumbnail
success = regenerate_cad_thumbnail(cad_file_id, part_colors)