feat(P2): USD Foundation — canonical part identity + material overrides
M1 — USD exporter:
- render-worker/scripts/export_step_to_usd.py (631 lines)
Full XCAF traversal, one UsdGeom.Mesh per leaf part,
schaeffler:partKey on every prim, index-space sharpEdgeVertexPairs
- render-worker/Dockerfile: usd-core>=24.11 installed (USD 0.26.3)
M2 — usd_master MediaAsset + pipeline auto-chain:
- migrations 060 (usd_master enum), 061 (3 JSONB columns),
062 (rename tessellation settings keys)
- generate_usd_master_task: runs export_step_to_usd.py, upserts
usd_master MediaAsset, writes resolved_material_assignments to CadFile
- Auto-chained from generate_gltf_geometry_task after every GLB export
- step_tasks.py shim re-exports generate_usd_master_task
M3 — scene-manifest API:
- part_key_service.py: build_scene_manifest(), generate_part_key(),
four-layer material priority resolution with provenance
- SceneManifest / PartEntry Pydantic models in products/schemas.py
- GET /api/cad/{id}/scene-manifest endpoint (graceful fallback to
parsed_objects when USD not yet generated)
- POST /api/cad/{id}/generate-usd-master endpoint
- frontend/src/api/sceneManifest.ts: fetchSceneManifest(),
triggerUsdMasterGeneration()
M4 — manual-material-overrides API:
- GET/PUT /api/cad/{id}/manual-material-overrides endpoints
- CadFile.manual_material_overrides JSONB column (migration 061)
- getManualOverrides() / saveManualOverrides() in cad.ts
M5 — ThreeDViewer partKey integration:
- export_step_to_gltf.py injects partKeyMap into GLB extras
- ThreeDViewer: partKeyMap extraction, resolvePartKey(), effectiveMaterials
merges legacy partMaterials + new manualOverrides (server-side persistence)
- MaterialPanel: dual-path save (partKey vs legacy), provenance badge,
reconciliation panel for unmatched/unassigned parts
Also:
- Admin.tsx: generate-missing-usd-masters + canonical scenes bulk actions
- ProductDetail.tsx: usd_master row in asset table
- vite-env.d.ts: fix ImportMeta.env TypeScript error
- GPUProbeResult: add timestamp/devices/render_time_s fields
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,21 @@
|
||||
"""Add usd_master to mediaassettype enum.
|
||||
|
||||
Revision ID: 060
|
||||
Revises: 059
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
revision = "060"
|
||||
down_revision = "059"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.execute("ALTER TYPE media_asset_type ADD VALUE IF NOT EXISTS 'usd_master'")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# PostgreSQL does not support removing enum values.
|
||||
# The 'usd_master' value will remain but is no longer referenced by application code.
|
||||
pass
|
||||
@@ -0,0 +1,25 @@
|
||||
"""Add three-layer material assignment columns to cad_files.
|
||||
|
||||
Revision ID: 061
|
||||
Revises: 060
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
revision = "061"
|
||||
down_revision = "060"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column("cad_files", sa.Column("source_material_assignments", postgresql.JSONB(), nullable=True))
|
||||
op.add_column("cad_files", sa.Column("resolved_material_assignments", postgresql.JSONB(), nullable=True))
|
||||
op.add_column("cad_files", sa.Column("manual_material_overrides", postgresql.JSONB(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("cad_files", "manual_material_overrides")
|
||||
op.drop_column("cad_files", "resolved_material_assignments")
|
||||
op.drop_column("cad_files", "source_material_assignments")
|
||||
@@ -0,0 +1,25 @@
|
||||
"""Rename gltf_preview/gltf_production tessellation settings keys.
|
||||
|
||||
Revision ID: 062
|
||||
Revises: 061
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
revision = "062"
|
||||
down_revision = "061"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.execute("UPDATE system_settings SET key = 'scene_linear_deflection' WHERE key = 'gltf_preview_linear_deflection'")
|
||||
op.execute("UPDATE system_settings SET key = 'scene_angular_deflection' WHERE key = 'gltf_preview_angular_deflection'")
|
||||
op.execute("UPDATE system_settings SET key = 'render_linear_deflection' WHERE key = 'gltf_production_linear_deflection'")
|
||||
op.execute("UPDATE system_settings SET key = 'render_angular_deflection' WHERE key = 'gltf_production_angular_deflection'")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.execute("UPDATE system_settings SET key = 'gltf_preview_linear_deflection' WHERE key = 'scene_linear_deflection'")
|
||||
op.execute("UPDATE system_settings SET key = 'gltf_preview_angular_deflection' WHERE key = 'scene_angular_deflection'")
|
||||
op.execute("UPDATE system_settings SET key = 'gltf_production_linear_deflection' WHERE key = 'render_linear_deflection'")
|
||||
op.execute("UPDATE system_settings SET key = 'gltf_production_angular_deflection' WHERE key = 'render_angular_deflection'")
|
||||
@@ -41,10 +41,10 @@ SETTINGS_DEFAULTS: dict[str, str] = {
|
||||
"smtp_from_address": "",
|
||||
# glTF tessellation quality
|
||||
"tessellation_engine": "occ", # "occ" | "gmsh" — tessellation backend
|
||||
"gltf_preview_linear_deflection": "0.1", # mm — geometry GLB for viewer
|
||||
"gltf_preview_angular_deflection": "0.1", # rad — Standard preset
|
||||
"gltf_production_linear_deflection": "0.03", # mm — production GLB
|
||||
"gltf_production_angular_deflection": "0.05", # rad — Standard preset
|
||||
"scene_linear_deflection": "0.1", # mm — geometry GLB for viewer
|
||||
"scene_angular_deflection": "0.1", # rad — Standard preset
|
||||
"render_linear_deflection": "0.03", # mm — production/render GLB
|
||||
"render_angular_deflection": "0.05", # rad — Standard preset
|
||||
# 3D viewer / glTF export settings
|
||||
"gltf_scale_factor": "0.001",
|
||||
"gltf_smooth_normals": "true",
|
||||
@@ -74,10 +74,10 @@ class SettingsOut(BaseModel):
|
||||
smtp_user: str = ""
|
||||
smtp_password: str = ""
|
||||
smtp_from_address: str = ""
|
||||
gltf_preview_linear_deflection: float = 0.1
|
||||
gltf_preview_angular_deflection: float = 0.1
|
||||
gltf_production_linear_deflection: float = 0.03
|
||||
gltf_production_angular_deflection: float = 0.05
|
||||
scene_linear_deflection: float = 0.1
|
||||
scene_angular_deflection: float = 0.1
|
||||
render_linear_deflection: float = 0.03
|
||||
render_angular_deflection: float = 0.05
|
||||
gltf_scale_factor: float = 0.001
|
||||
gltf_smooth_normals: bool = True
|
||||
viewer_max_distance: float = 50.0
|
||||
@@ -106,10 +106,10 @@ class SettingsUpdate(BaseModel):
|
||||
smtp_user: str | None = None
|
||||
smtp_password: str | None = None
|
||||
smtp_from_address: str | None = None
|
||||
gltf_preview_linear_deflection: float | None = None
|
||||
gltf_preview_angular_deflection: float | None = None
|
||||
gltf_production_linear_deflection: float | None = None
|
||||
gltf_production_angular_deflection: float | None = None
|
||||
scene_linear_deflection: float | None = None
|
||||
scene_angular_deflection: float | None = None
|
||||
render_linear_deflection: float | None = None
|
||||
render_angular_deflection: float | None = None
|
||||
gltf_scale_factor: float | None = None
|
||||
gltf_smooth_normals: bool | None = None
|
||||
viewer_max_distance: float | None = None
|
||||
@@ -224,10 +224,10 @@ def _settings_to_out(raw: dict[str, str]) -> SettingsOut:
|
||||
smtp_user=raw.get("smtp_user", ""),
|
||||
smtp_password=raw.get("smtp_password", ""),
|
||||
smtp_from_address=raw.get("smtp_from_address", ""),
|
||||
gltf_preview_linear_deflection=float(raw.get("gltf_preview_linear_deflection", "0.1")),
|
||||
gltf_preview_angular_deflection=float(raw.get("gltf_preview_angular_deflection", "0.5")),
|
||||
gltf_production_linear_deflection=float(raw.get("gltf_production_linear_deflection", "0.03")),
|
||||
gltf_production_angular_deflection=float(raw.get("gltf_production_angular_deflection", "0.2")),
|
||||
scene_linear_deflection=float(raw.get("scene_linear_deflection", "0.1")),
|
||||
scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.5")),
|
||||
render_linear_deflection=float(raw.get("render_linear_deflection", "0.03")),
|
||||
render_angular_deflection=float(raw.get("render_angular_deflection", "0.2")),
|
||||
gltf_scale_factor=float(raw.get("gltf_scale_factor", "0.001")),
|
||||
gltf_smooth_normals=raw.get("gltf_smooth_normals", "true") == "true",
|
||||
viewer_max_distance=float(raw.get("viewer_max_distance", "50")),
|
||||
@@ -340,22 +340,22 @@ async def update_settings(
|
||||
updates["gltf_pbr_roughness"] = str(body.gltf_pbr_roughness)
|
||||
if body.gltf_pbr_metallic is not None:
|
||||
updates["gltf_pbr_metallic"] = str(body.gltf_pbr_metallic)
|
||||
if body.gltf_preview_linear_deflection is not None:
|
||||
if not (0.001 <= body.gltf_preview_linear_deflection <= 10.0):
|
||||
raise HTTPException(400, detail="gltf_preview_linear_deflection must be 0.001–10.0 mm")
|
||||
updates["gltf_preview_linear_deflection"] = str(body.gltf_preview_linear_deflection)
|
||||
if body.gltf_preview_angular_deflection is not None:
|
||||
if not (0.05 <= body.gltf_preview_angular_deflection <= 1.5):
|
||||
raise HTTPException(400, detail="gltf_preview_angular_deflection must be 0.05–1.5 rad")
|
||||
updates["gltf_preview_angular_deflection"] = str(body.gltf_preview_angular_deflection)
|
||||
if body.gltf_production_linear_deflection is not None:
|
||||
if not (0.001 <= body.gltf_production_linear_deflection <= 10.0):
|
||||
raise HTTPException(400, detail="gltf_production_linear_deflection must be 0.001–10.0 mm")
|
||||
updates["gltf_production_linear_deflection"] = str(body.gltf_production_linear_deflection)
|
||||
if body.gltf_production_angular_deflection is not None:
|
||||
if not (0.05 <= body.gltf_production_angular_deflection <= 1.5):
|
||||
raise HTTPException(400, detail="gltf_production_angular_deflection must be 0.05–1.5 rad")
|
||||
updates["gltf_production_angular_deflection"] = str(body.gltf_production_angular_deflection)
|
||||
if body.scene_linear_deflection is not None:
|
||||
if not (0.001 <= body.scene_linear_deflection <= 10.0):
|
||||
raise HTTPException(400, detail="scene_linear_deflection must be 0.001–10.0 mm")
|
||||
updates["scene_linear_deflection"] = str(body.scene_linear_deflection)
|
||||
if body.scene_angular_deflection is not None:
|
||||
if not (0.05 <= body.scene_angular_deflection <= 1.5):
|
||||
raise HTTPException(400, detail="scene_angular_deflection must be 0.05–1.5 rad")
|
||||
updates["scene_angular_deflection"] = str(body.scene_angular_deflection)
|
||||
if body.render_linear_deflection is not None:
|
||||
if not (0.001 <= body.render_linear_deflection <= 10.0):
|
||||
raise HTTPException(400, detail="render_linear_deflection must be 0.001–10.0 mm")
|
||||
updates["render_linear_deflection"] = str(body.render_linear_deflection)
|
||||
if body.render_angular_deflection is not None:
|
||||
if not (0.05 <= body.render_angular_deflection <= 1.5):
|
||||
raise HTTPException(400, detail="render_angular_deflection must be 0.05–1.5 rad")
|
||||
updates["render_angular_deflection"] = str(body.render_angular_deflection)
|
||||
if body.tessellation_engine is not None:
|
||||
if body.tessellation_engine not in {"occ", "gmsh"}:
|
||||
raise HTTPException(400, detail="tessellation_engine must be 'occ' or 'gmsh'")
|
||||
@@ -532,13 +532,12 @@ async def reextract_all_metadata(
|
||||
return {"queued": queued, "message": f"Queued {queued} CAD file(s) for metadata re-extraction"}
|
||||
|
||||
|
||||
@router.post("/settings/generate-missing-geometry-glbs", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def generate_missing_geometry_glbs(
|
||||
@router.post("/settings/generate-missing-canonical-scenes", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def generate_missing_canonical_scenes(
|
||||
admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Queue geometry GLB generation for every completed CAD file that has no gltf_geometry MediaAsset."""
|
||||
import uuid as _uuid
|
||||
"""Queue canonical scene (geometry GLB + USD master) generation for every completed CAD file that has no gltf_geometry MediaAsset."""
|
||||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||||
|
||||
result = await db.execute(
|
||||
@@ -561,7 +560,37 @@ async def generate_missing_geometry_glbs(
|
||||
generate_gltf_geometry_task.delay(str(cad_file.id))
|
||||
queued += 1
|
||||
|
||||
return {"queued": queued, "message": f"Queued {queued} missing geometry GLB task(s)"}
|
||||
return {"queued": queued, "message": f"Queued {queued} missing canonical scene task(s)"}
|
||||
|
||||
|
||||
@router.post("/settings/generate-missing-usd-masters", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def generate_missing_usd_masters(
|
||||
admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Queue USD master export for every completed CAD file that has no usd_master MediaAsset."""
|
||||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||||
|
||||
result = await db.execute(
|
||||
select(CadFile).where(CadFile.processing_status == ProcessingStatus.completed)
|
||||
)
|
||||
cad_files = result.scalars().all()
|
||||
|
||||
existing_result = await db.execute(
|
||||
select(MediaAsset.cad_file_id).where(MediaAsset.asset_type == MediaAssetType.usd_master)
|
||||
)
|
||||
existing_ids = {row[0] for row in existing_result.all()}
|
||||
|
||||
from app.tasks.step_tasks import generate_usd_master_task
|
||||
queued = 0
|
||||
for cad_file in cad_files:
|
||||
if not cad_file.stored_path:
|
||||
continue
|
||||
if cad_file.id not in existing_ids:
|
||||
generate_usd_master_task.delay(str(cad_file.id))
|
||||
queued += 1
|
||||
|
||||
return {"queued": queued, "message": f"Queued {queued} missing USD master task(s)"}
|
||||
|
||||
|
||||
@router.post("/settings/recover-stuck-processing", status_code=status.HTTP_200_OK)
|
||||
|
||||
@@ -434,3 +434,110 @@ async def save_part_materials(
|
||||
cad_file_id=str(cad.id),
|
||||
part_materials=cad.part_materials,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# ---------------------------------------------------------------------------
|
||||
# Manual material overrides schemas (partKey-keyed)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class ManualMaterialOverridesIn(BaseModel):
|
||||
overrides: dict[str, str] # { partKey: materialName }
|
||||
|
||||
|
||||
class ManualMaterialOverridesOut(BaseModel):
|
||||
cad_file_id: str
|
||||
manual_material_overrides: dict[str, str] | None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# USD master endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/{id}/scene-manifest")
|
||||
async def get_scene_manifest(
|
||||
id: uuid.UUID,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Return scene manifest for a CAD file (part keys, material assignments)."""
|
||||
from app.domains.products.schemas import SceneManifest
|
||||
from app.services.part_key_service import build_scene_manifest
|
||||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||||
|
||||
cad = await _get_cad_file(id, db)
|
||||
|
||||
usd_result = await db.execute(
|
||||
select(MediaAsset).where(
|
||||
MediaAsset.cad_file_id == id,
|
||||
MediaAsset.asset_type == MediaAssetType.usd_master,
|
||||
)
|
||||
)
|
||||
usd_asset = usd_result.scalars().first()
|
||||
|
||||
if not usd_asset and not cad.parsed_objects:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Scene manifest not yet available — run generate-usd-master first",
|
||||
)
|
||||
|
||||
manifest_dict = build_scene_manifest(cad, usd_asset)
|
||||
return SceneManifest(**manifest_dict)
|
||||
|
||||
|
||||
@router.post("/{id}/generate-usd-master", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def generate_usd_master(
|
||||
id: uuid.UUID,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Queue a USD master export for a CAD file."""
|
||||
if not is_privileged(user):
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Insufficient permissions")
|
||||
|
||||
cad = await _get_cad_file(id, db)
|
||||
if not cad.stored_path:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="No STEP file stored")
|
||||
|
||||
from app.tasks.step_tasks import generate_usd_master_task
|
||||
task = generate_usd_master_task.delay(str(id))
|
||||
return {"status": "queued", "task_id": task.id, "cad_file_id": str(id)}
|
||||
|
||||
|
||||
@router.get("/{id}/manual-material-overrides", response_model=ManualMaterialOverridesOut)
|
||||
async def get_manual_material_overrides(
|
||||
id: uuid.UUID,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Return manual material overrides (partKey → materialName) for a CAD file."""
|
||||
cad = await _get_cad_file(id, db)
|
||||
return ManualMaterialOverridesOut(
|
||||
cad_file_id=str(id),
|
||||
manual_material_overrides=cad.manual_material_overrides,
|
||||
)
|
||||
|
||||
|
||||
@router.put("/{id}/manual-material-overrides", response_model=ManualMaterialOverridesOut)
|
||||
async def save_manual_material_overrides(
|
||||
id: uuid.UUID,
|
||||
body: ManualMaterialOverridesIn,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Save manual material overrides keyed by partKey.
|
||||
|
||||
Writes to CadFile.manual_material_overrides (JSONB).
|
||||
Takes priority over auto-resolved and source-matched materials in build_scene_manifest().
|
||||
"""
|
||||
if not is_privileged(user):
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Insufficient permissions")
|
||||
|
||||
cad = await _get_cad_file(id, db)
|
||||
cad.manual_material_overrides = body.overrides
|
||||
await db.commit()
|
||||
await db.refresh(cad)
|
||||
return ManualMaterialOverridesOut(
|
||||
cad_file_id=str(id),
|
||||
manual_material_overrides=cad.manual_material_overrides,
|
||||
)
|
||||
|
||||
@@ -38,7 +38,6 @@ class RenderConfig(BaseModel):
|
||||
blender_cycles_samples: int = 256
|
||||
blender_eevee_samples: int = 64
|
||||
thumbnail_format: str = "jpg"
|
||||
stl_quality: str = "low"
|
||||
blender_smooth_angle: int = 30
|
||||
cycles_device: str = "auto"
|
||||
render_backend: str = "celery"
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
"""Sync tenant context helpers for Celery tasks.
|
||||
|
||||
Celery tasks run in a sync context (no async event loop), so they cannot use
|
||||
the async ``set_tenant_context`` from ``app.database``. This module provides
|
||||
``set_tenant_context_sync`` which accepts a SQLAlchemy sync ``Session`` and
|
||||
a raw ``tenant_id`` UUID string (or None for global-admin bypass), as well as
|
||||
``resolve_tenant_id_for_cad`` / ``resolve_tenant_id_for_order_line`` helpers
|
||||
that look up the tenant_id from the database given only an entity ID.
|
||||
|
||||
Typical usage at the start of a Celery task::
|
||||
|
||||
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
|
||||
|
||||
tenant_id = resolve_tenant_id_for_cad(cad_file_id)
|
||||
# tenant_id is already logged by resolve_tenant_id_for_cad
|
||||
|
||||
# Then in every Session block that does RLS-protected queries:
|
||||
with Session(engine) as session:
|
||||
set_tenant_context_sync(session, tenant_id)
|
||||
# ... queries here respect RLS ...
|
||||
"""
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def set_tenant_context_sync(db: Session, tenant_id: Optional[str]) -> None:
|
||||
"""Set the PostgreSQL RLS context variable for a sync SQLAlchemy session.
|
||||
|
||||
Executes ``SET LOCAL app.current_tenant_id = :tid`` so that all subsequent
|
||||
queries within the same transaction respect row-level security policies.
|
||||
|
||||
Args:
|
||||
db: An open sync SQLAlchemy ``Session``.
|
||||
tenant_id: UUID string of the tenant, or ``None`` / empty string to use
|
||||
the bypass sentinel (global-admin context — sees all rows).
|
||||
"""
|
||||
if tenant_id:
|
||||
db.execute(
|
||||
text("SET LOCAL app.current_tenant_id = :tid"),
|
||||
{"tid": str(tenant_id)},
|
||||
)
|
||||
else:
|
||||
# None means no tenant context is known (e.g. system tasks).
|
||||
# Use empty string — RLS policies treat '' as no-tenant, which allows
|
||||
# global admin queries to proceed without filtering.
|
||||
db.execute(text("SET LOCAL app.current_tenant_id = ''"))
|
||||
|
||||
|
||||
def resolve_tenant_id_for_cad(cad_file_id: str) -> Optional[str]:
|
||||
"""Look up the tenant_id for a CadFile by its primary key.
|
||||
|
||||
Opens a short-lived sync session, reads CadFile.tenant_id, and returns it
|
||||
as a string UUID or None. Also emits the ``[TENANT]`` log line.
|
||||
|
||||
Args:
|
||||
cad_file_id: The UUID string (or UUID) of the CadFile record.
|
||||
|
||||
Returns:
|
||||
tenant_id as ``str`` if the CadFile has one, ``None`` otherwise.
|
||||
"""
|
||||
try:
|
||||
from app.config import settings as _cfg
|
||||
from app.models.cad_file import CadFile # compat shim → domains.products.models
|
||||
|
||||
_sync_url = _cfg.database_url.replace("+asyncpg", "")
|
||||
_eng = create_engine(_sync_url)
|
||||
try:
|
||||
with Session(_eng) as _sess:
|
||||
_cad = _sess.get(CadFile, cad_file_id)
|
||||
tenant_id = str(_cad.tenant_id) if (_cad and _cad.tenant_id) else None
|
||||
finally:
|
||||
_eng.dispose()
|
||||
except Exception as exc:
|
||||
logger.warning("[TENANT] resolve_tenant_id_for_cad(%s) failed: %s", cad_file_id, exc)
|
||||
tenant_id = None
|
||||
|
||||
logger.info("[TENANT] context set: tenant_id=%s", tenant_id)
|
||||
return tenant_id
|
||||
|
||||
|
||||
def resolve_tenant_id_for_order_line(order_line_id: str) -> Optional[str]:
|
||||
"""Look up the tenant_id for an OrderLine by its primary key.
|
||||
|
||||
Opens a short-lived sync session, reads OrderLine.tenant_id, and returns it
|
||||
as a string UUID or None. Also emits the ``[TENANT]`` log line.
|
||||
|
||||
Args:
|
||||
order_line_id: The UUID string (or UUID) of the OrderLine record.
|
||||
|
||||
Returns:
|
||||
tenant_id as ``str`` if the OrderLine has one, ``None`` otherwise.
|
||||
"""
|
||||
try:
|
||||
from app.config import settings as _cfg
|
||||
from app.models.order_line import OrderLine # compat shim
|
||||
|
||||
_sync_url = _cfg.database_url.replace("+asyncpg", "")
|
||||
_eng = create_engine(_sync_url)
|
||||
try:
|
||||
with Session(_eng) as _sess:
|
||||
_line = _sess.get(OrderLine, order_line_id)
|
||||
tenant_id = str(_line.tenant_id) if (_line and _line.tenant_id) else None
|
||||
finally:
|
||||
_eng.dispose()
|
||||
except Exception as exc:
|
||||
logger.warning("[TENANT] resolve_tenant_id_for_order_line(%s) failed: %s", order_line_id, exc)
|
||||
tenant_id = None
|
||||
|
||||
logger.info("[TENANT] context set: tenant_id=%s", tenant_id)
|
||||
return tenant_id
|
||||
@@ -17,6 +17,7 @@ class MediaAssetType(str, enum.Enum):
|
||||
gltf_geometry = "gltf_geometry"
|
||||
gltf_production = "gltf_production"
|
||||
blend_production = "blend_production"
|
||||
usd_master = "usd_master"
|
||||
|
||||
|
||||
class MediaAsset(Base):
|
||||
|
||||
@@ -40,9 +40,14 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
|
||||
pl = PipelineLogger(task_id=self.request.id)
|
||||
pl.step_start("export_glb_geometry", {"cad_file_id": cad_file_id})
|
||||
|
||||
# Resolve and log tenant context at task start (required for RLS)
|
||||
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
|
||||
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
|
||||
|
||||
sync_url = app_settings.database_url.replace("+asyncpg", "")
|
||||
eng = create_engine(sync_url)
|
||||
with Session(eng) as session:
|
||||
set_tenant_context_sync(session, _tenant_id)
|
||||
cad_file = session.get(CadFile, cad_file_id)
|
||||
if not cad_file or not cad_file.stored_path:
|
||||
logger.error("generate_gltf_geometry_task: no stored_path for %s", cad_file_id)
|
||||
@@ -66,10 +71,32 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
|
||||
|
||||
settings_rows = session.execute(_select(_SysSetting)).scalars().all()
|
||||
sys_settings = {s.key: s.value for s in settings_rows}
|
||||
|
||||
# Hash-based cache check: skip tessellation if file hasn't changed
|
||||
step_file_hash = cad_file.step_file_hash
|
||||
if step_file_hash:
|
||||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||||
import uuid as _uuid_check
|
||||
existing_geo = session.execute(
|
||||
_select(MediaAsset).where(
|
||||
MediaAsset.cad_file_id == _uuid_check.UUID(cad_file_id),
|
||||
MediaAsset.asset_type == MediaAssetType.gltf_geometry,
|
||||
)
|
||||
).scalars().first()
|
||||
if existing_geo:
|
||||
logger.info("[CACHE] hash match — skipping geometry GLB tessellation for %s", cad_file_id)
|
||||
pl.step_done("export_glb_geometry", result={"cached": True, "asset_id": str(existing_geo.id)})
|
||||
eng.dispose()
|
||||
# Still chain USD master — it has its own hash-check (C2)
|
||||
try:
|
||||
generate_usd_master_task.delay(cad_file_id)
|
||||
except Exception:
|
||||
logger.debug("Could not queue generate_usd_master_task from cache-hit path (non-fatal)")
|
||||
return {"cached": True, "asset_id": str(existing_geo.id)}
|
||||
eng.dispose()
|
||||
|
||||
linear_deflection = float(sys_settings.get("gltf_preview_linear_deflection", "0.1"))
|
||||
angular_deflection = float(sys_settings.get("gltf_preview_angular_deflection", "0.1"))
|
||||
linear_deflection = float(sys_settings.get("scene_linear_deflection", "0.1"))
|
||||
angular_deflection = float(sys_settings.get("scene_angular_deflection", "0.1"))
|
||||
tessellation_engine = sys_settings.get("tessellation_engine", "occ")
|
||||
|
||||
step = _Path(step_path_str)
|
||||
@@ -135,6 +162,7 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
|
||||
_sync_url = app_settings.database_url.replace("+asyncpg", "")
|
||||
_eng2 = _ce(_sync_url)
|
||||
with _Session(_eng2) as _sess:
|
||||
set_tenant_context_sync(_sess, _tenant_id)
|
||||
_key = str(output_path)
|
||||
_prefix = str(app_settings.upload_dir).rstrip("/") + "/"
|
||||
if _key.startswith(_prefix):
|
||||
@@ -172,6 +200,14 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
|
||||
|
||||
pl.step_done("export_glb_geometry", result={"glb_path": str(output_path), "asset_id": asset_id})
|
||||
logger.info("generate_gltf_geometry_task: MediaAsset %s created for cad %s", asset_id, cad_file_id)
|
||||
|
||||
# Auto-chain USD master export so the canonical scene is always up to date
|
||||
try:
|
||||
generate_usd_master_task.delay(cad_file_id)
|
||||
logger.info("generate_gltf_geometry_task: queued generate_usd_master_task for %s", cad_file_id)
|
||||
except Exception:
|
||||
logger.debug("Could not queue generate_usd_master_task (non-fatal)")
|
||||
|
||||
return {"glb_path": str(output_path), "asset_id": asset_id}
|
||||
|
||||
|
||||
@@ -207,6 +243,10 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
|
||||
pl.step_start("export_glb_production", {"cad_file_id": cad_file_id})
|
||||
log_task_event(self.request.id, f"generate_gltf_production_task started for cad {cad_file_id}", "info")
|
||||
|
||||
# Resolve and log tenant context at task start (required for RLS)
|
||||
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
|
||||
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
|
||||
|
||||
_sync_url = app_settings.database_url.replace("+asyncpg", "")
|
||||
_eng = _ce(_sync_url)
|
||||
|
||||
@@ -215,6 +255,7 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
|
||||
from app.models.system_setting import SystemSetting
|
||||
|
||||
with _Session(_eng) as _sess:
|
||||
set_tenant_context_sync(_sess, _tenant_id)
|
||||
_cad = _sess.execute(
|
||||
_sel(_CF).where(_CF.id == _uuid.UUID(cad_file_id))
|
||||
).scalar_one_or_none()
|
||||
@@ -231,8 +272,8 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
|
||||
raise RuntimeError(f"STEP file not found: {step_path}")
|
||||
|
||||
smooth_angle = float(sys_settings.get("blender_smooth_angle", "30"))
|
||||
prod_linear = float(sys_settings.get("gltf_production_linear_deflection", "0.03"))
|
||||
prod_angular = float(sys_settings.get("gltf_production_angular_deflection", "0.05"))
|
||||
prod_linear = float(sys_settings.get("render_linear_deflection", "0.03"))
|
||||
prod_angular = float(sys_settings.get("render_angular_deflection", "0.05"))
|
||||
tessellation_engine = sys_settings.get("tessellation_engine", "occ")
|
||||
|
||||
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
|
||||
@@ -289,8 +330,8 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
|
||||
# because CharacteristicLengthMax becomes too small. GMSH quality is algorithmic
|
||||
# (conforming seams) not density-based — a denser GMSH mesh adds no UV-unwrap benefit.
|
||||
if tessellation_engine == "gmsh":
|
||||
eff_linear = float(sys_settings.get("gltf_preview_linear_deflection", "0.1"))
|
||||
eff_angular = float(sys_settings.get("gltf_preview_angular_deflection", "0.1"))
|
||||
eff_linear = float(sys_settings.get("scene_linear_deflection", "0.1"))
|
||||
eff_angular = float(sys_settings.get("scene_angular_deflection", "0.1"))
|
||||
else:
|
||||
eff_linear = prod_linear
|
||||
eff_angular = prod_angular
|
||||
@@ -330,6 +371,7 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
|
||||
from app.domains.products.models import Product as _Product
|
||||
|
||||
with _Session(_eng) as _sess:
|
||||
set_tenant_context_sync(_sess, _tenant_id)
|
||||
_prod_query = _sel(_Product).where(_Product.cad_file_id == _uuid.UUID(cad_file_id))
|
||||
if product_id:
|
||||
_prod_query = _prod_query.where(_Product.id == _uuid.UUID(product_id))
|
||||
@@ -405,6 +447,7 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
|
||||
# any frontend page holding a stale download_url continues to resolve correctly.
|
||||
_eng2 = _ce(_sync_url)
|
||||
with _Session(_eng2) as _sess:
|
||||
set_tenant_context_sync(_sess, _tenant_id)
|
||||
_key = str(output_path)
|
||||
_prefix = str(app_settings.upload_dir).rstrip("/") + "/"
|
||||
if _key.startswith(_prefix):
|
||||
@@ -443,3 +486,204 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
|
||||
pl.step_done("export_glb_production", result={"glb_path": str(output_path), "asset_id": asset_id})
|
||||
logger.info("generate_gltf_production_task: MediaAsset %s created for cad %s", asset_id, cad_file_id)
|
||||
return {"glb_path": str(output_path), "asset_id": asset_id}
|
||||
|
||||
|
||||
@celery_app.task(
|
||||
bind=True,
|
||||
name="app.tasks.step_tasks.generate_usd_master_task",
|
||||
queue="thumbnail_rendering",
|
||||
max_retries=1,
|
||||
)
|
||||
def generate_usd_master_task(self, cad_file_id: str) -> dict:
|
||||
"""Export a USD master file from STEP via OCC + pxr authoring.
|
||||
|
||||
Pipeline:
|
||||
1. Reads STEP file via export_step_to_usd.py (OCC XCAF + pxr)
|
||||
2. Writes .usd file alongside the STEP file
|
||||
3. Stores result as usd_master MediaAsset
|
||||
4. Parses MANIFEST_JSON from stdout → writes resolved_material_assignments to CadFile
|
||||
"""
|
||||
import json as _json
|
||||
import os as _os
|
||||
import subprocess as _subprocess
|
||||
import sys as _sys
|
||||
import uuid as _uuid
|
||||
from pathlib import Path as _Path
|
||||
from sqlalchemy import create_engine as _ce, select as _sel
|
||||
from sqlalchemy.orm import Session as _Session
|
||||
|
||||
from app.config import settings as app_settings
|
||||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||||
from app.models.cad_file import CadFile
|
||||
from app.models.system_setting import SystemSetting
|
||||
from app.domains.products.models import Product
|
||||
|
||||
pl = PipelineLogger(task_id=self.request.id)
|
||||
pl.step_start("usd_master", {"cad_file_id": cad_file_id})
|
||||
|
||||
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
|
||||
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
|
||||
|
||||
sync_url = app_settings.database_url.replace("+asyncpg", "")
|
||||
eng = _ce(sync_url)
|
||||
|
||||
with _Session(eng) as sess:
|
||||
set_tenant_context_sync(sess, _tenant_id)
|
||||
cad_file = sess.get(CadFile, cad_file_id)
|
||||
if not cad_file or not cad_file.stored_path:
|
||||
logger.error("generate_usd_master_task: no stored_path for %s", cad_file_id)
|
||||
return {"error": "no stored_path"}
|
||||
|
||||
step_path = _Path(cad_file.stored_path)
|
||||
|
||||
product = sess.execute(
|
||||
_sel(Product).where(Product.cad_file_id == cad_file.id)
|
||||
).scalar_one_or_none()
|
||||
|
||||
color_map: dict[str, str] = {}
|
||||
if product and product.cad_part_materials:
|
||||
for entry in product.cad_part_materials:
|
||||
part_name = entry.get("part_name") or entry.get("name", "")
|
||||
hex_color = entry.get("hex_color") or entry.get("color", "")
|
||||
if part_name and hex_color:
|
||||
color_map[part_name] = hex_color
|
||||
|
||||
settings_rows = sess.execute(_sel(SystemSetting)).scalars().all()
|
||||
sys_settings = {s.key: s.value for s in settings_rows}
|
||||
|
||||
# Hash-based cache check: skip tessellation if file hasn't changed
|
||||
step_file_hash = cad_file.step_file_hash
|
||||
if step_file_hash:
|
||||
existing_usd = sess.execute(
|
||||
_sel(MediaAsset).where(
|
||||
MediaAsset.cad_file_id == cad_file.id,
|
||||
MediaAsset.asset_type == MediaAssetType.usd_master,
|
||||
)
|
||||
).scalars().first()
|
||||
if existing_usd:
|
||||
logger.info("[CACHE] hash match — skipping USD master tessellation for %s", cad_file_id)
|
||||
pl.step_done("usd_master", result={"cached": True, "asset_id": str(existing_usd.id)})
|
||||
eng.dispose()
|
||||
return {"cached": True, "asset_id": str(existing_usd.id)}
|
||||
eng.dispose()
|
||||
|
||||
if not step_path.exists():
|
||||
err = f"STEP file not found: {step_path}"
|
||||
pl.step_error("usd_master", err, None)
|
||||
raise RuntimeError(err)
|
||||
|
||||
linear_deflection = float(sys_settings.get("render_linear_deflection", "0.03"))
|
||||
angular_deflection = float(sys_settings.get("render_angular_deflection", "0.05"))
|
||||
sharp_threshold = float(sys_settings.get("sharp_edge_threshold", "20.0"))
|
||||
|
||||
output_path = step_path.parent / f"{step_path.stem}_master.usd"
|
||||
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
|
||||
script_path = scripts_dir / "export_step_to_usd.py"
|
||||
|
||||
if not script_path.exists():
|
||||
err = f"export_step_to_usd.py not found at {script_path}"
|
||||
pl.step_error("usd_master", err, None)
|
||||
raise RuntimeError(err)
|
||||
|
||||
cmd = [
|
||||
_sys.executable, str(script_path),
|
||||
"--step_path", str(step_path),
|
||||
"--output_path", str(output_path),
|
||||
"--color_map", _json.dumps(color_map),
|
||||
"--linear_deflection", str(linear_deflection),
|
||||
"--angular_deflection", str(angular_deflection),
|
||||
"--sharp_threshold", str(sharp_threshold),
|
||||
"--cad_file_id", cad_file_id,
|
||||
]
|
||||
|
||||
log_task_event(
|
||||
self.request.id,
|
||||
f"[USD_MASTER] exporting STEP → USD: {step_path.name}",
|
||||
"info",
|
||||
)
|
||||
|
||||
try:
|
||||
result = _subprocess.run(cmd, capture_output=True, text=True, timeout=600)
|
||||
for line in result.stdout.splitlines():
|
||||
logger.info("[usd-master] %s", line)
|
||||
for line in result.stderr.splitlines():
|
||||
logger.warning("[usd-master stderr] %s", line)
|
||||
|
||||
if result.returncode != 0 or not output_path.exists() or output_path.stat().st_size == 0:
|
||||
raise RuntimeError(
|
||||
f"export_step_to_usd.py failed (exit {result.returncode}).\n"
|
||||
f"STDERR: {result.stderr[-1000:]}"
|
||||
)
|
||||
except Exception as exc:
|
||||
log_task_event(self.request.id, f"[USD_MASTER] failed: {exc}", "error")
|
||||
pl.step_error("usd_master", str(exc), exc)
|
||||
raise self.retry(exc=exc, countdown=15)
|
||||
|
||||
# --- Store MediaAsset (upsert) ---
|
||||
eng2 = _ce(sync_url)
|
||||
asset_id: str = ""
|
||||
with _Session(eng2) as sess2:
|
||||
set_tenant_context_sync(sess2, _tenant_id)
|
||||
_key = str(output_path)
|
||||
_prefix = str(app_settings.upload_dir).rstrip("/") + "/"
|
||||
if _key.startswith(_prefix):
|
||||
_key = _key[len(_prefix):]
|
||||
_file_size = output_path.stat().st_size if output_path.exists() else None
|
||||
|
||||
existing = sess2.execute(
|
||||
_sel(MediaAsset).where(
|
||||
MediaAsset.cad_file_id == _uuid.UUID(cad_file_id),
|
||||
MediaAsset.asset_type == MediaAssetType.usd_master,
|
||||
)
|
||||
).scalars().first()
|
||||
|
||||
if existing:
|
||||
existing.storage_key = _key
|
||||
existing.mime_type = "model/vnd.usd"
|
||||
existing.file_size_bytes = _file_size
|
||||
sess2.commit()
|
||||
asset_id = str(existing.id)
|
||||
else:
|
||||
asset = MediaAsset(
|
||||
cad_file_id=_uuid.UUID(cad_file_id),
|
||||
asset_type=MediaAssetType.usd_master,
|
||||
storage_key=_key,
|
||||
mime_type="model/vnd.usd",
|
||||
file_size_bytes=_file_size,
|
||||
)
|
||||
sess2.add(asset)
|
||||
sess2.commit()
|
||||
asset_id = str(asset.id)
|
||||
eng2.dispose()
|
||||
|
||||
# --- Parse MANIFEST_JSON and write resolved_material_assignments ---
|
||||
manifest_parts: list = []
|
||||
for line in result.stdout.splitlines():
|
||||
if line.startswith("MANIFEST_JSON: "):
|
||||
try:
|
||||
manifest_parts = _json.loads(line[len("MANIFEST_JSON: "):]).get("parts", [])
|
||||
except Exception as parse_exc:
|
||||
logger.warning("[USD_MASTER] MANIFEST_JSON parse failed: %s", parse_exc)
|
||||
break
|
||||
|
||||
if manifest_parts:
|
||||
try:
|
||||
resolved = {
|
||||
p["part_key"]: {"source_name": p["source_name"], "prim_path": p["prim_path"]}
|
||||
for p in manifest_parts
|
||||
}
|
||||
eng3 = _ce(sync_url)
|
||||
with _Session(eng3) as sess3:
|
||||
set_tenant_context_sync(sess3, _tenant_id)
|
||||
row = sess3.get(CadFile, cad_file_id)
|
||||
if row:
|
||||
row.resolved_material_assignments = resolved
|
||||
sess3.commit()
|
||||
eng3.dispose()
|
||||
logger.info("[USD_MASTER] wrote resolved_material_assignments (%d parts)", len(resolved))
|
||||
except Exception as write_exc:
|
||||
logger.warning("[USD_MASTER] failed to write resolved_material_assignments: %s", write_exc)
|
||||
|
||||
log_task_event(self.request.id, f"[USD_MASTER] done: {output_path.name}", "done")
|
||||
pl.step_done("usd_master", result={"usd_path": str(output_path), "asset_id": asset_id})
|
||||
return {"usd_path": str(output_path), "asset_id": asset_id, "n_parts": len(manifest_parts)}
|
||||
|
||||
@@ -89,6 +89,10 @@ def process_step_file(self, cad_file_id: str):
|
||||
pl = PipelineLogger(task_id=self.request.id)
|
||||
pl.step_start("process_step_file", {"cad_file_id": cad_file_id})
|
||||
|
||||
# Resolve and log tenant context at task start (required for RLS)
|
||||
from app.core.tenant_context import resolve_tenant_id_for_cad
|
||||
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
|
||||
|
||||
lock_key = f"step_processing_lock:{cad_file_id}"
|
||||
r = redis_lib.from_url(app_settings.redis_url)
|
||||
acquired = r.set(lock_key, "1", nx=True, ex=600) # 10-minute TTL
|
||||
@@ -213,9 +217,14 @@ def reextract_cad_metadata(cad_file_id: str):
|
||||
pl = PipelineLogger(task_id=None)
|
||||
pl.step_start("reextract_cad_metadata", {"cad_file_id": cad_file_id})
|
||||
|
||||
# Resolve and log tenant context at task start (required for RLS)
|
||||
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
|
||||
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
|
||||
|
||||
sync_url = app_settings.database_url.replace("+asyncpg", "")
|
||||
eng = create_engine(sync_url)
|
||||
with Session(eng) as session:
|
||||
set_tenant_context_sync(session, _tenant_id)
|
||||
cad_file = session.get(CadFile, cad_file_id)
|
||||
if not cad_file or not cad_file.stored_path:
|
||||
logger.warning(f"reextract_cad_metadata: file not found {cad_file_id}")
|
||||
@@ -229,6 +238,7 @@ def reextract_cad_metadata(cad_file_id: str):
|
||||
patch = _bbox_from_glb(str(glb_path)) or _bbox_from_step_cadquery(step_path)
|
||||
if patch:
|
||||
with Session(eng) as session:
|
||||
set_tenant_context_sync(session, _tenant_id)
|
||||
cad_file = session.get(CadFile, cad_file_id)
|
||||
if cad_file:
|
||||
cad_file.mesh_attributes = {**(cad_file.mesh_attributes or {}), **patch}
|
||||
|
||||
@@ -30,6 +30,11 @@ def render_order_line_task(self, order_line_id: str):
|
||||
pl = PipelineLogger(task_id=self.request.id, order_line_id=order_line_id)
|
||||
pl.step_start("render_order_line_task", {"order_line_id": order_line_id})
|
||||
logger.info(f"Rendering order line: {order_line_id}")
|
||||
|
||||
# Resolve and log tenant context at task start (required for RLS)
|
||||
from app.core.tenant_context import resolve_tenant_id_for_order_line, set_tenant_context_sync
|
||||
_tenant_id = resolve_tenant_id_for_order_line(order_line_id)
|
||||
|
||||
from app.services.render_log import emit
|
||||
|
||||
emit(order_line_id, "Celery render task started")
|
||||
@@ -43,6 +48,7 @@ def render_order_line_task(self, order_line_id: str):
|
||||
engine = create_engine(sync_url)
|
||||
|
||||
with Session(engine) as session:
|
||||
set_tenant_context_sync(session, _tenant_id)
|
||||
from app.models.order_line import OrderLine
|
||||
from app.models.product import Product
|
||||
|
||||
@@ -89,6 +95,30 @@ def render_order_line_task(self, order_line_id: str):
|
||||
cad_file = line.product.cad_file
|
||||
materials_source = line.product.cad_part_materials
|
||||
|
||||
# Look up USD master asset for this CAD file — used when rendering
|
||||
# via USD path instead of production GLB
|
||||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||||
from pathlib import Path as _Path
|
||||
usd_render_path = None
|
||||
if cad_file:
|
||||
_usd_asset = session.execute(
|
||||
select(MediaAsset)
|
||||
.where(
|
||||
MediaAsset.cad_file_id == cad_file.id,
|
||||
MediaAsset.asset_type == MediaAssetType.usd_master,
|
||||
)
|
||||
.order_by(MediaAsset.created_at.desc())
|
||||
.limit(1)
|
||||
).scalar_one_or_none()
|
||||
if _usd_asset and _usd_asset.storage_key:
|
||||
_usd_candidate = _Path(app_settings.upload_dir) / _usd_asset.storage_key
|
||||
if _usd_candidate.exists():
|
||||
usd_render_path = _usd_candidate
|
||||
logger.info(
|
||||
"render_order_line: using usd_master %s for cad %s",
|
||||
_usd_candidate.name, cad_file.id,
|
||||
)
|
||||
|
||||
part_colors = {}
|
||||
if cad_file and cad_file.parsed_objects:
|
||||
parsed_names = cad_file.parsed_objects.get("objects", [])
|
||||
@@ -242,7 +272,6 @@ def render_order_line_task(self, order_line_id: str):
|
||||
height=render_height or 1920,
|
||||
engine=render_engine or _sys.get("blender_engine", "cycles"),
|
||||
samples=render_samples or int(_sys.get(f"blender_{render_engine or _sys.get('blender_engine','cycles')}_samples", 128)),
|
||||
stl_quality=_sys.get("stl_quality", "low"),
|
||||
smooth_angle=int(_sys.get("blender_smooth_angle", 30)),
|
||||
cycles_device=cycles_device_val,
|
||||
transparent_bg=transparent_bg,
|
||||
@@ -259,6 +288,7 @@ def render_order_line_task(self, order_line_id: str):
|
||||
rotation_x=rotation_x,
|
||||
rotation_y=rotation_y,
|
||||
rotation_z=rotation_z,
|
||||
usd_path=usd_render_path,
|
||||
)
|
||||
success = True
|
||||
render_log = {
|
||||
@@ -323,6 +353,7 @@ def render_order_line_task(self, order_line_id: str):
|
||||
denoising_prefilter=denoising_prefilter,
|
||||
denoising_quality=denoising_quality,
|
||||
denoising_use_gpu=denoising_use_gpu,
|
||||
usd_path=usd_render_path,
|
||||
)
|
||||
if success:
|
||||
pl.step_done("blender_still")
|
||||
@@ -376,13 +407,6 @@ def render_order_line_task(self, order_line_id: str):
|
||||
_file_size = _os.path.getsize(output_path)
|
||||
except OSError:
|
||||
pass
|
||||
if _ext in ("png", "jpg", "jpeg"):
|
||||
try:
|
||||
from PIL import Image as _PILImage
|
||||
with _PILImage.open(output_path) as _im:
|
||||
_width, _height = _im.size
|
||||
except Exception:
|
||||
pass
|
||||
# Snapshot key render settings into render_config
|
||||
_render_config = None
|
||||
if isinstance(render_log, dict):
|
||||
@@ -485,6 +509,7 @@ def render_order_line_task(self, order_line_id: str):
|
||||
sync_url2 = app_settings.database_url.replace("+asyncpg", "")
|
||||
eng2 = create_engine(sync_url2)
|
||||
with SyncSession(eng2) as s2:
|
||||
set_tenant_context_sync(s2, _tenant_id)
|
||||
from datetime import datetime as dt2
|
||||
s2.execute(
|
||||
sql_update2(OL2).where(OL2.id == order_line_id)
|
||||
@@ -500,6 +525,7 @@ def render_order_line_task(self, order_line_id: str):
|
||||
# Try to get order_id from DB
|
||||
eng3 = create_engine(sync_url2)
|
||||
with SyncSession(eng3) as s3:
|
||||
set_tenant_context_sync(s3, _tenant_id)
|
||||
from sqlalchemy import select as sel
|
||||
row = s3.execute(sel(OL2.order_id).where(OL2.id == order_line_id)).scalar_one_or_none()
|
||||
if row:
|
||||
@@ -511,6 +537,7 @@ def render_order_line_task(self, order_line_id: str):
|
||||
from app.models.order import Order as OrderModel2
|
||||
eng4 = create_engine(sync_url2)
|
||||
with SyncSession(eng4) as s4:
|
||||
set_tenant_context_sync(s4, _tenant_id)
|
||||
order_row2 = s4.execute(
|
||||
sel2(OrderModel2.created_by, OrderModel2.order_number)
|
||||
.join(OL2, OL2.order_id == OrderModel2.id)
|
||||
|
||||
@@ -26,6 +26,10 @@ def render_step_thumbnail(self, cad_file_id: str):
|
||||
pl.step_start("render_step_thumbnail", {"cad_file_id": cad_file_id})
|
||||
logger.info(f"Rendering thumbnail for CAD file: {cad_file_id}")
|
||||
|
||||
# Resolve and log tenant context at task start (required for RLS)
|
||||
from app.core.tenant_context import resolve_tenant_id_for_cad, set_tenant_context_sync
|
||||
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
|
||||
|
||||
# Compute and persist STEP file hash for STL cache lookups
|
||||
try:
|
||||
from sqlalchemy import create_engine
|
||||
@@ -37,6 +41,7 @@ def render_step_thumbnail(self, cad_file_id: str):
|
||||
sync_url = app_settings.database_url.replace("+asyncpg", "")
|
||||
_eng = create_engine(sync_url)
|
||||
with Session(_eng) as _sess:
|
||||
set_tenant_context_sync(_sess, _tenant_id)
|
||||
_cad = _sess.get(CadFile, cad_file_id)
|
||||
if _cad and _cad.stored_path and not _cad.step_file_hash:
|
||||
_hash = compute_step_hash(_cad.stored_path)
|
||||
@@ -71,6 +76,7 @@ def render_step_thumbnail(self, cad_file_id: str):
|
||||
_sync_url2 = _cfg2.database_url.replace("+asyncpg", "")
|
||||
_eng2 = create_engine(_sync_url2)
|
||||
with Session(_eng2) as _sess2:
|
||||
set_tenant_context_sync(_sess2, _tenant_id)
|
||||
_cad2 = _sess2.get(_CadFile2, cad_file_id)
|
||||
_step_path = _cad2.stored_path if _cad2 else None
|
||||
_eng2.dispose()
|
||||
@@ -82,6 +88,7 @@ def render_step_thumbnail(self, cad_file_id: str):
|
||||
if bbox_data:
|
||||
_eng2 = create_engine(_sync_url2)
|
||||
with Session(_eng2) as _sess2:
|
||||
set_tenant_context_sync(_sess2, _tenant_id)
|
||||
_cad2 = _sess2.get(_CadFile2, cad_file_id)
|
||||
if _cad2:
|
||||
_cad2.mesh_attributes = {**( _cad2.mesh_attributes or {}), **bbox_data}
|
||||
@@ -107,6 +114,7 @@ def render_step_thumbnail(self, cad_file_id: str):
|
||||
_sync_url3 = _cfg3.database_url.replace("+asyncpg", "")
|
||||
_eng3 = create_engine(_sync_url3)
|
||||
with Session(_eng3) as _sess3:
|
||||
set_tenant_context_sync(_sess3, _tenant_id)
|
||||
_cad3 = _sess3.get(_CadFile3, cad_file_id)
|
||||
_attrs = _cad3.mesh_attributes or {} if _cad3 else {}
|
||||
_step_path3 = _cad3.stored_path if _cad3 else None
|
||||
@@ -117,6 +125,7 @@ def render_step_thumbnail(self, cad_file_id: str):
|
||||
if edge_data:
|
||||
_eng3 = create_engine(_sync_url3)
|
||||
with Session(_eng3) as _sess3:
|
||||
set_tenant_context_sync(_sess3, _tenant_id)
|
||||
_cad3 = _sess3.get(_CadFile3, cad_file_id)
|
||||
if _cad3:
|
||||
_cad3.mesh_attributes = {**(_cad3.mesh_attributes or {}), **edge_data}
|
||||
@@ -145,6 +154,7 @@ def render_step_thumbnail(self, cad_file_id: str):
|
||||
_sync_url = _cfg.database_url.replace("+asyncpg", "")
|
||||
_eng = create_engine(_sync_url)
|
||||
with _Session(_eng) as _s:
|
||||
set_tenant_context_sync(_s, _tenant_id)
|
||||
_cad = _s.get(_CadFile, cad_file_id)
|
||||
_tid = str(_cad.tenant_id) if _cad and _cad.tenant_id else None
|
||||
_eng.dispose()
|
||||
@@ -176,6 +186,11 @@ def regenerate_thumbnail(self, cad_file_id: str, part_colors: dict):
|
||||
pl = PipelineLogger(task_id=self.request.id)
|
||||
pl.step_start("regenerate_thumbnail", {"cad_file_id": cad_file_id})
|
||||
logger.info(f"Regenerating thumbnail for CAD file: {cad_file_id}")
|
||||
|
||||
# Resolve and log tenant context at task start (required for RLS)
|
||||
from app.core.tenant_context import resolve_tenant_id_for_cad
|
||||
_tenant_id = resolve_tenant_id_for_cad(cad_file_id)
|
||||
|
||||
try:
|
||||
from app.services.step_processor import regenerate_cad_thumbnail
|
||||
success = regenerate_cad_thumbnail(cad_file_id, part_colors)
|
||||
|
||||
@@ -32,6 +32,9 @@ class CadFile(Base):
|
||||
render_log: Mapped[dict] = mapped_column(JSONB, nullable=True)
|
||||
mesh_attributes: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
part_materials: Mapped[dict | None] = mapped_column(JSONB, nullable=True, default=None)
|
||||
source_material_assignments: Mapped[dict | None] = mapped_column(JSONB, nullable=True, default=None)
|
||||
resolved_material_assignments: Mapped[dict | None] = mapped_column(JSONB, nullable=True, default=None)
|
||||
manual_material_overrides: Mapped[dict | None] = mapped_column(JSONB, nullable=True, default=None)
|
||||
step_file_hash: Mapped[str | None] = mapped_column(String(64), nullable=True, index=True)
|
||||
tenant_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
from pydantic import BaseModel
|
||||
from app.domains.rendering.schemas import RenderPositionOut
|
||||
|
||||
@@ -71,3 +72,19 @@ class ProductOut(BaseModel):
|
||||
updated_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class PartEntry(BaseModel):
|
||||
part_key: str
|
||||
source_name: str
|
||||
prim_path: str | None = None
|
||||
effective_material: str | None
|
||||
assignment_provenance: Literal["manual", "auto", "source", "default"]
|
||||
is_unassigned: bool
|
||||
|
||||
|
||||
class SceneManifest(BaseModel):
|
||||
cad_file_id: str
|
||||
parts: list[PartEntry]
|
||||
unmatched_source_rows: list[str]
|
||||
unassigned_parts: list[str]
|
||||
|
||||
@@ -0,0 +1,182 @@
|
||||
"""Part key generation and scene manifest building for the USD pipeline.
|
||||
|
||||
The `resolved_material_assignments` JSONB schema written by `generate_usd_master_task`:
|
||||
{part_key: {"source_name": str, "prim_path": str}}
|
||||
|
||||
The `manual_material_overrides` JSONB schema written by `PUT /cad/{id}/part-materials` (Priority 4):
|
||||
{part_key: material_name_str}
|
||||
|
||||
The `source_material_assignments` JSONB schema written by the Excel importer (future):
|
||||
{source_part_name: material_name_str}
|
||||
|
||||
No pxr imports — all data is read from JSONB columns, never from USD files directly.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
|
||||
# ── Part key generation ───────────────────────────────────────────────────────
|
||||
|
||||
_AF_RE = re.compile(r'_AF\d+$', re.IGNORECASE)
|
||||
|
||||
|
||||
def generate_part_key(
|
||||
xcaf_label_path: str,
|
||||
source_name: str,
|
||||
existing_keys: set[str] | None = None,
|
||||
) -> str:
|
||||
"""Deterministic slug from source_name, max 64 chars, unique within assembly.
|
||||
|
||||
- Strips `_AF\\d+` OCC suffix from source_name before slugifying.
|
||||
- Falls back to sha256 digest of xcaf_label_path if slug is empty.
|
||||
- Deduplicates by appending _2, _3, ... if existing_keys is provided.
|
||||
"""
|
||||
base = _AF_RE.sub('', source_name) if source_name else ''
|
||||
# Split camelCase before slugifying: "RingOuter" → "Ring_Outer"
|
||||
base = re.sub(r'([a-z])([A-Z])', r'\1_\2', base)
|
||||
slug = re.sub(r'[^a-z0-9]+', '_', base.lower()).strip('_')
|
||||
if not slug:
|
||||
slug = f"part_{hashlib.sha256(xcaf_label_path.encode()).hexdigest()[:8]}"
|
||||
slug = slug[:50]
|
||||
|
||||
if existing_keys is None:
|
||||
return slug
|
||||
|
||||
key = slug
|
||||
n = 2
|
||||
while key in existing_keys:
|
||||
key = f"{slug}_{n}"
|
||||
n += 1
|
||||
existing_keys.add(key)
|
||||
return key
|
||||
|
||||
|
||||
# ── Scene manifest building ───────────────────────────────────────────────────
|
||||
|
||||
def build_scene_manifest(cad_file, usd_asset=None) -> dict:
|
||||
"""Build a scene manifest dict from CadFile ORM object.
|
||||
|
||||
Source of part list (priority order):
|
||||
1. `resolved_material_assignments` — keyed by partKey (set by generate_usd_master_task)
|
||||
2. `parsed_objects["objects"]` — list of source name strings from STEP extraction
|
||||
3. Empty manifest if neither is available.
|
||||
|
||||
Material assignment priority per part:
|
||||
1. `manual_material_overrides[part_key]` — provenance "manual"
|
||||
2. `resolved_material_assignments[part_key]["material"]` — provenance "auto"
|
||||
3. substring match in `source_material_assignments` against source_name — provenance "source"
|
||||
4. None, is_unassigned=True — provenance "default"
|
||||
"""
|
||||
cad_id = str(cad_file.id)
|
||||
resolved = cad_file.resolved_material_assignments or {}
|
||||
manual = cad_file.manual_material_overrides or {}
|
||||
source = cad_file.source_material_assignments or {}
|
||||
|
||||
parts: list[dict] = []
|
||||
unmatched_source_rows: list[str] = []
|
||||
unassigned_parts: list[str] = []
|
||||
|
||||
if resolved:
|
||||
# Build from resolved assignments (USD pipeline has run)
|
||||
for part_key, meta in resolved.items():
|
||||
source_name = meta.get("source_name", "") if isinstance(meta, dict) else ""
|
||||
prim_path = meta.get("prim_path") if isinstance(meta, dict) else None
|
||||
|
||||
effective_material, provenance = _resolve_material(
|
||||
part_key, source_name, manual, resolved, source
|
||||
)
|
||||
is_unassigned = effective_material is None
|
||||
|
||||
parts.append({
|
||||
"part_key": part_key,
|
||||
"source_name": source_name,
|
||||
"prim_path": prim_path,
|
||||
"effective_material": effective_material,
|
||||
"assignment_provenance": provenance,
|
||||
"is_unassigned": is_unassigned,
|
||||
})
|
||||
if is_unassigned:
|
||||
unassigned_parts.append(part_key)
|
||||
|
||||
elif cad_file.parsed_objects:
|
||||
# Fall back to parsed_objects from STEP extraction
|
||||
object_names: list[str] = cad_file.parsed_objects.get("objects") or []
|
||||
seen_keys: set[str] = set()
|
||||
for source_name in object_names:
|
||||
part_key = generate_part_key(source_name, source_name, seen_keys)
|
||||
effective_material, provenance = _resolve_material(
|
||||
part_key, source_name, manual, resolved, source
|
||||
)
|
||||
is_unassigned = effective_material is None
|
||||
|
||||
parts.append({
|
||||
"part_key": part_key,
|
||||
"source_name": source_name,
|
||||
"prim_path": None,
|
||||
"effective_material": effective_material,
|
||||
"assignment_provenance": provenance,
|
||||
"is_unassigned": is_unassigned,
|
||||
})
|
||||
if is_unassigned:
|
||||
unassigned_parts.append(part_key)
|
||||
|
||||
# Find source rows not matched to any part
|
||||
matched_source_names = {p["source_name"].lower() for p in parts}
|
||||
for src_key in source:
|
||||
if not any(
|
||||
src_key.lower() in sn or sn in src_key.lower()
|
||||
for sn in matched_source_names
|
||||
):
|
||||
unmatched_source_rows.append(src_key)
|
||||
|
||||
return {
|
||||
"cad_file_id": cad_id,
|
||||
"parts": parts,
|
||||
"unmatched_source_rows": unmatched_source_rows,
|
||||
"unassigned_parts": unassigned_parts,
|
||||
}
|
||||
|
||||
|
||||
def _resolve_material(
|
||||
part_key: str,
|
||||
source_name: str,
|
||||
manual: dict,
|
||||
resolved: dict,
|
||||
source: dict,
|
||||
) -> tuple[str | None, str]:
|
||||
"""Return (material_name, provenance) for one part using priority order."""
|
||||
# 1. Manual override
|
||||
if part_key in manual and manual[part_key]:
|
||||
return str(manual[part_key]), "manual"
|
||||
|
||||
# 2. Auto-resolved from USD pipeline
|
||||
meta = resolved.get(part_key)
|
||||
if isinstance(meta, dict) and meta.get("material"):
|
||||
return str(meta["material"]), "auto"
|
||||
|
||||
# 3. Substring match in source_material_assignments against source_name
|
||||
sn_lower = source_name.lower()
|
||||
for src_key, src_mat in source.items():
|
||||
if src_key.lower() in sn_lower or sn_lower in src_key.lower():
|
||||
if src_mat:
|
||||
return str(src_mat), "source"
|
||||
|
||||
# 4. Unassigned
|
||||
return None, "default"
|
||||
|
||||
|
||||
# ── Effective assignments for render pipeline ─────────────────────────────────
|
||||
|
||||
def get_effective_assignments(cad_file) -> dict[str, str]:
|
||||
"""Return {part_key: material_name} merged from all three layers.
|
||||
|
||||
Used by the render pipeline when building the material map (Priority 5).
|
||||
"""
|
||||
manifest = build_scene_manifest(cad_file)
|
||||
return {
|
||||
p["part_key"]: p["effective_material"]
|
||||
for p in manifest["parts"]
|
||||
if p["effective_material"] is not None
|
||||
}
|
||||
@@ -20,4 +20,5 @@ from app.domains.pipeline.tasks.render_order_line import ( # noqa: F401
|
||||
from app.domains.pipeline.tasks.export_glb import ( # noqa: F401
|
||||
generate_gltf_geometry_task,
|
||||
generate_gltf_production_task,
|
||||
generate_usd_master_task,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user