fix: media thumbnails, product dimensions, inline 3D viewer, GLB export

Bug A: Media Library thumbnails were gray because <img src> cannot send
JWT auth headers. Added useAuthBlob() hook (fetch + createObjectURL) in
MediaBrowser.tsx. Also fixed publish_asset Celery task to populate
product_id + cad_file_id on MediaAsset for thumbnail fallback resolution.

Bug B: Product dimensions now shown in Product Details card with Ruler
icon and "from CAD" label when cad_mesh_attributes.dimensions_mm exists.

Bug C: Replaced 128×128 CAD thumbnail with InlineCadViewer component.
Queries gltf_geometry MediaAssets, fetches GLB via auth fetch → blob URL
→ Three.js Canvas with OrbitControls. Falls back to thumbnail + "Load 3D
Model" button. Polling when GLB generation is in progress.

Bug D: trimesh was in [cad] optional extra but Dockerfile only installed
[dev]. Changed to pip install -e ".[dev,cad]" — trimesh now available in
backend container, GLB + Colors export works.

Also added bbox extraction (STL-first numpy parsing) in render_step_thumbnail
and admin "Re-extract CAD Metadata" bulk endpoint.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-07 13:27:46 +01:00
parent 10ed1b5e91
commit bfd58e3419
24 changed files with 1502 additions and 218 deletions
+89 -6
View File
@@ -41,6 +41,14 @@ SETTINGS_DEFAULTS: dict[str, str] = {
"smtp_user": "",
"smtp_password": "",
"smtp_from_address": "",
# 3D viewer / glTF export settings
"gltf_scale_factor": "0.001",
"gltf_smooth_normals": "true",
"viewer_max_distance": "50",
"viewer_min_distance": "0.001",
"gltf_material_quality": "pbr_colors",
"gltf_pbr_roughness": "0.4",
"gltf_pbr_metallic": "0.6",
}
@@ -63,6 +71,13 @@ class SettingsOut(BaseModel):
smtp_user: str = ""
smtp_password: str = ""
smtp_from_address: str = ""
gltf_scale_factor: float = 0.001
gltf_smooth_normals: bool = True
viewer_max_distance: float = 50.0
viewer_min_distance: float = 0.001
gltf_material_quality: str = "pbr_colors"
gltf_pbr_roughness: float = 0.4
gltf_pbr_metallic: float = 0.6
class SettingsUpdate(BaseModel):
@@ -84,6 +99,13 @@ class SettingsUpdate(BaseModel):
smtp_user: str | None = None
smtp_password: str | None = None
smtp_from_address: str | None = None
gltf_scale_factor: float | None = None
gltf_smooth_normals: bool | None = None
viewer_max_distance: float | None = None
viewer_min_distance: float | None = None
gltf_material_quality: str | None = None
gltf_pbr_roughness: float | None = None
gltf_pbr_metallic: float | None = None
@router.get("/users", response_model=list[UserOut])
@@ -191,6 +213,13 @@ def _settings_to_out(raw: dict[str, str]) -> SettingsOut:
smtp_user=raw.get("smtp_user", ""),
smtp_password=raw.get("smtp_password", ""),
smtp_from_address=raw.get("smtp_from_address", ""),
gltf_scale_factor=float(raw.get("gltf_scale_factor", "0.001")),
gltf_smooth_normals=raw.get("gltf_smooth_normals", "true") == "true",
viewer_max_distance=float(raw.get("viewer_max_distance", "50")),
viewer_min_distance=float(raw.get("viewer_min_distance", "0.001")),
gltf_material_quality=raw.get("gltf_material_quality", "pbr_colors"),
gltf_pbr_roughness=float(raw.get("gltf_pbr_roughness", "0.4")),
gltf_pbr_metallic=float(raw.get("gltf_pbr_metallic", "0.6")),
)
@@ -285,6 +314,20 @@ async def update_settings(
updates["smtp_password"] = body.smtp_password
if body.smtp_from_address is not None:
updates["smtp_from_address"] = body.smtp_from_address
if body.gltf_scale_factor is not None:
updates["gltf_scale_factor"] = str(body.gltf_scale_factor)
if body.gltf_smooth_normals is not None:
updates["gltf_smooth_normals"] = "true" if body.gltf_smooth_normals else "false"
if body.viewer_max_distance is not None:
updates["viewer_max_distance"] = str(body.viewer_max_distance)
if body.viewer_min_distance is not None:
updates["viewer_min_distance"] = str(body.viewer_min_distance)
if body.gltf_material_quality is not None:
updates["gltf_material_quality"] = body.gltf_material_quality
if body.gltf_pbr_roughness is not None:
updates["gltf_pbr_roughness"] = str(body.gltf_pbr_roughness)
if body.gltf_pbr_metallic is not None:
updates["gltf_pbr_metallic"] = str(body.gltf_pbr_metallic)
for k, v in updates.items():
await _save_setting(db, k, v)
@@ -368,6 +411,33 @@ async def regenerate_thumbnails(
return {"queued": queued, "message": f"Re-queued {queued} CAD file(s) for thumbnail regeneration"}
@router.post("/settings/reextract-metadata", status_code=status.HTTP_202_ACCEPTED)
async def reextract_all_metadata(
admin: User = Depends(require_admin),
db: AsyncSession = Depends(get_db),
):
"""Re-extract OCC metadata (dimensions, sharp edges) for all completed CAD files.
Updates mesh_attributes without re-rendering thumbnails or changing processing status.
Use this after deploying bbox/edge extraction improvements.
"""
result = await db.execute(
select(CadFile).where(
CadFile.processing_status == ProcessingStatus.completed,
CadFile.stored_path.isnot(None),
)
)
cad_files = result.scalars().all()
from app.tasks.step_tasks import reextract_cad_metadata
queued = 0
for cad_file in cad_files:
reextract_cad_metadata.delay(str(cad_file.id))
queued += 1
return {"queued": queued, "message": f"Queued {queued} CAD file(s) for metadata re-extraction"}
@router.post("/settings/generate-missing-stls", status_code=status.HTTP_202_ACCEPTED)
async def generate_missing_stls(
admin: User = Depends(require_admin),
@@ -482,15 +552,25 @@ async def import_existing_media_assets(
created = 0
skipped = 0
from app.config import settings as _app_settings
def _normalize_key(path: str) -> str:
"""Strip UPLOAD_DIR prefix to store relative storage keys."""
key = str(path)
prefix = str(_app_settings.upload_dir).rstrip("/") + "/"
return key[len(prefix):] if key.startswith(prefix) else key
# 1. CadFiles with thumbnail_path
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
cad_result = await db.execute(
text("SELECT id, thumbnail_path FROM cad_files WHERE thumbnail_path IS NOT NULL AND processing_status = 'completed'")
)
for row in cad_result.fetchall():
cad_id, thumb_path = row
norm_key = _normalize_key(str(thumb_path))
# De-dup check
existing = await db.execute(
select(MediaAsset.id).where(MediaAsset.storage_key == thumb_path).limit(1)
select(MediaAsset.id).where(MediaAsset.storage_key == norm_key).limit(1)
)
if existing.scalar_one_or_none():
skipped += 1
@@ -500,13 +580,14 @@ async def import_existing_media_assets(
asset = MediaAsset(
cad_file_id=uuid.UUID(str(cad_id)),
asset_type=MediaAssetType.thumbnail,
storage_key=str(thumb_path),
storage_key=norm_key,
mime_type=mime,
)
db.add(asset)
created += 1
# 2. OrderLines with result_path
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
ol_result = await db.execute(
text("""
SELECT ol.id, ol.result_path, ol.product_id, COALESCE(ot.is_animation, false) as is_animation
@@ -516,9 +597,10 @@ async def import_existing_media_assets(
""")
)
for row in ol_result.fetchall():
ol_id, result_path, product_id, is_animation = row
ol_id, result_path, product_id, _is_animation = row
norm_key = _normalize_key(str(result_path))
existing = await db.execute(
select(MediaAsset.id).where(MediaAsset.storage_key == result_path).limit(1)
select(MediaAsset.id).where(MediaAsset.storage_key == norm_key).limit(1)
)
if existing.scalar_one_or_none():
skipped += 1
@@ -528,13 +610,14 @@ async def import_existing_media_assets(
mime = "video/mp4"
asset_type = MediaAssetType.turntable
else:
# Extension determines type — poster frames (.jpg/.png) are always stills
mime = "image/png" if ext.endswith(".png") else "image/jpeg"
asset_type = MediaAssetType.turntable if is_animation else MediaAssetType.still
asset_type = MediaAssetType.still
asset = MediaAsset(
order_line_id=uuid.UUID(str(ol_id)),
product_id=uuid.UUID(str(product_id)) if product_id else None,
asset_type=asset_type,
storage_key=str(result_path),
storage_key=norm_key,
mime_type=mime,
)
db.add(asset)
+149
View File
@@ -180,6 +180,9 @@ async def get_thumbnail(
db: AsyncSession = Depends(get_db),
):
"""Serve the thumbnail image for a CAD file (no auth — UUID is opaque enough)."""
from sqlalchemy import text
# Bypass RLS for this public endpoint (cad_files has tenant RLS but thumbnails are public)
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
cad = await _get_cad_file(id, db)
if not cad.thumbnail_path:
@@ -196,6 +199,7 @@ async def get_thumbnail(
path=str(thumb_path),
media_type=media_type,
filename=f"{id}{ext}",
headers={"Cache-Control": "max-age=3600, public"},
)
@@ -390,3 +394,148 @@ async def regenerate_thumbnail(
"status": "queued",
"task_id": task_id,
}
@router.get("/{id}/export-gltf-colored")
async def export_gltf_colored(
id: uuid.UUID,
user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Export a GLB with PBR colors from part_colors (material alias mapping).
Loads per-part STLs from the low-quality parts cache directory and applies
PBR materials based on the product's cad_part_materials color assignments.
Falls back to the combined STL with a single grey material.
"""
from fastapi.responses import Response
from sqlalchemy import text, select
import trimesh
import io
if user.role.value not in ("admin", "project_manager"):
raise HTTPException(status_code=403, detail="Insufficient permissions")
# Bypass RLS for cad_files + products
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
cad = await _get_cad_file(id, db)
if not cad.stored_path:
raise HTTPException(404, detail="STEP file not uploaded")
step_path = Path(cad.stored_path)
stl_path = step_path.parent / f"{step_path.stem}_low.stl"
parts_dir = step_path.parent / f"{step_path.stem}_low_parts"
if not stl_path.exists():
raise HTTPException(404, detail="STL cache not found. Trigger a render first.")
# Load settings
from app.models.system_setting import SystemSetting
settings_result = await db.execute(
select(SystemSetting.key, SystemSetting.value).where(
SystemSetting.key.in_([
"gltf_scale_factor", "gltf_smooth_normals",
"gltf_pbr_roughness", "gltf_pbr_metallic",
])
)
)
raw_settings = {k: v for k, v in settings_result.all()}
scale = float(raw_settings.get("gltf_scale_factor", "0.001"))
smooth = raw_settings.get("gltf_smooth_normals", "true") == "true"
roughness = float(raw_settings.get("gltf_pbr_roughness", "0.4"))
metallic = float(raw_settings.get("gltf_pbr_metallic", "0.6"))
# Load part colors from product
from app.domains.products.models import Product
part_colors: dict[str, str] = {}
if cad.id:
prod_result = await db.execute(
select(Product).where(Product.cad_file_id == cad.id).limit(1)
)
product = prod_result.scalar_one_or_none()
if product and product.cad_part_materials:
for entry in product.cad_part_materials:
part_name = entry.get("part_name") or entry.get("name", "")
hex_color = entry.get("hex_color") or entry.get("color", "")
if part_name and hex_color:
part_colors[part_name] = hex_color
def _hex_to_rgba(h: str) -> list:
h = h.lstrip("#")
if len(h) < 6:
return [0.7, 0.7, 0.7, 1.0]
try:
return [int(h[i:i+2], 16) / 255.0 for i in (0, 2, 4)] + [1.0]
except Exception:
return [0.7, 0.7, 0.7, 1.0]
def _make_material(hex_color: str | None = None):
rgba = _hex_to_rgba(hex_color) if hex_color else [0.7, 0.7, 0.7, 1.0]
return trimesh.visual.material.PBRMaterial(
baseColorFactor=rgba,
roughnessFactor=roughness,
metallicFactor=metallic,
)
def _apply_mesh(mesh, color=None):
mesh.apply_scale(scale)
if smooth:
try:
trimesh.smoothing.filter_laplacian(mesh, lamb=0.5, iterations=5)
except Exception:
pass
mesh.visual = trimesh.visual.TextureVisuals(material=_make_material(color))
return mesh
# Try per-part STLs first
scene = trimesh.Scene()
used_parts = False
if parts_dir.exists() and part_colors:
for part_name, hex_color in part_colors.items():
# Sanitize part name for filesystem
safe_name = part_name.replace("/", "_").replace("\\", "_")
part_stl = parts_dir / f"{safe_name}.stl"
if not part_stl.exists():
# Try lowercase / partial match
candidates = list(parts_dir.glob(f"{safe_name}*.stl"))
if not candidates:
candidates = list(parts_dir.glob("*.stl"))
candidates = [c for c in candidates if safe_name.lower() in c.stem.lower()]
if candidates:
part_stl = candidates[0]
else:
continue
try:
m = trimesh.load(str(part_stl), force="mesh")
_apply_mesh(m, hex_color)
scene.add_geometry(m, geom_name=part_name)
used_parts = True
except Exception:
pass
if not used_parts:
# Fallback: combined STL, single color
combined = trimesh.load(str(stl_path))
if hasattr(combined, 'geometry'):
for name, m in combined.geometry.items():
_apply_mesh(m, next(iter(part_colors.values()), None))
scene.add_geometry(m, geom_name=name)
else:
_apply_mesh(combined, next(iter(part_colors.values()), None))
scene.add_geometry(combined)
# Export to bytes
buf = io.BytesIO()
scene.export(buf, file_type="glb")
glb_bytes = buf.getvalue()
original_stem = Path(cad.original_name or "model").stem
filename = f"{original_stem}_colored.glb"
return Response(
content=glb_bytes,
media_type="model/gltf-binary",
headers={"Content-Disposition": f"attachment; filename={filename}"},
)
+1
View File
@@ -75,6 +75,7 @@ def _product_out(product: Product, priority: list[str] | None = None) -> Product
out.thumbnail_url = product.thumbnail_url
out.processing_status = product.processing_status
out.cad_parsed_objects = product.cad_parsed_objects
out.cad_mesh_attributes = product.cad_file.mesh_attributes if product.cad_file else None
out.render_image_url = _best_render_url(product, priority or ["latest_render", "cad_thumbnail"])
out.stl_cached = _stl_cached_qualities(product)
return out
+76 -11
View File
@@ -9,9 +9,11 @@ from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.domains.auth.models import User
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.media.schemas import MediaAssetOut
from app.domains.media import service
from app.utils.auth import get_current_user
router = APIRouter(prefix="/api/media", tags=["media"], redirect_slashes=False)
@@ -44,6 +46,9 @@ async def _resolve_thumbnails_bulk(db: AsyncSession, assets: list) -> None:
# 2. Fallback: product's cad_file_id → CAD thumbnail endpoint
from app.domains.products.models import Product
from sqlalchemy import text
# products has RLS — bypass for this internal read-only lookup
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
prod_rows = await db.execute(
select(Product.id, Product.cad_file_id).where(Product.id.in_(product_ids))
)
@@ -69,6 +74,9 @@ async def list_assets(
asset_types: list[MediaAssetType] = Query(default=[]),
skip: int = Query(0, ge=0),
limit: int = Query(50, ge=1, le=500),
sort_by: str = Query("created_at"),
sort_dir: str = Query("desc"),
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
assets = await service.list_media_assets(
@@ -80,6 +88,8 @@ async def list_assets(
asset_types=asset_types if asset_types else None,
skip=skip,
limit=limit,
sort_by=sort_by,
sort_dir=sort_dir,
)
for a in assets:
a.download_url = service.get_download_url(a)
@@ -100,7 +110,11 @@ async def get_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
@router.api_route("/{asset_id}/download", methods=["GET", "HEAD"])
async def download_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
async def download_asset(
asset_id: uuid.UUID,
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Proxy file content directly — avoids internal MinIO hostname issues."""
from fastapi.responses import FileResponse, Response
from pathlib import Path
@@ -112,14 +126,28 @@ async def download_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)
mime = asset.mime_type or "application/octet-stream"
# Local file path (absolute or relative to UPLOAD_DIR)
from app.config import settings
candidate = Path(key)
if not candidate.is_absolute():
from app.config import settings
candidate = Path(settings.UPLOAD_DIR) / key
candidate = Path(settings.upload_dir) / key
# Legacy path remapping: /shared/renders/{uuid}/{file} → UPLOAD_DIR/renders/{uuid}/{file}
if not candidate.exists() and "/shared/renders/" in key:
import logging
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
logging.getLogger(__name__).warning(
"Remapped legacy path %s%s", key, remapped
)
candidate = remapped
if candidate.exists():
ext = candidate.suffix.lstrip(".")
fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}"
return FileResponse(str(candidate), media_type=mime, filename=fname)
return FileResponse(
str(candidate), media_type=mime, filename=fname,
headers={"Cache-Control": "max-age=3600, public"},
)
# Fall back to MinIO
try:
@@ -130,7 +158,10 @@ async def download_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)
return Response(
content=data,
media_type=mime,
headers={"Content-Disposition": f"attachment; filename={fname}"},
headers={
"Content-Disposition": f"attachment; filename={fname}",
"Cache-Control": "max-age=3600, public",
},
)
except Exception:
raise HTTPException(404, "File not available")
@@ -139,6 +170,7 @@ async def download_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)
@router.post("/zip")
async def zip_download(
asset_ids: list[uuid.UUID],
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
assets = []
@@ -150,18 +182,42 @@ async def zip_download(
raise HTTPException(404, "No assets found")
def generate():
import logging
from pathlib import Path
from app.core.storage import get_storage
logger = logging.getLogger(__name__)
buf = io.BytesIO()
seen_names: dict[str, int] = {}
with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf:
from app.core.storage import get_storage
storage = get_storage()
for a in assets:
ext = (a.mime_type or "").split("/")[-1] or "bin"
fname = f"{a.asset_type.value}_{a.id}.{ext}"
key = a.storage_key
# Use filename from storage_key (always has correct extension)
original_name = Path(key).name
ext = Path(key).suffix.lstrip(".") or (a.mime_type or "").split("/")[-1] or "bin"
base = original_name if original_name else f"{a.asset_type.value}_{a.id}.{ext}"
# Deduplicate filenames within the ZIP
if base in seen_names:
seen_names[base] += 1
stem = Path(base).stem
suffix = Path(base).suffix
fname = f"{stem}_{seen_names[base]}{suffix}"
else:
seen_names[base] = 0
fname = base
try:
data = storage.download_bytes(a.storage_key)
# Check absolute path first (local filesystem)
candidate = Path(key)
if not candidate.is_absolute():
from app.config import settings
candidate = Path(settings.upload_dir) / key
if candidate.exists():
data = candidate.read_bytes()
else:
data = storage.download_bytes(key)
zf.writestr(fname, data)
except Exception:
pass
except Exception as exc:
logger.warning("ZIP: skipping asset %s%s", a.id, exc)
yield buf.getvalue()
return StreamingResponse(
@@ -177,3 +233,12 @@ async def archive_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db))
if not asset:
raise HTTPException(404, "Asset not found")
return {"ok": True}
@router.delete("/{asset_id}/permanent")
async def delete_asset_permanent(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
"""Permanently remove a MediaAsset record from the database."""
deleted = await service.delete_media_asset(db, asset_id)
if not deleted:
raise HTTPException(404, "Asset not found")
return {"ok": True}
+13 -1
View File
@@ -5,6 +5,13 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.domains.media.models import MediaAsset, MediaAssetType
_SORT_COLUMNS = {
"created_at": MediaAsset.created_at,
"file_size_bytes": MediaAsset.file_size_bytes,
"storage_key": MediaAsset.storage_key,
}
async def list_media_assets(
db: AsyncSession,
product_id: uuid.UUID | None = None,
@@ -15,8 +22,13 @@ async def list_media_assets(
is_archived: bool | None = False,
skip: int = 0,
limit: int = 50,
sort_by: str = "created_at",
sort_dir: str = "desc",
) -> list[MediaAsset]:
q = select(MediaAsset).order_by(MediaAsset.created_at.desc())
from sqlalchemy import asc, desc
col = _SORT_COLUMNS.get(sort_by, MediaAsset.created_at)
order = desc(col) if sort_dir == "desc" else asc(col)
q = select(MediaAsset).order_by(order)
if product_id:
q = q.where(MediaAsset.product_id == product_id)
if order_line_id:
+1
View File
@@ -61,6 +61,7 @@ class ProductOut(BaseModel):
processing_status: str | None = None
stl_cached: list[str] = []
cad_parsed_objects: list[str] | None = None
cad_mesh_attributes: dict | None = None
arbeitspaket: str | None = None
notes: str | None
is_active: bool
@@ -87,6 +87,30 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
workflow_type,
)
# For turntable workflows: resolve step_path + output_dir from the order line at runtime
if workflow_type == "turntable" and ("step_path" not in params or "output_dir" not in params):
from app.domains.products.models import CadFile as _CadFile
from pathlib import Path as _Path
from app.config import settings as _cfg
_product = line.product if hasattr(line, "product") else None
if _product is None:
from sqlalchemy.orm import selectinload as _si
from app.domains.orders.models import OrderLine as _OL
_line_full = session.execute(
select(_OL).where(_OL.id == line.id).options(_si(_OL.product))
).scalar_one_or_none()
_product = _line_full.product if _line_full else None
if _product and _product.cad_file_id:
_cad = session.execute(
select(_CadFile).where(_CadFile.id == _product.cad_file_id)
).scalar_one_or_none()
if _cad and _cad.stored_path:
params.setdefault("step_path", _cad.stored_path)
params.setdefault(
"output_dir",
str(_Path(_cfg.upload_dir) / "renders" / str(line.id)),
)
from app.domains.rendering.workflow_builder import dispatch_workflow
celery_task_id = dispatch_workflow(workflow_type, order_line_id, params)
+68
View File
@@ -15,6 +15,36 @@ from app.core.task_logs import log_task_event
logger = logging.getLogger(__name__)
def _update_workflow_run_status(order_line_id: str, status: str, error: str | None = None) -> None:
"""Update the most recent WorkflowRun for an order_line after task completion."""
try:
import asyncio
from datetime import datetime as _dt
async def _run():
from app.database import AsyncSessionLocal
from app.domains.rendering.models import WorkflowRun
from sqlalchemy import select as _sel
async with AsyncSessionLocal() as db:
res = await db.execute(
_sel(WorkflowRun)
.where(WorkflowRun.order_line_id == order_line_id)
.order_by(WorkflowRun.created_at.desc())
.limit(1)
)
run = res.scalar_one_or_none()
if run and run.status == "pending":
run.status = status
run.completed_at = _dt.utcnow()
if error:
run.error_message = error[:2000]
await db.commit()
asyncio.get_event_loop().run_until_complete(_run())
except Exception as _exc:
logger.warning("Failed to update WorkflowRun status for line %s: %s", order_line_id, _exc)
@celery_app.task(
bind=True,
name="app.domains.rendering.tasks.render_still_task",
@@ -291,6 +321,7 @@ def publish_asset(
from app.database import AsyncSessionLocal
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.orders.models import OrderLine
from app.domains.products.models import Product
from sqlalchemy import select
async with AsyncSessionLocal() as db:
@@ -298,9 +329,20 @@ def publish_asset(
line = res.scalar_one_or_none()
if not line:
return None
# Resolve cad_file_id from the linked product
cad_file_id = None
if line.product_id:
prod_res = await db.execute(select(Product).where(Product.id == line.product_id))
product = prod_res.scalar_one_or_none()
if product:
cad_file_id = product.cad_file_id
asset = MediaAsset(
tenant_id=getattr(line, "tenant_id", None),
order_line_id=line.id,
product_id=line.product_id,
cad_file_id=cad_file_id,
asset_type=MediaAssetType(asset_type),
storage_key=storage_key,
render_config=render_config,
@@ -396,6 +438,7 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict:
})
except Exception:
pass
_update_workflow_run_status(order_line_id, "completed")
return result
except Exception as exc:
log_task_event(self.request.id, f"Failed: {exc}", "error")
@@ -409,6 +452,7 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict:
})
except Exception:
pass
_update_workflow_run_status(order_line_id, "failed", str(exc))
raise self.retry(exc=exc, countdown=30)
@@ -448,6 +492,29 @@ def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
asset_type = "gltf_geometry"
# Load sharp edge hints from mesh_attributes for UV seam marking
sharp_edges_json = "[]"
if cad_file_id:
try:
import asyncio as _asyncio
async def _load_mesh_attrs() -> list:
from app.database import AsyncSessionLocal
from app.models.cad_file import CadFile as _CF
from sqlalchemy import select as _sel
async with AsyncSessionLocal() as _db:
_res = await _db.execute(_sel(_CF).where(_CF.id == cad_file_id))
_cad = _res.scalar_one_or_none()
if _cad and _cad.mesh_attributes:
return _cad.mesh_attributes.get("sharp_edge_midpoints") or []
return []
_midpoints = _asyncio.get_event_loop().run_until_complete(_load_mesh_attrs())
if _midpoints:
sharp_edges_json = json.dumps(_midpoints)
except Exception as _exc:
logger.warning("Could not load sharp_edge_midpoints for %s: %s", cad_file_id, _exc)
if is_blender_available() and export_script.exists():
blender_bin = find_blender()
cmd = [
@@ -458,6 +525,7 @@ def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
"--output_path", str(output_path),
"--asset_library_blend", "",
"--material_map", json.dumps({}),
"--sharp_edges_json", sharp_edges_json,
]
try:
result = subprocess.run(cmd, capture_output=True, text=True, timeout=300)
+31 -2
View File
@@ -293,8 +293,33 @@ def extract_mesh_edge_data(step_path: str) -> dict:
except Exception:
continue
# Bounding box extraction (OCC Bnd_Box)
from OCC.Core.Bnd import Bnd_Box
from OCC.Core.BRepBndLib import brepbndlib
try:
bbox = Bnd_Box()
brepbndlib.Add(shape, bbox)
xmin, ymin, zmin, xmax, ymax, zmax = bbox.Get()
dimensions_mm = {
"x": round(xmax - xmin, 2),
"y": round(ymax - ymin, 2),
"z": round(zmax - zmin, 2),
}
bbox_center_mm = {
"x": round((xmin + xmax) / 2, 2),
"y": round((ymin + ymax) / 2, 2),
"z": round((zmin + zmax) / 2, 2),
}
except Exception:
dimensions_mm = None
bbox_center_mm = None
if not dihedral_angles:
return {}
result: dict = {}
if dimensions_mm:
result["dimensions_mm"] = dimensions_mm
result["bbox_center_mm"] = bbox_center_mm
return result
import statistics
median_angle = statistics.median(dihedral_angles)
@@ -307,11 +332,15 @@ def extract_mesh_edge_data(step_path: str) -> dict:
else:
suggested = 30.0
return {
result = {
"suggested_smooth_angle": round(suggested, 1),
"has_mechanical_edges": max_angle > 45,
"sharp_edge_midpoints": sharp_midpoints[:500],
}
if dimensions_mm:
result["dimensions_mm"] = dimensions_mm
result["bbox_center_mm"] = bbox_center_mm
return result
except ImportError:
# OCC not available (e.g. in backend container)
return {}
+189 -5
View File
@@ -1,11 +1,76 @@
"""Celery tasks for STEP file processing and thumbnail generation."""
import logging
import struct
from pathlib import Path
from app.tasks.celery_app import celery_app
from app.core.task_logs import log_task_event
logger = logging.getLogger(__name__)
def _bbox_from_stl(stl_path: str) -> dict | None:
"""Extract bounding box from a cached binary STL file.
Returns {"dimensions_mm": {x,y,z}, "bbox_center_mm": {x,y,z}} or None on failure.
Reading vertex extremes from an existing STL is ~10-100× faster than re-parsing STEP.
"""
try:
import numpy as np
p = Path(stl_path)
if not p.exists() or p.stat().st_size < 84:
return None
with p.open("rb") as f:
f.seek(80) # skip 80-byte header
n = struct.unpack("<I", f.read(4))[0]
if n == 0:
return None
raw = f.read(n * 50) # 50 bytes per triangle
# Binary STL per-triangle layout: normal(12B) + v1(12B) + v2(12B) + v3(12B) + attr(2B) = 50B
# Extract vertex bytes (columns 12..48 of each 50-byte row)
arr = np.frombuffer(raw, dtype=np.uint8).reshape(n, 50)
verts = np.frombuffer(arr[:, 12:48].tobytes(), dtype=np.float32).reshape(-1, 3)
mins = verts.min(axis=0)
maxs = verts.max(axis=0)
dims = maxs - mins
return {
"dimensions_mm": {
"x": round(float(dims[0]), 2),
"y": round(float(dims[1]), 2),
"z": round(float(dims[2]), 2),
},
"bbox_center_mm": {
"x": round(float((mins[0] + maxs[0]) / 2), 2),
"y": round(float((mins[1] + maxs[1]) / 2), 2),
"z": round(float((mins[2] + maxs[2]) / 2), 2),
},
}
except Exception as exc:
logger.debug(f"_bbox_from_stl failed for {stl_path}: {exc}")
return None
def _bbox_from_step_cadquery(step_path: str) -> dict | None:
"""Fallback: extract bounding box by re-parsing STEP via cadquery."""
try:
import cadquery as cq
bb = cq.importers.importStep(step_path).val().BoundingBox()
return {
"dimensions_mm": {
"x": round(bb.xlen, 2),
"y": round(bb.ylen, 2),
"z": round(bb.zlen, 2),
},
"bbox_center_mm": {
"x": round((bb.xmin + bb.xmax) / 2, 2),
"y": round((bb.ymin + bb.ymax) / 2, 2),
"z": round((bb.zmin + bb.zmax) / 2, 2),
},
}
except Exception as exc:
logger.debug(f"_bbox_from_step_cadquery failed for {step_path}: {exc}")
return None
@celery_app.task(bind=True, name="app.tasks.step_tasks.process_step_file", queue="step_processing")
def process_step_file(self, cad_file_id: str):
"""Process a STEP file: extract objects, generate thumbnail, convert to glTF.
@@ -164,6 +229,42 @@ def render_step_thumbnail(self, cad_file_id: str):
logger.error(f"Thumbnail render failed for {cad_file_id}: {exc}")
raise self.retry(exc=exc, countdown=30, max_retries=2)
# Extract bounding box from the STL that was just cached by the renderer.
# STL binary parsing is near-instant (numpy min/max) vs re-parsing the STEP file.
# Falls back to cadquery STEP re-parse if STL is not found.
try:
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from app.config import settings as _cfg2
from app.models.cad_file import CadFile as _CadFile2
_sync_url2 = _cfg2.database_url.replace("+asyncpg", "")
_eng2 = create_engine(_sync_url2)
with Session(_eng2) as _sess2:
_cad2 = _sess2.get(_CadFile2, cad_file_id)
_step_path = _cad2.stored_path if _cad2 else None
_eng2.dispose()
if _step_path and not (_cad2.mesh_attributes or {}).get("dimensions_mm"):
_step = Path(_step_path)
_stl = _step.parent / f"{_step.stem}_low.stl"
bbox_data = _bbox_from_stl(str(_stl)) or _bbox_from_step_cadquery(_step_path)
if bbox_data:
_eng2 = create_engine(_sync_url2)
with Session(_eng2) as _sess2:
_cad2 = _sess2.get(_CadFile2, cad_file_id)
if _cad2:
_cad2.mesh_attributes = {**( _cad2.mesh_attributes or {}), **bbox_data}
_sess2.commit()
dims = bbox_data["dimensions_mm"]
logger.info(
f"bbox for {cad_file_id}: "
f"{dims['x']}×{dims['y']}×{dims['z']} mm"
)
_eng2.dispose()
except Exception:
logger.exception(f"bbox extraction failed for {cad_file_id} (non-fatal)")
# Auto-populate materials now that parsed_objects are available
try:
_auto_populate_materials_for_cad(cad_file_id)
@@ -195,6 +296,52 @@ def render_step_thumbnail(self, cad_file_id: str):
logger.debug("WebSocket publish for CAD complete skipped (non-fatal)")
@celery_app.task(name="app.tasks.step_tasks.reextract_cad_metadata", queue="thumbnail_rendering")
def reextract_cad_metadata(cad_file_id: str):
"""Re-extract bounding-box dimensions for an already-completed CAD file.
Uses cadquery (available in render-worker) to compute dimensions_mm.
Updates mesh_attributes without changing processing_status or re-rendering.
Safe to run on completed files.
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from app.config import settings as app_settings
from app.models.cad_file import CadFile
sync_url = app_settings.database_url.replace("+asyncpg", "")
eng = create_engine(sync_url)
with Session(eng) as session:
cad_file = session.get(CadFile, cad_file_id)
if not cad_file or not cad_file.stored_path:
logger.warning(f"reextract_cad_metadata: file not found {cad_file_id}")
eng.dispose()
return
step_path = cad_file.stored_path
try:
p = Path(step_path)
stl_path = p.parent / f"{p.stem}_low.stl"
patch = _bbox_from_stl(str(stl_path)) or _bbox_from_step_cadquery(step_path)
if patch:
with Session(eng) as session:
cad_file = session.get(CadFile, cad_file_id)
if cad_file:
cad_file.mesh_attributes = {**(cad_file.mesh_attributes or {}), **patch}
session.commit()
dims = patch["dimensions_mm"]
logger.info(
f"reextract_cad_metadata: {cad_file_id}"
f"{dims['x']}×{dims['y']}×{dims['z']} mm"
)
else:
logger.warning(f"reextract_cad_metadata: no bbox data for {cad_file_id}")
except Exception as exc:
logger.error(f"reextract_cad_metadata failed for {cad_file_id}: {exc}")
finally:
eng.dispose()
@celery_app.task(bind=True, name="app.tasks.step_tasks.generate_stl_cache", queue="thumbnail_rendering")
def generate_stl_cache(self, cad_file_id: str, quality: str):
"""Generate and cache STL for a CAD file without triggering a full render."""
@@ -267,6 +414,15 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
logger.error("generate_gltf_geometry_task: no stored_path for %s", cad_file_id)
return
step_path_str = cad_file.stored_path
# Read 3D export settings
from sqlalchemy import text as _text
_scale = float(session.execute(_text(
"SELECT value FROM system_settings WHERE key='gltf_scale_factor'"
)).scalar() or "0.001")
_smooth = (session.execute(_text(
"SELECT value FROM system_settings WHERE key='gltf_smooth_normals'"
)).scalar() or "true") == "true"
eng.dispose()
log_task_event(self.request.id, f"Starting generate_gltf_geometry_task: cad_file={cad_file_id}", "info")
@@ -280,7 +436,25 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
output_path = step.parent / f"{step.stem}_geometry.glb"
try:
import trimesh
import trimesh as _trimesh
def _process_mesh(m):
m.apply_scale(_scale)
if _smooth:
try:
_trimesh.smoothing.filter_laplacian(m, lamb=0.5, iterations=5)
except Exception:
pass # non-critical
mesh = trimesh.load(str(stl_path))
if hasattr(mesh, 'geometry'):
# trimesh.Scene with multiple sub-meshes
for sub in mesh.geometry.values():
_process_mesh(sub)
else:
_process_mesh(mesh)
mesh.export(str(output_path))
log_task_event(self.request.id, f"Completed successfully: {output_path.name}", "done")
logger.info("generate_gltf_geometry_task: exported %s", output_path.name)
@@ -295,12 +469,17 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
async def _store():
from app.database import AsyncSessionLocal
from app.domains.media.models import MediaAsset, MediaAssetType
from app.config import settings as _cfg
async with AsyncSessionLocal() as db:
import uuid
_key = str(output_path)
_prefix = str(_cfg.upload_dir).rstrip("/") + "/"
if _key.startswith(_prefix):
_key = _key[len(_prefix):]
asset = MediaAsset(
cad_file_id=uuid.UUID(cad_file_id),
asset_type=MediaAssetType.gltf_geometry,
storage_key=str(output_path),
storage_key=_key,
mime_type="model/gltf-binary",
file_size_bytes=output_path.stat().st_size if output_path.exists() else None,
)
@@ -648,13 +827,18 @@ def render_order_line_task(self, order_line_id: str):
# Create MediaAsset so the render appears in the Media Browser
try:
from app.domains.media.models import MediaAsset, MediaAssetType as MAT
from app.config import settings as _cfg2
_ext = str(output_path).rsplit(".", 1)[-1].lower() if "." in str(output_path) else "bin"
_mime = "video/mp4" if _ext in ("mp4", "webm") else ("image/jpeg" if _ext in ("jpg", "jpeg") else "image/png")
_is_anim = bool(line.output_type and line.output_type.is_animation)
_at = MAT.turntable if _is_anim else MAT.still
# Extension determines type — poster frames (.jpg/.png) from animations are still stills
_at = MAT.turntable if _ext in ("mp4", "webm") else MAT.still
_tenant_id = line.product.cad_file.tenant_id if (line.product and line.product.cad_file) else None
# Normalize storage_key to relative path
_raw_key = str(output_path)
_upload_prefix = str(_cfg2.upload_dir).rstrip("/") + "/"
_norm_key = _raw_key[len(_upload_prefix):] if _raw_key.startswith(_upload_prefix) else _raw_key
_existing = session.execute(
select(MediaAsset.id).where(MediaAsset.storage_key == output_path).limit(1)
select(MediaAsset.id).where(MediaAsset.storage_key == _norm_key).limit(1)
).scalar_one_or_none()
if not _existing:
_asset = MediaAsset(
@@ -662,7 +846,7 @@ def render_order_line_task(self, order_line_id: str):
order_line_id=line.id,
product_id=line.product_id,
asset_type=_at,
storage_key=output_path,
storage_key=_norm_key,
mime_type=_mime,
)
session.add(_asset)