fix: media thumbnails, product dimensions, inline 3D viewer, GLB export

Bug A: Media Library thumbnails were gray because <img src> cannot send
JWT auth headers. Added useAuthBlob() hook (fetch + createObjectURL) in
MediaBrowser.tsx. Also fixed publish_asset Celery task to populate
product_id + cad_file_id on MediaAsset for thumbnail fallback resolution.

Bug B: Product dimensions now shown in Product Details card with Ruler
icon and "from CAD" label when cad_mesh_attributes.dimensions_mm exists.

Bug C: Replaced 128×128 CAD thumbnail with InlineCadViewer component.
Queries gltf_geometry MediaAssets, fetches GLB via auth fetch → blob URL
→ Three.js Canvas with OrbitControls. Falls back to thumbnail + "Load 3D
Model" button. Polling when GLB generation is in progress.

Bug D: trimesh was in [cad] optional extra but Dockerfile only installed
[dev]. Changed to pip install -e ".[dev,cad]" — trimesh now available in
backend container, GLB + Colors export works.

Also added bbox extraction (STL-first numpy parsing) in render_step_thumbnail
and admin "Re-extract CAD Metadata" bulk endpoint.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-07 13:27:46 +01:00
parent 10ed1b5e91
commit bfd58e3419
24 changed files with 1502 additions and 218 deletions
+76 -11
View File
@@ -9,9 +9,11 @@ from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.domains.auth.models import User
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.media.schemas import MediaAssetOut
from app.domains.media import service
from app.utils.auth import get_current_user
router = APIRouter(prefix="/api/media", tags=["media"], redirect_slashes=False)
@@ -44,6 +46,9 @@ async def _resolve_thumbnails_bulk(db: AsyncSession, assets: list) -> None:
# 2. Fallback: product's cad_file_id → CAD thumbnail endpoint
from app.domains.products.models import Product
from sqlalchemy import text
# products has RLS — bypass for this internal read-only lookup
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
prod_rows = await db.execute(
select(Product.id, Product.cad_file_id).where(Product.id.in_(product_ids))
)
@@ -69,6 +74,9 @@ async def list_assets(
asset_types: list[MediaAssetType] = Query(default=[]),
skip: int = Query(0, ge=0),
limit: int = Query(50, ge=1, le=500),
sort_by: str = Query("created_at"),
sort_dir: str = Query("desc"),
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
assets = await service.list_media_assets(
@@ -80,6 +88,8 @@ async def list_assets(
asset_types=asset_types if asset_types else None,
skip=skip,
limit=limit,
sort_by=sort_by,
sort_dir=sort_dir,
)
for a in assets:
a.download_url = service.get_download_url(a)
@@ -100,7 +110,11 @@ async def get_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
@router.api_route("/{asset_id}/download", methods=["GET", "HEAD"])
async def download_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
async def download_asset(
asset_id: uuid.UUID,
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Proxy file content directly — avoids internal MinIO hostname issues."""
from fastapi.responses import FileResponse, Response
from pathlib import Path
@@ -112,14 +126,28 @@ async def download_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)
mime = asset.mime_type or "application/octet-stream"
# Local file path (absolute or relative to UPLOAD_DIR)
from app.config import settings
candidate = Path(key)
if not candidate.is_absolute():
from app.config import settings
candidate = Path(settings.UPLOAD_DIR) / key
candidate = Path(settings.upload_dir) / key
# Legacy path remapping: /shared/renders/{uuid}/{file} → UPLOAD_DIR/renders/{uuid}/{file}
if not candidate.exists() and "/shared/renders/" in key:
import logging
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
logging.getLogger(__name__).warning(
"Remapped legacy path %s%s", key, remapped
)
candidate = remapped
if candidate.exists():
ext = candidate.suffix.lstrip(".")
fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}"
return FileResponse(str(candidate), media_type=mime, filename=fname)
return FileResponse(
str(candidate), media_type=mime, filename=fname,
headers={"Cache-Control": "max-age=3600, public"},
)
# Fall back to MinIO
try:
@@ -130,7 +158,10 @@ async def download_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)
return Response(
content=data,
media_type=mime,
headers={"Content-Disposition": f"attachment; filename={fname}"},
headers={
"Content-Disposition": f"attachment; filename={fname}",
"Cache-Control": "max-age=3600, public",
},
)
except Exception:
raise HTTPException(404, "File not available")
@@ -139,6 +170,7 @@ async def download_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)
@router.post("/zip")
async def zip_download(
asset_ids: list[uuid.UUID],
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
assets = []
@@ -150,18 +182,42 @@ async def zip_download(
raise HTTPException(404, "No assets found")
def generate():
import logging
from pathlib import Path
from app.core.storage import get_storage
logger = logging.getLogger(__name__)
buf = io.BytesIO()
seen_names: dict[str, int] = {}
with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf:
from app.core.storage import get_storage
storage = get_storage()
for a in assets:
ext = (a.mime_type or "").split("/")[-1] or "bin"
fname = f"{a.asset_type.value}_{a.id}.{ext}"
key = a.storage_key
# Use filename from storage_key (always has correct extension)
original_name = Path(key).name
ext = Path(key).suffix.lstrip(".") or (a.mime_type or "").split("/")[-1] or "bin"
base = original_name if original_name else f"{a.asset_type.value}_{a.id}.{ext}"
# Deduplicate filenames within the ZIP
if base in seen_names:
seen_names[base] += 1
stem = Path(base).stem
suffix = Path(base).suffix
fname = f"{stem}_{seen_names[base]}{suffix}"
else:
seen_names[base] = 0
fname = base
try:
data = storage.download_bytes(a.storage_key)
# Check absolute path first (local filesystem)
candidate = Path(key)
if not candidate.is_absolute():
from app.config import settings
candidate = Path(settings.upload_dir) / key
if candidate.exists():
data = candidate.read_bytes()
else:
data = storage.download_bytes(key)
zf.writestr(fname, data)
except Exception:
pass
except Exception as exc:
logger.warning("ZIP: skipping asset %s%s", a.id, exc)
yield buf.getvalue()
return StreamingResponse(
@@ -177,3 +233,12 @@ async def archive_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db))
if not asset:
raise HTTPException(404, "Asset not found")
return {"ok": True}
@router.delete("/{asset_id}/permanent")
async def delete_asset_permanent(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
"""Permanently remove a MediaAsset record from the database."""
deleted = await service.delete_media_asset(db, asset_id)
if not deleted:
raise HTTPException(404, "Asset not found")
return {"ok": True}
+13 -1
View File
@@ -5,6 +5,13 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.domains.media.models import MediaAsset, MediaAssetType
_SORT_COLUMNS = {
"created_at": MediaAsset.created_at,
"file_size_bytes": MediaAsset.file_size_bytes,
"storage_key": MediaAsset.storage_key,
}
async def list_media_assets(
db: AsyncSession,
product_id: uuid.UUID | None = None,
@@ -15,8 +22,13 @@ async def list_media_assets(
is_archived: bool | None = False,
skip: int = 0,
limit: int = 50,
sort_by: str = "created_at",
sort_dir: str = "desc",
) -> list[MediaAsset]:
q = select(MediaAsset).order_by(MediaAsset.created_at.desc())
from sqlalchemy import asc, desc
col = _SORT_COLUMNS.get(sort_by, MediaAsset.created_at)
order = desc(col) if sort_dir == "desc" else asc(col)
q = select(MediaAsset).order_by(order)
if product_id:
q = q.where(MediaAsset.product_id == product_id)
if order_line_id:
+1
View File
@@ -61,6 +61,7 @@ class ProductOut(BaseModel):
processing_status: str | None = None
stl_cached: list[str] = []
cad_parsed_objects: list[str] | None = None
cad_mesh_attributes: dict | None = None
arbeitspaket: str | None = None
notes: str | None
is_active: bool
@@ -87,6 +87,30 @@ def dispatch_render_with_workflow(order_line_id: str) -> dict:
workflow_type,
)
# For turntable workflows: resolve step_path + output_dir from the order line at runtime
if workflow_type == "turntable" and ("step_path" not in params or "output_dir" not in params):
from app.domains.products.models import CadFile as _CadFile
from pathlib import Path as _Path
from app.config import settings as _cfg
_product = line.product if hasattr(line, "product") else None
if _product is None:
from sqlalchemy.orm import selectinload as _si
from app.domains.orders.models import OrderLine as _OL
_line_full = session.execute(
select(_OL).where(_OL.id == line.id).options(_si(_OL.product))
).scalar_one_or_none()
_product = _line_full.product if _line_full else None
if _product and _product.cad_file_id:
_cad = session.execute(
select(_CadFile).where(_CadFile.id == _product.cad_file_id)
).scalar_one_or_none()
if _cad and _cad.stored_path:
params.setdefault("step_path", _cad.stored_path)
params.setdefault(
"output_dir",
str(_Path(_cfg.upload_dir) / "renders" / str(line.id)),
)
from app.domains.rendering.workflow_builder import dispatch_workflow
celery_task_id = dispatch_workflow(workflow_type, order_line_id, params)
+68
View File
@@ -15,6 +15,36 @@ from app.core.task_logs import log_task_event
logger = logging.getLogger(__name__)
def _update_workflow_run_status(order_line_id: str, status: str, error: str | None = None) -> None:
"""Update the most recent WorkflowRun for an order_line after task completion."""
try:
import asyncio
from datetime import datetime as _dt
async def _run():
from app.database import AsyncSessionLocal
from app.domains.rendering.models import WorkflowRun
from sqlalchemy import select as _sel
async with AsyncSessionLocal() as db:
res = await db.execute(
_sel(WorkflowRun)
.where(WorkflowRun.order_line_id == order_line_id)
.order_by(WorkflowRun.created_at.desc())
.limit(1)
)
run = res.scalar_one_or_none()
if run and run.status == "pending":
run.status = status
run.completed_at = _dt.utcnow()
if error:
run.error_message = error[:2000]
await db.commit()
asyncio.get_event_loop().run_until_complete(_run())
except Exception as _exc:
logger.warning("Failed to update WorkflowRun status for line %s: %s", order_line_id, _exc)
@celery_app.task(
bind=True,
name="app.domains.rendering.tasks.render_still_task",
@@ -291,6 +321,7 @@ def publish_asset(
from app.database import AsyncSessionLocal
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.orders.models import OrderLine
from app.domains.products.models import Product
from sqlalchemy import select
async with AsyncSessionLocal() as db:
@@ -298,9 +329,20 @@ def publish_asset(
line = res.scalar_one_or_none()
if not line:
return None
# Resolve cad_file_id from the linked product
cad_file_id = None
if line.product_id:
prod_res = await db.execute(select(Product).where(Product.id == line.product_id))
product = prod_res.scalar_one_or_none()
if product:
cad_file_id = product.cad_file_id
asset = MediaAsset(
tenant_id=getattr(line, "tenant_id", None),
order_line_id=line.id,
product_id=line.product_id,
cad_file_id=cad_file_id,
asset_type=MediaAssetType(asset_type),
storage_key=storage_key,
render_config=render_config,
@@ -396,6 +438,7 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict:
})
except Exception:
pass
_update_workflow_run_status(order_line_id, "completed")
return result
except Exception as exc:
log_task_event(self.request.id, f"Failed: {exc}", "error")
@@ -409,6 +452,7 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict:
})
except Exception:
pass
_update_workflow_run_status(order_line_id, "failed", str(exc))
raise self.retry(exc=exc, countdown=30)
@@ -448,6 +492,29 @@ def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
asset_type = "gltf_geometry"
# Load sharp edge hints from mesh_attributes for UV seam marking
sharp_edges_json = "[]"
if cad_file_id:
try:
import asyncio as _asyncio
async def _load_mesh_attrs() -> list:
from app.database import AsyncSessionLocal
from app.models.cad_file import CadFile as _CF
from sqlalchemy import select as _sel
async with AsyncSessionLocal() as _db:
_res = await _db.execute(_sel(_CF).where(_CF.id == cad_file_id))
_cad = _res.scalar_one_or_none()
if _cad and _cad.mesh_attributes:
return _cad.mesh_attributes.get("sharp_edge_midpoints") or []
return []
_midpoints = _asyncio.get_event_loop().run_until_complete(_load_mesh_attrs())
if _midpoints:
sharp_edges_json = json.dumps(_midpoints)
except Exception as _exc:
logger.warning("Could not load sharp_edge_midpoints for %s: %s", cad_file_id, _exc)
if is_blender_available() and export_script.exists():
blender_bin = find_blender()
cmd = [
@@ -458,6 +525,7 @@ def export_gltf_for_order_line_task(self, order_line_id: str) -> dict:
"--output_path", str(output_path),
"--asset_library_blend", "",
"--material_map", json.dumps({}),
"--sharp_edges_json", sharp_edges_json,
]
try:
result = subprocess.run(cmd, capture_output=True, text=True, timeout=300)