Files
HartOMat/backend/app/domains/media/router.py
T
Hartmut ca62319688 feat: sharp edge pipeline V02, tessellation presets, media cache-bust, GMSH plan
Sharp Edge Pipeline V02:
- export_step_to_gltf.py: replace BRep_Tool.Polygon3D_s (returns None in XCAF) with
  GCPnts_UniformAbscissa curve sampling at 0.3mm step — extracts 17,129 segment pairs
- Inject sharp_edge_pairs + sharp_threshold_deg into GLB extras (scenes[0].extras)
  via binary GLB JSON-chunk patching (no extra dependency)
- export_gltf.py: read schaeffler_sharp_edge_pairs from Blender scene custom props,
  apply via KD-tree to mark edges sharp=True + seam=True (OCC mm Z-up → Blender transform)
- tools/restore_sharp_marks.py: dual-pass (dihedral angle + OCC pairs), updated coordinate
  transform (X, -Z, Y) * 0.001

Tessellation:
- Admin UI: Draft / Standard / Fine preset buttons with active-state highlighting
- Default angular deflection: preview 0.5→0.1 rad, production 0.2→0.05 rad
- export_glb.py: read updated defaults from system_settings

Media / Cache:
- media/service.py: get_download_url appends ?v={file_size_bytes} cache-buster
- media/router.py: Cache-Control: no-cache for all download/thumbnail endpoints

Render pipeline:
- still_render.py / turntable_render.py: shared GPU activation + camera improvements
- render_order_line.py: global render position support
- render_thumbnail.py: updated defaults

Frontend:
- InlineCadViewer: file_size_bytes-aware URL update triggers re-fetch on regeneration
- ThreeDViewer: material panel, part selection, PBR mode improvements
- Admin.tsx: tessellation preset cards, GMSH setting dropdown
- MediaBrowser, ProductDetail, OrderDetail, Orders: various UI improvements
- New: MaterialPanel, GlobalRenderPositionsPanel, StepIndicator components
- New: renderPositions.ts API client

Plans / Docs:
- plan.md: GMSH Frontal-Delaunay tessellation plan (6 tasks)
- LEARNINGS.md: OCC Polygon3D_s None issue + GCPnts fix
- .gitignore: add backend/core (core dump from root process)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-11 14:40:36 +01:00

489 lines
18 KiB
Python

"""MediaAsset router — /api/media."""
import io
import math
import uuid
import zipfile
from fastapi import APIRouter, Depends, HTTPException, Query
from fastapi.responses import StreamingResponse
from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.domains.auth.models import User
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.media.schemas import MediaAssetOut, MediaAssetBrowseItem, MediaAssetBrowseResponse
from app.domains.media import service
from app.utils.auth import get_current_user
router = APIRouter(prefix="/api/media", tags=["media"], redirect_slashes=False)
async def _resolve_thumbnails_bulk(db: AsyncSession, assets: list) -> None:
"""Resolve thumbnail_url for assets using the same priority as product pages.
Priority per asset (applied only when thumbnail_url is not yet set):
1. Latest 'still' MediaAsset for the same product (rendered preview)
2. Product's linked CadFile thumbnail (/api/cad/{id}/thumbnail)
"""
needs = [a for a in assets if not a.thumbnail_url and a.product_id]
if not needs:
return
product_ids = list({a.product_id for a in needs})
# 1. Latest 'still' asset per product (DISTINCT ON product_id ORDER BY created_at DESC)
still_rows = await db.execute(
select(MediaAsset.product_id, MediaAsset.id)
.where(
MediaAsset.product_id.in_(product_ids),
MediaAsset.asset_type == MediaAssetType.still,
MediaAsset.is_archived == False, # noqa: E712
)
.order_by(MediaAsset.product_id, MediaAsset.created_at.desc())
.distinct(MediaAsset.product_id)
)
best_still: dict[str, str] = {str(pid): str(sid) for pid, sid in still_rows.all()}
# 2. Fallback: product's cad_file_id → CAD thumbnail endpoint
from app.domains.products.models import Product
from sqlalchemy import text
# products has RLS — bypass for this internal read-only lookup
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
prod_rows = await db.execute(
select(Product.id, Product.cad_file_id).where(Product.id.in_(product_ids))
)
product_cad: dict[str, str] = {
str(pid): str(cid) for pid, cid in prod_rows.all() if cid
}
for a in needs:
pid = str(a.product_id)
if pid in best_still:
a.thumbnail_url = f"/api/media/{best_still[pid]}/thumbnail"
elif pid in product_cad:
a.thumbnail_url = f"/api/cad/{product_cad[pid]}/thumbnail"
@router.get("", response_model=list[MediaAssetOut])
@router.get("/", response_model=list[MediaAssetOut], include_in_schema=False)
async def list_assets(
product_id: uuid.UUID | None = None,
order_line_id: uuid.UUID | None = None,
cad_file_id: uuid.UUID | None = None,
asset_type: MediaAssetType | None = None,
asset_types: list[MediaAssetType] = Query(default=[]),
skip: int = Query(0, ge=0),
limit: int = Query(50, ge=1, le=500),
sort_by: str = Query("created_at"),
sort_dir: str = Query("desc"),
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
assets = await service.list_media_assets(
db,
product_id=product_id,
order_line_id=order_line_id,
cad_file_id=cad_file_id,
asset_type=asset_type,
asset_types=asset_types if asset_types else None,
skip=skip,
limit=limit,
sort_by=sort_by,
sort_dir=sort_dir,
)
for a in assets:
a.download_url = service.get_download_url(a)
a.thumbnail_url = service.get_thumbnail_url(a)
await _resolve_thumbnails_bulk(db, assets)
return assets
@router.get("/assets", response_model=MediaAssetBrowseResponse)
async def browse_media_assets(
asset_type: str | None = None,
category_key: str | None = None,
render_status: str | None = None,
q: str | None = None,
exclude_technical: bool = Query(True, description="Exclude GLB/STL/Blend technical assets"),
page: int = Query(1, ge=1),
page_size: int = Query(50, ge=1, le=200),
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
) -> MediaAssetBrowseResponse:
"""Media browser: server-side filtered + paginated asset list with product context."""
from app.domains.products.models import Product
from app.domains.orders.models import OrderLine
from sqlalchemy import desc
# Build query with LEFT JOINs to get product and order_line context.
# MediaAsset has direct product_id FK and order_line_id FK.
# OrderLine has render_status which we also want to surface.
stmt = (
select(
MediaAsset,
Product.name.label("product_name"),
Product.pim_id.label("product_pim_id"),
Product.category_key.label("category_key"),
OrderLine.render_status.label("render_status"),
Product.ebene1.label("product_ebene1"),
Product.ebene2.label("product_ebene2"),
Product.baureihe.label("product_baureihe"),
Product.produkt_baureihe.label("product_produkt_baureihe"),
Product.lagertyp.label("product_lagertyp"),
Product.name_cad_modell.label("product_name_cad_modell"),
)
.outerjoin(Product, MediaAsset.product_id == Product.id)
.outerjoin(OrderLine, MediaAsset.order_line_id == OrderLine.id)
.where(MediaAsset.is_archived == False) # noqa: E712
.order_by(desc(MediaAsset.created_at))
)
# Apply filters
_TECHNICAL_TYPES = (
MediaAssetType.gltf_geometry,
MediaAssetType.gltf_production,
MediaAssetType.blend_production,
MediaAssetType.stl_low,
MediaAssetType.stl_high,
)
if asset_type:
try:
at_enum = MediaAssetType(asset_type)
stmt = stmt.where(MediaAsset.asset_type == at_enum)
except ValueError:
pass # invalid type → ignore filter
elif exclude_technical:
stmt = stmt.where(MediaAsset.asset_type.notin_(_TECHNICAL_TYPES))
if category_key:
stmt = stmt.where(Product.category_key == category_key)
if render_status:
stmt = stmt.where(OrderLine.render_status == render_status)
if q:
pattern = f"%{q}%"
from sqlalchemy import or_
stmt = stmt.where(
or_(
Product.name.ilike(pattern),
Product.pim_id.ilike(pattern),
Product.ebene1.ilike(pattern),
Product.ebene2.ilike(pattern),
Product.baureihe.ilike(pattern),
Product.produkt_baureihe.ilike(pattern),
Product.lagertyp.ilike(pattern),
Product.name_cad_modell.ilike(pattern),
)
)
# Count total matching rows
count_stmt = select(func.count()).select_from(stmt.subquery())
total_result = await db.execute(count_stmt)
total = total_result.scalar_one()
# Paginate
offset = (page - 1) * page_size
stmt = stmt.offset(offset).limit(page_size)
all_rows = (await db.execute(stmt)).all()
# Pre-assign thumbnail_url so _resolve_thumbnails_bulk can check it
raw_assets = [row[0] for row in all_rows]
for a in raw_assets:
a.thumbnail_url = service.get_thumbnail_url(a)
# Resolve fallback thumbnails for non-image assets via product→cad lookup
await _resolve_thumbnails_bulk(db, raw_assets)
items: list[MediaAssetBrowseItem] = []
for row in all_rows:
asset: MediaAsset = row[0]
product_name: str | None = row[1]
product_pim_id: str | None = row[2]
cat_key: str | None = row[3]
r_status: str | None = row[4]
ebene1: str | None = row[5]
ebene2: str | None = row[6]
baureihe: str | None = row[7]
produkt_baureihe: str | None = row[8]
lagertyp: str | None = row[9]
name_cad_modell: str | None = row[10]
thumb = asset.thumbnail_url
item = MediaAssetBrowseItem(
id=asset.id,
asset_type=asset.asset_type,
file_path=asset.storage_key,
file_size_bytes=asset.file_size_bytes,
mime_type=asset.mime_type,
created_at=asset.created_at,
order_line_id=asset.order_line_id,
product_id=asset.product_id,
product_name=product_name,
product_pim_id=product_pim_id,
category_key=cat_key,
render_status=r_status,
product_ebene1=ebene1,
product_ebene2=ebene2,
product_baureihe=baureihe,
product_produkt_baureihe=produkt_baureihe,
product_lagertyp=lagertyp,
product_name_cad_modell=name_cad_modell,
download_url=f"/api/media/{asset.id}/download",
thumbnail_url=thumb,
)
items.append(item)
pages = max(1, math.ceil(total / page_size))
return MediaAssetBrowseResponse(
items=items,
total=total,
page=page,
page_size=page_size,
pages=pages,
)
@router.get("/{asset_id}", response_model=MediaAssetOut)
async def get_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
asset = await service.get_media_asset(db, asset_id)
if not asset:
raise HTTPException(404, "Asset not found")
asset.download_url = service.get_download_url(asset)
asset.thumbnail_url = service.get_thumbnail_url(asset)
await _resolve_thumbnails_bulk(db, [asset])
return asset
@router.get("/{asset_id}/thumbnail")
async def thumbnail_asset(
asset_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
):
"""Serve asset as an inline image — no auth required (UUID is opaque enough).
Only serves image/video MIME types; returns 404 for binary files.
"""
from fastapi.responses import FileResponse, Response
from pathlib import Path
asset = await service.get_media_asset(db, asset_id)
if not asset:
raise HTTPException(404, "Asset not found")
mime = asset.mime_type or ""
if not (mime.startswith("image/") or mime.startswith("video/")):
raise HTTPException(404, "Not a previewable asset")
key = asset.storage_key
from app.config import settings
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
if not candidate.exists() and "/shared/renders/" in key:
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
candidate = remapped
if candidate.exists():
return FileResponse(
str(candidate), media_type=mime,
headers={"Cache-Control": "max-age=86400, public"},
)
try:
from app.core.storage import get_storage
data = get_storage().download_bytes(key)
return Response(content=data, media_type=mime,
headers={"Cache-Control": "max-age=86400, public"})
except Exception:
raise HTTPException(404, "File not available")
@router.api_route("/{asset_id}/download", methods=["GET", "HEAD"])
async def download_asset(
asset_id: uuid.UUID,
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Proxy file content directly — avoids internal MinIO hostname issues."""
from fastapi.responses import FileResponse, Response
from pathlib import Path
asset = await service.get_media_asset(db, asset_id)
if not asset:
raise HTTPException(404, "Asset not found")
key = asset.storage_key
mime = asset.mime_type or "application/octet-stream"
# Local file path (absolute or relative to UPLOAD_DIR)
from app.config import settings
candidate = Path(key)
if not candidate.is_absolute():
candidate = Path(settings.upload_dir) / key
# Legacy path remapping: /shared/renders/{uuid}/{file} → UPLOAD_DIR/renders/{uuid}/{file}
if not candidate.exists() and "/shared/renders/" in key:
import logging
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
logging.getLogger(__name__).warning(
"Remapped legacy path %s%s", key, remapped
)
candidate = remapped
if candidate.exists():
ext = candidate.suffix.lstrip(".")
fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}"
return FileResponse(
str(candidate), media_type=mime, filename=fname,
headers={"Cache-Control": "no-cache"},
)
# Fall back to MinIO
try:
from app.core.storage import get_storage
data = get_storage().download_bytes(key)
ext = key.rsplit(".", 1)[-1] if "." in key else "bin"
fname = f"{asset.asset_type.value}_{asset_id}.{ext}"
return Response(
content=data,
media_type=mime,
headers={
"Content-Disposition": f"attachment; filename={fname}",
"Cache-Control": "no-cache",
},
)
except Exception:
raise HTTPException(404, "File not available")
@router.post("/zip")
async def zip_download(
asset_ids: list[uuid.UUID],
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
assets = []
for aid in asset_ids:
a = await service.get_media_asset(db, aid)
if a:
assets.append(a)
if not assets:
raise HTTPException(404, "No assets found")
def generate():
import logging
from pathlib import Path
from app.core.storage import get_storage
logger = logging.getLogger(__name__)
buf = io.BytesIO()
seen_names: dict[str, int] = {}
with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf:
storage = get_storage()
for a in assets:
key = a.storage_key
# Use filename from storage_key (always has correct extension)
original_name = Path(key).name
ext = Path(key).suffix.lstrip(".") or (a.mime_type or "").split("/")[-1] or "bin"
base = original_name if original_name else f"{a.asset_type.value}_{a.id}.{ext}"
# Deduplicate filenames within the ZIP
if base in seen_names:
seen_names[base] += 1
stem = Path(base).stem
suffix = Path(base).suffix
fname = f"{stem}_{seen_names[base]}{suffix}"
else:
seen_names[base] = 0
fname = base
try:
# Check absolute path first (local filesystem)
candidate = Path(key)
if not candidate.is_absolute():
from app.config import settings
candidate = Path(settings.upload_dir) / key
if candidate.exists():
data = candidate.read_bytes()
else:
data = storage.download_bytes(key)
zf.writestr(fname, data)
except Exception as exc:
logger.warning("ZIP: skipping asset %s%s", a.id, exc)
yield buf.getvalue()
return StreamingResponse(
generate(),
media_type="application/zip",
headers={"Content-Disposition": "attachment; filename=media-export.zip"},
)
@router.delete("/{asset_id}")
async def archive_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
asset = await service.archive_media_asset(db, asset_id)
if not asset:
raise HTTPException(404, "Asset not found")
return {"ok": True}
@router.delete("/{asset_id}/permanent")
async def delete_asset_permanent(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
"""Permanently remove a MediaAsset record from the database."""
deleted = await service.delete_media_asset(db, asset_id)
if not deleted:
raise HTTPException(404, "Asset not found")
return {"ok": True}
@router.post("/cleanup-orphaned")
async def cleanup_orphaned_assets(
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Delete all MediaAsset DB records whose backing file doesn't exist on disk or in MinIO.
Returns counts of checked/deleted records. Admin only.
"""
import logging
from pathlib import Path
from app.config import settings
from app.core.storage import get_storage
logger = logging.getLogger(__name__)
storage = get_storage()
def _file_exists(key: str) -> bool:
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
if candidate.exists():
return True
# Legacy path remapping
if "/shared/renders/" in key:
parts = key.split("/")
if len(parts) >= 2:
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
if remapped.exists():
return True
# Check MinIO
try:
storage.download_bytes(key)
return True
except Exception:
return False
result = await db.execute(select(MediaAsset).where(MediaAsset.is_archived == False)) # noqa: E712
all_assets = result.scalars().all()
deleted_ids = []
for asset in all_assets:
if not _file_exists(asset.storage_key):
logger.info("Cleanup: deleting orphaned asset %s (%s)", asset.id, asset.storage_key)
await db.delete(asset)
deleted_ids.append(str(asset.id))
if deleted_ids:
await db.commit()
return {
"checked": len(all_assets),
"deleted": len(deleted_ids),
"deleted_ids": deleted_ids,
}