feat: sharp edge pipeline V02, tessellation presets, media cache-bust, GMSH plan
Sharp Edge Pipeline V02:
- export_step_to_gltf.py: replace BRep_Tool.Polygon3D_s (returns None in XCAF) with
GCPnts_UniformAbscissa curve sampling at 0.3mm step — extracts 17,129 segment pairs
- Inject sharp_edge_pairs + sharp_threshold_deg into GLB extras (scenes[0].extras)
via binary GLB JSON-chunk patching (no extra dependency)
- export_gltf.py: read schaeffler_sharp_edge_pairs from Blender scene custom props,
apply via KD-tree to mark edges sharp=True + seam=True (OCC mm Z-up → Blender transform)
- tools/restore_sharp_marks.py: dual-pass (dihedral angle + OCC pairs), updated coordinate
transform (X, -Z, Y) * 0.001
Tessellation:
- Admin UI: Draft / Standard / Fine preset buttons with active-state highlighting
- Default angular deflection: preview 0.5→0.1 rad, production 0.2→0.05 rad
- export_glb.py: read updated defaults from system_settings
Media / Cache:
- media/service.py: get_download_url appends ?v={file_size_bytes} cache-buster
- media/router.py: Cache-Control: no-cache for all download/thumbnail endpoints
Render pipeline:
- still_render.py / turntable_render.py: shared GPU activation + camera improvements
- render_order_line.py: global render position support
- render_thumbnail.py: updated defaults
Frontend:
- InlineCadViewer: file_size_bytes-aware URL update triggers re-fetch on regeneration
- ThreeDViewer: material panel, part selection, PBR mode improvements
- Admin.tsx: tessellation preset cards, GMSH setting dropdown
- MediaBrowser, ProductDetail, OrderDetail, Orders: various UI improvements
- New: MaterialPanel, GlobalRenderPositionsPanel, StepIndicator components
- New: renderPositions.ts API client
Plans / Docs:
- plan.md: GMSH Frontal-Delaunay tessellation plan (6 tasks)
- LEARNINGS.md: OCC Polygon3D_s None issue + GCPnts fix
- .gitignore: add backend/core (core dump from root process)
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -60,7 +60,7 @@ async def _resolve_thumbnails_bulk(db: AsyncSession, assets: list) -> None:
|
||||
for a in needs:
|
||||
pid = str(a.product_id)
|
||||
if pid in best_still:
|
||||
a.thumbnail_url = f"/api/media/{best_still[pid]}/download"
|
||||
a.thumbnail_url = f"/api/media/{best_still[pid]}/thumbnail"
|
||||
elif pid in product_cad:
|
||||
a.thumbnail_url = f"/api/cad/{product_cad[pid]}/thumbnail"
|
||||
|
||||
@@ -105,6 +105,7 @@ async def browse_media_assets(
|
||||
category_key: str | None = None,
|
||||
render_status: str | None = None,
|
||||
q: str | None = None,
|
||||
exclude_technical: bool = Query(True, description="Exclude GLB/STL/Blend technical assets"),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=200),
|
||||
_user: User = Depends(get_current_user),
|
||||
@@ -125,6 +126,12 @@ async def browse_media_assets(
|
||||
Product.pim_id.label("product_pim_id"),
|
||||
Product.category_key.label("category_key"),
|
||||
OrderLine.render_status.label("render_status"),
|
||||
Product.ebene1.label("product_ebene1"),
|
||||
Product.ebene2.label("product_ebene2"),
|
||||
Product.baureihe.label("product_baureihe"),
|
||||
Product.produkt_baureihe.label("product_produkt_baureihe"),
|
||||
Product.lagertyp.label("product_lagertyp"),
|
||||
Product.name_cad_modell.label("product_name_cad_modell"),
|
||||
)
|
||||
.outerjoin(Product, MediaAsset.product_id == Product.id)
|
||||
.outerjoin(OrderLine, MediaAsset.order_line_id == OrderLine.id)
|
||||
@@ -133,12 +140,21 @@ async def browse_media_assets(
|
||||
)
|
||||
|
||||
# Apply filters
|
||||
_TECHNICAL_TYPES = (
|
||||
MediaAssetType.gltf_geometry,
|
||||
MediaAssetType.gltf_production,
|
||||
MediaAssetType.blend_production,
|
||||
MediaAssetType.stl_low,
|
||||
MediaAssetType.stl_high,
|
||||
)
|
||||
if asset_type:
|
||||
try:
|
||||
at_enum = MediaAssetType(asset_type)
|
||||
stmt = stmt.where(MediaAsset.asset_type == at_enum)
|
||||
except ValueError:
|
||||
pass # invalid type → ignore filter
|
||||
elif exclude_technical:
|
||||
stmt = stmt.where(MediaAsset.asset_type.notin_(_TECHNICAL_TYPES))
|
||||
|
||||
if category_key:
|
||||
stmt = stmt.where(Product.category_key == category_key)
|
||||
@@ -153,6 +169,12 @@ async def browse_media_assets(
|
||||
or_(
|
||||
Product.name.ilike(pattern),
|
||||
Product.pim_id.ilike(pattern),
|
||||
Product.ebene1.ilike(pattern),
|
||||
Product.ebene2.ilike(pattern),
|
||||
Product.baureihe.ilike(pattern),
|
||||
Product.produkt_baureihe.ilike(pattern),
|
||||
Product.lagertyp.ilike(pattern),
|
||||
Product.name_cad_modell.ilike(pattern),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -165,15 +187,30 @@ async def browse_media_assets(
|
||||
offset = (page - 1) * page_size
|
||||
stmt = stmt.offset(offset).limit(page_size)
|
||||
|
||||
rows = await db.execute(stmt)
|
||||
all_rows = (await db.execute(stmt)).all()
|
||||
|
||||
# Pre-assign thumbnail_url so _resolve_thumbnails_bulk can check it
|
||||
raw_assets = [row[0] for row in all_rows]
|
||||
for a in raw_assets:
|
||||
a.thumbnail_url = service.get_thumbnail_url(a)
|
||||
# Resolve fallback thumbnails for non-image assets via product→cad lookup
|
||||
await _resolve_thumbnails_bulk(db, raw_assets)
|
||||
|
||||
items: list[MediaAssetBrowseItem] = []
|
||||
for row in rows.all():
|
||||
for row in all_rows:
|
||||
asset: MediaAsset = row[0]
|
||||
product_name: str | None = row[1]
|
||||
product_pim_id: str | None = row[2]
|
||||
cat_key: str | None = row[3]
|
||||
r_status: str | None = row[4]
|
||||
ebene1: str | None = row[5]
|
||||
ebene2: str | None = row[6]
|
||||
baureihe: str | None = row[7]
|
||||
produkt_baureihe: str | None = row[8]
|
||||
lagertyp: str | None = row[9]
|
||||
name_cad_modell: str | None = row[10]
|
||||
|
||||
thumb = asset.thumbnail_url
|
||||
item = MediaAssetBrowseItem(
|
||||
id=asset.id,
|
||||
asset_type=asset.asset_type,
|
||||
@@ -187,8 +224,14 @@ async def browse_media_assets(
|
||||
product_pim_id=product_pim_id,
|
||||
category_key=cat_key,
|
||||
render_status=r_status,
|
||||
product_ebene1=ebene1,
|
||||
product_ebene2=ebene2,
|
||||
product_baureihe=baureihe,
|
||||
product_produkt_baureihe=produkt_baureihe,
|
||||
product_lagertyp=lagertyp,
|
||||
product_name_cad_modell=name_cad_modell,
|
||||
download_url=f"/api/media/{asset.id}/download",
|
||||
thumbnail_url=service.get_thumbnail_url(asset),
|
||||
thumbnail_url=thumb,
|
||||
)
|
||||
items.append(item)
|
||||
|
||||
@@ -213,6 +256,48 @@ async def get_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
|
||||
return asset
|
||||
|
||||
|
||||
@router.get("/{asset_id}/thumbnail")
|
||||
async def thumbnail_asset(
|
||||
asset_id: uuid.UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Serve asset as an inline image — no auth required (UUID is opaque enough).
|
||||
|
||||
Only serves image/video MIME types; returns 404 for binary files.
|
||||
"""
|
||||
from fastapi.responses import FileResponse, Response
|
||||
from pathlib import Path
|
||||
asset = await service.get_media_asset(db, asset_id)
|
||||
if not asset:
|
||||
raise HTTPException(404, "Asset not found")
|
||||
|
||||
mime = asset.mime_type or ""
|
||||
if not (mime.startswith("image/") or mime.startswith("video/")):
|
||||
raise HTTPException(404, "Not a previewable asset")
|
||||
|
||||
key = asset.storage_key
|
||||
from app.config import settings
|
||||
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
|
||||
if not candidate.exists() and "/shared/renders/" in key:
|
||||
parts = key.split("/")
|
||||
if len(parts) >= 2:
|
||||
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
|
||||
if remapped.exists():
|
||||
candidate = remapped
|
||||
if candidate.exists():
|
||||
return FileResponse(
|
||||
str(candidate), media_type=mime,
|
||||
headers={"Cache-Control": "max-age=86400, public"},
|
||||
)
|
||||
try:
|
||||
from app.core.storage import get_storage
|
||||
data = get_storage().download_bytes(key)
|
||||
return Response(content=data, media_type=mime,
|
||||
headers={"Cache-Control": "max-age=86400, public"})
|
||||
except Exception:
|
||||
raise HTTPException(404, "File not available")
|
||||
|
||||
|
||||
@router.api_route("/{asset_id}/download", methods=["GET", "HEAD"])
|
||||
async def download_asset(
|
||||
asset_id: uuid.UUID,
|
||||
@@ -250,7 +335,7 @@ async def download_asset(
|
||||
fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}"
|
||||
return FileResponse(
|
||||
str(candidate), media_type=mime, filename=fname,
|
||||
headers={"Cache-Control": "max-age=3600, public"},
|
||||
headers={"Cache-Control": "no-cache"},
|
||||
)
|
||||
|
||||
# Fall back to MinIO
|
||||
@@ -264,7 +349,7 @@ async def download_asset(
|
||||
media_type=mime,
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename={fname}",
|
||||
"Cache-Control": "max-age=3600, public",
|
||||
"Cache-Control": "no-cache",
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
@@ -346,3 +431,58 @@ async def delete_asset_permanent(asset_id: uuid.UUID, db: AsyncSession = Depends
|
||||
if not deleted:
|
||||
raise HTTPException(404, "Asset not found")
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@router.post("/cleanup-orphaned")
|
||||
async def cleanup_orphaned_assets(
|
||||
_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Delete all MediaAsset DB records whose backing file doesn't exist on disk or in MinIO.
|
||||
|
||||
Returns counts of checked/deleted records. Admin only.
|
||||
"""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from app.config import settings
|
||||
from app.core.storage import get_storage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
storage = get_storage()
|
||||
|
||||
def _file_exists(key: str) -> bool:
|
||||
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
|
||||
if candidate.exists():
|
||||
return True
|
||||
# Legacy path remapping
|
||||
if "/shared/renders/" in key:
|
||||
parts = key.split("/")
|
||||
if len(parts) >= 2:
|
||||
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
|
||||
if remapped.exists():
|
||||
return True
|
||||
# Check MinIO
|
||||
try:
|
||||
storage.download_bytes(key)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
result = await db.execute(select(MediaAsset).where(MediaAsset.is_archived == False)) # noqa: E712
|
||||
all_assets = result.scalars().all()
|
||||
|
||||
deleted_ids = []
|
||||
for asset in all_assets:
|
||||
if not _file_exists(asset.storage_key):
|
||||
logger.info("Cleanup: deleting orphaned asset %s (%s)", asset.id, asset.storage_key)
|
||||
await db.delete(asset)
|
||||
deleted_ids.append(str(asset.id))
|
||||
|
||||
if deleted_ids:
|
||||
await db.commit()
|
||||
|
||||
return {
|
||||
"checked": len(all_assets),
|
||||
"deleted": len(deleted_ids),
|
||||
"deleted_ids": deleted_ids,
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user