feat: layout hamburger, media browser filters+previews, billing fixes

- Layout: mobile hamburger menu + overlay backdrop + close button; content area always full-width
- Media browser: filter chips (default still+turntable); advanced toggle for GLB/STL; thumbnail_url previews for non-image types; video hover-play for turntable
- Backend: asset_types multi-filter, thumbnail_url in MediaAssetOut, download proxy endpoint for MinIO/local files
- Admin: "Import Existing Media" button → POST /api/admin/import-media-assets
- Billing: fix invoice create 500 (MissingGreenlet — use selectinload after commit); PDF download uses axios blob instead of bare <a href> (auth header missing); fix storage.upload() accepting str|Path
- SSE task logs: task_logs.py core + router, LiveRenderLog component
- CadPreview: fix infinite loop when no gltf_geometry assets; loading screen before ThreeDViewer render
- render-worker: add trimesh layer to Dockerfile

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-07 00:09:27 +01:00
parent 9bf6e72718
commit f5ca91ee02
25 changed files with 792 additions and 299 deletions
+73
View File
@@ -470,3 +470,76 @@ async def renderer_status(
}
@router.post("/import-media-assets")
async def import_existing_media_assets(
db: AsyncSession = Depends(get_db),
current_user: User = Depends(require_admin),
):
"""Import existing cad thumbnails and order line renders as MediaAsset records."""
from app.domains.media.models import MediaAsset, MediaAssetType
from sqlalchemy import text
created = 0
skipped = 0
# 1. CadFiles with thumbnail_path
cad_result = await db.execute(
text("SELECT id, thumbnail_path FROM cad_files WHERE thumbnail_path IS NOT NULL AND status = 'completed'")
)
for row in cad_result.fetchall():
cad_id, thumb_path = row
# De-dup check
existing = await db.execute(
select(MediaAsset.id).where(MediaAsset.storage_key == thumb_path).limit(1)
)
if existing.scalar_one_or_none():
skipped += 1
continue
ext = str(thumb_path).lower()
mime = "image/jpeg" if ext.endswith(".jpg") or ext.endswith(".jpeg") else "image/png"
asset = MediaAsset(
cad_file_id=uuid.UUID(str(cad_id)),
asset_type=MediaAssetType.thumbnail,
storage_key=str(thumb_path),
mime_type=mime,
)
db.add(asset)
created += 1
# 2. OrderLines with result_path
ol_result = await db.execute(
text("""
SELECT ol.id, ol.result_path, ol.product_id, COALESCE(ot.is_animation, false) as is_animation
FROM order_lines ol
LEFT JOIN output_types ot ON ot.id = ol.output_type_id
WHERE ol.result_path IS NOT NULL AND ol.render_status = 'completed'
""")
)
for row in ol_result.fetchall():
ol_id, result_path, product_id, is_animation = row
existing = await db.execute(
select(MediaAsset.id).where(MediaAsset.storage_key == result_path).limit(1)
)
if existing.scalar_one_or_none():
skipped += 1
continue
ext = str(result_path).lower()
if ext.endswith(".mp4") or ext.endswith(".webm"):
mime = "video/mp4"
asset_type = MediaAssetType.turntable
else:
mime = "image/png" if ext.endswith(".png") else "image/jpeg"
asset_type = MediaAssetType.turntable if is_animation else MediaAssetType.still
asset = MediaAsset(
order_line_id=uuid.UUID(str(ol_id)),
product_id=uuid.UUID(str(product_id)) if product_id else None,
asset_type=asset_type,
storage_key=str(result_path),
mime_type=mime,
)
db.add(asset)
created += 1
await db.commit()
return {"created": created, "skipped": skipped}
+78
View File
@@ -0,0 +1,78 @@
"""SSE endpoint for live task log streaming."""
from __future__ import annotations
import asyncio
import json
import logging
from fastapi import APIRouter, Depends
from fastapi.responses import StreamingResponse
from app.utils.auth import get_current_user
from app.config import settings
router = APIRouter(prefix="/tasks", tags=["task-logs"])
logger = logging.getLogger(__name__)
@router.get("/{task_id}/logs")
async def stream_task_logs(
task_id: str,
current_user=Depends(get_current_user),
):
"""SSE stream of task log lines. Use fetch() with Authorization header on the frontend."""
import redis.asyncio as aioredis
async def event_stream():
r = aioredis.from_url(settings.redis_url)
try:
# Send heartbeat first
yield "data: {\"type\":\"connected\"}\n\n"
# Send existing log lines
existing = await r.lrange(f"task_logs:{task_id}", 0, -1)
for line in existing:
data = line.decode() if isinstance(line, bytes) else line
yield f"data: {data}\n\n"
# Subscribe and stream new entries
pubsub = r.pubsub()
await pubsub.subscribe(f"task_logs_ch:{task_id}")
timeout_seconds = 600 # 10 minutes max
deadline = asyncio.get_event_loop().time() + timeout_seconds
while asyncio.get_event_loop().time() < deadline:
try:
msg = await asyncio.wait_for(
pubsub.get_message(ignore_subscribe_messages=True),
timeout=2.0
)
if msg and msg["type"] == "message":
data = msg["data"].decode() if isinstance(msg["data"], bytes) else msg["data"]
yield f"data: {data}\n\n"
# Check if task completed
try:
parsed = json.loads(data)
if parsed.get("level") == "done":
break
except Exception:
pass
else:
# Heartbeat every 2s
yield ": heartbeat\n\n"
except asyncio.TimeoutError:
yield ": heartbeat\n\n"
except Exception as exc:
logger.error("SSE stream error for task %s: %s", task_id, exc)
finally:
try:
await r.aclose()
except Exception:
pass
return StreamingResponse(
event_stream(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"X-Accel-Buffering": "no",
},
)