feat: layout hamburger, media browser filters+previews, billing fixes
- Layout: mobile hamburger menu + overlay backdrop + close button; content area always full-width - Media browser: filter chips (default still+turntable); advanced toggle for GLB/STL; thumbnail_url previews for non-image types; video hover-play for turntable - Backend: asset_types multi-filter, thumbnail_url in MediaAssetOut, download proxy endpoint for MinIO/local files - Admin: "Import Existing Media" button → POST /api/admin/import-media-assets - Billing: fix invoice create 500 (MissingGreenlet — use selectinload after commit); PDF download uses axios blob instead of bare <a href> (auth header missing); fix storage.upload() accepting str|Path - SSE task logs: task_logs.py core + router, LiveRenderLog component - CadPreview: fix infinite loop when no gltf_geometry assets; loading screen before ThreeDViewer render - render-worker: add trimesh layer to Dockerfile Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -470,3 +470,76 @@ async def renderer_status(
|
||||
}
|
||||
|
||||
|
||||
@router.post("/import-media-assets")
|
||||
async def import_existing_media_assets(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(require_admin),
|
||||
):
|
||||
"""Import existing cad thumbnails and order line renders as MediaAsset records."""
|
||||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||||
from sqlalchemy import text
|
||||
|
||||
created = 0
|
||||
skipped = 0
|
||||
|
||||
# 1. CadFiles with thumbnail_path
|
||||
cad_result = await db.execute(
|
||||
text("SELECT id, thumbnail_path FROM cad_files WHERE thumbnail_path IS NOT NULL AND status = 'completed'")
|
||||
)
|
||||
for row in cad_result.fetchall():
|
||||
cad_id, thumb_path = row
|
||||
# De-dup check
|
||||
existing = await db.execute(
|
||||
select(MediaAsset.id).where(MediaAsset.storage_key == thumb_path).limit(1)
|
||||
)
|
||||
if existing.scalar_one_or_none():
|
||||
skipped += 1
|
||||
continue
|
||||
ext = str(thumb_path).lower()
|
||||
mime = "image/jpeg" if ext.endswith(".jpg") or ext.endswith(".jpeg") else "image/png"
|
||||
asset = MediaAsset(
|
||||
cad_file_id=uuid.UUID(str(cad_id)),
|
||||
asset_type=MediaAssetType.thumbnail,
|
||||
storage_key=str(thumb_path),
|
||||
mime_type=mime,
|
||||
)
|
||||
db.add(asset)
|
||||
created += 1
|
||||
|
||||
# 2. OrderLines with result_path
|
||||
ol_result = await db.execute(
|
||||
text("""
|
||||
SELECT ol.id, ol.result_path, ol.product_id, COALESCE(ot.is_animation, false) as is_animation
|
||||
FROM order_lines ol
|
||||
LEFT JOIN output_types ot ON ot.id = ol.output_type_id
|
||||
WHERE ol.result_path IS NOT NULL AND ol.render_status = 'completed'
|
||||
""")
|
||||
)
|
||||
for row in ol_result.fetchall():
|
||||
ol_id, result_path, product_id, is_animation = row
|
||||
existing = await db.execute(
|
||||
select(MediaAsset.id).where(MediaAsset.storage_key == result_path).limit(1)
|
||||
)
|
||||
if existing.scalar_one_or_none():
|
||||
skipped += 1
|
||||
continue
|
||||
ext = str(result_path).lower()
|
||||
if ext.endswith(".mp4") or ext.endswith(".webm"):
|
||||
mime = "video/mp4"
|
||||
asset_type = MediaAssetType.turntable
|
||||
else:
|
||||
mime = "image/png" if ext.endswith(".png") else "image/jpeg"
|
||||
asset_type = MediaAssetType.turntable if is_animation else MediaAssetType.still
|
||||
asset = MediaAsset(
|
||||
order_line_id=uuid.UUID(str(ol_id)),
|
||||
product_id=uuid.UUID(str(product_id)) if product_id else None,
|
||||
asset_type=asset_type,
|
||||
storage_key=str(result_path),
|
||||
mime_type=mime,
|
||||
)
|
||||
db.add(asset)
|
||||
created += 1
|
||||
|
||||
await db.commit()
|
||||
return {"created": created, "skipped": skipped}
|
||||
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
"""SSE endpoint for live task log streaming."""
|
||||
from __future__ import annotations
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import StreamingResponse
|
||||
from app.utils.auth import get_current_user
|
||||
from app.config import settings
|
||||
|
||||
router = APIRouter(prefix="/tasks", tags=["task-logs"])
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("/{task_id}/logs")
|
||||
async def stream_task_logs(
|
||||
task_id: str,
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
"""SSE stream of task log lines. Use fetch() with Authorization header on the frontend."""
|
||||
import redis.asyncio as aioredis
|
||||
|
||||
async def event_stream():
|
||||
r = aioredis.from_url(settings.redis_url)
|
||||
try:
|
||||
# Send heartbeat first
|
||||
yield "data: {\"type\":\"connected\"}\n\n"
|
||||
|
||||
# Send existing log lines
|
||||
existing = await r.lrange(f"task_logs:{task_id}", 0, -1)
|
||||
for line in existing:
|
||||
data = line.decode() if isinstance(line, bytes) else line
|
||||
yield f"data: {data}\n\n"
|
||||
|
||||
# Subscribe and stream new entries
|
||||
pubsub = r.pubsub()
|
||||
await pubsub.subscribe(f"task_logs_ch:{task_id}")
|
||||
|
||||
timeout_seconds = 600 # 10 minutes max
|
||||
deadline = asyncio.get_event_loop().time() + timeout_seconds
|
||||
|
||||
while asyncio.get_event_loop().time() < deadline:
|
||||
try:
|
||||
msg = await asyncio.wait_for(
|
||||
pubsub.get_message(ignore_subscribe_messages=True),
|
||||
timeout=2.0
|
||||
)
|
||||
if msg and msg["type"] == "message":
|
||||
data = msg["data"].decode() if isinstance(msg["data"], bytes) else msg["data"]
|
||||
yield f"data: {data}\n\n"
|
||||
# Check if task completed
|
||||
try:
|
||||
parsed = json.loads(data)
|
||||
if parsed.get("level") == "done":
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
# Heartbeat every 2s
|
||||
yield ": heartbeat\n\n"
|
||||
except asyncio.TimeoutError:
|
||||
yield ": heartbeat\n\n"
|
||||
except Exception as exc:
|
||||
logger.error("SSE stream error for task %s: %s", task_id, exc)
|
||||
finally:
|
||||
try:
|
||||
await r.aclose()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return StreamingResponse(
|
||||
event_stream(),
|
||||
media_type="text/event-stream",
|
||||
headers={
|
||||
"Cache-Control": "no-cache",
|
||||
"X-Accel-Buffering": "no",
|
||||
},
|
||||
)
|
||||
@@ -56,8 +56,9 @@ class MinIOStorage:
|
||||
except Exception as exc:
|
||||
logger.warning("Could not create MinIO bucket %s: %s", self._bucket, exc)
|
||||
|
||||
def upload(self, local_path: Path, object_key: str) -> str:
|
||||
def upload(self, local_path: Path | str, object_key: str) -> str:
|
||||
"""Upload a local file to MinIO. Returns the object_key."""
|
||||
local_path = Path(local_path)
|
||||
self._client.upload_file(str(local_path), self._bucket, object_key)
|
||||
logger.debug("Uploaded %s → minio://%s/%s", local_path.name, self._bucket, object_key)
|
||||
return object_key
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
"""Redis-backed task log store for SSE streaming."""
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
from app.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
TASK_LOG_TTL = 3600 # 1 hour
|
||||
|
||||
|
||||
def log_task_event(task_id: str, message: str, level: str = "info") -> None:
|
||||
"""Append a log line to Redis list and publish to channel. Safe to call from Celery tasks."""
|
||||
try:
|
||||
import redis
|
||||
r = redis.from_url(settings.redis_url)
|
||||
entry = json.dumps({"ts": time.time(), "level": level, "msg": message, "task_id": task_id})
|
||||
pipe = r.pipeline()
|
||||
pipe.rpush(f"task_logs:{task_id}", entry)
|
||||
pipe.expire(f"task_logs:{task_id}", TASK_LOG_TTL)
|
||||
pipe.publish(f"task_logs_ch:{task_id}", entry)
|
||||
pipe.execute()
|
||||
r.close()
|
||||
except Exception as exc:
|
||||
logger.debug("log_task_event failed: %s", exc)
|
||||
@@ -2,7 +2,7 @@
|
||||
from __future__ import annotations
|
||||
import uuid
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.responses import RedirectResponse
|
||||
from fastapi.responses import Response
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
@@ -83,8 +83,12 @@ async def download_invoice_pdf(
|
||||
if not key:
|
||||
raise HTTPException(status_code=503, detail="PDF generation unavailable (WeasyPrint not installed)")
|
||||
from app.core.storage import get_storage
|
||||
url = get_storage().get_url(key)
|
||||
return RedirectResponse(url=url)
|
||||
pdf_bytes = get_storage().download_bytes(key)
|
||||
return Response(
|
||||
content=pdf_bytes,
|
||||
media_type="application/pdf",
|
||||
headers={"Content-Disposition": f"attachment; filename=invoice-{invoice_id}.pdf"},
|
||||
)
|
||||
|
||||
|
||||
@invoice_router.delete("/invoices/{invoice_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
import uuid
|
||||
from datetime import date, datetime
|
||||
from decimal import Decimal
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, computed_field
|
||||
|
||||
|
||||
class InvoiceLineCreate(BaseModel):
|
||||
@@ -54,4 +54,11 @@ class InvoiceOut(BaseModel):
|
||||
created_at: datetime
|
||||
lines: list[InvoiceLineOut] = []
|
||||
|
||||
@computed_field # type: ignore[misc]
|
||||
@property
|
||||
def pdf_url(self) -> str | None:
|
||||
if self.pdf_key:
|
||||
return f"/api/billing/invoices/{self.id}/pdf"
|
||||
return None
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
@@ -267,8 +267,7 @@ async def create_invoice(
|
||||
invoice.total_net = total_net
|
||||
invoice.total_vat = (total_net * vat_rate).quantize(Decimal("0.01"))
|
||||
await db.commit()
|
||||
await db.refresh(invoice)
|
||||
return invoice
|
||||
return await get_invoice(db, invoice.id)
|
||||
|
||||
|
||||
async def get_invoices(
|
||||
|
||||
@@ -12,15 +12,17 @@ from app.domains.media.models import MediaAssetType
|
||||
from app.domains.media.schemas import MediaAssetOut
|
||||
from app.domains.media import service
|
||||
|
||||
router = APIRouter(prefix="/api/media", tags=["media"])
|
||||
router = APIRouter(prefix="/api/media", tags=["media"], redirect_slashes=False)
|
||||
|
||||
|
||||
@router.get("/", response_model=list[MediaAssetOut])
|
||||
@router.get("", response_model=list[MediaAssetOut])
|
||||
@router.get("/", response_model=list[MediaAssetOut], include_in_schema=False)
|
||||
async def list_assets(
|
||||
product_id: uuid.UUID | None = None,
|
||||
order_line_id: uuid.UUID | None = None,
|
||||
cad_file_id: uuid.UUID | None = None,
|
||||
asset_type: MediaAssetType | None = None,
|
||||
asset_types: list[MediaAssetType] = Query(default=[]),
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(50, ge=1, le=500),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
@@ -31,11 +33,13 @@ async def list_assets(
|
||||
order_line_id=order_line_id,
|
||||
cad_file_id=cad_file_id,
|
||||
asset_type=asset_type,
|
||||
asset_types=asset_types if asset_types else None,
|
||||
skip=skip,
|
||||
limit=limit,
|
||||
)
|
||||
for a in assets:
|
||||
a.download_url = service.get_download_url(a)
|
||||
a.thumbnail_url = service.get_thumbnail_url(a)
|
||||
return assets
|
||||
|
||||
|
||||
@@ -45,19 +49,45 @@ async def get_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
|
||||
if not asset:
|
||||
raise HTTPException(404, "Asset not found")
|
||||
asset.download_url = service.get_download_url(asset)
|
||||
asset.thumbnail_url = service.get_thumbnail_url(asset)
|
||||
return asset
|
||||
|
||||
|
||||
@router.get("/{asset_id}/download")
|
||||
@router.api_route("/{asset_id}/download", methods=["GET", "HEAD"])
|
||||
async def download_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
|
||||
from fastapi.responses import RedirectResponse
|
||||
"""Proxy file content directly — avoids internal MinIO hostname issues."""
|
||||
from fastapi.responses import FileResponse, Response
|
||||
from pathlib import Path
|
||||
asset = await service.get_media_asset(db, asset_id)
|
||||
if not asset:
|
||||
raise HTTPException(404, "Asset not found")
|
||||
url = service.get_download_url(asset)
|
||||
if url:
|
||||
return RedirectResponse(url)
|
||||
raise HTTPException(404, "File not available")
|
||||
|
||||
key = asset.storage_key
|
||||
mime = asset.mime_type or "application/octet-stream"
|
||||
|
||||
# Local file path (absolute or relative to UPLOAD_DIR)
|
||||
candidate = Path(key)
|
||||
if not candidate.is_absolute():
|
||||
from app.config import settings
|
||||
candidate = Path(settings.UPLOAD_DIR) / key
|
||||
if candidate.exists():
|
||||
ext = candidate.suffix.lstrip(".")
|
||||
fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}"
|
||||
return FileResponse(str(candidate), media_type=mime, filename=fname)
|
||||
|
||||
# Fall back to MinIO
|
||||
try:
|
||||
from app.core.storage import get_storage
|
||||
data = get_storage().download_bytes(key)
|
||||
ext = key.rsplit(".", 1)[-1] if "." in key else "bin"
|
||||
fname = f"{asset.asset_type.value}_{asset_id}.{ext}"
|
||||
return Response(
|
||||
content=data,
|
||||
media_type=mime,
|
||||
headers={"Content-Disposition": f"attachment; filename={fname}"},
|
||||
)
|
||||
except Exception:
|
||||
raise HTTPException(404, "File not available")
|
||||
|
||||
|
||||
@router.post("/zip")
|
||||
|
||||
@@ -22,5 +22,6 @@ class MediaAssetOut(BaseModel):
|
||||
is_archived: bool
|
||||
created_at: datetime
|
||||
download_url: str | None = None
|
||||
thumbnail_url: str | None = None
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
@@ -11,6 +11,7 @@ async def list_media_assets(
|
||||
order_line_id: uuid.UUID | None = None,
|
||||
cad_file_id: uuid.UUID | None = None,
|
||||
asset_type: MediaAssetType | None = None,
|
||||
asset_types: list[MediaAssetType] | None = None,
|
||||
is_archived: bool | None = False,
|
||||
skip: int = 0,
|
||||
limit: int = 50,
|
||||
@@ -22,7 +23,9 @@ async def list_media_assets(
|
||||
q = q.where(MediaAsset.order_line_id == order_line_id)
|
||||
if cad_file_id:
|
||||
q = q.where(MediaAsset.cad_file_id == cad_file_id)
|
||||
if asset_type:
|
||||
if asset_types:
|
||||
q = q.where(MediaAsset.asset_type.in_(asset_types))
|
||||
elif asset_type is not None:
|
||||
q = q.where(MediaAsset.asset_type == asset_type)
|
||||
if is_archived is not None:
|
||||
q = q.where(MediaAsset.is_archived == is_archived)
|
||||
@@ -62,10 +65,12 @@ async def delete_media_asset(db: AsyncSession, asset_id: uuid.UUID) -> bool:
|
||||
|
||||
|
||||
def get_download_url(asset: MediaAsset) -> str | None:
|
||||
"""Get presigned URL from MinIO or local path."""
|
||||
try:
|
||||
from app.core.storage import get_storage
|
||||
storage = get_storage()
|
||||
return storage.get_url(asset.storage_key)
|
||||
except Exception:
|
||||
return f"/uploads/{asset.storage_key}"
|
||||
"""Return a backend proxy URL so the browser can always download the file."""
|
||||
return f"/api/media/{asset.id}/download"
|
||||
|
||||
|
||||
def get_thumbnail_url(asset: MediaAsset) -> str | None:
|
||||
"""Return CAD thumbnail URL if asset has a cad_file_id."""
|
||||
if asset.cad_file_id:
|
||||
return f"/api/cad/{asset.cad_file_id}/thumbnail"
|
||||
return None
|
||||
|
||||
@@ -10,6 +10,7 @@ import logging
|
||||
from pathlib import Path
|
||||
|
||||
from app.tasks.celery_app import celery_app
|
||||
from app.core.task_logs import log_task_event
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -55,6 +56,7 @@ def render_still_task(
|
||||
Returns render metadata dict on success.
|
||||
Retries up to 2 times on failure (30s countdown).
|
||||
"""
|
||||
log_task_event(self.request.id, f"Starting render_still_task: {Path(step_path).name}", "info")
|
||||
try:
|
||||
from app.services.render_blender import render_still
|
||||
result = render_still(
|
||||
@@ -86,14 +88,34 @@ def render_still_task(
|
||||
denoising_use_gpu=denoising_use_gpu,
|
||||
mesh_attributes=mesh_attributes or {},
|
||||
)
|
||||
log_task_event(self.request.id, f"Completed successfully in {result.get('total_duration_s', 0):.1f}s", "done")
|
||||
logger.info(
|
||||
"render_still_task completed: %s → %s in %.1fs",
|
||||
Path(step_path).name, Path(output_path).name,
|
||||
result.get("total_duration_s", 0),
|
||||
)
|
||||
try:
|
||||
from app.core.websocket import publish_event_sync
|
||||
publish_event_sync(None, {
|
||||
"type": "render.still.completed",
|
||||
"step_path": Path(step_path).name,
|
||||
"output": Path(output_path).name,
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
return result
|
||||
except Exception as exc:
|
||||
log_task_event(self.request.id, f"Failed: {exc}", "error")
|
||||
logger.error("render_still_task failed for %s: %s", step_path, exc)
|
||||
try:
|
||||
from app.core.websocket import publish_event_sync
|
||||
publish_event_sync(None, {
|
||||
"type": "render.still.failed",
|
||||
"step_path": Path(step_path).name,
|
||||
"error": str(exc),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
raise self.retry(exc=exc, countdown=30)
|
||||
|
||||
|
||||
@@ -136,6 +158,7 @@ def render_turntable_task(
|
||||
|
||||
Returns render metadata dict on success.
|
||||
"""
|
||||
log_task_event(self.request.id, f"Starting render_turntable_task: {Path(step_path).name}", "info")
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
@@ -211,7 +234,17 @@ def render_turntable_task(
|
||||
f"Blender turntable exited {result.returncode}:\n{result.stdout[-2000:]}"
|
||||
)
|
||||
except Exception as exc:
|
||||
log_task_event(self.request.id, f"Failed: {exc}", "error")
|
||||
logger.error("render_turntable_task failed: %s", exc)
|
||||
try:
|
||||
from app.core.websocket import publish_event_sync
|
||||
publish_event_sync(None, {
|
||||
"type": "render.turntable.failed",
|
||||
"step_path": Path(step_path).name,
|
||||
"error": str(exc),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
raise self.retry(exc=exc, countdown=60)
|
||||
|
||||
# FFmpeg composite: frames → MP4 with optional background
|
||||
@@ -224,6 +257,16 @@ def render_turntable_task(
|
||||
except subprocess.CalledProcessError as exc:
|
||||
raise RuntimeError(f"FFmpeg composite failed: {exc.stderr[-500:]}")
|
||||
|
||||
log_task_event(self.request.id, "Completed successfully", "done")
|
||||
try:
|
||||
from app.core.websocket import publish_event_sync
|
||||
publish_event_sync(None, {
|
||||
"type": "render.turntable.completed",
|
||||
"step_path": Path(step_path).name,
|
||||
"output": Path(output_mp4).name,
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
return {
|
||||
"output_mp4": str(output_mp4),
|
||||
"frame_count": frame_count,
|
||||
@@ -313,8 +356,10 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict:
|
||||
Wraps render_still_task logic but accepts order_line_id instead of step_path.
|
||||
On success, creates a MediaAsset record via publish_asset.
|
||||
"""
|
||||
log_task_event(self.request.id, f"Starting render_order_line_still_task: order_line={order_line_id}", "info")
|
||||
step_path_str, cad_file_id = _resolve_step_path_for_order_line(order_line_id)
|
||||
if not step_path_str:
|
||||
log_task_event(self.request.id, f"Failed: cannot resolve STEP path for order_line {order_line_id}", "error")
|
||||
raise RuntimeError(
|
||||
f"Cannot resolve STEP path for order_line {order_line_id}: "
|
||||
"product missing or has no linked CAD file"
|
||||
@@ -338,13 +383,32 @@ def render_order_line_still_task(self, order_line_id: str, **params) -> dict:
|
||||
str(output_path),
|
||||
render_config=result,
|
||||
)
|
||||
log_task_event(self.request.id, f"Completed successfully in {result.get('total_duration_s', 0):.1f}s", "done")
|
||||
logger.info(
|
||||
"render_order_line_still_task completed for line %s in %.1fs",
|
||||
order_line_id, result.get("total_duration_s", 0),
|
||||
)
|
||||
try:
|
||||
from app.core.websocket import publish_event_sync
|
||||
publish_event_sync(None, {
|
||||
"type": "render.order_line.completed",
|
||||
"order_line_id": order_line_id,
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
return result
|
||||
except Exception as exc:
|
||||
log_task_event(self.request.id, f"Failed: {exc}", "error")
|
||||
logger.error("render_order_line_still_task failed for %s: %s", order_line_id, exc)
|
||||
try:
|
||||
from app.core.websocket import publish_event_sync
|
||||
publish_event_sync(None, {
|
||||
"type": "render.order_line.failed",
|
||||
"order_line_id": order_line_id,
|
||||
"error": str(exc),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
raise self.retry(exc=exc, countdown=30)
|
||||
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ from app.domains.rendering.workflow_router import router as workflows_router
|
||||
from app.domains.media.router import router as media_router
|
||||
from app.api.routers.asset_libraries import router as asset_libraries_router
|
||||
from app.domains.admin.dashboard_router import router as dashboard_router
|
||||
from app.api.routers.task_logs import router as task_logs_router
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@@ -90,6 +91,7 @@ app.include_router(workflows_router)
|
||||
app.include_router(media_router)
|
||||
app.include_router(asset_libraries_router, prefix="/api")
|
||||
app.include_router(dashboard_router, prefix="/api")
|
||||
app.include_router(task_logs_router, prefix="/api")
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Celery tasks for STEP file processing and thumbnail generation."""
|
||||
import logging
|
||||
from app.tasks.celery_app import celery_app
|
||||
from app.core.task_logs import log_task_event
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -268,9 +269,11 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
|
||||
step_path_str = cad_file.stored_path
|
||||
eng.dispose()
|
||||
|
||||
log_task_event(self.request.id, f"Starting generate_gltf_geometry_task: cad_file={cad_file_id}", "info")
|
||||
step = _Path(step_path_str)
|
||||
stl_path = step.parent / f"{step.stem}_low.stl"
|
||||
if not stl_path.exists():
|
||||
log_task_event(self.request.id, f"Failed: STL cache not found: {stl_path}", "error")
|
||||
logger.error("generate_gltf_geometry_task: STL not found %s", stl_path)
|
||||
raise RuntimeError(f"STL cache not found: {stl_path}")
|
||||
|
||||
@@ -279,8 +282,10 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
|
||||
import trimesh
|
||||
mesh = trimesh.load(str(stl_path))
|
||||
mesh.export(str(output_path))
|
||||
log_task_event(self.request.id, f"Completed successfully: {output_path.name}", "done")
|
||||
logger.info("generate_gltf_geometry_task: exported %s", output_path.name)
|
||||
except Exception as exc:
|
||||
log_task_event(self.request.id, f"Failed: {exc}", "error")
|
||||
logger.error("generate_gltf_geometry_task failed for %s: %s", cad_file_id, exc)
|
||||
raise self.retry(exc=exc, countdown=15)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user