e7b70a35ea
Previously the endpoint queued USD generation for ALL 295 completed CadFiles, including 250 orphan CadFiles not linked to any product. Now filters to only CadFiles referenced by at least one Product.cad_file_id, reducing the backfill from ~285 to ~41 tasks. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
806 lines
33 KiB
Python
806 lines
33 KiB
Python
import json
|
||
import uuid
|
||
from datetime import datetime
|
||
from typing import Any
|
||
from fastapi import APIRouter, Depends, HTTPException, status
|
||
from sqlalchemy.ext.asyncio import AsyncSession
|
||
from sqlalchemy import select, update as sql_update
|
||
from pydantic import BaseModel
|
||
from app.database import get_db
|
||
from app.models.user import User
|
||
from app.models.system_setting import SystemSetting
|
||
from app.models.cad_file import CadFile, ProcessingStatus
|
||
from app.models.output_type import OutputType as OutputTypeModel
|
||
from app.schemas.user import UserOut, UserUpdate, UserCreate
|
||
from app.utils.auth import require_global_admin, hash_password
|
||
|
||
router = APIRouter(prefix="/admin", tags=["admin"])
|
||
|
||
VALID_RENDERERS = {"blender"}
|
||
VALID_ENGINES = {"cycles", "eevee"}
|
||
VALID_FORMATS = {"jpg", "png"}
|
||
VALID_CYCLES_DEVICES = {"auto", "gpu", "cpu"}
|
||
SETTINGS_DEFAULTS: dict[str, str] = {
|
||
"thumbnail_renderer": "blender",
|
||
"blender_engine": "cycles",
|
||
"blender_cycles_samples": "256",
|
||
"blender_eevee_samples": "64",
|
||
"thumbnail_format": "jpg",
|
||
"blender_smooth_angle": "30",
|
||
"cycles_device": "auto",
|
||
"render_backend": "celery",
|
||
"blender_max_concurrent_renders": "3",
|
||
"product_thumbnail_priority": '["latest_render","cad_thumbnail"]',
|
||
"render_stall_timeout_minutes": "120",
|
||
# SMTP (email notifications — disabled by default)
|
||
"smtp_enabled": "false",
|
||
"smtp_host": "",
|
||
"smtp_port": "587",
|
||
"smtp_user": "",
|
||
"smtp_password": "",
|
||
"smtp_from_address": "",
|
||
# glTF tessellation quality
|
||
"tessellation_engine": "occ", # "occ" | "gmsh" — tessellation backend
|
||
"scene_linear_deflection": "0.1", # mm — geometry GLB for viewer
|
||
"scene_angular_deflection": "0.1", # rad — Standard preset
|
||
"render_linear_deflection": "0.03", # mm — production/render GLB
|
||
"render_angular_deflection": "0.05", # rad — Standard preset
|
||
# 3D viewer / glTF export settings
|
||
"gltf_scale_factor": "0.001",
|
||
"gltf_smooth_normals": "true",
|
||
"viewer_max_distance": "50",
|
||
"viewer_min_distance": "0.001",
|
||
"gltf_material_quality": "pbr_colors",
|
||
"gltf_pbr_roughness": "0.4",
|
||
"gltf_pbr_metallic": "0.6",
|
||
}
|
||
|
||
|
||
class SettingsOut(BaseModel):
|
||
thumbnail_renderer: str = "blender"
|
||
blender_engine: str = "cycles"
|
||
blender_cycles_samples: int = 256
|
||
blender_eevee_samples: int = 64
|
||
thumbnail_format: str = "jpg"
|
||
blender_smooth_angle: int = 30
|
||
cycles_device: str = "auto"
|
||
render_backend: str = "celery"
|
||
blender_max_concurrent_renders: int = 3
|
||
product_thumbnail_priority: str = '["latest_render","cad_thumbnail"]'
|
||
render_stall_timeout_minutes: int = 120
|
||
smtp_enabled: bool = False
|
||
smtp_host: str = ""
|
||
smtp_port: int = 587
|
||
smtp_user: str = ""
|
||
smtp_password: str = ""
|
||
smtp_from_address: str = ""
|
||
scene_linear_deflection: float = 0.1
|
||
scene_angular_deflection: float = 0.1
|
||
render_linear_deflection: float = 0.03
|
||
render_angular_deflection: float = 0.05
|
||
gltf_scale_factor: float = 0.001
|
||
gltf_smooth_normals: bool = True
|
||
viewer_max_distance: float = 50.0
|
||
viewer_min_distance: float = 0.001
|
||
gltf_material_quality: str = "pbr_colors"
|
||
gltf_pbr_roughness: float = 0.4
|
||
gltf_pbr_metallic: float = 0.6
|
||
tessellation_engine: str = "occ"
|
||
|
||
|
||
class SettingsUpdate(BaseModel):
|
||
thumbnail_renderer: str | None = None
|
||
blender_engine: str | None = None
|
||
blender_cycles_samples: int | None = None
|
||
blender_eevee_samples: int | None = None
|
||
thumbnail_format: str | None = None
|
||
blender_smooth_angle: int | None = None
|
||
cycles_device: str | None = None
|
||
render_backend: str | None = None
|
||
blender_max_concurrent_renders: int | None = None
|
||
product_thumbnail_priority: str | None = None
|
||
render_stall_timeout_minutes: int | None = None
|
||
smtp_enabled: bool | None = None
|
||
smtp_host: str | None = None
|
||
smtp_port: int | None = None
|
||
smtp_user: str | None = None
|
||
smtp_password: str | None = None
|
||
smtp_from_address: str | None = None
|
||
scene_linear_deflection: float | None = None
|
||
scene_angular_deflection: float | None = None
|
||
render_linear_deflection: float | None = None
|
||
render_angular_deflection: float | None = None
|
||
gltf_scale_factor: float | None = None
|
||
gltf_smooth_normals: bool | None = None
|
||
viewer_max_distance: float | None = None
|
||
viewer_min_distance: float | None = None
|
||
gltf_material_quality: str | None = None
|
||
gltf_pbr_roughness: float | None = None
|
||
gltf_pbr_metallic: float | None = None
|
||
tessellation_engine: str | None = None
|
||
|
||
|
||
@router.get("/users", response_model=list[UserOut])
|
||
async def list_users(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
result = await db.execute(select(User).order_by(User.created_at.desc()))
|
||
return result.scalars().all()
|
||
|
||
|
||
@router.post("/users", response_model=UserOut, status_code=status.HTTP_201_CREATED)
|
||
async def create_user(
|
||
body: UserCreate,
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
result = await db.execute(select(User).where(User.email == body.email))
|
||
if result.scalar_one_or_none():
|
||
raise HTTPException(400, detail="Email already registered")
|
||
|
||
user = User(
|
||
email=body.email,
|
||
password_hash=hash_password(body.password),
|
||
full_name=body.full_name,
|
||
role=body.role,
|
||
)
|
||
db.add(user)
|
||
await db.commit()
|
||
await db.refresh(user)
|
||
return user
|
||
|
||
|
||
@router.patch("/users/{user_id}", response_model=UserOut)
|
||
async def update_user(
|
||
user_id: uuid.UUID,
|
||
body: UserUpdate,
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
result = await db.execute(select(User).where(User.id == user_id))
|
||
user = result.scalar_one_or_none()
|
||
if not user:
|
||
raise HTTPException(404, detail="User not found")
|
||
|
||
for field, val in body.model_dump(exclude_unset=True).items():
|
||
setattr(user, field, val)
|
||
await db.commit()
|
||
await db.refresh(user)
|
||
return user
|
||
|
||
|
||
@router.delete("/users/{user_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||
async def delete_user(
|
||
user_id: uuid.UUID,
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
result = await db.execute(select(User).where(User.id == user_id))
|
||
user = result.scalar_one_or_none()
|
||
if not user:
|
||
raise HTTPException(404, detail="User not found")
|
||
if user.id == admin.id:
|
||
raise HTTPException(400, detail="Cannot delete yourself")
|
||
await db.delete(user)
|
||
await db.commit()
|
||
|
||
|
||
# ── System Settings ──────────────────────────────────────────────────────────
|
||
|
||
async def _load_settings(db: AsyncSession) -> dict[str, str]:
|
||
"""Load all system settings, filling missing keys with defaults."""
|
||
result = await db.execute(select(SystemSetting))
|
||
stored = {row.key: row.value for row in result.scalars().all()}
|
||
return {k: stored.get(k, v) for k, v in SETTINGS_DEFAULTS.items()}
|
||
|
||
|
||
async def _save_setting(db: AsyncSession, key: str, value: str) -> None:
|
||
result = await db.execute(
|
||
sql_update(SystemSetting)
|
||
.where(SystemSetting.key == key)
|
||
.values(value=value, updated_at=datetime.utcnow())
|
||
)
|
||
if result.rowcount == 0:
|
||
db.add(SystemSetting(key=key, value=value, updated_at=datetime.utcnow()))
|
||
|
||
|
||
def _settings_to_out(raw: dict[str, str]) -> SettingsOut:
|
||
return SettingsOut(
|
||
thumbnail_renderer=raw["thumbnail_renderer"],
|
||
blender_engine=raw["blender_engine"],
|
||
blender_cycles_samples=int(raw["blender_cycles_samples"]),
|
||
blender_eevee_samples=int(raw["blender_eevee_samples"]),
|
||
thumbnail_format=raw["thumbnail_format"],
|
||
blender_smooth_angle=int(raw["blender_smooth_angle"]),
|
||
cycles_device=raw["cycles_device"],
|
||
render_backend=raw["render_backend"],
|
||
blender_max_concurrent_renders=int(raw["blender_max_concurrent_renders"]),
|
||
product_thumbnail_priority=raw.get("product_thumbnail_priority", '["latest_render","cad_thumbnail"]'),
|
||
render_stall_timeout_minutes=int(raw.get("render_stall_timeout_minutes", "120")),
|
||
smtp_enabled=raw.get("smtp_enabled", "false").lower() == "true",
|
||
smtp_host=raw.get("smtp_host", ""),
|
||
smtp_port=int(raw.get("smtp_port", "587")),
|
||
smtp_user=raw.get("smtp_user", ""),
|
||
smtp_password=raw.get("smtp_password", ""),
|
||
smtp_from_address=raw.get("smtp_from_address", ""),
|
||
scene_linear_deflection=float(raw.get("scene_linear_deflection", "0.1")),
|
||
scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.5")),
|
||
render_linear_deflection=float(raw.get("render_linear_deflection", "0.03")),
|
||
render_angular_deflection=float(raw.get("render_angular_deflection", "0.2")),
|
||
gltf_scale_factor=float(raw.get("gltf_scale_factor", "0.001")),
|
||
gltf_smooth_normals=raw.get("gltf_smooth_normals", "true") == "true",
|
||
viewer_max_distance=float(raw.get("viewer_max_distance", "50")),
|
||
viewer_min_distance=float(raw.get("viewer_min_distance", "0.001")),
|
||
gltf_material_quality=raw.get("gltf_material_quality", "pbr_colors"),
|
||
gltf_pbr_roughness=float(raw.get("gltf_pbr_roughness", "0.4")),
|
||
gltf_pbr_metallic=float(raw.get("gltf_pbr_metallic", "0.6")),
|
||
tessellation_engine=raw.get("tessellation_engine", "occ"),
|
||
)
|
||
|
||
|
||
@router.get("/settings", response_model=SettingsOut)
|
||
async def get_settings(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
return _settings_to_out(await _load_settings(db))
|
||
|
||
|
||
@router.put("/settings", response_model=SettingsOut)
|
||
async def update_settings(
|
||
body: SettingsUpdate,
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
if body.thumbnail_renderer is not None and body.thumbnail_renderer not in VALID_RENDERERS:
|
||
raise HTTPException(400, detail=f"Invalid renderer. Choose: {', '.join(sorted(VALID_RENDERERS))}")
|
||
if body.blender_engine is not None and body.blender_engine not in VALID_ENGINES:
|
||
raise HTTPException(400, detail=f"Invalid engine. Choose: {', '.join(sorted(VALID_ENGINES))}")
|
||
if body.blender_cycles_samples is not None and not (1 <= body.blender_cycles_samples <= 4096):
|
||
raise HTTPException(400, detail="blender_cycles_samples must be 1–4096")
|
||
if body.blender_eevee_samples is not None and not (1 <= body.blender_eevee_samples <= 1024):
|
||
raise HTTPException(400, detail="blender_eevee_samples must be 1–1024")
|
||
if body.thumbnail_format is not None and body.thumbnail_format not in VALID_FORMATS:
|
||
raise HTTPException(400, detail=f"Invalid thumbnail_format. Choose: {', '.join(sorted(VALID_FORMATS))}")
|
||
if body.blender_smooth_angle is not None and not (0 <= body.blender_smooth_angle <= 180):
|
||
raise HTTPException(400, detail="blender_smooth_angle must be 0–180 degrees")
|
||
if body.cycles_device is not None and body.cycles_device not in VALID_CYCLES_DEVICES:
|
||
raise HTTPException(400, detail=f"Invalid cycles_device. Choose: {', '.join(sorted(VALID_CYCLES_DEVICES))}")
|
||
if body.blender_max_concurrent_renders is not None and not (1 <= body.blender_max_concurrent_renders <= 16):
|
||
raise HTTPException(400, detail="blender_max_concurrent_renders must be 1–16")
|
||
if body.render_stall_timeout_minutes is not None and not (10 <= body.render_stall_timeout_minutes <= 10080):
|
||
raise HTTPException(400, detail="render_stall_timeout_minutes must be 10–10080 (10 min to 1 week)")
|
||
if body.product_thumbnail_priority is not None:
|
||
try:
|
||
entries = json.loads(body.product_thumbnail_priority)
|
||
if not isinstance(entries, list):
|
||
raise ValueError
|
||
except (json.JSONDecodeError, ValueError):
|
||
raise HTTPException(400, detail="product_thumbnail_priority must be a valid JSON array")
|
||
valid_literals = {"cad_thumbnail", "latest_render"}
|
||
for entry in entries:
|
||
if entry not in valid_literals:
|
||
try:
|
||
ot_id = uuid.UUID(entry)
|
||
except ValueError:
|
||
raise HTTPException(400, detail=f"Invalid priority entry '{entry}': must be 'cad_thumbnail', 'latest_render', or a valid output type UUID")
|
||
ot_row = await db.execute(select(OutputTypeModel).where(OutputTypeModel.id == ot_id))
|
||
if not ot_row.scalar_one_or_none():
|
||
raise HTTPException(400, detail=f"Output type '{entry}' not found")
|
||
|
||
updates: dict[str, str] = {}
|
||
if body.thumbnail_renderer is not None:
|
||
updates["thumbnail_renderer"] = body.thumbnail_renderer
|
||
if body.blender_engine is not None:
|
||
updates["blender_engine"] = body.blender_engine
|
||
if body.blender_cycles_samples is not None:
|
||
updates["blender_cycles_samples"] = str(body.blender_cycles_samples)
|
||
if body.blender_eevee_samples is not None:
|
||
updates["blender_eevee_samples"] = str(body.blender_eevee_samples)
|
||
if body.thumbnail_format is not None:
|
||
updates["thumbnail_format"] = body.thumbnail_format
|
||
if body.blender_smooth_angle is not None:
|
||
updates["blender_smooth_angle"] = str(body.blender_smooth_angle)
|
||
if body.cycles_device is not None:
|
||
updates["cycles_device"] = body.cycles_device
|
||
if body.render_backend is not None:
|
||
updates["render_backend"] = body.render_backend
|
||
if body.blender_max_concurrent_renders is not None:
|
||
updates["blender_max_concurrent_renders"] = str(body.blender_max_concurrent_renders)
|
||
if body.render_stall_timeout_minutes is not None:
|
||
updates["render_stall_timeout_minutes"] = str(body.render_stall_timeout_minutes)
|
||
if body.product_thumbnail_priority is not None:
|
||
updates["product_thumbnail_priority"] = body.product_thumbnail_priority
|
||
if body.smtp_enabled is not None:
|
||
updates["smtp_enabled"] = "true" if body.smtp_enabled else "false"
|
||
if body.smtp_host is not None:
|
||
updates["smtp_host"] = body.smtp_host
|
||
if body.smtp_port is not None:
|
||
if not (1 <= body.smtp_port <= 65535):
|
||
raise HTTPException(400, detail="smtp_port must be 1–65535")
|
||
updates["smtp_port"] = str(body.smtp_port)
|
||
if body.smtp_user is not None:
|
||
updates["smtp_user"] = body.smtp_user
|
||
if body.smtp_password is not None:
|
||
updates["smtp_password"] = body.smtp_password
|
||
if body.smtp_from_address is not None:
|
||
updates["smtp_from_address"] = body.smtp_from_address
|
||
if body.gltf_scale_factor is not None:
|
||
updates["gltf_scale_factor"] = str(body.gltf_scale_factor)
|
||
if body.gltf_smooth_normals is not None:
|
||
updates["gltf_smooth_normals"] = "true" if body.gltf_smooth_normals else "false"
|
||
if body.viewer_max_distance is not None:
|
||
updates["viewer_max_distance"] = str(body.viewer_max_distance)
|
||
if body.viewer_min_distance is not None:
|
||
updates["viewer_min_distance"] = str(body.viewer_min_distance)
|
||
if body.gltf_material_quality is not None:
|
||
updates["gltf_material_quality"] = body.gltf_material_quality
|
||
if body.gltf_pbr_roughness is not None:
|
||
updates["gltf_pbr_roughness"] = str(body.gltf_pbr_roughness)
|
||
if body.gltf_pbr_metallic is not None:
|
||
updates["gltf_pbr_metallic"] = str(body.gltf_pbr_metallic)
|
||
if body.scene_linear_deflection is not None:
|
||
if not (0.001 <= body.scene_linear_deflection <= 10.0):
|
||
raise HTTPException(400, detail="scene_linear_deflection must be 0.001–10.0 mm")
|
||
updates["scene_linear_deflection"] = str(body.scene_linear_deflection)
|
||
if body.scene_angular_deflection is not None:
|
||
if not (0.05 <= body.scene_angular_deflection <= 1.5):
|
||
raise HTTPException(400, detail="scene_angular_deflection must be 0.05–1.5 rad")
|
||
updates["scene_angular_deflection"] = str(body.scene_angular_deflection)
|
||
if body.render_linear_deflection is not None:
|
||
if not (0.001 <= body.render_linear_deflection <= 10.0):
|
||
raise HTTPException(400, detail="render_linear_deflection must be 0.001–10.0 mm")
|
||
updates["render_linear_deflection"] = str(body.render_linear_deflection)
|
||
if body.render_angular_deflection is not None:
|
||
if not (0.05 <= body.render_angular_deflection <= 1.5):
|
||
raise HTTPException(400, detail="render_angular_deflection must be 0.05–1.5 rad")
|
||
updates["render_angular_deflection"] = str(body.render_angular_deflection)
|
||
if body.tessellation_engine is not None:
|
||
if body.tessellation_engine not in {"occ", "gmsh"}:
|
||
raise HTTPException(400, detail="tessellation_engine must be 'occ' or 'gmsh'")
|
||
updates["tessellation_engine"] = body.tessellation_engine
|
||
|
||
for k, v in updates.items():
|
||
await _save_setting(db, k, v)
|
||
await db.commit()
|
||
|
||
# Note: blender-renderer HTTP service removed; concurrency is now controlled
|
||
# via render-worker Docker concurrency setting (thumbnail_rendering queue).
|
||
|
||
return _settings_to_out(await _load_settings(db))
|
||
|
||
|
||
@router.post("/settings/process-unprocessed", status_code=status.HTTP_202_ACCEPTED)
|
||
async def process_unprocessed_steps(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
"""Queue all STEP files that are not yet completed.
|
||
|
||
Queues pending and failed files immediately. Files stuck in 'processing'
|
||
for more than 15 minutes (i.e. their worker task was killed or lost) are
|
||
also recovered. Actively-processing files (updated within the last 15 min)
|
||
are left alone to avoid duplicate task execution on the same file.
|
||
"""
|
||
from datetime import datetime, timedelta
|
||
stuck_cutoff = datetime.utcnow() - timedelta(minutes=15)
|
||
result = await db.execute(
|
||
select(CadFile).where(
|
||
CadFile.stored_path.isnot(None),
|
||
# pending/failed always, plus processing-but-stale (stuck)
|
||
(
|
||
CadFile.processing_status.in_([
|
||
ProcessingStatus.pending,
|
||
ProcessingStatus.failed,
|
||
]) |
|
||
(
|
||
(CadFile.processing_status == ProcessingStatus.processing) &
|
||
(CadFile.updated_at < stuck_cutoff)
|
||
)
|
||
),
|
||
)
|
||
)
|
||
cad_files = result.scalars().all()
|
||
|
||
from app.tasks.step_tasks import process_step_file
|
||
queued = 0
|
||
for cad_file in cad_files:
|
||
cad_file.processing_status = ProcessingStatus.pending
|
||
process_step_file.delay(str(cad_file.id))
|
||
queued += 1
|
||
await db.commit()
|
||
|
||
return {"queued": queued, "message": f"Queued {queued} STEP file(s) for processing"}
|
||
|
||
|
||
@router.post("/settings/regenerate-thumbnails", status_code=status.HTTP_202_ACCEPTED)
|
||
async def regenerate_thumbnails(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
"""Re-queue completed CAD files that are linked to a product for thumbnail regeneration."""
|
||
from app.domains.products.models import Product
|
||
result = await db.execute(
|
||
select(CadFile)
|
||
.join(Product, Product.cad_file_id == CadFile.id)
|
||
.where(CadFile.processing_status == ProcessingStatus.completed)
|
||
)
|
||
cad_files = result.scalars().all()
|
||
|
||
from app.tasks.step_tasks import render_step_thumbnail
|
||
queued = 0
|
||
for cad_file in cad_files:
|
||
render_step_thumbnail.delay(str(cad_file.id))
|
||
queued += 1
|
||
|
||
return {"queued": queued, "message": f"Re-queued {queued} CAD file(s) for thumbnail regeneration"}
|
||
|
||
|
||
@router.get("/settings/orphaned-cad-files")
|
||
async def get_orphaned_cad_files(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
"""Return count and total disk size of CadFiles not linked to any product."""
|
||
from sqlalchemy import func
|
||
from app.domains.products.models import Product
|
||
result = await db.execute(
|
||
select(func.count(CadFile.id), func.sum(CadFile.file_size))
|
||
.outerjoin(Product, Product.cad_file_id == CadFile.id)
|
||
.where(Product.id.is_(None))
|
||
)
|
||
count, total_bytes = result.one()
|
||
return {
|
||
"count": count or 0,
|
||
"total_mb": round((total_bytes or 0) / 1024 / 1024, 1),
|
||
}
|
||
|
||
|
||
@router.post("/settings/cleanup-orphaned-cad-files")
|
||
async def cleanup_orphaned_cad_files(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
"""Delete CadFile DB records and associated files on disk for all orphaned CadFiles.
|
||
|
||
A CadFile is orphaned if no product currently references it via products.cad_file_id.
|
||
"""
|
||
import os
|
||
from app.domains.products.models import Product
|
||
|
||
result = await db.execute(
|
||
select(CadFile)
|
||
.outerjoin(Product, Product.cad_file_id == CadFile.id)
|
||
.where(Product.id.is_(None))
|
||
)
|
||
orphans = result.scalars().all()
|
||
|
||
deleted_files = 0
|
||
deleted_bytes = 0
|
||
|
||
for cad_file in orphans:
|
||
# Remove files from disk (non-fatal if missing)
|
||
for path_attr in ("stored_path", "thumbnail_path", "gltf_path"):
|
||
path = getattr(cad_file, path_attr, None)
|
||
if path:
|
||
try:
|
||
if os.path.isfile(path):
|
||
size = os.path.getsize(path)
|
||
os.remove(path)
|
||
deleted_files += 1
|
||
deleted_bytes += size
|
||
except OSError:
|
||
pass
|
||
await db.delete(cad_file)
|
||
|
||
await db.commit()
|
||
return {
|
||
"deleted_records": len(orphans),
|
||
"deleted_files": deleted_files,
|
||
"freed_mb": round(deleted_bytes / 1024 / 1024, 1),
|
||
}
|
||
|
||
|
||
@router.post("/settings/reextract-metadata", status_code=status.HTTP_202_ACCEPTED)
|
||
async def reextract_all_metadata(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
"""Re-extract OCC metadata (dimensions, sharp edges) for all completed CAD files.
|
||
|
||
Updates mesh_attributes without re-rendering thumbnails or changing processing status.
|
||
Use this after deploying bbox/edge extraction improvements.
|
||
"""
|
||
from app.domains.products.models import Product
|
||
result = await db.execute(
|
||
select(CadFile)
|
||
.join(Product, Product.cad_file_id == CadFile.id)
|
||
.where(
|
||
CadFile.processing_status == ProcessingStatus.completed,
|
||
CadFile.stored_path.isnot(None),
|
||
)
|
||
)
|
||
cad_files = result.scalars().all()
|
||
|
||
from app.tasks.step_tasks import reextract_cad_metadata
|
||
queued = 0
|
||
for cad_file in cad_files:
|
||
reextract_cad_metadata.delay(str(cad_file.id))
|
||
queued += 1
|
||
|
||
return {"queued": queued, "message": f"Queued {queued} CAD file(s) for metadata re-extraction"}
|
||
|
||
|
||
@router.post("/settings/generate-missing-canonical-scenes", status_code=status.HTTP_202_ACCEPTED)
|
||
async def generate_missing_canonical_scenes(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
"""Queue canonical scene (geometry GLB + USD master) generation for every completed CAD file that has no gltf_geometry MediaAsset."""
|
||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||
|
||
result = await db.execute(
|
||
select(CadFile).where(CadFile.processing_status == ProcessingStatus.completed)
|
||
)
|
||
cad_files = result.scalars().all()
|
||
|
||
# Bulk-fetch existing gltf_geometry assets
|
||
existing_result = await db.execute(
|
||
select(MediaAsset.cad_file_id).where(MediaAsset.asset_type == MediaAssetType.gltf_geometry)
|
||
)
|
||
existing_ids = {row[0] for row in existing_result.all()}
|
||
|
||
from app.tasks.step_tasks import generate_gltf_geometry_task
|
||
queued = 0
|
||
for cad_file in cad_files:
|
||
if not cad_file.stored_path:
|
||
continue
|
||
if cad_file.id not in existing_ids:
|
||
generate_gltf_geometry_task.delay(str(cad_file.id))
|
||
queued += 1
|
||
|
||
return {"queued": queued, "message": f"Queued {queued} missing canonical scene task(s)"}
|
||
|
||
|
||
@router.post("/settings/generate-missing-usd-masters", status_code=status.HTTP_202_ACCEPTED)
|
||
async def generate_missing_usd_masters(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
"""Queue USD master export for completed CAD files linked to a product that have no usd_master MediaAsset.
|
||
|
||
Only CadFiles referenced by at least one Product are included — orphan CadFiles
|
||
(uploaded but never linked to a product) are skipped to avoid unnecessary work.
|
||
"""
|
||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||
from app.domains.products.models import Product
|
||
|
||
# Only CadFiles that are actually used by a product
|
||
product_cad_ids_result = await db.execute(
|
||
select(Product.cad_file_id).where(Product.cad_file_id.isnot(None)).distinct()
|
||
)
|
||
product_cad_ids = {row[0] for row in product_cad_ids_result.all()}
|
||
|
||
result = await db.execute(
|
||
select(CadFile).where(
|
||
CadFile.processing_status == ProcessingStatus.completed,
|
||
CadFile.id.in_(product_cad_ids),
|
||
)
|
||
)
|
||
cad_files = result.scalars().all()
|
||
|
||
existing_result = await db.execute(
|
||
select(MediaAsset.cad_file_id).where(MediaAsset.asset_type == MediaAssetType.usd_master)
|
||
)
|
||
existing_ids = {row[0] for row in existing_result.all()}
|
||
|
||
from app.tasks.step_tasks import generate_usd_master_task
|
||
queued = 0
|
||
for cad_file in cad_files:
|
||
if not cad_file.stored_path:
|
||
continue
|
||
if cad_file.id not in existing_ids:
|
||
generate_usd_master_task.delay(str(cad_file.id))
|
||
queued += 1
|
||
|
||
return {"queued": queued, "message": f"Queued {queued} missing USD master task(s)"}
|
||
|
||
|
||
@router.post("/settings/recover-stuck-processing", status_code=status.HTTP_200_OK)
|
||
async def recover_stuck_processing(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
"""Reset CAD files stuck in 'processing' for more than 10 minutes to 'failed'.
|
||
|
||
Call this when a CAD file shows 'processing' indefinitely. The auto-recovery
|
||
beat task also runs every 5 minutes, so this is just for immediate relief.
|
||
"""
|
||
from datetime import datetime, timedelta
|
||
from sqlalchemy import update as sql_update, and_
|
||
|
||
cutoff = datetime.utcnow() - timedelta(minutes=10)
|
||
result = await db.execute(
|
||
sql_update(CadFile)
|
||
.where(
|
||
and_(
|
||
CadFile.processing_status == ProcessingStatus.processing,
|
||
CadFile.updated_at < cutoff,
|
||
)
|
||
)
|
||
.values(
|
||
processing_status=ProcessingStatus.failed,
|
||
error_message="Processing timed out — worker may have crashed. Use 'Regenerate Thumbnail' to retry.",
|
||
)
|
||
.returning(CadFile.id)
|
||
)
|
||
reset_ids = [str(r[0]) for r in result.fetchall()]
|
||
await db.commit()
|
||
return {"reset": len(reset_ids), "ids": reset_ids,
|
||
"message": f"Reset {len(reset_ids)} stuck file(s) to 'failed'"}
|
||
|
||
|
||
@router.post("/settings/seed-workflows", status_code=status.HTTP_200_OK)
|
||
async def seed_workflows(
|
||
admin: User = Depends(require_global_admin),
|
||
db: AsyncSession = Depends(get_db),
|
||
):
|
||
"""Create the standard workflow definitions if they do not already exist."""
|
||
from app.domains.rendering.models import WorkflowDefinition
|
||
|
||
STANDARD_WORKFLOWS = [
|
||
{
|
||
"name": "Still Image — Cycles",
|
||
"config": {
|
||
"type": "still",
|
||
"params": {"render_engine": "cycles", "samples": 256, "resolution": [1920, 1080]},
|
||
},
|
||
},
|
||
{
|
||
"name": "Still Image — EEVEE",
|
||
"config": {
|
||
"type": "still",
|
||
"params": {"render_engine": "eevee", "samples": 64, "resolution": [1920, 1080]},
|
||
},
|
||
},
|
||
{
|
||
"name": "Turntable Animation",
|
||
"config": {
|
||
"type": "turntable",
|
||
"params": {"render_engine": "cycles", "samples": 64, "fps": 24, "duration_s": 5},
|
||
},
|
||
},
|
||
{
|
||
"name": "Multi-Angle (0° / 45° / 90°)",
|
||
"config": {
|
||
"type": "multi_angle",
|
||
"params": {"render_engine": "cycles", "samples": 128, "angles": [0, 45, 90]},
|
||
},
|
||
},
|
||
]
|
||
|
||
existing_result = await db.execute(select(WorkflowDefinition))
|
||
existing_names = {wf.name for wf in existing_result.scalars().all()}
|
||
|
||
created = 0
|
||
for wf_data in STANDARD_WORKFLOWS:
|
||
if wf_data["name"] not in existing_names:
|
||
db.add(WorkflowDefinition(
|
||
name=wf_data["name"],
|
||
config=wf_data["config"],
|
||
is_active=True,
|
||
))
|
||
created += 1
|
||
|
||
await db.commit()
|
||
return {"created": created, "message": f"Created {created} workflow definition(s)"}
|
||
|
||
|
||
@router.get("/settings/renderer-status")
|
||
async def renderer_status(
|
||
admin: User = Depends(require_global_admin),
|
||
):
|
||
"""Check health of renderer services."""
|
||
from app.services.render_blender import find_blender, is_blender_available
|
||
blender_available = is_blender_available()
|
||
blender_bin = find_blender()
|
||
return {
|
||
"blender": {
|
||
"available": blender_available,
|
||
"note": (
|
||
f"render-worker subprocess ({blender_bin})"
|
||
if blender_available
|
||
else "Blender not found — check render-worker container and BLENDER_BIN"
|
||
),
|
||
},
|
||
}
|
||
|
||
|
||
@router.post("/import-media-assets")
|
||
async def import_existing_media_assets(
|
||
db: AsyncSession = Depends(get_db),
|
||
current_user: User = Depends(require_global_admin),
|
||
):
|
||
"""Import existing cad thumbnails and order line renders as MediaAsset records."""
|
||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||
from sqlalchemy import text
|
||
|
||
created = 0
|
||
skipped = 0
|
||
|
||
from app.config import settings as _app_settings
|
||
|
||
def _normalize_key(path: str) -> str:
|
||
"""Strip UPLOAD_DIR prefix to store relative storage keys."""
|
||
key = str(path)
|
||
prefix = str(_app_settings.upload_dir).rstrip("/") + "/"
|
||
return key[len(prefix):] if key.startswith(prefix) else key
|
||
|
||
# 1. CadFiles with thumbnail_path
|
||
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
|
||
cad_result = await db.execute(
|
||
text("SELECT id, thumbnail_path FROM cad_files WHERE thumbnail_path IS NOT NULL AND processing_status = 'completed'")
|
||
)
|
||
for row in cad_result.fetchall():
|
||
cad_id, thumb_path = row
|
||
norm_key = _normalize_key(str(thumb_path))
|
||
# De-dup check
|
||
existing = await db.execute(
|
||
select(MediaAsset.id).where(MediaAsset.storage_key == norm_key).limit(1)
|
||
)
|
||
if existing.scalar_one_or_none():
|
||
skipped += 1
|
||
continue
|
||
ext = str(thumb_path).lower()
|
||
mime = "image/jpeg" if ext.endswith(".jpg") or ext.endswith(".jpeg") else "image/png"
|
||
asset = MediaAsset(
|
||
cad_file_id=uuid.UUID(str(cad_id)),
|
||
asset_type=MediaAssetType.thumbnail,
|
||
storage_key=norm_key,
|
||
mime_type=mime,
|
||
)
|
||
db.add(asset)
|
||
created += 1
|
||
|
||
# 2. OrderLines with result_path
|
||
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
|
||
ol_result = await db.execute(
|
||
text("""
|
||
SELECT ol.id, ol.result_path, ol.product_id, COALESCE(ot.is_animation, false) as is_animation
|
||
FROM order_lines ol
|
||
LEFT JOIN output_types ot ON ot.id = ol.output_type_id
|
||
WHERE ol.result_path IS NOT NULL AND ol.render_status = 'completed'
|
||
""")
|
||
)
|
||
for row in ol_result.fetchall():
|
||
ol_id, result_path, product_id, _is_animation = row
|
||
norm_key = _normalize_key(str(result_path))
|
||
existing = await db.execute(
|
||
select(MediaAsset.id).where(MediaAsset.storage_key == norm_key).limit(1)
|
||
)
|
||
if existing.scalar_one_or_none():
|
||
skipped += 1
|
||
continue
|
||
ext = str(result_path).lower()
|
||
if ext.endswith(".mp4") or ext.endswith(".webm"):
|
||
mime = "video/mp4"
|
||
asset_type = MediaAssetType.turntable
|
||
else:
|
||
# Extension determines type — poster frames (.jpg/.png) are always stills
|
||
mime = "image/png" if ext.endswith(".png") else "image/jpeg"
|
||
asset_type = MediaAssetType.still
|
||
asset = MediaAsset(
|
||
order_line_id=uuid.UUID(str(ol_id)),
|
||
product_id=uuid.UUID(str(product_id)) if product_id else None,
|
||
asset_type=asset_type,
|
||
storage_key=norm_key,
|
||
mime_type=mime,
|
||
)
|
||
db.add(asset)
|
||
created += 1
|
||
|
||
await db.commit()
|
||
return {"created": created, "skipped": skipped}
|
||
|