Files
HartOMat/backend/app/api/routers/admin.py
T

1097 lines
44 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import json
import uuid
from datetime import datetime, timedelta
from typing import Any, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update as sql_update, func, case, distinct, and_, extract
from pydantic import BaseModel
from app.database import get_db
from app.models.user import User
from app.models.system_setting import SystemSetting
from app.models.cad_file import CadFile, ProcessingStatus
from app.models.output_type import OutputType as OutputTypeModel
from app.schemas.user import UserOut, UserUpdate, UserCreate
from app.utils.auth import require_global_admin, get_current_user, hash_password
router = APIRouter(prefix="/admin", tags=["admin"])
VALID_RENDERERS = {"blender"}
VALID_ENGINES = {"cycles", "eevee"}
VALID_FORMATS = {"jpg", "png"}
VALID_CYCLES_DEVICES = {"auto", "gpu", "cpu"}
SETTINGS_DEFAULTS: dict[str, str] = {
"thumbnail_renderer": "blender",
"blender_engine": "cycles",
"blender_cycles_samples": "256",
"blender_eevee_samples": "64",
"thumbnail_format": "jpg",
"blender_smooth_angle": "30",
"cycles_device": "auto",
"render_backend": "celery",
"blender_max_concurrent_renders": "3",
"product_thumbnail_priority": '["latest_render","cad_thumbnail"]',
"render_stall_timeout_minutes": "120",
# SMTP (email notifications — disabled by default)
"smtp_enabled": "false",
"smtp_host": "",
"smtp_port": "587",
"smtp_user": "",
"smtp_password": "",
"smtp_from_address": "",
# glTF tessellation quality
"tessellation_engine": "occ", # "occ" | "gmsh" — tessellation backend
"scene_linear_deflection": "0.1", # mm — geometry GLB for viewer
"scene_angular_deflection": "0.1", # rad — Standard preset
"render_linear_deflection": "0.03", # mm — production/render GLB
"render_angular_deflection": "0.05", # rad — Standard preset
# 3D viewer / glTF export settings
"gltf_scale_factor": "0.001",
"gltf_smooth_normals": "true",
"viewer_max_distance": "50",
"viewer_min_distance": "0.001",
"gltf_material_quality": "pbr_colors",
"gltf_pbr_roughness": "0.4",
"gltf_pbr_metallic": "0.6",
}
class SettingsOut(BaseModel):
thumbnail_renderer: str = "blender"
blender_engine: str = "cycles"
blender_cycles_samples: int = 256
blender_eevee_samples: int = 64
thumbnail_format: str = "jpg"
blender_smooth_angle: int = 30
cycles_device: str = "auto"
render_backend: str = "celery"
blender_max_concurrent_renders: int = 3
product_thumbnail_priority: str = '["latest_render","cad_thumbnail"]'
render_stall_timeout_minutes: int = 120
smtp_enabled: bool = False
smtp_host: str = ""
smtp_port: int = 587
smtp_user: str = ""
smtp_password: str = ""
smtp_from_address: str = ""
scene_linear_deflection: float = 0.1
scene_angular_deflection: float = 0.1
render_linear_deflection: float = 0.03
render_angular_deflection: float = 0.05
gltf_scale_factor: float = 0.001
gltf_smooth_normals: bool = True
viewer_max_distance: float = 50.0
viewer_min_distance: float = 0.001
gltf_material_quality: str = "pbr_colors"
gltf_pbr_roughness: float = 0.4
gltf_pbr_metallic: float = 0.6
tessellation_engine: str = "occ"
class SettingsUpdate(BaseModel):
thumbnail_renderer: str | None = None
blender_engine: str | None = None
blender_cycles_samples: int | None = None
blender_eevee_samples: int | None = None
thumbnail_format: str | None = None
blender_smooth_angle: int | None = None
cycles_device: str | None = None
render_backend: str | None = None
blender_max_concurrent_renders: int | None = None
product_thumbnail_priority: str | None = None
render_stall_timeout_minutes: int | None = None
smtp_enabled: bool | None = None
smtp_host: str | None = None
smtp_port: int | None = None
smtp_user: str | None = None
smtp_password: str | None = None
smtp_from_address: str | None = None
scene_linear_deflection: float | None = None
scene_angular_deflection: float | None = None
render_linear_deflection: float | None = None
render_angular_deflection: float | None = None
gltf_scale_factor: float | None = None
gltf_smooth_normals: bool | None = None
viewer_max_distance: float | None = None
viewer_min_distance: float | None = None
gltf_material_quality: str | None = None
gltf_pbr_roughness: float | None = None
gltf_pbr_metallic: float | None = None
tessellation_engine: str | None = None
@router.get("/users", response_model=list[UserOut])
async def list_users(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
result = await db.execute(select(User).order_by(User.created_at.desc()))
return result.scalars().all()
@router.post("/users", response_model=UserOut, status_code=status.HTTP_201_CREATED)
async def create_user(
body: UserCreate,
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
result = await db.execute(select(User).where(User.email == body.email))
if result.scalar_one_or_none():
raise HTTPException(400, detail="Email already registered")
user = User(
email=body.email,
password_hash=hash_password(body.password),
full_name=body.full_name,
role=body.role,
)
db.add(user)
await db.commit()
await db.refresh(user)
return user
@router.patch("/users/{user_id}", response_model=UserOut)
async def update_user(
user_id: uuid.UUID,
body: UserUpdate,
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
result = await db.execute(select(User).where(User.id == user_id))
user = result.scalar_one_or_none()
if not user:
raise HTTPException(404, detail="User not found")
for field, val in body.model_dump(exclude_unset=True).items():
setattr(user, field, val)
await db.commit()
await db.refresh(user)
return user
@router.delete("/users/{user_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_user(
user_id: uuid.UUID,
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
result = await db.execute(select(User).where(User.id == user_id))
user = result.scalar_one_or_none()
if not user:
raise HTTPException(404, detail="User not found")
if user.id == admin.id:
raise HTTPException(400, detail="Cannot delete yourself")
await db.delete(user)
await db.commit()
# ── System Settings ──────────────────────────────────────────────────────────
async def _load_settings(db: AsyncSession) -> dict[str, str]:
"""Load all system settings, filling missing keys with defaults."""
result = await db.execute(select(SystemSetting))
stored = {row.key: row.value for row in result.scalars().all()}
return {k: stored.get(k, v) for k, v in SETTINGS_DEFAULTS.items()}
async def _save_setting(db: AsyncSession, key: str, value: str) -> None:
result = await db.execute(
sql_update(SystemSetting)
.where(SystemSetting.key == key)
.values(value=value, updated_at=datetime.utcnow())
)
if result.rowcount == 0:
db.add(SystemSetting(key=key, value=value, updated_at=datetime.utcnow()))
def _settings_to_out(raw: dict[str, str]) -> SettingsOut:
return SettingsOut(
thumbnail_renderer=raw["thumbnail_renderer"],
blender_engine=raw["blender_engine"],
blender_cycles_samples=int(raw["blender_cycles_samples"]),
blender_eevee_samples=int(raw["blender_eevee_samples"]),
thumbnail_format=raw["thumbnail_format"],
blender_smooth_angle=int(raw["blender_smooth_angle"]),
cycles_device=raw["cycles_device"],
render_backend=raw["render_backend"],
blender_max_concurrent_renders=int(raw["blender_max_concurrent_renders"]),
product_thumbnail_priority=raw.get("product_thumbnail_priority", '["latest_render","cad_thumbnail"]'),
render_stall_timeout_minutes=int(raw.get("render_stall_timeout_minutes", "120")),
smtp_enabled=raw.get("smtp_enabled", "false").lower() == "true",
smtp_host=raw.get("smtp_host", ""),
smtp_port=int(raw.get("smtp_port", "587")),
smtp_user=raw.get("smtp_user", ""),
smtp_password=raw.get("smtp_password", ""),
smtp_from_address=raw.get("smtp_from_address", ""),
scene_linear_deflection=float(raw.get("scene_linear_deflection", "0.1")),
scene_angular_deflection=float(raw.get("scene_angular_deflection", "0.5")),
render_linear_deflection=float(raw.get("render_linear_deflection", "0.03")),
render_angular_deflection=float(raw.get("render_angular_deflection", "0.2")),
gltf_scale_factor=float(raw.get("gltf_scale_factor", "0.001")),
gltf_smooth_normals=raw.get("gltf_smooth_normals", "true") == "true",
viewer_max_distance=float(raw.get("viewer_max_distance", "50")),
viewer_min_distance=float(raw.get("viewer_min_distance", "0.001")),
gltf_material_quality=raw.get("gltf_material_quality", "pbr_colors"),
gltf_pbr_roughness=float(raw.get("gltf_pbr_roughness", "0.4")),
gltf_pbr_metallic=float(raw.get("gltf_pbr_metallic", "0.6")),
tessellation_engine=raw.get("tessellation_engine", "occ"),
)
@router.get("/settings", response_model=SettingsOut)
async def get_settings(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
return _settings_to_out(await _load_settings(db))
@router.put("/settings", response_model=SettingsOut)
async def update_settings(
body: SettingsUpdate,
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
if body.thumbnail_renderer is not None and body.thumbnail_renderer not in VALID_RENDERERS:
raise HTTPException(400, detail=f"Invalid renderer. Choose: {', '.join(sorted(VALID_RENDERERS))}")
if body.blender_engine is not None and body.blender_engine not in VALID_ENGINES:
raise HTTPException(400, detail=f"Invalid engine. Choose: {', '.join(sorted(VALID_ENGINES))}")
if body.blender_cycles_samples is not None and not (1 <= body.blender_cycles_samples <= 4096):
raise HTTPException(400, detail="blender_cycles_samples must be 14096")
if body.blender_eevee_samples is not None and not (1 <= body.blender_eevee_samples <= 1024):
raise HTTPException(400, detail="blender_eevee_samples must be 11024")
if body.thumbnail_format is not None and body.thumbnail_format not in VALID_FORMATS:
raise HTTPException(400, detail=f"Invalid thumbnail_format. Choose: {', '.join(sorted(VALID_FORMATS))}")
if body.blender_smooth_angle is not None and not (0 <= body.blender_smooth_angle <= 180):
raise HTTPException(400, detail="blender_smooth_angle must be 0180 degrees")
if body.cycles_device is not None and body.cycles_device not in VALID_CYCLES_DEVICES:
raise HTTPException(400, detail=f"Invalid cycles_device. Choose: {', '.join(sorted(VALID_CYCLES_DEVICES))}")
if body.blender_max_concurrent_renders is not None and not (1 <= body.blender_max_concurrent_renders <= 16):
raise HTTPException(400, detail="blender_max_concurrent_renders must be 116")
if body.render_stall_timeout_minutes is not None and not (10 <= body.render_stall_timeout_minutes <= 10080):
raise HTTPException(400, detail="render_stall_timeout_minutes must be 1010080 (10 min to 1 week)")
if body.product_thumbnail_priority is not None:
try:
entries = json.loads(body.product_thumbnail_priority)
if not isinstance(entries, list):
raise ValueError
except (json.JSONDecodeError, ValueError):
raise HTTPException(400, detail="product_thumbnail_priority must be a valid JSON array")
valid_literals = {"cad_thumbnail", "latest_render"}
for entry in entries:
if entry not in valid_literals:
try:
ot_id = uuid.UUID(entry)
except ValueError:
raise HTTPException(400, detail=f"Invalid priority entry '{entry}': must be 'cad_thumbnail', 'latest_render', or a valid output type UUID")
ot_row = await db.execute(select(OutputTypeModel).where(OutputTypeModel.id == ot_id))
if not ot_row.scalar_one_or_none():
raise HTTPException(400, detail=f"Output type '{entry}' not found")
updates: dict[str, str] = {}
if body.thumbnail_renderer is not None:
updates["thumbnail_renderer"] = body.thumbnail_renderer
if body.blender_engine is not None:
updates["blender_engine"] = body.blender_engine
if body.blender_cycles_samples is not None:
updates["blender_cycles_samples"] = str(body.blender_cycles_samples)
if body.blender_eevee_samples is not None:
updates["blender_eevee_samples"] = str(body.blender_eevee_samples)
if body.thumbnail_format is not None:
updates["thumbnail_format"] = body.thumbnail_format
if body.blender_smooth_angle is not None:
updates["blender_smooth_angle"] = str(body.blender_smooth_angle)
if body.cycles_device is not None:
updates["cycles_device"] = body.cycles_device
if body.render_backend is not None:
updates["render_backend"] = body.render_backend
if body.blender_max_concurrent_renders is not None:
updates["blender_max_concurrent_renders"] = str(body.blender_max_concurrent_renders)
if body.render_stall_timeout_minutes is not None:
updates["render_stall_timeout_minutes"] = str(body.render_stall_timeout_minutes)
if body.product_thumbnail_priority is not None:
updates["product_thumbnail_priority"] = body.product_thumbnail_priority
if body.smtp_enabled is not None:
updates["smtp_enabled"] = "true" if body.smtp_enabled else "false"
if body.smtp_host is not None:
updates["smtp_host"] = body.smtp_host
if body.smtp_port is not None:
if not (1 <= body.smtp_port <= 65535):
raise HTTPException(400, detail="smtp_port must be 165535")
updates["smtp_port"] = str(body.smtp_port)
if body.smtp_user is not None:
updates["smtp_user"] = body.smtp_user
if body.smtp_password is not None:
updates["smtp_password"] = body.smtp_password
if body.smtp_from_address is not None:
updates["smtp_from_address"] = body.smtp_from_address
if body.gltf_scale_factor is not None:
updates["gltf_scale_factor"] = str(body.gltf_scale_factor)
if body.gltf_smooth_normals is not None:
updates["gltf_smooth_normals"] = "true" if body.gltf_smooth_normals else "false"
if body.viewer_max_distance is not None:
updates["viewer_max_distance"] = str(body.viewer_max_distance)
if body.viewer_min_distance is not None:
updates["viewer_min_distance"] = str(body.viewer_min_distance)
if body.gltf_material_quality is not None:
updates["gltf_material_quality"] = body.gltf_material_quality
if body.gltf_pbr_roughness is not None:
updates["gltf_pbr_roughness"] = str(body.gltf_pbr_roughness)
if body.gltf_pbr_metallic is not None:
updates["gltf_pbr_metallic"] = str(body.gltf_pbr_metallic)
if body.scene_linear_deflection is not None:
if not (0.001 <= body.scene_linear_deflection <= 10.0):
raise HTTPException(400, detail="scene_linear_deflection must be 0.00110.0 mm")
updates["scene_linear_deflection"] = str(body.scene_linear_deflection)
if body.scene_angular_deflection is not None:
if not (0.05 <= body.scene_angular_deflection <= 1.5):
raise HTTPException(400, detail="scene_angular_deflection must be 0.051.5 rad")
updates["scene_angular_deflection"] = str(body.scene_angular_deflection)
if body.render_linear_deflection is not None:
if not (0.001 <= body.render_linear_deflection <= 10.0):
raise HTTPException(400, detail="render_linear_deflection must be 0.00110.0 mm")
updates["render_linear_deflection"] = str(body.render_linear_deflection)
if body.render_angular_deflection is not None:
if not (0.05 <= body.render_angular_deflection <= 1.5):
raise HTTPException(400, detail="render_angular_deflection must be 0.051.5 rad")
updates["render_angular_deflection"] = str(body.render_angular_deflection)
if body.tessellation_engine is not None:
if body.tessellation_engine not in {"occ", "gmsh"}:
raise HTTPException(400, detail="tessellation_engine must be 'occ' or 'gmsh'")
updates["tessellation_engine"] = body.tessellation_engine
for k, v in updates.items():
await _save_setting(db, k, v)
await db.commit()
# Note: blender-renderer HTTP service removed; concurrency is now controlled
# via render-worker Docker concurrency setting (asset_pipeline queue).
return _settings_to_out(await _load_settings(db))
@router.post("/settings/process-unprocessed", status_code=status.HTTP_202_ACCEPTED)
async def process_unprocessed_steps(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Queue all STEP files that are not yet completed.
Queues pending and failed files immediately. Files stuck in 'processing'
for more than 15 minutes (i.e. their worker task was killed or lost) are
also recovered. Actively-processing files (updated within the last 15 min)
are left alone to avoid duplicate task execution on the same file.
"""
from datetime import datetime, timedelta
stuck_cutoff = datetime.utcnow() - timedelta(minutes=15)
result = await db.execute(
select(CadFile).where(
CadFile.stored_path.isnot(None),
# pending/failed always, plus processing-but-stale (stuck)
(
CadFile.processing_status.in_([
ProcessingStatus.pending,
ProcessingStatus.failed,
]) |
(
(CadFile.processing_status == ProcessingStatus.processing) &
(CadFile.updated_at < stuck_cutoff)
)
),
)
)
cad_files = result.scalars().all()
from app.tasks.step_tasks import process_step_file
queued = 0
for cad_file in cad_files:
cad_file.processing_status = ProcessingStatus.pending
process_step_file.delay(str(cad_file.id))
queued += 1
await db.commit()
return {"queued": queued, "message": f"Queued {queued} STEP file(s) for processing"}
@router.post("/settings/regenerate-thumbnails", status_code=status.HTTP_202_ACCEPTED)
async def regenerate_thumbnails(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Re-queue completed CAD files that are linked to a product for thumbnail regeneration."""
from app.domains.products.models import Product
result = await db.execute(
select(CadFile)
.join(Product, Product.cad_file_id == CadFile.id)
.where(CadFile.processing_status == ProcessingStatus.completed)
)
cad_files = result.scalars().all()
from app.tasks.step_tasks import render_step_thumbnail
queued = 0
for cad_file in cad_files:
render_step_thumbnail.delay(str(cad_file.id))
queued += 1
return {"queued": queued, "message": f"Re-queued {queued} CAD file(s) for thumbnail regeneration"}
@router.get("/settings/orphaned-cad-files")
async def get_orphaned_cad_files(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Return count and total disk size of CadFiles not linked to any product."""
from sqlalchemy import func
from app.domains.products.models import Product
result = await db.execute(
select(func.count(CadFile.id), func.sum(CadFile.file_size))
.outerjoin(Product, Product.cad_file_id == CadFile.id)
.where(Product.id.is_(None))
)
count, total_bytes = result.one()
return {
"count": count or 0,
"total_mb": round((total_bytes or 0) / 1024 / 1024, 1),
}
@router.post("/settings/cleanup-orphaned-cad-files")
async def cleanup_orphaned_cad_files(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Delete CadFile DB records and associated files on disk for all orphaned CadFiles.
A CadFile is orphaned if no product currently references it via products.cad_file_id.
"""
import os
from app.domains.products.models import Product
result = await db.execute(
select(CadFile)
.outerjoin(Product, Product.cad_file_id == CadFile.id)
.where(Product.id.is_(None))
)
orphans = result.scalars().all()
deleted_files = 0
deleted_bytes = 0
for cad_file in orphans:
# Remove files from disk (non-fatal if missing)
for path_attr in ("stored_path", "thumbnail_path", "gltf_path"):
path = getattr(cad_file, path_attr, None)
if path:
try:
if os.path.isfile(path):
size = os.path.getsize(path)
os.remove(path)
deleted_files += 1
deleted_bytes += size
except OSError:
pass
await db.delete(cad_file)
await db.commit()
return {
"deleted_records": len(orphans),
"deleted_files": deleted_files,
"freed_mb": round(deleted_bytes / 1024 / 1024, 1),
}
@router.post("/settings/reextract-metadata", status_code=status.HTTP_202_ACCEPTED)
async def reextract_all_metadata(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Re-extract OCC metadata (dimensions, sharp edges) for all completed CAD files.
Updates mesh_attributes without re-rendering thumbnails or changing processing status.
Use this after deploying bbox/edge extraction improvements.
"""
from app.domains.products.models import Product
result = await db.execute(
select(CadFile)
.join(Product, Product.cad_file_id == CadFile.id)
.where(
CadFile.processing_status == ProcessingStatus.completed,
CadFile.stored_path.isnot(None),
)
)
cad_files = result.scalars().all()
from app.tasks.step_tasks import reextract_cad_metadata
queued = 0
for cad_file in cad_files:
reextract_cad_metadata.delay(str(cad_file.id))
queued += 1
return {"queued": queued, "message": f"Queued {queued} CAD file(s) for metadata re-extraction"}
@router.post("/settings/reextract-rich-metadata", status_code=status.HTTP_202_ACCEPTED)
async def reextract_rich_metadata(
admin: User = Depends(require_global_admin),
):
"""Queue a batch task to re-compute volume, surface area, complexity for all products with STEP files."""
from app.tasks.step_tasks import reextract_rich_metadata_task
reextract_rich_metadata_task.delay()
return {"queued": True, "message": "Rich metadata re-extraction task queued"}
@router.post("/settings/generate-missing-canonical-scenes", status_code=status.HTTP_202_ACCEPTED)
async def generate_missing_canonical_scenes(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Queue canonical scene (geometry GLB + USD master) generation for every completed CAD file that has no gltf_geometry MediaAsset."""
from app.domains.media.models import MediaAsset, MediaAssetType
result = await db.execute(
select(CadFile).where(CadFile.processing_status == ProcessingStatus.completed)
)
cad_files = result.scalars().all()
# Bulk-fetch existing gltf_geometry assets
existing_result = await db.execute(
select(MediaAsset.cad_file_id).where(MediaAsset.asset_type == MediaAssetType.gltf_geometry)
)
existing_ids = {row[0] for row in existing_result.all()}
from app.tasks.step_tasks import generate_gltf_geometry_task
queued = 0
for cad_file in cad_files:
if not cad_file.stored_path:
continue
if cad_file.id not in existing_ids:
generate_gltf_geometry_task.delay(str(cad_file.id))
queued += 1
return {"queued": queued, "message": f"Queued {queued} missing canonical scene task(s)"}
@router.post("/settings/generate-missing-usd-masters", status_code=status.HTTP_202_ACCEPTED)
async def generate_missing_usd_masters(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Queue USD master export for completed CAD files linked to a product that have no usd_master MediaAsset.
Only CadFiles referenced by at least one Product are included — orphan CadFiles
(uploaded but never linked to a product) are skipped to avoid unnecessary work.
"""
from app.domains.media.models import MediaAsset, MediaAssetType
from app.domains.products.models import Product
# Only CadFiles that are actually used by a product
product_cad_ids_result = await db.execute(
select(Product.cad_file_id).where(Product.cad_file_id.isnot(None)).distinct()
)
product_cad_ids = {row[0] for row in product_cad_ids_result.all()}
result = await db.execute(
select(CadFile).where(
CadFile.processing_status == ProcessingStatus.completed,
CadFile.id.in_(product_cad_ids),
)
)
cad_files = result.scalars().all()
existing_result = await db.execute(
select(MediaAsset.cad_file_id).where(MediaAsset.asset_type == MediaAssetType.usd_master)
)
existing_ids = {row[0] for row in existing_result.all()}
from app.tasks.step_tasks import generate_usd_master_task
queued = 0
for cad_file in cad_files:
if not cad_file.stored_path:
continue
if cad_file.id not in existing_ids:
generate_usd_master_task.delay(str(cad_file.id))
queued += 1
return {"queued": queued, "message": f"Queued {queued} missing USD master task(s)"}
@router.post("/settings/regenerate-all-canonical-scenes", status_code=status.HTTP_202_ACCEPTED)
async def regenerate_all_canonical_scenes(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Re-queue GLB + USD master export for ALL completed CAD files (overwrites existing assets)."""
result = await db.execute(
select(CadFile).where(CadFile.processing_status == ProcessingStatus.completed)
)
cad_files = result.scalars().all()
from app.tasks.step_tasks import generate_gltf_geometry_task
queued = 0
for cad_file in cad_files:
if not cad_file.stored_path:
continue
generate_gltf_geometry_task.delay(str(cad_file.id))
queued += 1
return {"queued": queued, "message": f"Queued {queued} canonical scene regeneration task(s)"}
@router.post("/settings/recover-stuck-processing", status_code=status.HTTP_200_OK)
async def recover_stuck_processing(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Reset CAD files stuck in 'processing' for more than 10 minutes to 'failed'.
Call this when a CAD file shows 'processing' indefinitely. The auto-recovery
beat task also runs every 5 minutes, so this is just for immediate relief.
"""
from datetime import datetime, timedelta
from sqlalchemy import update as sql_update, and_
cutoff = datetime.utcnow() - timedelta(minutes=10)
result = await db.execute(
sql_update(CadFile)
.where(
and_(
CadFile.processing_status == ProcessingStatus.processing,
CadFile.updated_at < cutoff,
)
)
.values(
processing_status=ProcessingStatus.failed,
error_message="Processing timed out — worker may have crashed. Use 'Regenerate Thumbnail' to retry.",
)
.returning(CadFile.id)
)
reset_ids = [str(r[0]) for r in result.fetchall()]
await db.commit()
return {"reset": len(reset_ids), "ids": reset_ids,
"message": f"Reset {len(reset_ids)} stuck file(s) to 'failed'"}
@router.post("/settings/seed-workflows", status_code=status.HTTP_200_OK)
async def seed_workflows(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Create the standard workflow definitions if they do not already exist."""
from app.domains.rendering.models import WorkflowDefinition
STANDARD_WORKFLOWS = [
{
"name": "Still Image — Cycles",
"config": {
"type": "still",
"params": {"render_engine": "cycles", "samples": 256, "resolution": [1920, 1080]},
},
},
{
"name": "Still Image — EEVEE",
"config": {
"type": "still",
"params": {"render_engine": "eevee", "samples": 64, "resolution": [1920, 1080]},
},
},
{
"name": "Turntable Animation",
"config": {
"type": "turntable",
"params": {"render_engine": "cycles", "samples": 64, "fps": 24, "duration_s": 5},
},
},
{
"name": "Multi-Angle (0° / 45° / 90°)",
"config": {
"type": "multi_angle",
"params": {"render_engine": "cycles", "samples": 128, "angles": [0, 45, 90]},
},
},
]
existing_result = await db.execute(select(WorkflowDefinition))
existing_names = {wf.name for wf in existing_result.scalars().all()}
created = 0
for wf_data in STANDARD_WORKFLOWS:
if wf_data["name"] not in existing_names:
db.add(WorkflowDefinition(
name=wf_data["name"],
config=wf_data["config"],
is_active=True,
))
created += 1
await db.commit()
return {"created": created, "message": f"Created {created} workflow definition(s)"}
@router.get("/settings/renderer-status")
async def renderer_status(
admin: User = Depends(require_global_admin),
):
"""Check Blender availability on the render-worker via Celery task."""
from app.tasks.gpu_tasks import check_blender_status
try:
result = check_blender_status.apply_async()
data = result.get(timeout=10)
except Exception as exc:
data = {"available": False, "blender_bin": "", "version": "", "error": str(exc)}
return {"blender": data}
@router.post("/import-media-assets")
async def import_existing_media_assets(
db: AsyncSession = Depends(get_db),
current_user: User = Depends(require_global_admin),
):
"""Import existing cad thumbnails and order line renders as MediaAsset records."""
from app.domains.media.models import MediaAsset, MediaAssetType
from sqlalchemy import text
created = 0
skipped = 0
from app.config import settings as _app_settings
def _normalize_key(path: str) -> str:
"""Strip UPLOAD_DIR prefix to store relative storage keys."""
key = str(path)
prefix = str(_app_settings.upload_dir).rstrip("/") + "/"
return key[len(prefix):] if key.startswith(prefix) else key
# 1. CadFiles with thumbnail_path
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
cad_result = await db.execute(
text("SELECT id, thumbnail_path FROM cad_files WHERE thumbnail_path IS NOT NULL AND processing_status = 'completed'")
)
for row in cad_result.fetchall():
cad_id, thumb_path = row
norm_key = _normalize_key(str(thumb_path))
# De-dup check
existing = await db.execute(
select(MediaAsset.id).where(MediaAsset.storage_key == norm_key).limit(1)
)
if existing.scalar_one_or_none():
skipped += 1
continue
ext = str(thumb_path).lower()
mime = "image/jpeg" if ext.endswith(".jpg") or ext.endswith(".jpeg") else "image/png"
asset = MediaAsset(
cad_file_id=uuid.UUID(str(cad_id)),
asset_type=MediaAssetType.thumbnail,
storage_key=norm_key,
mime_type=mime,
)
db.add(asset)
created += 1
# 2. OrderLines with result_path
await db.execute(text("SET LOCAL app.current_tenant_id = 'bypass'"))
ol_result = await db.execute(
text("""
SELECT ol.id, ol.result_path, ol.product_id, COALESCE(ot.is_animation, false) as is_animation
FROM order_lines ol
LEFT JOIN output_types ot ON ot.id = ol.output_type_id
WHERE ol.result_path IS NOT NULL AND ol.render_status = 'completed'
""")
)
for row in ol_result.fetchall():
ol_id, result_path, product_id, _is_animation = row
norm_key = _normalize_key(str(result_path))
existing = await db.execute(
select(MediaAsset.id).where(MediaAsset.storage_key == norm_key).limit(1)
)
if existing.scalar_one_or_none():
skipped += 1
continue
ext = str(result_path).lower()
if ext.endswith(".mp4") or ext.endswith(".webm"):
mime = "video/mp4"
asset_type = MediaAssetType.turntable
else:
# Extension determines type — poster frames (.jpg/.png) are always stills
mime = "image/png" if ext.endswith(".png") else "image/jpeg"
asset_type = MediaAssetType.still
asset = MediaAsset(
order_line_id=uuid.UUID(str(ol_id)),
product_id=uuid.UUID(str(product_id)) if product_id else None,
asset_type=asset_type,
storage_key=norm_key,
mime_type=mime,
)
db.add(asset)
created += 1
await db.commit()
return {"created": created, "skipped": skipped}
@router.delete("/settings/purge-render-media", status_code=status.HTTP_200_OK)
async def purge_render_media(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Delete all still and turntable MediaAsset records and their backing files.
This removes rendered images and animations but leaves thumbnails, GLBs,
STLs, and USD masters intact.
"""
import logging
from pathlib import Path
from app.config import settings
from app.core.storage import get_storage
from app.domains.media.models import MediaAsset, MediaAssetType
logger = logging.getLogger(__name__)
storage = get_storage()
result = await db.execute(
select(MediaAsset).where(
MediaAsset.asset_type.in_([MediaAssetType.still, MediaAssetType.turntable])
)
)
assets = result.scalars().all()
deleted_db = 0
deleted_files = 0
freed_bytes = 0
for asset in assets:
# Delete backing file
key = asset.storage_key
try:
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
if candidate.exists():
freed_bytes += candidate.stat().st_size
candidate.unlink()
deleted_files += 1
elif hasattr(storage, 'delete'):
storage.delete(key)
deleted_files += 1
except Exception as exc:
logger.warning("Could not delete file for asset %s (%s): %s", asset.id, key, exc)
await db.delete(asset)
deleted_db += 1
await db.commit()
return {
"deleted_records": deleted_db,
"deleted_files": deleted_files,
"freed_mb": round(freed_bytes / 1024 / 1024, 1),
"message": f"Purged {deleted_db} still/turntable asset(s), freed {round(freed_bytes / 1024 / 1024, 1)} MB",
}
# ── Dashboard Stats ──────────────────────────────────────────────────────────
class RenderThroughputStats(BaseModel):
completed_today: int
completed_this_week: int
completed_this_month: int
failed_today: int
failed_this_week: int
failed_this_month: int
avg_render_time_s: Optional[float]
median_render_time_s: Optional[float]
class MaterialCoverageStats(BaseModel):
total_unique_materials: int
mapped_materials: int
unmapped_materials: int
coverage_pct: float
library_material_count: int
alias_count: int
class ProductStatsOverview(BaseModel):
total_products: int
with_step_files: int
without_step_files: int
step_coverage_pct: float
class OrderStatusBreakdown(BaseModel):
draft: int
submitted: int
processing: int
completed: int
rejected: int
total: int
class DashboardStatsResponse(BaseModel):
render_throughput: RenderThroughputStats
material_coverage: MaterialCoverageStats
product_stats: ProductStatsOverview
order_status: OrderStatusBreakdown
@router.get("/dashboard-stats", response_model=DashboardStatsResponse)
async def get_dashboard_stats(
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
) -> DashboardStatsResponse:
"""Aggregate stats for the dashboard: render throughput, material coverage, product and order stats."""
from app.domains.orders.models import Order, OrderStatus, OrderLine
from app.domains.products.models import Product
from app.domains.materials.models import Material, MaterialAlias
now = datetime.utcnow()
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
week_start = today_start - timedelta(days=today_start.weekday())
month_start = today_start.replace(day=1)
# ── Render throughput ─────────────────────────────────────────────────
def _count_renders(status_val: str, since: datetime):
return select(func.count(OrderLine.id)).where(
OrderLine.render_status == status_val,
OrderLine.render_completed_at >= since,
)
completed_today = (await db.execute(_count_renders("completed", today_start))).scalar() or 0
completed_week = (await db.execute(_count_renders("completed", week_start))).scalar() or 0
completed_month = (await db.execute(_count_renders("completed", month_start))).scalar() or 0
failed_today = (await db.execute(_count_renders("failed", today_start))).scalar() or 0
failed_week = (await db.execute(_count_renders("failed", week_start))).scalar() or 0
failed_month = (await db.execute(_count_renders("failed", month_start))).scalar() or 0
# Average and median render time (for completed renders with both timestamps)
render_duration = extract(
"epoch",
OrderLine.render_completed_at - OrderLine.render_started_at,
)
avg_result = await db.execute(
select(func.avg(render_duration)).where(
OrderLine.render_status == "completed",
OrderLine.render_started_at.isnot(None),
OrderLine.render_completed_at.isnot(None),
)
)
avg_render_s = avg_result.scalar()
avg_render_s = round(avg_render_s, 1) if avg_render_s is not None else None
# Median via percentile_cont
median_result = await db.execute(
select(
func.percentile_cont(0.5).within_group(render_duration)
).where(
OrderLine.render_status == "completed",
OrderLine.render_started_at.isnot(None),
OrderLine.render_completed_at.isnot(None),
)
)
median_render_s = median_result.scalar()
median_render_s = round(median_render_s, 1) if median_render_s is not None else None
render_throughput = RenderThroughputStats(
completed_today=completed_today,
completed_this_week=completed_week,
completed_this_month=completed_month,
failed_today=failed_today,
failed_this_week=failed_week,
failed_this_month=failed_month,
avg_render_time_s=avg_render_s,
median_render_time_s=median_render_s,
)
# ── Material coverage ─────────────────────────────────────────────────
# Unique material names referenced in products' cad_part_materials
# Each product.cad_part_materials is a JSONB array of {part_name, material}
# We collect all distinct material names from products
product_rows = await db.execute(
select(Product.cad_part_materials).where(Product.cad_part_materials.isnot(None))
)
all_mat_names: set[str] = set()
for (cpm,) in product_rows:
if isinstance(cpm, list):
for entry in cpm:
if isinstance(entry, dict) and entry.get("material"):
all_mat_names.add(entry["material"])
# Library materials (name starts with HARTOMAT_)
lib_count_result = await db.execute(
select(func.count(Material.id)).where(Material.name.like("HARTOMAT_%"))
)
library_material_count = lib_count_result.scalar() or 0
# All known material names (from Material table)
known_mat_result = await db.execute(select(Material.name))
known_names = {row[0] for row in known_mat_result}
# All aliases
alias_result = await db.execute(select(MaterialAlias.alias))
known_aliases = {row[0] for row in alias_result}
alias_count_result = await db.execute(select(func.count(MaterialAlias.id)))
alias_count = alias_count_result.scalar() or 0
# A material from a product is "mapped" if it exists in Material table or has an alias
mapped = 0
for mat_name in all_mat_names:
if mat_name in known_names or mat_name in known_aliases:
mapped += 1
total_unique = len(all_mat_names)
unmapped = total_unique - mapped
coverage_pct = round((mapped / total_unique * 100) if total_unique > 0 else 100.0, 1)
material_coverage = MaterialCoverageStats(
total_unique_materials=total_unique,
mapped_materials=mapped,
unmapped_materials=unmapped,
coverage_pct=coverage_pct,
library_material_count=library_material_count,
alias_count=alias_count,
)
# ── Product stats ─────────────────────────────────────────────────────
total_products_result = await db.execute(select(func.count(Product.id)))
total_products = total_products_result.scalar() or 0
with_step_result = await db.execute(
select(func.count(Product.id)).where(Product.cad_file_id.isnot(None))
)
with_step = with_step_result.scalar() or 0
without_step = total_products - with_step
step_pct = round((with_step / total_products * 100) if total_products > 0 else 0.0, 1)
product_stats = ProductStatsOverview(
total_products=total_products,
with_step_files=with_step,
without_step_files=without_step,
step_coverage_pct=step_pct,
)
# ── Order status breakdown ────────────────────────────────────────────
order_counts = await db.execute(
select(Order.status, func.count(Order.id)).group_by(Order.status)
)
status_map: dict[str, int] = {}
for row_status, count in order_counts:
status_map[row_status.value if hasattr(row_status, "value") else str(row_status)] = count
order_total = sum(status_map.values())
order_status = OrderStatusBreakdown(
draft=status_map.get("draft", 0),
submitted=status_map.get("submitted", 0),
processing=status_map.get("processing", 0),
completed=status_map.get("completed", 0),
rejected=status_map.get("rejected", 0),
total=order_total,
)
return DashboardStatsResponse(
render_throughput=render_throughput,
material_coverage=material_coverage,
product_stats=product_stats,
order_status=order_status,
)