feat: material alias seeds expansion, bulk product delete, dashboard stats widgets
- Material alias seeds: 95 → 855 aliases covering German variants, DIN standards, Werkstoffnummern, industry terms, English equivalents, polymer abbreviations - Batch product delete/deactivate endpoint (POST /products/batch-delete) - Multi-select UI on Products page with floating action bar - Dashboard: RenderThroughput + MaterialCoverage widgets - Dashboard stats endpoint (GET /admin/dashboard-stats) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -1,10 +1,10 @@
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update as sql_update
|
||||
from sqlalchemy import select, update as sql_update, func, case, distinct, and_, extract
|
||||
from pydantic import BaseModel
|
||||
from app.database import get_db
|
||||
from app.models.user import User
|
||||
@@ -12,7 +12,7 @@ from app.models.system_setting import SystemSetting
|
||||
from app.models.cad_file import CadFile, ProcessingStatus
|
||||
from app.models.output_type import OutputType as OutputTypeModel
|
||||
from app.schemas.user import UserOut, UserUpdate, UserCreate
|
||||
from app.utils.auth import require_global_admin, hash_password
|
||||
from app.utils.auth import require_global_admin, get_current_user, hash_password
|
||||
|
||||
router = APIRouter(prefix="/admin", tags=["admin"])
|
||||
|
||||
@@ -882,3 +882,211 @@ async def purge_render_media(
|
||||
"message": f"Purged {deleted_db} still/turntable asset(s), freed {round(freed_bytes / 1024 / 1024, 1)} MB",
|
||||
}
|
||||
|
||||
|
||||
# ── Dashboard Stats ──────────────────────────────────────────────────────────
|
||||
|
||||
class RenderThroughputStats(BaseModel):
|
||||
completed_today: int
|
||||
completed_this_week: int
|
||||
completed_this_month: int
|
||||
failed_today: int
|
||||
failed_this_week: int
|
||||
failed_this_month: int
|
||||
avg_render_time_s: Optional[float]
|
||||
median_render_time_s: Optional[float]
|
||||
|
||||
|
||||
class MaterialCoverageStats(BaseModel):
|
||||
total_unique_materials: int
|
||||
mapped_materials: int
|
||||
unmapped_materials: int
|
||||
coverage_pct: float
|
||||
library_material_count: int
|
||||
alias_count: int
|
||||
|
||||
|
||||
class ProductStatsOverview(BaseModel):
|
||||
total_products: int
|
||||
with_step_files: int
|
||||
without_step_files: int
|
||||
step_coverage_pct: float
|
||||
|
||||
|
||||
class OrderStatusBreakdown(BaseModel):
|
||||
draft: int
|
||||
submitted: int
|
||||
processing: int
|
||||
completed: int
|
||||
rejected: int
|
||||
total: int
|
||||
|
||||
|
||||
class DashboardStatsResponse(BaseModel):
|
||||
render_throughput: RenderThroughputStats
|
||||
material_coverage: MaterialCoverageStats
|
||||
product_stats: ProductStatsOverview
|
||||
order_status: OrderStatusBreakdown
|
||||
|
||||
|
||||
@router.get("/dashboard-stats", response_model=DashboardStatsResponse)
|
||||
async def get_dashboard_stats(
|
||||
_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> DashboardStatsResponse:
|
||||
"""Aggregate stats for the dashboard: render throughput, material coverage, product and order stats."""
|
||||
from app.domains.orders.models import Order, OrderStatus, OrderLine
|
||||
from app.domains.products.models import Product
|
||||
from app.domains.materials.models import Material, MaterialAlias
|
||||
|
||||
now = datetime.utcnow()
|
||||
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
week_start = today_start - timedelta(days=today_start.weekday())
|
||||
month_start = today_start.replace(day=1)
|
||||
|
||||
# ── Render throughput ─────────────────────────────────────────────────
|
||||
def _count_renders(status_val: str, since: datetime):
|
||||
return select(func.count(OrderLine.id)).where(
|
||||
OrderLine.render_status == status_val,
|
||||
OrderLine.render_completed_at >= since,
|
||||
)
|
||||
|
||||
completed_today = (await db.execute(_count_renders("completed", today_start))).scalar() or 0
|
||||
completed_week = (await db.execute(_count_renders("completed", week_start))).scalar() or 0
|
||||
completed_month = (await db.execute(_count_renders("completed", month_start))).scalar() or 0
|
||||
failed_today = (await db.execute(_count_renders("failed", today_start))).scalar() or 0
|
||||
failed_week = (await db.execute(_count_renders("failed", week_start))).scalar() or 0
|
||||
failed_month = (await db.execute(_count_renders("failed", month_start))).scalar() or 0
|
||||
|
||||
# Average and median render time (for completed renders with both timestamps)
|
||||
render_duration = extract(
|
||||
"epoch",
|
||||
OrderLine.render_completed_at - OrderLine.render_started_at,
|
||||
)
|
||||
avg_result = await db.execute(
|
||||
select(func.avg(render_duration)).where(
|
||||
OrderLine.render_status == "completed",
|
||||
OrderLine.render_started_at.isnot(None),
|
||||
OrderLine.render_completed_at.isnot(None),
|
||||
)
|
||||
)
|
||||
avg_render_s = avg_result.scalar()
|
||||
avg_render_s = round(avg_render_s, 1) if avg_render_s is not None else None
|
||||
|
||||
# Median via percentile_cont
|
||||
median_result = await db.execute(
|
||||
select(
|
||||
func.percentile_cont(0.5).within_group(render_duration)
|
||||
).where(
|
||||
OrderLine.render_status == "completed",
|
||||
OrderLine.render_started_at.isnot(None),
|
||||
OrderLine.render_completed_at.isnot(None),
|
||||
)
|
||||
)
|
||||
median_render_s = median_result.scalar()
|
||||
median_render_s = round(median_render_s, 1) if median_render_s is not None else None
|
||||
|
||||
render_throughput = RenderThroughputStats(
|
||||
completed_today=completed_today,
|
||||
completed_this_week=completed_week,
|
||||
completed_this_month=completed_month,
|
||||
failed_today=failed_today,
|
||||
failed_this_week=failed_week,
|
||||
failed_this_month=failed_month,
|
||||
avg_render_time_s=avg_render_s,
|
||||
median_render_time_s=median_render_s,
|
||||
)
|
||||
|
||||
# ── Material coverage ─────────────────────────────────────────────────
|
||||
# Unique material names referenced in products' cad_part_materials
|
||||
# Each product.cad_part_materials is a JSONB array of {part_name, material}
|
||||
# We collect all distinct material names from products
|
||||
product_rows = await db.execute(
|
||||
select(Product.cad_part_materials).where(Product.cad_part_materials.isnot(None))
|
||||
)
|
||||
all_mat_names: set[str] = set()
|
||||
for (cpm,) in product_rows:
|
||||
if isinstance(cpm, list):
|
||||
for entry in cpm:
|
||||
if isinstance(entry, dict) and entry.get("material"):
|
||||
all_mat_names.add(entry["material"])
|
||||
|
||||
# Library materials (name starts with SCHAEFFLER_)
|
||||
lib_count_result = await db.execute(
|
||||
select(func.count(Material.id)).where(Material.name.like("SCHAEFFLER_%"))
|
||||
)
|
||||
library_material_count = lib_count_result.scalar() or 0
|
||||
|
||||
# All known material names (from Material table)
|
||||
known_mat_result = await db.execute(select(Material.name))
|
||||
known_names = {row[0] for row in known_mat_result}
|
||||
|
||||
# All aliases
|
||||
alias_result = await db.execute(select(MaterialAlias.alias))
|
||||
known_aliases = {row[0] for row in alias_result}
|
||||
|
||||
alias_count_result = await db.execute(select(func.count(MaterialAlias.id)))
|
||||
alias_count = alias_count_result.scalar() or 0
|
||||
|
||||
# A material from a product is "mapped" if it exists in Material table or has an alias
|
||||
mapped = 0
|
||||
for mat_name in all_mat_names:
|
||||
if mat_name in known_names or mat_name in known_aliases:
|
||||
mapped += 1
|
||||
|
||||
total_unique = len(all_mat_names)
|
||||
unmapped = total_unique - mapped
|
||||
coverage_pct = round((mapped / total_unique * 100) if total_unique > 0 else 100.0, 1)
|
||||
|
||||
material_coverage = MaterialCoverageStats(
|
||||
total_unique_materials=total_unique,
|
||||
mapped_materials=mapped,
|
||||
unmapped_materials=unmapped,
|
||||
coverage_pct=coverage_pct,
|
||||
library_material_count=library_material_count,
|
||||
alias_count=alias_count,
|
||||
)
|
||||
|
||||
# ── Product stats ─────────────────────────────────────────────────────
|
||||
total_products_result = await db.execute(select(func.count(Product.id)))
|
||||
total_products = total_products_result.scalar() or 0
|
||||
|
||||
with_step_result = await db.execute(
|
||||
select(func.count(Product.id)).where(Product.cad_file_id.isnot(None))
|
||||
)
|
||||
with_step = with_step_result.scalar() or 0
|
||||
without_step = total_products - with_step
|
||||
step_pct = round((with_step / total_products * 100) if total_products > 0 else 0.0, 1)
|
||||
|
||||
product_stats = ProductStatsOverview(
|
||||
total_products=total_products,
|
||||
with_step_files=with_step,
|
||||
without_step_files=without_step,
|
||||
step_coverage_pct=step_pct,
|
||||
)
|
||||
|
||||
# ── Order status breakdown ────────────────────────────────────────────
|
||||
order_counts = await db.execute(
|
||||
select(Order.status, func.count(Order.id)).group_by(Order.status)
|
||||
)
|
||||
status_map: dict[str, int] = {}
|
||||
for row_status, count in order_counts:
|
||||
status_map[row_status.value if hasattr(row_status, "value") else str(row_status)] = count
|
||||
|
||||
order_total = sum(status_map.values())
|
||||
|
||||
order_status = OrderStatusBreakdown(
|
||||
draft=status_map.get("draft", 0),
|
||||
submitted=status_map.get("submitted", 0),
|
||||
processing=status_map.get("processing", 0),
|
||||
completed=status_map.get("completed", 0),
|
||||
rejected=status_map.get("rejected", 0),
|
||||
total=order_total,
|
||||
)
|
||||
|
||||
return DashboardStatsResponse(
|
||||
render_throughput=render_throughput,
|
||||
material_coverage=material_coverage,
|
||||
product_stats=product_stats,
|
||||
order_status=order_status,
|
||||
)
|
||||
|
||||
|
||||
@@ -34,6 +34,118 @@ from app.models.user import User
|
||||
router = APIRouter(prefix="/products", tags=["products"])
|
||||
|
||||
|
||||
class BatchDeleteRequest(BaseModel):
|
||||
product_ids: list[uuid.UUID]
|
||||
hard: bool = False
|
||||
|
||||
|
||||
@router.post("/batch-delete", status_code=status.HTTP_200_OK)
|
||||
async def batch_delete_products(
|
||||
body: BatchDeleteRequest,
|
||||
user: User = Depends(require_admin_or_pm),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Delete or deactivate multiple products at once.
|
||||
|
||||
When hard=False (default), products are soft-deleted (is_active=False).
|
||||
When hard=True, products and their related data are permanently removed
|
||||
using the same cleanup logic as the single-product delete endpoint.
|
||||
"""
|
||||
from sqlalchemy import delete as sql_delete
|
||||
|
||||
if not body.product_ids:
|
||||
return {"deleted": 0, "not_found": 0}
|
||||
|
||||
# Deduplicate
|
||||
product_ids = list(set(body.product_ids))
|
||||
|
||||
# Load all products
|
||||
result = await db.execute(
|
||||
select(Product).where(Product.id.in_(product_ids))
|
||||
)
|
||||
products_found = {p.id: p for p in result.scalars().all()}
|
||||
not_found = len(product_ids) - len(products_found)
|
||||
|
||||
if not products_found:
|
||||
return {"deleted": 0, "not_found": not_found}
|
||||
|
||||
if not body.hard:
|
||||
# Soft delete: deactivate all found products
|
||||
for product in products_found.values():
|
||||
product.is_active = False
|
||||
await db.commit()
|
||||
return {"deleted": len(products_found), "not_found": not_found}
|
||||
|
||||
# Hard delete: reuse single-delete cleanup logic per product
|
||||
from app.domains.media.models import MediaAsset
|
||||
from app.core.storage import get_storage
|
||||
|
||||
all_storage_keys: list[str] = []
|
||||
all_result_paths: list[str] = []
|
||||
|
||||
for pid, product in products_found.items():
|
||||
# 1. Collect storage keys from MediaAssets
|
||||
media_result = await db.execute(
|
||||
select(MediaAsset.storage_key).where(MediaAsset.product_id == pid)
|
||||
)
|
||||
all_storage_keys.extend(row[0] for row in media_result.all() if row[0])
|
||||
|
||||
# 2. Collect render result paths from order lines
|
||||
ol_result = await db.execute(
|
||||
select(OrderLine.result_path).where(
|
||||
OrderLine.product_id == pid,
|
||||
OrderLine.result_path.isnot(None),
|
||||
)
|
||||
)
|
||||
all_result_paths.extend(row[0] for row in ol_result.all() if row[0])
|
||||
|
||||
# 3. Check if CadFile is orphaned
|
||||
cad_file_id = product.cad_file_id
|
||||
orphan_cad = False
|
||||
if cad_file_id:
|
||||
other_count = await db.execute(
|
||||
select(func.count(Product.id)).where(
|
||||
Product.cad_file_id == cad_file_id,
|
||||
Product.id.notin_([p for p in products_found]),
|
||||
)
|
||||
)
|
||||
orphan_cad = (other_count.scalar() or 0) == 0
|
||||
|
||||
# 4. Delete order_lines
|
||||
await db.execute(sql_delete(OrderLine).where(OrderLine.product_id == pid))
|
||||
|
||||
# 5. Delete orphaned CadFile
|
||||
if orphan_cad and cad_file_id:
|
||||
cad_media_result = await db.execute(
|
||||
select(MediaAsset.storage_key).where(MediaAsset.cad_file_id == cad_file_id)
|
||||
)
|
||||
all_storage_keys.extend(row[0] for row in cad_media_result.all() if row[0])
|
||||
product.cad_file_id = None
|
||||
await db.flush()
|
||||
await db.execute(sql_delete(CadFile).where(CadFile.id == cad_file_id))
|
||||
|
||||
# 6. Delete product (cascades MediaAsset + ProductRenderPosition)
|
||||
await db.delete(product)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# 7. Clean up storage files (best-effort, after commit)
|
||||
storage = get_storage()
|
||||
for key in all_storage_keys:
|
||||
try:
|
||||
storage.delete(key)
|
||||
except Exception:
|
||||
pass
|
||||
for path in all_result_paths:
|
||||
try:
|
||||
if os.path.isfile(path):
|
||||
os.unlink(path)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return {"deleted": len(products_found), "not_found": not_found}
|
||||
|
||||
|
||||
def _best_render_url(product: Product, priority: list[str]) -> str | None:
|
||||
"""Walk the priority list and return the first available render URL.
|
||||
|
||||
|
||||
Reference in New Issue
Block a user