feat: material alias seeds expansion, bulk product delete, dashboard stats widgets
- Material alias seeds: 95 → 855 aliases covering German variants, DIN standards, Werkstoffnummern, industry terms, English equivalents, polymer abbreviations - Batch product delete/deactivate endpoint (POST /products/batch-delete) - Multi-select UI on Products page with floating action bar - Dashboard: RenderThroughput + MaterialCoverage widgets - Dashboard stats endpoint (GET /admin/dashboard-stats) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -1,10 +1,10 @@
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update as sql_update
|
||||
from sqlalchemy import select, update as sql_update, func, case, distinct, and_, extract
|
||||
from pydantic import BaseModel
|
||||
from app.database import get_db
|
||||
from app.models.user import User
|
||||
@@ -12,7 +12,7 @@ from app.models.system_setting import SystemSetting
|
||||
from app.models.cad_file import CadFile, ProcessingStatus
|
||||
from app.models.output_type import OutputType as OutputTypeModel
|
||||
from app.schemas.user import UserOut, UserUpdate, UserCreate
|
||||
from app.utils.auth import require_global_admin, hash_password
|
||||
from app.utils.auth import require_global_admin, get_current_user, hash_password
|
||||
|
||||
router = APIRouter(prefix="/admin", tags=["admin"])
|
||||
|
||||
@@ -882,3 +882,211 @@ async def purge_render_media(
|
||||
"message": f"Purged {deleted_db} still/turntable asset(s), freed {round(freed_bytes / 1024 / 1024, 1)} MB",
|
||||
}
|
||||
|
||||
|
||||
# ── Dashboard Stats ──────────────────────────────────────────────────────────
|
||||
|
||||
class RenderThroughputStats(BaseModel):
|
||||
completed_today: int
|
||||
completed_this_week: int
|
||||
completed_this_month: int
|
||||
failed_today: int
|
||||
failed_this_week: int
|
||||
failed_this_month: int
|
||||
avg_render_time_s: Optional[float]
|
||||
median_render_time_s: Optional[float]
|
||||
|
||||
|
||||
class MaterialCoverageStats(BaseModel):
|
||||
total_unique_materials: int
|
||||
mapped_materials: int
|
||||
unmapped_materials: int
|
||||
coverage_pct: float
|
||||
library_material_count: int
|
||||
alias_count: int
|
||||
|
||||
|
||||
class ProductStatsOverview(BaseModel):
|
||||
total_products: int
|
||||
with_step_files: int
|
||||
without_step_files: int
|
||||
step_coverage_pct: float
|
||||
|
||||
|
||||
class OrderStatusBreakdown(BaseModel):
|
||||
draft: int
|
||||
submitted: int
|
||||
processing: int
|
||||
completed: int
|
||||
rejected: int
|
||||
total: int
|
||||
|
||||
|
||||
class DashboardStatsResponse(BaseModel):
|
||||
render_throughput: RenderThroughputStats
|
||||
material_coverage: MaterialCoverageStats
|
||||
product_stats: ProductStatsOverview
|
||||
order_status: OrderStatusBreakdown
|
||||
|
||||
|
||||
@router.get("/dashboard-stats", response_model=DashboardStatsResponse)
|
||||
async def get_dashboard_stats(
|
||||
_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> DashboardStatsResponse:
|
||||
"""Aggregate stats for the dashboard: render throughput, material coverage, product and order stats."""
|
||||
from app.domains.orders.models import Order, OrderStatus, OrderLine
|
||||
from app.domains.products.models import Product
|
||||
from app.domains.materials.models import Material, MaterialAlias
|
||||
|
||||
now = datetime.utcnow()
|
||||
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
week_start = today_start - timedelta(days=today_start.weekday())
|
||||
month_start = today_start.replace(day=1)
|
||||
|
||||
# ── Render throughput ─────────────────────────────────────────────────
|
||||
def _count_renders(status_val: str, since: datetime):
|
||||
return select(func.count(OrderLine.id)).where(
|
||||
OrderLine.render_status == status_val,
|
||||
OrderLine.render_completed_at >= since,
|
||||
)
|
||||
|
||||
completed_today = (await db.execute(_count_renders("completed", today_start))).scalar() or 0
|
||||
completed_week = (await db.execute(_count_renders("completed", week_start))).scalar() or 0
|
||||
completed_month = (await db.execute(_count_renders("completed", month_start))).scalar() or 0
|
||||
failed_today = (await db.execute(_count_renders("failed", today_start))).scalar() or 0
|
||||
failed_week = (await db.execute(_count_renders("failed", week_start))).scalar() or 0
|
||||
failed_month = (await db.execute(_count_renders("failed", month_start))).scalar() or 0
|
||||
|
||||
# Average and median render time (for completed renders with both timestamps)
|
||||
render_duration = extract(
|
||||
"epoch",
|
||||
OrderLine.render_completed_at - OrderLine.render_started_at,
|
||||
)
|
||||
avg_result = await db.execute(
|
||||
select(func.avg(render_duration)).where(
|
||||
OrderLine.render_status == "completed",
|
||||
OrderLine.render_started_at.isnot(None),
|
||||
OrderLine.render_completed_at.isnot(None),
|
||||
)
|
||||
)
|
||||
avg_render_s = avg_result.scalar()
|
||||
avg_render_s = round(avg_render_s, 1) if avg_render_s is not None else None
|
||||
|
||||
# Median via percentile_cont
|
||||
median_result = await db.execute(
|
||||
select(
|
||||
func.percentile_cont(0.5).within_group(render_duration)
|
||||
).where(
|
||||
OrderLine.render_status == "completed",
|
||||
OrderLine.render_started_at.isnot(None),
|
||||
OrderLine.render_completed_at.isnot(None),
|
||||
)
|
||||
)
|
||||
median_render_s = median_result.scalar()
|
||||
median_render_s = round(median_render_s, 1) if median_render_s is not None else None
|
||||
|
||||
render_throughput = RenderThroughputStats(
|
||||
completed_today=completed_today,
|
||||
completed_this_week=completed_week,
|
||||
completed_this_month=completed_month,
|
||||
failed_today=failed_today,
|
||||
failed_this_week=failed_week,
|
||||
failed_this_month=failed_month,
|
||||
avg_render_time_s=avg_render_s,
|
||||
median_render_time_s=median_render_s,
|
||||
)
|
||||
|
||||
# ── Material coverage ─────────────────────────────────────────────────
|
||||
# Unique material names referenced in products' cad_part_materials
|
||||
# Each product.cad_part_materials is a JSONB array of {part_name, material}
|
||||
# We collect all distinct material names from products
|
||||
product_rows = await db.execute(
|
||||
select(Product.cad_part_materials).where(Product.cad_part_materials.isnot(None))
|
||||
)
|
||||
all_mat_names: set[str] = set()
|
||||
for (cpm,) in product_rows:
|
||||
if isinstance(cpm, list):
|
||||
for entry in cpm:
|
||||
if isinstance(entry, dict) and entry.get("material"):
|
||||
all_mat_names.add(entry["material"])
|
||||
|
||||
# Library materials (name starts with SCHAEFFLER_)
|
||||
lib_count_result = await db.execute(
|
||||
select(func.count(Material.id)).where(Material.name.like("SCHAEFFLER_%"))
|
||||
)
|
||||
library_material_count = lib_count_result.scalar() or 0
|
||||
|
||||
# All known material names (from Material table)
|
||||
known_mat_result = await db.execute(select(Material.name))
|
||||
known_names = {row[0] for row in known_mat_result}
|
||||
|
||||
# All aliases
|
||||
alias_result = await db.execute(select(MaterialAlias.alias))
|
||||
known_aliases = {row[0] for row in alias_result}
|
||||
|
||||
alias_count_result = await db.execute(select(func.count(MaterialAlias.id)))
|
||||
alias_count = alias_count_result.scalar() or 0
|
||||
|
||||
# A material from a product is "mapped" if it exists in Material table or has an alias
|
||||
mapped = 0
|
||||
for mat_name in all_mat_names:
|
||||
if mat_name in known_names or mat_name in known_aliases:
|
||||
mapped += 1
|
||||
|
||||
total_unique = len(all_mat_names)
|
||||
unmapped = total_unique - mapped
|
||||
coverage_pct = round((mapped / total_unique * 100) if total_unique > 0 else 100.0, 1)
|
||||
|
||||
material_coverage = MaterialCoverageStats(
|
||||
total_unique_materials=total_unique,
|
||||
mapped_materials=mapped,
|
||||
unmapped_materials=unmapped,
|
||||
coverage_pct=coverage_pct,
|
||||
library_material_count=library_material_count,
|
||||
alias_count=alias_count,
|
||||
)
|
||||
|
||||
# ── Product stats ─────────────────────────────────────────────────────
|
||||
total_products_result = await db.execute(select(func.count(Product.id)))
|
||||
total_products = total_products_result.scalar() or 0
|
||||
|
||||
with_step_result = await db.execute(
|
||||
select(func.count(Product.id)).where(Product.cad_file_id.isnot(None))
|
||||
)
|
||||
with_step = with_step_result.scalar() or 0
|
||||
without_step = total_products - with_step
|
||||
step_pct = round((with_step / total_products * 100) if total_products > 0 else 0.0, 1)
|
||||
|
||||
product_stats = ProductStatsOverview(
|
||||
total_products=total_products,
|
||||
with_step_files=with_step,
|
||||
without_step_files=without_step,
|
||||
step_coverage_pct=step_pct,
|
||||
)
|
||||
|
||||
# ── Order status breakdown ────────────────────────────────────────────
|
||||
order_counts = await db.execute(
|
||||
select(Order.status, func.count(Order.id)).group_by(Order.status)
|
||||
)
|
||||
status_map: dict[str, int] = {}
|
||||
for row_status, count in order_counts:
|
||||
status_map[row_status.value if hasattr(row_status, "value") else str(row_status)] = count
|
||||
|
||||
order_total = sum(status_map.values())
|
||||
|
||||
order_status = OrderStatusBreakdown(
|
||||
draft=status_map.get("draft", 0),
|
||||
submitted=status_map.get("submitted", 0),
|
||||
processing=status_map.get("processing", 0),
|
||||
completed=status_map.get("completed", 0),
|
||||
rejected=status_map.get("rejected", 0),
|
||||
total=order_total,
|
||||
)
|
||||
|
||||
return DashboardStatsResponse(
|
||||
render_throughput=render_throughput,
|
||||
material_coverage=material_coverage,
|
||||
product_stats=product_stats,
|
||||
order_status=order_status,
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user