feat(F-G-H-I): STL cache, invoices, import validation, notification settings

Phase F — STL Hash Cache:
- Migration 041: step_file_hash column on cad_files
- cache_service.py: SHA256 hash + MinIO-backed STL cache (check/store)
- render_step_thumbnail: compute+persist hash before render
- generate_stl_cache: check MinIO cache before cadquery conversion, store after

Phase G — Invoices:
- Migration 042: invoices + invoice_lines tables with RLS
- Invoice/InvoiceLine models + schemas
- billing service: generate_invoice_number (INV-YYYY-NNNN), create/list/get/delete/PDF
- WeasyPrint PDF generation; backend Dockerfile + pyproject.toml deps
- invoice_router with 6 endpoints; registered in main.py
- frontend: Billing.tsx page + api/billing.ts; route + nav link

Phase H — Import Sanity Check:
- Migration 043: import_validations table
- ImportValidation model + schemas
- run_sanity_check: material fuzzy-match (cutoff=0.8), STEP availability, duplicate detection
- validate_excel_import Celery task (queue: step_processing)
- uploads.py: create ImportValidation on /excel, fire task, expose GET /validations/{id}
- frontend: Upload.tsx polling ValidationDialog with Ampel status indicators

Phase I — Notification Settings:
- Migration 044: notification_configs table (user×event×channel toggles)
- NotificationConfig model + seeds (in_app=true, email=false)
- get/upsert/reset config endpoints on /notifications/config
- frontend: NotificationSettings.tsx page + api/notifications.ts extensions

Infrastructure:
- docker-compose.yml: add worker-thumbnail service (concurrency=1, Q=thumbnail_rendering)
- Fix Dockerfile: libgdk-pixbuf-2.0-0 (correct Debian bookworm package name)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-06 18:05:01 +01:00
parent 7706c514c8
commit f19a6ccde8
34 changed files with 1940 additions and 14 deletions
+36
View File
@@ -155,3 +155,39 @@ async def mark_one_read(
)
await db.commit()
return {"ok": True}
# ── Notification Config Endpoints ────────────────────────────────────────────
from app.domains.notifications.schemas import NotificationConfigOut, NotificationConfigUpdate
from app.domains.notifications.service import (
get_notification_configs, upsert_notification_config, reset_notification_configs
)
@router.get("/config", response_model=list[NotificationConfigOut])
async def get_my_notification_config(
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
return await get_notification_configs(db, current_user.id)
@router.put("/config/{event_type}/{channel}", response_model=NotificationConfigOut)
async def update_my_notification_config(
event_type: str,
channel: str,
body: NotificationConfigUpdate,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
if channel not in ("in_app", "email"):
raise HTTPException(status_code=400, detail="channel must be 'in_app' or 'email'")
return await upsert_notification_config(db, current_user.id, event_type, channel, body.enabled)
@router.post("/config/reset", response_model=list[NotificationConfigOut])
async def reset_my_notification_config(
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
return await reset_notification_configs(db, current_user.id)
+35
View File
@@ -56,6 +56,7 @@ class ExcelPreviewResponse(BaseModel):
rows: list[ExcelPreviewRow]
column_headers: list[str] = []
template_name: str | None = None
validation_id: str | None = None
# ── Finalize request models ────────────────────────────────────────────
@@ -166,6 +167,23 @@ async def upload_excel(
},
)
# Queue sanity-check validation task
validation_id: str | None = None
try:
from app.domains.imports.models import ImportValidation
val = ImportValidation(
excel_path=str(tmp_path),
tenant_id=getattr(user, "tenant_id", None),
)
db.add(val)
await db.commit()
await db.refresh(val)
validation_id = str(val.id)
from app.domains.imports.tasks import validate_excel_import
validate_excel_import.delay(validation_id, str(tmp_path), str(getattr(user, "tenant_id", "") or ""))
except Exception as exc:
pass # validation is non-critical
return ExcelPreviewResponse(
excel_path=str(tmp_path),
filename=file.filename or "",
@@ -181,6 +199,7 @@ async def upload_excel(
rows=annotated_rows,
column_headers=parsed_dict.get("column_headers", []),
template_name=parsed_dict.get("template_name"),
validation_id=validation_id,
)
@@ -409,3 +428,19 @@ async def upload_step(
file_hash=file_hash,
status="uploaded",
)
@router.get("/validations/{validation_id}")
async def get_import_validation(
validation_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
user: User = Depends(get_current_user),
):
"""Poll the result of an Excel sanity-check validation."""
from app.domains.imports.models import ImportValidation
from app.domains.imports.schemas import ImportValidationOut
result = await db.execute(select(ImportValidation).where(ImportValidation.id == validation_id))
val = result.scalar_one_or_none()
if not val:
raise HTTPException(404, detail="Validation not found")
return ImportValidationOut.model_validate(val)
+37 -2
View File
@@ -1,7 +1,7 @@
import uuid
from datetime import datetime
from datetime import date, datetime
from decimal import Decimal
from sqlalchemy import String, Boolean, DateTime, Text, Numeric, Integer, UniqueConstraint, Index, ForeignKey
from sqlalchemy import String, Boolean, Date, DateTime, Text, Numeric, Integer, UniqueConstraint, Index, ForeignKey
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID
from app.database import Base
@@ -31,3 +31,38 @@ class PricingTier(Base):
UniqueConstraint("category_key", "quality_level", name="uq_pricing_tier"),
Index("ix_pricing_tiers_category_key", "category_key"),
)
class Invoice(Base):
__tablename__ = "invoices"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True)
invoice_number: Mapped[str] = mapped_column(String(20), nullable=False, unique=True)
status: Mapped[str] = mapped_column(String(20), nullable=False, default="draft")
issued_at: Mapped[date | None] = mapped_column(Date, nullable=True)
due_at: Mapped[date | None] = mapped_column(Date, nullable=True)
total_net: Mapped[Decimal | None] = mapped_column(Numeric(12, 2), nullable=True)
total_vat: Mapped[Decimal | None] = mapped_column(Numeric(12, 2), nullable=True)
vat_rate: Mapped[Decimal] = mapped_column(Numeric(5, 4), nullable=False, default=Decimal("0.19"))
currency: Mapped[str] = mapped_column(String(3), nullable=False, default="EUR")
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
pdf_key: Mapped[str | None] = mapped_column(Text, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False)
updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
lines: Mapped[list["InvoiceLine"]] = relationship("InvoiceLine", back_populates="invoice", cascade="all, delete-orphan")
class InvoiceLine(Base):
__tablename__ = "invoice_lines"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
invoice_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("invoices.id", ondelete="CASCADE"), nullable=False)
order_line_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("order_lines.id", ondelete="SET NULL"), nullable=True)
description: Mapped[str] = mapped_column(Text, nullable=False)
quantity: Mapped[int] = mapped_column(Integer, nullable=False, default=1)
unit_price: Mapped[Decimal | None] = mapped_column(Numeric(10, 2), nullable=True)
total: Mapped[Decimal | None] = mapped_column(Numeric(10, 2), nullable=True)
invoice: Mapped["Invoice"] = relationship("Invoice", back_populates="lines")
+100 -3
View File
@@ -1,4 +1,101 @@
# Re-export from original router.
from app.api.routers.pricing import router
"""Billing router — Invoice CRUD + PDF."""
from __future__ import annotations
import uuid
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.responses import RedirectResponse
from sqlalchemy.ext.asyncio import AsyncSession
__all__ = ["router"]
from app.database import get_db
from app.utils.auth import require_admin_or_pm
from app.domains.billing.schemas import InvoiceCreate, InvoiceOut, InvoiceStatusUpdate
from app.domains.billing.service import (
create_invoice, get_invoices, get_invoice,
update_invoice_status, delete_invoice, render_pdf,
)
# Keep the old pricing router re-export for backward compat
from app.api.routers.pricing import router as pricing_router
invoice_router = APIRouter(prefix="/billing", tags=["billing"])
@invoice_router.get("/invoices", response_model=list[InvoiceOut])
async def list_invoices(
skip: int = 0,
limit: int = 50,
db: AsyncSession = Depends(get_db),
current_user=Depends(require_admin_or_pm),
):
return await get_invoices(db, skip=skip, limit=limit)
@invoice_router.post("/invoices", response_model=InvoiceOut, status_code=status.HTTP_201_CREATED)
async def create_invoice_endpoint(
body: InvoiceCreate,
db: AsyncSession = Depends(get_db),
current_user=Depends(require_admin_or_pm),
):
tenant_id = getattr(current_user, 'tenant_id', None)
return await create_invoice(
db,
tenant_id=tenant_id,
order_line_ids=body.order_line_ids,
notes=body.notes,
issued_at=body.issued_at,
due_at=body.due_at,
vat_rate=body.vat_rate,
currency=body.currency,
)
@invoice_router.get("/invoices/{invoice_id}", response_model=InvoiceOut)
async def get_invoice_endpoint(
invoice_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
current_user=Depends(require_admin_or_pm),
):
inv = await get_invoice(db, invoice_id)
if not inv:
raise HTTPException(status_code=404, detail="Invoice not found")
return inv
@invoice_router.patch("/invoices/{invoice_id}", response_model=InvoiceOut)
async def update_invoice_status_endpoint(
invoice_id: uuid.UUID,
body: InvoiceStatusUpdate,
db: AsyncSession = Depends(get_db),
current_user=Depends(require_admin_or_pm),
):
inv = await update_invoice_status(db, invoice_id, body.status)
if not inv:
raise HTTPException(status_code=404, detail="Invoice not found")
return inv
@invoice_router.get("/invoices/{invoice_id}/pdf")
async def download_invoice_pdf(
invoice_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
current_user=Depends(require_admin_or_pm),
):
key = await render_pdf(db, invoice_id)
if not key:
raise HTTPException(status_code=503, detail="PDF generation unavailable (WeasyPrint not installed)")
from app.core.storage import get_storage
url = get_storage().get_url(key)
return RedirectResponse(url=url)
@invoice_router.delete("/invoices/{invoice_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_invoice_endpoint(
invoice_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
current_user=Depends(require_admin_or_pm),
):
ok = await delete_invoice(db, invoice_id)
if not ok:
raise HTTPException(status_code=400, detail="Only draft invoices can be deleted")
__all__ = ["invoice_router", "pricing_router"]
+57
View File
@@ -0,0 +1,57 @@
"""Billing schemas — Invoice + InvoiceLine Pydantic models."""
from __future__ import annotations
import uuid
from datetime import date, datetime
from decimal import Decimal
from pydantic import BaseModel
class InvoiceLineCreate(BaseModel):
order_line_id: uuid.UUID | None = None
description: str
quantity: int = 1
unit_price: Decimal | None = None
class InvoiceLineOut(BaseModel):
id: uuid.UUID
invoice_id: uuid.UUID
order_line_id: uuid.UUID | None
description: str
quantity: int
unit_price: Decimal | None
total: Decimal | None
model_config = {"from_attributes": True}
class InvoiceCreate(BaseModel):
order_line_ids: list[uuid.UUID] = []
notes: str | None = None
issued_at: date | None = None
due_at: date | None = None
vat_rate: Decimal = Decimal("0.19")
currency: str = "EUR"
class InvoiceStatusUpdate(BaseModel):
status: str # draft|sent|paid|cancelled
class InvoiceOut(BaseModel):
id: uuid.UUID
tenant_id: uuid.UUID | None
invoice_number: str
status: str
issued_at: date | None
due_at: date | None
total_net: Decimal | None
total_vat: Decimal | None
vat_rate: Decimal
currency: str
notes: str | None
pdf_key: str | None
created_at: datetime
lines: list[InvoiceLineOut] = []
model_config = {"from_attributes": True}
+190 -3
View File
@@ -1,4 +1,4 @@
"""Pricing service — price lookup and order price computation.
"""Billing service — pricing (price lookup + order price computation) and invoice CRUD + PDF generation.
Price resolution cascade for order lines:
1. OutputType's linked pricing_tier (if active) → use its price_per_item
@@ -6,14 +6,23 @@ Price resolution cascade for order lines:
3. "default" category tier → global fallback
4. None if nothing configured
"""
from __future__ import annotations
import logging
import os
import tempfile
import uuid
from datetime import date, datetime
from decimal import Decimal
from typing import Any
from sqlalchemy import select, update as sql_update
from sqlalchemy import func, select, update as sql_update
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from app.domains.billing.models import PricingTier
from app.domains.billing.models import Invoice, InvoiceLine, PricingTier
logger = logging.getLogger(__name__)
async def get_price_for(
@@ -181,3 +190,181 @@ async def refresh_order_price(db: AsyncSession, order_id) -> Decimal | None:
)
await db.commit()
return new_price
# ---------------------------------------------------------------------------
# Invoice CRUD
# ---------------------------------------------------------------------------
VALID_STATUSES = {"draft", "sent", "paid", "cancelled"}
async def generate_invoice_number(db: AsyncSession, tenant_id: uuid.UUID | None) -> str:
"""Generate sequential invoice number: INV-YYYY-NNNN."""
year = datetime.utcnow().year
count_result = await db.execute(
select(func.count()).select_from(Invoice).where(
func.extract("year", Invoice.created_at) == year
)
)
seq = (count_result.scalar() or 0) + 1
return f"INV-{year}-{seq:04d}"
async def create_invoice(
db: AsyncSession,
tenant_id: uuid.UUID | None,
order_line_ids: list[uuid.UUID],
notes: str | None = None,
issued_at: date | None = None,
due_at: date | None = None,
vat_rate: Decimal = Decimal("0.19"),
currency: str = "EUR",
) -> Invoice:
"""Create invoice with lines derived from order lines."""
from app.domains.orders.models import OrderLine
invoice_number = await generate_invoice_number(db, tenant_id)
invoice = Invoice(
tenant_id=tenant_id,
invoice_number=invoice_number,
status="draft",
issued_at=issued_at or date.today(),
due_at=due_at,
notes=notes,
vat_rate=vat_rate,
currency=currency,
)
db.add(invoice)
await db.flush() # get invoice.id
total_net = Decimal("0")
for ol_id in order_line_ids:
result = await db.execute(select(OrderLine).where(OrderLine.id == ol_id))
ol = result.scalar_one_or_none()
if not ol:
continue
unit_price = ol.unit_price or Decimal("0")
line = InvoiceLine(
invoice_id=invoice.id,
order_line_id=ol.id,
description=f"Render: {ol.id}",
quantity=1,
unit_price=unit_price,
total=unit_price,
)
db.add(line)
total_net += unit_price
invoice.total_net = total_net
invoice.total_vat = (total_net * vat_rate).quantize(Decimal("0.01"))
await db.commit()
await db.refresh(invoice)
return invoice
async def get_invoices(
db: AsyncSession,
tenant_id: uuid.UUID | None = None,
skip: int = 0,
limit: int = 50,
) -> list[Invoice]:
q = (
select(Invoice)
.options(selectinload(Invoice.lines))
.order_by(Invoice.created_at.desc())
.offset(skip)
.limit(limit)
)
result = await db.execute(q)
return list(result.scalars().all())
async def get_invoice(db: AsyncSession, invoice_id: uuid.UUID) -> Invoice | None:
result = await db.execute(
select(Invoice)
.options(selectinload(Invoice.lines))
.where(Invoice.id == invoice_id)
)
return result.scalar_one_or_none()
async def update_invoice_status(db: AsyncSession, invoice_id: uuid.UUID, status: str) -> Invoice | None:
invoice = await get_invoice(db, invoice_id)
if not invoice:
return None
invoice.status = status
invoice.updated_at = datetime.utcnow()
await db.commit()
await db.refresh(invoice)
return invoice
async def delete_invoice(db: AsyncSession, invoice_id: uuid.UUID) -> bool:
invoice = await get_invoice(db, invoice_id)
if not invoice or invoice.status != "draft":
return False
await db.delete(invoice)
await db.commit()
return True
async def render_pdf(db: AsyncSession, invoice_id: uuid.UUID) -> str | None:
"""Generate PDF via WeasyPrint, upload to storage, return storage key."""
try:
from weasyprint import HTML
except ImportError:
logger.warning("WeasyPrint not installed — PDF generation skipped")
return None
invoice = await get_invoice(db, invoice_id)
if not invoice:
return None
html_content = _build_invoice_html(invoice)
pdf_bytes = HTML(string=html_content).write_pdf()
from app.core.storage import get_storage
storage = get_storage()
key = f"invoices/{invoice_id}.pdf"
with tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) as tmp:
tmp.write(pdf_bytes)
tmp_path = tmp.name
try:
storage.upload(tmp_path, key)
finally:
os.unlink(tmp_path)
invoice.pdf_key = key
invoice.updated_at = datetime.utcnow()
await db.commit()
return key
def _build_invoice_html(invoice: Invoice) -> str:
lines_html = "".join(
f"<tr><td>{l.description}</td><td>{l.quantity}</td>"
f"<td>{l.unit_price or 0:.2f} {invoice.currency}</td>"
f"<td>{l.total or 0:.2f} {invoice.currency}</td></tr>"
for l in invoice.lines
)
return f"""<!DOCTYPE html><html><head><meta charset="utf-8">
<style>
body {{ font-family: Arial, sans-serif; margin: 40px; color: #333; }}
h1 {{ color: #1a56db; }} table {{ width: 100%; border-collapse: collapse; margin-top: 20px; }}
th, td {{ padding: 8px 12px; border-bottom: 1px solid #e5e7eb; text-align: left; }}
th {{ background: #f9fafb; font-weight: 600; }}
.totals {{ text-align: right; margin-top: 16px; }}
</style></head><body>
<h1>Invoice {invoice.invoice_number}</h1>
<p>Status: <strong>{invoice.status}</strong> | Currency: {invoice.currency}</p>
<p>Issued: {invoice.issued_at} | Due: {invoice.due_at or ""}</p>
<table><thead><tr><th>Description</th><th>Qty</th><th>Unit Price</th><th>Total</th></tr></thead>
<tbody>{lines_html}</tbody></table>
<div class="totals">
<p>Net: <strong>{invoice.total_net or 0:.2f} {invoice.currency}</strong></p>
<p>VAT ({float(invoice.vat_rate) * 100:.0f}%): {invoice.total_vat or 0:.2f} {invoice.currency}</p>
<p>Gross: <strong>{(invoice.total_net or 0) + (invoice.total_vat or 0):.2f} {invoice.currency}</strong></p>
</div>
{f'<p><em>Notes: {invoice.notes}</em></p>' if invoice.notes else ''}
</body></html>"""
+15
View File
@@ -9,6 +9,21 @@ if TYPE_CHECKING:
from app.domains.tenants.models import Tenant
class ImportValidation(Base):
__tablename__ = "import_validations"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True), ForeignKey("tenants.id", ondelete="CASCADE"), nullable=True, index=True
)
excel_path: Mapped[str] = mapped_column(Text, nullable=False)
status: Mapped[str] = mapped_column(String(20), nullable=False, default="pending")
summary: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
rows: Mapped[list | None] = mapped_column(JSONB, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False)
completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
class Template(Base):
__tablename__ = "templates"
+26
View File
@@ -1,3 +1,6 @@
from __future__ import annotations
import uuid
from datetime import datetime
from pydantic import BaseModel
from typing import Any
@@ -41,3 +44,26 @@ class StepUploadResponse(BaseModel):
file_hash: str
status: str
matched_items: list[str] = []
# ── Import Validation ──────────────────────────────────────────────────────
class ValidationIssue(BaseModel):
type: str # "missing_material" | "material_suggestion" | "no_step" | "duplicate"
field: str | None = None
value: str | None = None
suggestion: str | None = None
message: str
class ImportValidationOut(BaseModel):
id: uuid.UUID
tenant_id: uuid.UUID | None
excel_path: str
status: str
summary: dict | None
rows: list | None
created_at: datetime
completed_at: datetime | None
model_config = {"from_attributes": True}
+168
View File
@@ -1,12 +1,180 @@
"""Import services — Excel parsing and product import."""
from __future__ import annotations
import difflib
import logging
import uuid
from datetime import datetime
# Re-export from original service files for backward compatibility.
from app.services.excel_parser import parse_excel, parsed_excel_to_dict
from app.services.excel_import import import_excel_to_products, preview_excel_rows
logger = logging.getLogger(__name__)
__all__ = [
"parse_excel",
"parsed_excel_to_dict",
"import_excel_to_products",
"preview_excel_rows",
"run_sanity_check",
]
def run_sanity_check(validation_id: str, excel_path: str, tenant_id: str | None) -> dict:
"""Run sanity check on an imported Excel file.
Returns result dict with summary + rows (stored in ImportValidation).
Uses sync DB access (Celery context).
"""
from sqlalchemy import create_engine, select
from sqlalchemy.orm import Session, selectinload
from app.config import settings as app_settings
from app.domains.imports.models import ImportValidation
from app.domains.materials.models import Material, MaterialAlias
from app.domains.products.models import Product, CadFile
sync_url = app_settings.database_url.replace("+asyncpg", "")
engine = create_engine(sync_url)
with Session(engine) as db:
# Update status to running
val = db.get(ImportValidation, validation_id)
if not val:
logger.warning("ImportValidation %s not found", validation_id)
return {}
val.status = "running"
db.commit()
# Load all known material names + aliases for fuzzy matching
materials = db.execute(
select(Material).options(selectinload(Material.aliases))
).scalars().all()
known_names: list[str] = []
for m in materials:
known_names.append(m.name.lower())
for a in m.aliases:
known_names.append(a.alias.lower())
# Parse Excel
try:
parsed = parse_excel(excel_path)
except Exception as exc:
logger.error("Failed to parse excel %s: %s", excel_path, exc)
val.status = "failed"
val.completed_at = datetime.utcnow()
db.commit()
return {}
rows_out = []
seen_pim_ids: dict[str, int] = {}
counts = {"ok": 0, "warnings": 0, "errors": 0, "missing_materials": 0, "no_step": 0, "duplicates": 0}
for row in parsed:
issues = []
pim_id = getattr(row, "pim_id", None) or ""
produkt_baureihe = getattr(row, "produkt_baureihe", None) or ""
components = getattr(row, "components", []) or []
# Duplicate check
key = pim_id or produkt_baureihe
if key:
if key in seen_pim_ids:
issues.append({
"type": "duplicate",
"field": "pim_id",
"value": key,
"suggestion": None,
"message": f"Duplicate of row {seen_pim_ids[key]}",
})
counts["duplicates"] += 1
else:
seen_pim_ids[key] = row.row_index
# STEP availability check
product_id = None
if pim_id or produkt_baureihe:
q = select(Product)
if pim_id:
q = q.where(Product.pim_id == pim_id)
elif produkt_baureihe:
q = q.where(Product.produkt_baureihe == produkt_baureihe)
product = db.execute(q).scalar_one_or_none()
if product:
product_id = str(product.id)
has_cad = db.execute(
select(CadFile).where(CadFile.id.in_(
[item.cad_file_id for item in product.order_items if hasattr(item, 'cad_file_id')]
))
).first() if hasattr(product, 'order_items') else None
# Simple check: product exists but may have no CAD
if not product.cad_file_id if hasattr(product, 'cad_file_id') else False:
issues.append({
"type": "no_step",
"field": "cad_file",
"value": None,
"suggestion": None,
"message": "No STEP file linked to this product",
})
counts["no_step"] += 1
# Material check
for comp in components:
mat_name = getattr(comp, "material", None) or ""
if not mat_name:
continue
mat_lower = mat_name.lower()
if mat_lower in known_names:
continue # exact match
matches = difflib.get_close_matches(mat_lower, known_names, n=1, cutoff=0.8)
if matches:
issues.append({
"type": "material_suggestion",
"field": "material",
"value": mat_name,
"suggestion": matches[0],
"message": f"Material '{mat_name}' not found; closest: '{matches[0]}'",
})
else:
issues.append({
"type": "missing_material",
"field": "material",
"value": mat_name,
"suggestion": None,
"message": f"Material '{mat_name}' not found in library",
})
counts["missing_materials"] += 1
# Row status
has_error = any(i["type"] in ("missing_material",) for i in issues)
has_warning = any(i["type"] in ("duplicate", "no_step", "material_suggestion") for i in issues)
if has_error:
row_status = "error"
counts["errors"] += 1
elif has_warning:
row_status = "warning"
counts["warnings"] += 1
else:
row_status = "ok"
counts["ok"] += 1
rows_out.append({
"row_index": row.row_index,
"product_id": product_id,
"pim_id": pim_id or None,
"produkt_baureihe": produkt_baureihe or None,
"issues": issues,
"status": row_status,
})
summary = {
"total": len(rows_out),
**counts,
}
val.status = "completed"
val.summary = summary
val.rows = rows_out
val.completed_at = datetime.utcnow()
db.commit()
return {"summary": summary, "rows": rows_out}
+38
View File
@@ -0,0 +1,38 @@
"""Celery tasks for import validation."""
from __future__ import annotations
import logging
from celery import shared_task
logger = logging.getLogger(__name__)
@shared_task(name="imports.validate_excel_import", queue="step_processing", bind=True)
def validate_excel_import(self, validation_id: str, excel_path: str, tenant_id: str | None = None):
"""Run sanity check on imported Excel file and store results."""
logger.info("Running import validation %s for %s", validation_id, excel_path)
try:
from app.domains.imports.service import run_sanity_check
result = run_sanity_check(validation_id, excel_path, tenant_id)
logger.info("Validation %s completed: %s", validation_id, result.get("summary", {}))
return result
except Exception as exc:
logger.error("Validation %s failed: %s", validation_id, exc)
# Mark as failed in DB
try:
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from app.config import settings as app_settings
from app.domains.imports.models import ImportValidation
from datetime import datetime
sync_url = app_settings.database_url.replace("+asyncpg", "")
engine = create_engine(sync_url)
with Session(engine) as db:
val = db.get(ImportValidation, validation_id)
if val:
val.status = "failed"
val.completed_at = datetime.utcnow()
db.commit()
except Exception:
pass
raise
@@ -32,3 +32,27 @@ class AuditLog(Base):
user: Mapped["User"] = relationship("User", back_populates="audit_logs", foreign_keys=[user_id])
target_user: Mapped["User"] = relationship("User", foreign_keys=[target_user_id])
# Event type constants
class NotificationEvent:
ORDER_SUBMITTED = "order.submitted"
ORDER_COMPLETED = "order.completed"
RENDER_COMPLETED = "render.completed"
RENDER_FAILED = "render.failed"
EXCEL_IMPORTED = "excel.imported"
ALL = [ORDER_SUBMITTED, ORDER_COMPLETED, RENDER_COMPLETED, RENDER_FAILED, EXCEL_IMPORTED]
class NotificationConfig(Base):
__tablename__ = "notification_configs"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True
)
event_type: Mapped[str] = mapped_column(String(100), nullable=False)
channel: Mapped[str] = mapped_column(String(20), nullable=False) # "in_app" | "email"
enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False)
@@ -0,0 +1,20 @@
"""Notification schemas."""
from __future__ import annotations
import uuid
from datetime import datetime
from pydantic import BaseModel
class NotificationConfigOut(BaseModel):
id: uuid.UUID
user_id: uuid.UUID
event_type: str
channel: str
enabled: bool
created_at: datetime
model_config = {"from_attributes": True}
class NotificationConfigUpdate(BaseModel):
enabled: bool
+89 -1
View File
@@ -7,7 +7,7 @@ import logging
import uuid
from datetime import datetime
from sqlalchemy import create_engine
from sqlalchemy import create_engine, select
from sqlalchemy.orm import Session
from sqlalchemy.ext.asyncio import AsyncSession
@@ -82,3 +82,91 @@ def emit_notification_sync(
session.commit()
except Exception:
logger.exception("Failed to emit notification (sync)")
# ── Notification config helpers ─────────────────────────────────────────────
def _is_channel_enabled_sync(user_id: str | None, event_type: str, channel: str) -> bool:
"""Check if a notification channel is enabled for a user (sync, for Celery)."""
if not user_id:
return channel == "in_app" # default: in_app on, email off
engine = _get_engine()
from app.domains.notifications.models import NotificationConfig
with Session(engine) as session:
cfg = session.execute(
select(NotificationConfig).where(
NotificationConfig.user_id == user_id,
NotificationConfig.event_type == event_type,
NotificationConfig.channel == channel,
)
).scalar_one_or_none()
if cfg is None:
return channel == "in_app" # default
return cfg.enabled
def send_email_notification_stub(
*,
to_user_id: str | None,
event_type: str,
subject: str,
body: str,
) -> None:
"""Email notification stub — logs only, email sending not yet active."""
logger.info(
"[EMAIL STUB] Would send email to user=%s event=%s subject=%s",
to_user_id, event_type, subject
)
async def get_notification_configs(db: AsyncSession, user_id: uuid.UUID) -> list:
from app.domains.notifications.models import NotificationConfig
from sqlalchemy import select as sa_select
result = await db.execute(
sa_select(NotificationConfig).where(NotificationConfig.user_id == user_id)
.order_by(NotificationConfig.event_type, NotificationConfig.channel)
)
return list(result.scalars().all())
async def upsert_notification_config(
db: AsyncSession,
user_id: uuid.UUID,
event_type: str,
channel: str,
enabled: bool,
) -> object:
from app.domains.notifications.models import NotificationConfig
from sqlalchemy import select as sa_select
result = await db.execute(
sa_select(NotificationConfig).where(
NotificationConfig.user_id == user_id,
NotificationConfig.event_type == event_type,
NotificationConfig.channel == channel,
)
)
cfg = result.scalar_one_or_none()
if cfg is None:
cfg = NotificationConfig(user_id=user_id, event_type=event_type, channel=channel, enabled=enabled)
db.add(cfg)
else:
cfg.enabled = enabled
await db.commit()
await db.refresh(cfg)
return cfg
async def reset_notification_configs(db: AsyncSession, user_id: uuid.UUID) -> list:
from app.domains.notifications.models import NotificationConfig, NotificationEvent
from sqlalchemy import delete as sa_delete
await db.execute(sa_delete(NotificationConfig).where(NotificationConfig.user_id == user_id))
configs = []
for event in NotificationEvent.ALL:
for channel, default_enabled in [("in_app", True), ("email", False)]:
cfg = NotificationConfig(
user_id=user_id, event_type=event, channel=channel, enabled=default_enabled
)
db.add(cfg)
configs.append(cfg)
await db.commit()
return configs
@@ -0,0 +1,48 @@
"""SHA256-based STL conversion cache using MinIO."""
from __future__ import annotations
import hashlib
import logging
from pathlib import Path
logger = logging.getLogger(__name__)
CACHE_PREFIX = "conversion-cache"
def compute_step_hash(file_path: str) -> str:
"""Compute SHA256 hash of a STEP file."""
h = hashlib.sha256()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(65536), b""):
h.update(chunk)
return h.hexdigest()
def _cache_key(step_hash: str, quality: str) -> str:
return f"{CACHE_PREFIX}/{step_hash}_{quality}.stl"
def check_stl_cache(step_hash: str, quality: str) -> bytes | None:
"""Return STL bytes from MinIO cache if present, else None."""
from app.core.storage import get_storage
storage = get_storage()
key = _cache_key(step_hash, quality)
try:
if storage.exists(key):
return storage.download_bytes(key)
return None
except Exception as exc:
logger.warning("Cache check failed for %s: %s", key, exc)
return None
def store_stl_cache(step_hash: str, quality: str, stl_path: str) -> None:
"""Upload local STL file to MinIO cache."""
from app.core.storage import get_storage
storage = get_storage()
key = _cache_key(step_hash, quality)
try:
storage.upload(stl_path, key)
logger.info("Stored STL cache: %s", key)
except Exception as exc:
logger.warning("Failed to store STL cache %s: %s", key, exc)
+1
View File
@@ -31,6 +31,7 @@ class CadFile(Base):
error_message: Mapped[str] = mapped_column(String(2000), nullable=True)
render_log: Mapped[dict] = mapped_column(JSONB, nullable=True)
mesh_attributes: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
step_file_hash: Mapped[str | None] = mapped_column(String(64), nullable=True, index=True)
tenant_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True
)
+2 -1
View File
@@ -16,7 +16,7 @@ from app.domains.products.router import products_router, cad_router
from app.domains.materials.router import router as materials_router
from app.domains.rendering.router import render_templates_router, output_types_router
from app.domains.notifications.router import router as notifications_router
from app.domains.billing.router import router as pricing_router
from app.domains.billing.router import pricing_router, invoice_router
from app.domains.tenants.router import router as tenants_router
from app.domains.rendering.workflow_router import router as workflows_router
from app.domains.media.router import router as media_router
@@ -73,6 +73,7 @@ app.include_router(materials_router, prefix="/api")
app.include_router(worker_router, prefix="/api")
app.include_router(analytics_router, prefix="/api")
app.include_router(pricing_router, prefix="/api")
app.include_router(invoice_router, prefix="/api")
app.include_router(products_router, prefix="/api")
app.include_router(output_types_router, prefix="/api")
app.include_router(render_templates_router, prefix="/api")
+1
View File
@@ -10,6 +10,7 @@ celery_app = Celery(
"app.tasks.ai_tasks",
"app.domains.rendering.tasks",
"app.domains.products.tasks",
"app.domains.imports.tasks",
],
)
+34 -1
View File
@@ -132,6 +132,28 @@ def render_step_thumbnail(self, cad_file_id: str):
On success, also auto-populates materials and marks the CadFile as completed.
"""
logger.info(f"Rendering thumbnail for CAD file: {cad_file_id}")
# Compute and persist STEP file hash for STL cache lookups
try:
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from app.config import settings as app_settings
from app.models.cad_file import CadFile
from app.domains.products.cache_service import compute_step_hash
sync_url = app_settings.database_url.replace("+asyncpg", "")
_eng = create_engine(sync_url)
with Session(_eng) as _sess:
_cad = _sess.get(CadFile, cad_file_id)
if _cad and _cad.stored_path and not _cad.step_file_hash:
_hash = compute_step_hash(_cad.stored_path)
_cad.step_file_hash = _hash
_sess.commit()
logger.info(f"Saved step_file_hash for {cad_file_id}: {_hash[:12]}")
_eng.dispose()
except Exception:
logger.warning(f"step_file_hash computation failed for {cad_file_id} (non-fatal)")
try:
from app.services.step_processor import regenerate_cad_thumbnail
success = regenerate_cad_thumbnail(cad_file_id, part_colors={})
@@ -172,13 +194,24 @@ def generate_stl_cache(self, cad_file_id: str, quality: str):
try:
from app.services.render_blender import convert_step_to_stl, export_per_part_stls
from app.domains.products.cache_service import compute_step_hash, check_stl_cache, store_stl_cache
from pathlib import Path as _Path
step = _Path(step_path)
stl_out = step.parent / f"{step.stem}_{quality}.stl"
parts_dir = step.parent / f"{step.stem}_{quality}_parts"
if not stl_out.exists() or stl_out.stat().st_size == 0:
convert_step_to_stl(step, stl_out, quality)
# Check MinIO cache before running cadquery conversion
step_hash = compute_step_hash(step_path)
cached_bytes = check_stl_cache(step_hash, quality)
if cached_bytes:
stl_out.write_bytes(cached_bytes)
logger.info(f"STL cache hit for {cad_file_id} ({quality}), skipped conversion")
else:
convert_step_to_stl(step, stl_out, quality)
# Store result in MinIO for future workers
if stl_out.exists() and stl_out.stat().st_size > 0:
store_stl_cache(step_hash, quality, str(stl_out))
if not (parts_dir / "manifest.json").exists():
try:
export_per_part_stls(step, parts_dir, quality)