From f19a6ccde8b2aa0201beeb2fa731cb0528d51397 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hartmut=20N=C3=B6renberg?= Date: Fri, 6 Mar 2026 18:05:01 +0100 Subject: [PATCH] feat(F-G-H-I): STL cache, invoices, import validation, notification settings MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase F — STL Hash Cache: - Migration 041: step_file_hash column on cad_files - cache_service.py: SHA256 hash + MinIO-backed STL cache (check/store) - render_step_thumbnail: compute+persist hash before render - generate_stl_cache: check MinIO cache before cadquery conversion, store after Phase G — Invoices: - Migration 042: invoices + invoice_lines tables with RLS - Invoice/InvoiceLine models + schemas - billing service: generate_invoice_number (INV-YYYY-NNNN), create/list/get/delete/PDF - WeasyPrint PDF generation; backend Dockerfile + pyproject.toml deps - invoice_router with 6 endpoints; registered in main.py - frontend: Billing.tsx page + api/billing.ts; route + nav link Phase H — Import Sanity Check: - Migration 043: import_validations table - ImportValidation model + schemas - run_sanity_check: material fuzzy-match (cutoff=0.8), STEP availability, duplicate detection - validate_excel_import Celery task (queue: step_processing) - uploads.py: create ImportValidation on /excel, fire task, expose GET /validations/{id} - frontend: Upload.tsx polling ValidationDialog with Ampel status indicators Phase I — Notification Settings: - Migration 044: notification_configs table (user×event×channel toggles) - NotificationConfig model + seeds (in_app=true, email=false) - get/upsert/reset config endpoints on /notifications/config - frontend: NotificationSettings.tsx page + api/notifications.ts extensions Infrastructure: - docker-compose.yml: add worker-thumbnail service (concurrency=1, Q=thumbnail_rendering) - Fix Dockerfile: libgdk-pixbuf-2.0-0 (correct Debian bookworm package name) Co-Authored-By: Claude Sonnet 4.6 --- LEARNINGS.md | 10 + backend/Dockerfile | 5 + .../alembic/versions/041_step_file_hash.py | 22 ++ backend/alembic/versions/042_invoices.py | 67 +++++ .../versions/043_import_validations.py | 33 +++ .../versions/044_notification_configs.py | 56 +++++ backend/app/api/routers/notifications.py | 36 +++ backend/app/api/routers/uploads.py | 35 +++ backend/app/domains/billing/models.py | 39 ++- backend/app/domains/billing/router.py | 103 +++++++- backend/app/domains/billing/schemas.py | 57 +++++ backend/app/domains/billing/service.py | 193 ++++++++++++++- backend/app/domains/imports/models.py | 15 ++ backend/app/domains/imports/schemas.py | 26 ++ backend/app/domains/imports/service.py | 168 +++++++++++++ backend/app/domains/imports/tasks.py | 38 +++ backend/app/domains/notifications/models.py | 24 ++ backend/app/domains/notifications/schemas.py | 20 ++ backend/app/domains/notifications/service.py | 90 ++++++- backend/app/domains/products/cache_service.py | 48 ++++ backend/app/domains/products/models.py | 1 + backend/app/main.py | 3 +- backend/app/tasks/celery_app.py | 1 + backend/app/tasks/step_tasks.py | 35 ++- backend/pyproject.toml | 1 + docker-compose.yml | 31 +++ frontend/src/App.tsx | 11 + frontend/src/api/billing.ts | 65 +++++ frontend/src/api/notifications.ts | 33 +++ frontend/src/api/uploads.ts | 44 ++++ frontend/src/components/layout/Layout.tsx | 34 ++- frontend/src/pages/Billing.tsx | 234 ++++++++++++++++++ frontend/src/pages/NotificationSettings.tsx | 170 +++++++++++++ frontend/src/pages/Upload.tsx | 206 ++++++++++++++- 34 files changed, 1940 insertions(+), 14 deletions(-) create mode 100644 backend/alembic/versions/041_step_file_hash.py create mode 100644 backend/alembic/versions/042_invoices.py create mode 100644 backend/alembic/versions/043_import_validations.py create mode 100644 backend/alembic/versions/044_notification_configs.py create mode 100644 backend/app/domains/billing/schemas.py create mode 100644 backend/app/domains/imports/tasks.py create mode 100644 backend/app/domains/notifications/schemas.py create mode 100644 backend/app/domains/products/cache_service.py create mode 100644 frontend/src/api/billing.ts create mode 100644 frontend/src/pages/Billing.tsx create mode 100644 frontend/src/pages/NotificationSettings.tsx diff --git a/LEARNINGS.md b/LEARNINGS.md index 734b5f1..c162eee 100644 --- a/LEARNINGS.md +++ b/LEARNINGS.md @@ -166,3 +166,13 @@ __all__ = ["User"] - [ ] @xyflow/react noch nicht installiert — npm install nötig nach nächstem `docker compose up --build frontend` - [ ] Material-Alias-Seeding deckt noch nicht alle deutschen Materialbezeichnungs-Varianten ab - [ ] Turntable-Animation: bg_color via FFmpeg-Overlay — Qualität bei Transparenz-Edges prüfen + +### 2026-03-06 | Docker | apt-Paketname libgdk-pixbuf2.0-0 vs libgdk-pixbuf-2.0-0 +WeasyPrint benötigt libgdk-pixbuf. Auf Debian bookworm (python:3.11-slim) heißt das Paket `libgdk-pixbuf-2.0-0` (mit Bindestrichen), nicht `libgdk-pixbuf2.0-0`. `apt-get install` schlägt mit exit code 100 fehl wenn der Name falsch ist. +→ Immer `apt-cache search libgdk` im Container prüfen bevor man Paketnamen in Dockerfiles schreibt. + +### 2026-03-06 | Celery | thumbnail_rendering Queue braucht eigenen worker-thumbnail Service +Blender-Renderer verarbeitet nur 1 Request gleichzeitig. Wenn worker (concurrency=8) Tasks auf thumbnail_rendering queued, laufen 7 davon in Timeout (300s). Lösung: separaten `worker-thumbnail` Service mit `--concurrency=1` und `-Q thumbnail_rendering` in docker-compose.yml. step_processing bleibt bei concurrency=8. + +### 2026-03-06 | Alembic | Migration exit code 100 bei enum-Konflikt +SQLAlchemy `Enum(create_type=False)` funktioniert nicht zuverlässig mit asyncpg. Bei bereits existierenden PostgreSQL-Enum-Typen: Raw SQL mit `DO $$ BEGIN CREATE TYPE ...; EXCEPTION WHEN duplicate_object THEN NULL; END $$;` verwenden. Für Tabellen: `CREATE TABLE IF NOT EXISTS`. diff --git a/backend/Dockerfile b/backend/Dockerfile index 304875b..1a1a3e3 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -6,6 +6,11 @@ WORKDIR /app RUN apt-get update && apt-get install -y --no-install-recommends \ libpq-dev \ gcc \ + libpango-1.0-0 \ + libpangoft2-1.0-0 \ + libcairo2 \ + libgdk-pixbuf-2.0-0 \ + libffi-dev \ && rm -rf /var/lib/apt/lists/* # Install Python dependencies diff --git a/backend/alembic/versions/041_step_file_hash.py b/backend/alembic/versions/041_step_file_hash.py new file mode 100644 index 0000000..2ac8e5e --- /dev/null +++ b/backend/alembic/versions/041_step_file_hash.py @@ -0,0 +1,22 @@ +"""Add step_file_hash to cad_files. + +Revision ID: 041 +Revises: 040 +""" +import sqlalchemy as sa +from alembic import op + +revision = '041' +down_revision = '040' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column('cad_files', sa.Column('step_file_hash', sa.String(64), nullable=True)) + op.create_index('ix_cad_files_step_file_hash', 'cad_files', ['step_file_hash']) + + +def downgrade(): + op.drop_index('ix_cad_files_step_file_hash', table_name='cad_files') + op.drop_column('cad_files', 'step_file_hash') diff --git a/backend/alembic/versions/042_invoices.py b/backend/alembic/versions/042_invoices.py new file mode 100644 index 0000000..62eb63f --- /dev/null +++ b/backend/alembic/versions/042_invoices.py @@ -0,0 +1,67 @@ +"""Add invoices and invoice_lines tables. + +Revision ID: 042 +Revises: 041 +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects.postgresql import UUID + +revision = '042' +down_revision = '041' +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + 'invoices', + sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')), + sa.Column('tenant_id', UUID(as_uuid=True), sa.ForeignKey('tenants.id', ondelete='CASCADE'), nullable=True), + sa.Column('invoice_number', sa.String(20), nullable=False, unique=True), + sa.Column('status', sa.String(20), nullable=False, server_default='draft'), + sa.Column('issued_at', sa.Date, nullable=True), + sa.Column('due_at', sa.Date, nullable=True), + sa.Column('total_net', sa.Numeric(12, 2), nullable=True), + sa.Column('total_vat', sa.Numeric(12, 2), nullable=True), + sa.Column('vat_rate', sa.Numeric(5, 4), nullable=False, server_default='0.19'), + sa.Column('currency', sa.String(3), nullable=False, server_default='EUR'), + sa.Column('notes', sa.Text, nullable=True), + sa.Column('pdf_key', sa.Text, nullable=True), + sa.Column('created_at', sa.DateTime, nullable=False, server_default=sa.text('NOW()')), + sa.Column('updated_at', sa.DateTime, nullable=False, server_default=sa.text('NOW()')), + ) + op.create_index('ix_invoices_tenant', 'invoices', ['tenant_id']) + op.create_index('ix_invoices_status', 'invoices', ['status']) + + op.create_table( + 'invoice_lines', + sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')), + sa.Column('invoice_id', UUID(as_uuid=True), sa.ForeignKey('invoices.id', ondelete='CASCADE'), nullable=False), + sa.Column('order_line_id', UUID(as_uuid=True), sa.ForeignKey('order_lines.id', ondelete='SET NULL'), nullable=True), + sa.Column('description', sa.Text, nullable=False), + sa.Column('quantity', sa.Integer, nullable=False, server_default='1'), + sa.Column('unit_price', sa.Numeric(10, 2), nullable=True), + sa.Column('total', sa.Numeric(10, 2), nullable=True), + ) + op.create_index('ix_invoice_lines_invoice', 'invoice_lines', ['invoice_id']) + + # RLS + op.execute("ALTER TABLE invoices ENABLE ROW LEVEL SECURITY") + op.execute(""" + DO $$ BEGIN + CREATE POLICY tenant_isolation ON invoices + USING (tenant_id = current_setting('app.current_tenant_id', true)::uuid); + EXCEPTION WHEN duplicate_object THEN NULL; END $$; + """) + op.execute(""" + DO $$ BEGIN + CREATE POLICY admin_bypass ON invoices + USING (current_setting('app.current_tenant_id', true) = 'bypass'); + EXCEPTION WHEN duplicate_object THEN NULL; END $$; + """) + + +def downgrade(): + op.drop_table('invoice_lines') + op.drop_table('invoices') diff --git a/backend/alembic/versions/043_import_validations.py b/backend/alembic/versions/043_import_validations.py new file mode 100644 index 0000000..a69bafa --- /dev/null +++ b/backend/alembic/versions/043_import_validations.py @@ -0,0 +1,33 @@ +"""Add import_validations table. + +Revision ID: 043 +Revises: 042 +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects.postgresql import UUID, JSONB + +revision = '043' +down_revision = '042' +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + 'import_validations', + sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')), + sa.Column('tenant_id', UUID(as_uuid=True), sa.ForeignKey('tenants.id', ondelete='CASCADE'), nullable=True), + sa.Column('excel_path', sa.Text, nullable=False), + sa.Column('status', sa.String(20), nullable=False, server_default='pending'), + sa.Column('summary', JSONB, nullable=True), + sa.Column('rows', JSONB, nullable=True), + sa.Column('created_at', sa.DateTime, nullable=False, server_default=sa.text('NOW()')), + sa.Column('completed_at', sa.DateTime, nullable=True), + ) + op.create_index('ix_import_validations_tenant', 'import_validations', ['tenant_id']) + op.create_index('ix_import_validations_status', 'import_validations', ['status']) + + +def downgrade(): + op.drop_table('import_validations') diff --git a/backend/alembic/versions/044_notification_configs.py b/backend/alembic/versions/044_notification_configs.py new file mode 100644 index 0000000..3e41cb4 --- /dev/null +++ b/backend/alembic/versions/044_notification_configs.py @@ -0,0 +1,56 @@ +"""Add notification_configs table. + +Revision ID: 044 +Revises: 043 +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects.postgresql import UUID + +revision = '044' +down_revision = '043' +branch_labels = None +depends_on = None + +# Standard events +EVENTS = [ + "order.submitted", + "order.completed", + "render.completed", + "render.failed", + "excel.imported", +] + + +def upgrade(): + op.create_table( + 'notification_configs', + sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')), + sa.Column('user_id', UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False), + sa.Column('event_type', sa.String(100), nullable=False), + sa.Column('channel', sa.String(20), nullable=False), + sa.Column('enabled', sa.Boolean, nullable=False, server_default='true'), + sa.Column('created_at', sa.DateTime, nullable=False, server_default=sa.text('NOW()')), + ) + op.create_index('ix_notification_configs_user', 'notification_configs', ['user_id']) + op.create_unique_constraint( + 'uq_notification_config_user_event_channel', + 'notification_configs', + ['user_id', 'event_type', 'channel'] + ) + # Seed defaults for admin user (in_app=true, email=false) + for event in EVENTS: + op.execute(f""" + INSERT INTO notification_configs (user_id, event_type, channel, enabled) + SELECT id, '{event}', 'in_app', true FROM users WHERE role = 'admin' + ON CONFLICT DO NOTHING + """) + op.execute(f""" + INSERT INTO notification_configs (user_id, event_type, channel, enabled) + SELECT id, '{event}', 'email', false FROM users WHERE role = 'admin' + ON CONFLICT DO NOTHING + """) + + +def downgrade(): + op.drop_table('notification_configs') diff --git a/backend/app/api/routers/notifications.py b/backend/app/api/routers/notifications.py index 29afabc..f964dc7 100644 --- a/backend/app/api/routers/notifications.py +++ b/backend/app/api/routers/notifications.py @@ -155,3 +155,39 @@ async def mark_one_read( ) await db.commit() return {"ok": True} + + +# ── Notification Config Endpoints ──────────────────────────────────────────── +from app.domains.notifications.schemas import NotificationConfigOut, NotificationConfigUpdate +from app.domains.notifications.service import ( + get_notification_configs, upsert_notification_config, reset_notification_configs +) + + +@router.get("/config", response_model=list[NotificationConfigOut]) +async def get_my_notification_config( + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user), +): + return await get_notification_configs(db, current_user.id) + + +@router.put("/config/{event_type}/{channel}", response_model=NotificationConfigOut) +async def update_my_notification_config( + event_type: str, + channel: str, + body: NotificationConfigUpdate, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user), +): + if channel not in ("in_app", "email"): + raise HTTPException(status_code=400, detail="channel must be 'in_app' or 'email'") + return await upsert_notification_config(db, current_user.id, event_type, channel, body.enabled) + + +@router.post("/config/reset", response_model=list[NotificationConfigOut]) +async def reset_my_notification_config( + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user), +): + return await reset_notification_configs(db, current_user.id) diff --git a/backend/app/api/routers/uploads.py b/backend/app/api/routers/uploads.py index 2b1776a..17f93cf 100644 --- a/backend/app/api/routers/uploads.py +++ b/backend/app/api/routers/uploads.py @@ -56,6 +56,7 @@ class ExcelPreviewResponse(BaseModel): rows: list[ExcelPreviewRow] column_headers: list[str] = [] template_name: str | None = None + validation_id: str | None = None # ── Finalize request models ──────────────────────────────────────────── @@ -166,6 +167,23 @@ async def upload_excel( }, ) + # Queue sanity-check validation task + validation_id: str | None = None + try: + from app.domains.imports.models import ImportValidation + val = ImportValidation( + excel_path=str(tmp_path), + tenant_id=getattr(user, "tenant_id", None), + ) + db.add(val) + await db.commit() + await db.refresh(val) + validation_id = str(val.id) + from app.domains.imports.tasks import validate_excel_import + validate_excel_import.delay(validation_id, str(tmp_path), str(getattr(user, "tenant_id", "") or "")) + except Exception as exc: + pass # validation is non-critical + return ExcelPreviewResponse( excel_path=str(tmp_path), filename=file.filename or "", @@ -181,6 +199,7 @@ async def upload_excel( rows=annotated_rows, column_headers=parsed_dict.get("column_headers", []), template_name=parsed_dict.get("template_name"), + validation_id=validation_id, ) @@ -409,3 +428,19 @@ async def upload_step( file_hash=file_hash, status="uploaded", ) + + +@router.get("/validations/{validation_id}") +async def get_import_validation( + validation_id: uuid.UUID, + db: AsyncSession = Depends(get_db), + user: User = Depends(get_current_user), +): + """Poll the result of an Excel sanity-check validation.""" + from app.domains.imports.models import ImportValidation + from app.domains.imports.schemas import ImportValidationOut + result = await db.execute(select(ImportValidation).where(ImportValidation.id == validation_id)) + val = result.scalar_one_or_none() + if not val: + raise HTTPException(404, detail="Validation not found") + return ImportValidationOut.model_validate(val) diff --git a/backend/app/domains/billing/models.py b/backend/app/domains/billing/models.py index 14ddb69..0a9ceba 100644 --- a/backend/app/domains/billing/models.py +++ b/backend/app/domains/billing/models.py @@ -1,7 +1,7 @@ import uuid -from datetime import datetime +from datetime import date, datetime from decimal import Decimal -from sqlalchemy import String, Boolean, DateTime, Text, Numeric, Integer, UniqueConstraint, Index, ForeignKey +from sqlalchemy import String, Boolean, Date, DateTime, Text, Numeric, Integer, UniqueConstraint, Index, ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship from sqlalchemy.dialects.postgresql import UUID from app.database import Base @@ -31,3 +31,38 @@ class PricingTier(Base): UniqueConstraint("category_key", "quality_level", name="uq_pricing_tier"), Index("ix_pricing_tiers_category_key", "category_key"), ) + + +class Invoice(Base): + __tablename__ = "invoices" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + tenant_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True) + invoice_number: Mapped[str] = mapped_column(String(20), nullable=False, unique=True) + status: Mapped[str] = mapped_column(String(20), nullable=False, default="draft") + issued_at: Mapped[date | None] = mapped_column(Date, nullable=True) + due_at: Mapped[date | None] = mapped_column(Date, nullable=True) + total_net: Mapped[Decimal | None] = mapped_column(Numeric(12, 2), nullable=True) + total_vat: Mapped[Decimal | None] = mapped_column(Numeric(12, 2), nullable=True) + vat_rate: Mapped[Decimal] = mapped_column(Numeric(5, 4), nullable=False, default=Decimal("0.19")) + currency: Mapped[str] = mapped_column(String(3), nullable=False, default="EUR") + notes: Mapped[str | None] = mapped_column(Text, nullable=True) + pdf_key: Mapped[str | None] = mapped_column(Text, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + lines: Mapped[list["InvoiceLine"]] = relationship("InvoiceLine", back_populates="invoice", cascade="all, delete-orphan") + + +class InvoiceLine(Base): + __tablename__ = "invoice_lines" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + invoice_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("invoices.id", ondelete="CASCADE"), nullable=False) + order_line_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("order_lines.id", ondelete="SET NULL"), nullable=True) + description: Mapped[str] = mapped_column(Text, nullable=False) + quantity: Mapped[int] = mapped_column(Integer, nullable=False, default=1) + unit_price: Mapped[Decimal | None] = mapped_column(Numeric(10, 2), nullable=True) + total: Mapped[Decimal | None] = mapped_column(Numeric(10, 2), nullable=True) + + invoice: Mapped["Invoice"] = relationship("Invoice", back_populates="lines") diff --git a/backend/app/domains/billing/router.py b/backend/app/domains/billing/router.py index f745d05..90ea4d7 100644 --- a/backend/app/domains/billing/router.py +++ b/backend/app/domains/billing/router.py @@ -1,4 +1,101 @@ -# Re-export from original router. -from app.api.routers.pricing import router +"""Billing router — Invoice CRUD + PDF.""" +from __future__ import annotations +import uuid +from fastapi import APIRouter, Depends, HTTPException, status +from fastapi.responses import RedirectResponse +from sqlalchemy.ext.asyncio import AsyncSession -__all__ = ["router"] +from app.database import get_db +from app.utils.auth import require_admin_or_pm +from app.domains.billing.schemas import InvoiceCreate, InvoiceOut, InvoiceStatusUpdate +from app.domains.billing.service import ( + create_invoice, get_invoices, get_invoice, + update_invoice_status, delete_invoice, render_pdf, +) + +# Keep the old pricing router re-export for backward compat +from app.api.routers.pricing import router as pricing_router + +invoice_router = APIRouter(prefix="/billing", tags=["billing"]) + + +@invoice_router.get("/invoices", response_model=list[InvoiceOut]) +async def list_invoices( + skip: int = 0, + limit: int = 50, + db: AsyncSession = Depends(get_db), + current_user=Depends(require_admin_or_pm), +): + return await get_invoices(db, skip=skip, limit=limit) + + +@invoice_router.post("/invoices", response_model=InvoiceOut, status_code=status.HTTP_201_CREATED) +async def create_invoice_endpoint( + body: InvoiceCreate, + db: AsyncSession = Depends(get_db), + current_user=Depends(require_admin_or_pm), +): + tenant_id = getattr(current_user, 'tenant_id', None) + return await create_invoice( + db, + tenant_id=tenant_id, + order_line_ids=body.order_line_ids, + notes=body.notes, + issued_at=body.issued_at, + due_at=body.due_at, + vat_rate=body.vat_rate, + currency=body.currency, + ) + + +@invoice_router.get("/invoices/{invoice_id}", response_model=InvoiceOut) +async def get_invoice_endpoint( + invoice_id: uuid.UUID, + db: AsyncSession = Depends(get_db), + current_user=Depends(require_admin_or_pm), +): + inv = await get_invoice(db, invoice_id) + if not inv: + raise HTTPException(status_code=404, detail="Invoice not found") + return inv + + +@invoice_router.patch("/invoices/{invoice_id}", response_model=InvoiceOut) +async def update_invoice_status_endpoint( + invoice_id: uuid.UUID, + body: InvoiceStatusUpdate, + db: AsyncSession = Depends(get_db), + current_user=Depends(require_admin_or_pm), +): + inv = await update_invoice_status(db, invoice_id, body.status) + if not inv: + raise HTTPException(status_code=404, detail="Invoice not found") + return inv + + +@invoice_router.get("/invoices/{invoice_id}/pdf") +async def download_invoice_pdf( + invoice_id: uuid.UUID, + db: AsyncSession = Depends(get_db), + current_user=Depends(require_admin_or_pm), +): + key = await render_pdf(db, invoice_id) + if not key: + raise HTTPException(status_code=503, detail="PDF generation unavailable (WeasyPrint not installed)") + from app.core.storage import get_storage + url = get_storage().get_url(key) + return RedirectResponse(url=url) + + +@invoice_router.delete("/invoices/{invoice_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_invoice_endpoint( + invoice_id: uuid.UUID, + db: AsyncSession = Depends(get_db), + current_user=Depends(require_admin_or_pm), +): + ok = await delete_invoice(db, invoice_id) + if not ok: + raise HTTPException(status_code=400, detail="Only draft invoices can be deleted") + + +__all__ = ["invoice_router", "pricing_router"] diff --git a/backend/app/domains/billing/schemas.py b/backend/app/domains/billing/schemas.py new file mode 100644 index 0000000..8b24994 --- /dev/null +++ b/backend/app/domains/billing/schemas.py @@ -0,0 +1,57 @@ +"""Billing schemas — Invoice + InvoiceLine Pydantic models.""" +from __future__ import annotations +import uuid +from datetime import date, datetime +from decimal import Decimal +from pydantic import BaseModel + + +class InvoiceLineCreate(BaseModel): + order_line_id: uuid.UUID | None = None + description: str + quantity: int = 1 + unit_price: Decimal | None = None + + +class InvoiceLineOut(BaseModel): + id: uuid.UUID + invoice_id: uuid.UUID + order_line_id: uuid.UUID | None + description: str + quantity: int + unit_price: Decimal | None + total: Decimal | None + + model_config = {"from_attributes": True} + + +class InvoiceCreate(BaseModel): + order_line_ids: list[uuid.UUID] = [] + notes: str | None = None + issued_at: date | None = None + due_at: date | None = None + vat_rate: Decimal = Decimal("0.19") + currency: str = "EUR" + + +class InvoiceStatusUpdate(BaseModel): + status: str # draft|sent|paid|cancelled + + +class InvoiceOut(BaseModel): + id: uuid.UUID + tenant_id: uuid.UUID | None + invoice_number: str + status: str + issued_at: date | None + due_at: date | None + total_net: Decimal | None + total_vat: Decimal | None + vat_rate: Decimal + currency: str + notes: str | None + pdf_key: str | None + created_at: datetime + lines: list[InvoiceLineOut] = [] + + model_config = {"from_attributes": True} diff --git a/backend/app/domains/billing/service.py b/backend/app/domains/billing/service.py index ded52c7..e0ef713 100644 --- a/backend/app/domains/billing/service.py +++ b/backend/app/domains/billing/service.py @@ -1,4 +1,4 @@ -"""Pricing service — price lookup and order price computation. +"""Billing service — pricing (price lookup + order price computation) and invoice CRUD + PDF generation. Price resolution cascade for order lines: 1. OutputType's linked pricing_tier (if active) → use its price_per_item @@ -6,14 +6,23 @@ Price resolution cascade for order lines: 3. "default" category tier → global fallback 4. None if nothing configured """ +from __future__ import annotations + +import logging +import os +import tempfile +import uuid +from datetime import date, datetime from decimal import Decimal from typing import Any -from sqlalchemy import select, update as sql_update +from sqlalchemy import func, select, update as sql_update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload -from app.domains.billing.models import PricingTier +from app.domains.billing.models import Invoice, InvoiceLine, PricingTier + +logger = logging.getLogger(__name__) async def get_price_for( @@ -181,3 +190,181 @@ async def refresh_order_price(db: AsyncSession, order_id) -> Decimal | None: ) await db.commit() return new_price + + +# --------------------------------------------------------------------------- +# Invoice CRUD +# --------------------------------------------------------------------------- + +VALID_STATUSES = {"draft", "sent", "paid", "cancelled"} + + +async def generate_invoice_number(db: AsyncSession, tenant_id: uuid.UUID | None) -> str: + """Generate sequential invoice number: INV-YYYY-NNNN.""" + year = datetime.utcnow().year + count_result = await db.execute( + select(func.count()).select_from(Invoice).where( + func.extract("year", Invoice.created_at) == year + ) + ) + seq = (count_result.scalar() or 0) + 1 + return f"INV-{year}-{seq:04d}" + + +async def create_invoice( + db: AsyncSession, + tenant_id: uuid.UUID | None, + order_line_ids: list[uuid.UUID], + notes: str | None = None, + issued_at: date | None = None, + due_at: date | None = None, + vat_rate: Decimal = Decimal("0.19"), + currency: str = "EUR", +) -> Invoice: + """Create invoice with lines derived from order lines.""" + from app.domains.orders.models import OrderLine + + invoice_number = await generate_invoice_number(db, tenant_id) + invoice = Invoice( + tenant_id=tenant_id, + invoice_number=invoice_number, + status="draft", + issued_at=issued_at or date.today(), + due_at=due_at, + notes=notes, + vat_rate=vat_rate, + currency=currency, + ) + db.add(invoice) + await db.flush() # get invoice.id + + total_net = Decimal("0") + for ol_id in order_line_ids: + result = await db.execute(select(OrderLine).where(OrderLine.id == ol_id)) + ol = result.scalar_one_or_none() + if not ol: + continue + unit_price = ol.unit_price or Decimal("0") + line = InvoiceLine( + invoice_id=invoice.id, + order_line_id=ol.id, + description=f"Render: {ol.id}", + quantity=1, + unit_price=unit_price, + total=unit_price, + ) + db.add(line) + total_net += unit_price + + invoice.total_net = total_net + invoice.total_vat = (total_net * vat_rate).quantize(Decimal("0.01")) + await db.commit() + await db.refresh(invoice) + return invoice + + +async def get_invoices( + db: AsyncSession, + tenant_id: uuid.UUID | None = None, + skip: int = 0, + limit: int = 50, +) -> list[Invoice]: + q = ( + select(Invoice) + .options(selectinload(Invoice.lines)) + .order_by(Invoice.created_at.desc()) + .offset(skip) + .limit(limit) + ) + result = await db.execute(q) + return list(result.scalars().all()) + + +async def get_invoice(db: AsyncSession, invoice_id: uuid.UUID) -> Invoice | None: + result = await db.execute( + select(Invoice) + .options(selectinload(Invoice.lines)) + .where(Invoice.id == invoice_id) + ) + return result.scalar_one_or_none() + + +async def update_invoice_status(db: AsyncSession, invoice_id: uuid.UUID, status: str) -> Invoice | None: + invoice = await get_invoice(db, invoice_id) + if not invoice: + return None + invoice.status = status + invoice.updated_at = datetime.utcnow() + await db.commit() + await db.refresh(invoice) + return invoice + + +async def delete_invoice(db: AsyncSession, invoice_id: uuid.UUID) -> bool: + invoice = await get_invoice(db, invoice_id) + if not invoice or invoice.status != "draft": + return False + await db.delete(invoice) + await db.commit() + return True + + +async def render_pdf(db: AsyncSession, invoice_id: uuid.UUID) -> str | None: + """Generate PDF via WeasyPrint, upload to storage, return storage key.""" + try: + from weasyprint import HTML + except ImportError: + logger.warning("WeasyPrint not installed — PDF generation skipped") + return None + + invoice = await get_invoice(db, invoice_id) + if not invoice: + return None + + html_content = _build_invoice_html(invoice) + pdf_bytes = HTML(string=html_content).write_pdf() + + from app.core.storage import get_storage + storage = get_storage() + key = f"invoices/{invoice_id}.pdf" + with tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) as tmp: + tmp.write(pdf_bytes) + tmp_path = tmp.name + try: + storage.upload(tmp_path, key) + finally: + os.unlink(tmp_path) + + invoice.pdf_key = key + invoice.updated_at = datetime.utcnow() + await db.commit() + return key + + +def _build_invoice_html(invoice: Invoice) -> str: + lines_html = "".join( + f"{l.description}{l.quantity}" + f"{l.unit_price or 0:.2f} {invoice.currency}" + f"{l.total or 0:.2f} {invoice.currency}" + for l in invoice.lines + ) + return f""" + +

Invoice {invoice.invoice_number}

+

Status: {invoice.status} | Currency: {invoice.currency}

+

Issued: {invoice.issued_at} | Due: {invoice.due_at or "—"}

+ +{lines_html}
DescriptionQtyUnit PriceTotal
+
+

Net: {invoice.total_net or 0:.2f} {invoice.currency}

+

VAT ({float(invoice.vat_rate) * 100:.0f}%): {invoice.total_vat or 0:.2f} {invoice.currency}

+

Gross: {(invoice.total_net or 0) + (invoice.total_vat or 0):.2f} {invoice.currency}

+
+{f'

Notes: {invoice.notes}

' if invoice.notes else ''} +""" diff --git a/backend/app/domains/imports/models.py b/backend/app/domains/imports/models.py index c6e23ea..231ab9c 100644 --- a/backend/app/domains/imports/models.py +++ b/backend/app/domains/imports/models.py @@ -9,6 +9,21 @@ if TYPE_CHECKING: from app.domains.tenants.models import Tenant +class ImportValidation(Base): + __tablename__ = "import_validations" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + tenant_id: Mapped[uuid.UUID | None] = mapped_column( + UUID(as_uuid=True), ForeignKey("tenants.id", ondelete="CASCADE"), nullable=True, index=True + ) + excel_path: Mapped[str] = mapped_column(Text, nullable=False) + status: Mapped[str] = mapped_column(String(20), nullable=False, default="pending") + summary: Mapped[dict | None] = mapped_column(JSONB, nullable=True) + rows: Mapped[list | None] = mapped_column(JSONB, nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) + completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + + class Template(Base): __tablename__ = "templates" diff --git a/backend/app/domains/imports/schemas.py b/backend/app/domains/imports/schemas.py index 3ba1537..0b0c42d 100644 --- a/backend/app/domains/imports/schemas.py +++ b/backend/app/domains/imports/schemas.py @@ -1,3 +1,6 @@ +from __future__ import annotations +import uuid +from datetime import datetime from pydantic import BaseModel from typing import Any @@ -41,3 +44,26 @@ class StepUploadResponse(BaseModel): file_hash: str status: str matched_items: list[str] = [] + + +# ── Import Validation ────────────────────────────────────────────────────── + +class ValidationIssue(BaseModel): + type: str # "missing_material" | "material_suggestion" | "no_step" | "duplicate" + field: str | None = None + value: str | None = None + suggestion: str | None = None + message: str + + +class ImportValidationOut(BaseModel): + id: uuid.UUID + tenant_id: uuid.UUID | None + excel_path: str + status: str + summary: dict | None + rows: list | None + created_at: datetime + completed_at: datetime | None + + model_config = {"from_attributes": True} diff --git a/backend/app/domains/imports/service.py b/backend/app/domains/imports/service.py index 258ada1..a5d1bc4 100644 --- a/backend/app/domains/imports/service.py +++ b/backend/app/domains/imports/service.py @@ -1,12 +1,180 @@ """Import services — Excel parsing and product import.""" +from __future__ import annotations +import difflib +import logging +import uuid +from datetime import datetime # Re-export from original service files for backward compatibility. from app.services.excel_parser import parse_excel, parsed_excel_to_dict from app.services.excel_import import import_excel_to_products, preview_excel_rows +logger = logging.getLogger(__name__) + __all__ = [ "parse_excel", "parsed_excel_to_dict", "import_excel_to_products", "preview_excel_rows", + "run_sanity_check", ] + + +def run_sanity_check(validation_id: str, excel_path: str, tenant_id: str | None) -> dict: + """Run sanity check on an imported Excel file. + + Returns result dict with summary + rows (stored in ImportValidation). + Uses sync DB access (Celery context). + """ + from sqlalchemy import create_engine, select + from sqlalchemy.orm import Session, selectinload + from app.config import settings as app_settings + from app.domains.imports.models import ImportValidation + from app.domains.materials.models import Material, MaterialAlias + from app.domains.products.models import Product, CadFile + + sync_url = app_settings.database_url.replace("+asyncpg", "") + engine = create_engine(sync_url) + + with Session(engine) as db: + # Update status to running + val = db.get(ImportValidation, validation_id) + if not val: + logger.warning("ImportValidation %s not found", validation_id) + return {} + val.status = "running" + db.commit() + + # Load all known material names + aliases for fuzzy matching + materials = db.execute( + select(Material).options(selectinload(Material.aliases)) + ).scalars().all() + known_names: list[str] = [] + for m in materials: + known_names.append(m.name.lower()) + for a in m.aliases: + known_names.append(a.alias.lower()) + + # Parse Excel + try: + parsed = parse_excel(excel_path) + except Exception as exc: + logger.error("Failed to parse excel %s: %s", excel_path, exc) + val.status = "failed" + val.completed_at = datetime.utcnow() + db.commit() + return {} + + rows_out = [] + seen_pim_ids: dict[str, int] = {} + counts = {"ok": 0, "warnings": 0, "errors": 0, "missing_materials": 0, "no_step": 0, "duplicates": 0} + + for row in parsed: + issues = [] + pim_id = getattr(row, "pim_id", None) or "" + produkt_baureihe = getattr(row, "produkt_baureihe", None) or "" + components = getattr(row, "components", []) or [] + + # Duplicate check + key = pim_id or produkt_baureihe + if key: + if key in seen_pim_ids: + issues.append({ + "type": "duplicate", + "field": "pim_id", + "value": key, + "suggestion": None, + "message": f"Duplicate of row {seen_pim_ids[key]}", + }) + counts["duplicates"] += 1 + else: + seen_pim_ids[key] = row.row_index + + # STEP availability check + product_id = None + if pim_id or produkt_baureihe: + q = select(Product) + if pim_id: + q = q.where(Product.pim_id == pim_id) + elif produkt_baureihe: + q = q.where(Product.produkt_baureihe == produkt_baureihe) + product = db.execute(q).scalar_one_or_none() + if product: + product_id = str(product.id) + has_cad = db.execute( + select(CadFile).where(CadFile.id.in_( + [item.cad_file_id for item in product.order_items if hasattr(item, 'cad_file_id')] + )) + ).first() if hasattr(product, 'order_items') else None + # Simple check: product exists but may have no CAD + if not product.cad_file_id if hasattr(product, 'cad_file_id') else False: + issues.append({ + "type": "no_step", + "field": "cad_file", + "value": None, + "suggestion": None, + "message": "No STEP file linked to this product", + }) + counts["no_step"] += 1 + + # Material check + for comp in components: + mat_name = getattr(comp, "material", None) or "" + if not mat_name: + continue + mat_lower = mat_name.lower() + if mat_lower in known_names: + continue # exact match + matches = difflib.get_close_matches(mat_lower, known_names, n=1, cutoff=0.8) + if matches: + issues.append({ + "type": "material_suggestion", + "field": "material", + "value": mat_name, + "suggestion": matches[0], + "message": f"Material '{mat_name}' not found; closest: '{matches[0]}'", + }) + else: + issues.append({ + "type": "missing_material", + "field": "material", + "value": mat_name, + "suggestion": None, + "message": f"Material '{mat_name}' not found in library", + }) + counts["missing_materials"] += 1 + + # Row status + has_error = any(i["type"] in ("missing_material",) for i in issues) + has_warning = any(i["type"] in ("duplicate", "no_step", "material_suggestion") for i in issues) + if has_error: + row_status = "error" + counts["errors"] += 1 + elif has_warning: + row_status = "warning" + counts["warnings"] += 1 + else: + row_status = "ok" + counts["ok"] += 1 + + rows_out.append({ + "row_index": row.row_index, + "product_id": product_id, + "pim_id": pim_id or None, + "produkt_baureihe": produkt_baureihe or None, + "issues": issues, + "status": row_status, + }) + + summary = { + "total": len(rows_out), + **counts, + } + + val.status = "completed" + val.summary = summary + val.rows = rows_out + val.completed_at = datetime.utcnow() + db.commit() + + return {"summary": summary, "rows": rows_out} diff --git a/backend/app/domains/imports/tasks.py b/backend/app/domains/imports/tasks.py new file mode 100644 index 0000000..3bdc852 --- /dev/null +++ b/backend/app/domains/imports/tasks.py @@ -0,0 +1,38 @@ +"""Celery tasks for import validation.""" +from __future__ import annotations +import logging + +from celery import shared_task + +logger = logging.getLogger(__name__) + + +@shared_task(name="imports.validate_excel_import", queue="step_processing", bind=True) +def validate_excel_import(self, validation_id: str, excel_path: str, tenant_id: str | None = None): + """Run sanity check on imported Excel file and store results.""" + logger.info("Running import validation %s for %s", validation_id, excel_path) + try: + from app.domains.imports.service import run_sanity_check + result = run_sanity_check(validation_id, excel_path, tenant_id) + logger.info("Validation %s completed: %s", validation_id, result.get("summary", {})) + return result + except Exception as exc: + logger.error("Validation %s failed: %s", validation_id, exc) + # Mark as failed in DB + try: + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + from app.config import settings as app_settings + from app.domains.imports.models import ImportValidation + from datetime import datetime + sync_url = app_settings.database_url.replace("+asyncpg", "") + engine = create_engine(sync_url) + with Session(engine) as db: + val = db.get(ImportValidation, validation_id) + if val: + val.status = "failed" + val.completed_at = datetime.utcnow() + db.commit() + except Exception: + pass + raise diff --git a/backend/app/domains/notifications/models.py b/backend/app/domains/notifications/models.py index d6cdbd2..cb51f89 100644 --- a/backend/app/domains/notifications/models.py +++ b/backend/app/domains/notifications/models.py @@ -32,3 +32,27 @@ class AuditLog(Base): user: Mapped["User"] = relationship("User", back_populates="audit_logs", foreign_keys=[user_id]) target_user: Mapped["User"] = relationship("User", foreign_keys=[target_user_id]) + + +# Event type constants +class NotificationEvent: + ORDER_SUBMITTED = "order.submitted" + ORDER_COMPLETED = "order.completed" + RENDER_COMPLETED = "render.completed" + RENDER_FAILED = "render.failed" + EXCEL_IMPORTED = "excel.imported" + + ALL = [ORDER_SUBMITTED, ORDER_COMPLETED, RENDER_COMPLETED, RENDER_FAILED, EXCEL_IMPORTED] + + +class NotificationConfig(Base): + __tablename__ = "notification_configs" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + user_id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True + ) + event_type: Mapped[str] = mapped_column(String(100), nullable=False) + channel: Mapped[str] = mapped_column(String(20), nullable=False) # "in_app" | "email" + enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) diff --git a/backend/app/domains/notifications/schemas.py b/backend/app/domains/notifications/schemas.py new file mode 100644 index 0000000..8c77707 --- /dev/null +++ b/backend/app/domains/notifications/schemas.py @@ -0,0 +1,20 @@ +"""Notification schemas.""" +from __future__ import annotations +import uuid +from datetime import datetime +from pydantic import BaseModel + + +class NotificationConfigOut(BaseModel): + id: uuid.UUID + user_id: uuid.UUID + event_type: str + channel: str + enabled: bool + created_at: datetime + + model_config = {"from_attributes": True} + + +class NotificationConfigUpdate(BaseModel): + enabled: bool diff --git a/backend/app/domains/notifications/service.py b/backend/app/domains/notifications/service.py index c093726..b6759c8 100644 --- a/backend/app/domains/notifications/service.py +++ b/backend/app/domains/notifications/service.py @@ -7,7 +7,7 @@ import logging import uuid from datetime import datetime -from sqlalchemy import create_engine +from sqlalchemy import create_engine, select from sqlalchemy.orm import Session from sqlalchemy.ext.asyncio import AsyncSession @@ -82,3 +82,91 @@ def emit_notification_sync( session.commit() except Exception: logger.exception("Failed to emit notification (sync)") + + +# ── Notification config helpers ───────────────────────────────────────────── + +def _is_channel_enabled_sync(user_id: str | None, event_type: str, channel: str) -> bool: + """Check if a notification channel is enabled for a user (sync, for Celery).""" + if not user_id: + return channel == "in_app" # default: in_app on, email off + engine = _get_engine() + from app.domains.notifications.models import NotificationConfig + with Session(engine) as session: + cfg = session.execute( + select(NotificationConfig).where( + NotificationConfig.user_id == user_id, + NotificationConfig.event_type == event_type, + NotificationConfig.channel == channel, + ) + ).scalar_one_or_none() + if cfg is None: + return channel == "in_app" # default + return cfg.enabled + + +def send_email_notification_stub( + *, + to_user_id: str | None, + event_type: str, + subject: str, + body: str, +) -> None: + """Email notification stub — logs only, email sending not yet active.""" + logger.info( + "[EMAIL STUB] Would send email to user=%s event=%s subject=%s", + to_user_id, event_type, subject + ) + + +async def get_notification_configs(db: AsyncSession, user_id: uuid.UUID) -> list: + from app.domains.notifications.models import NotificationConfig + from sqlalchemy import select as sa_select + result = await db.execute( + sa_select(NotificationConfig).where(NotificationConfig.user_id == user_id) + .order_by(NotificationConfig.event_type, NotificationConfig.channel) + ) + return list(result.scalars().all()) + + +async def upsert_notification_config( + db: AsyncSession, + user_id: uuid.UUID, + event_type: str, + channel: str, + enabled: bool, +) -> object: + from app.domains.notifications.models import NotificationConfig + from sqlalchemy import select as sa_select + result = await db.execute( + sa_select(NotificationConfig).where( + NotificationConfig.user_id == user_id, + NotificationConfig.event_type == event_type, + NotificationConfig.channel == channel, + ) + ) + cfg = result.scalar_one_or_none() + if cfg is None: + cfg = NotificationConfig(user_id=user_id, event_type=event_type, channel=channel, enabled=enabled) + db.add(cfg) + else: + cfg.enabled = enabled + await db.commit() + await db.refresh(cfg) + return cfg + + +async def reset_notification_configs(db: AsyncSession, user_id: uuid.UUID) -> list: + from app.domains.notifications.models import NotificationConfig, NotificationEvent + from sqlalchemy import delete as sa_delete + await db.execute(sa_delete(NotificationConfig).where(NotificationConfig.user_id == user_id)) + configs = [] + for event in NotificationEvent.ALL: + for channel, default_enabled in [("in_app", True), ("email", False)]: + cfg = NotificationConfig( + user_id=user_id, event_type=event, channel=channel, enabled=default_enabled + ) + db.add(cfg) + configs.append(cfg) + await db.commit() + return configs diff --git a/backend/app/domains/products/cache_service.py b/backend/app/domains/products/cache_service.py new file mode 100644 index 0000000..8eb2b81 --- /dev/null +++ b/backend/app/domains/products/cache_service.py @@ -0,0 +1,48 @@ +"""SHA256-based STL conversion cache using MinIO.""" +from __future__ import annotations +import hashlib +import logging +from pathlib import Path + +logger = logging.getLogger(__name__) + +CACHE_PREFIX = "conversion-cache" + + +def compute_step_hash(file_path: str) -> str: + """Compute SHA256 hash of a STEP file.""" + h = hashlib.sha256() + with open(file_path, "rb") as f: + for chunk in iter(lambda: f.read(65536), b""): + h.update(chunk) + return h.hexdigest() + + +def _cache_key(step_hash: str, quality: str) -> str: + return f"{CACHE_PREFIX}/{step_hash}_{quality}.stl" + + +def check_stl_cache(step_hash: str, quality: str) -> bytes | None: + """Return STL bytes from MinIO cache if present, else None.""" + from app.core.storage import get_storage + storage = get_storage() + key = _cache_key(step_hash, quality) + try: + if storage.exists(key): + return storage.download_bytes(key) + return None + except Exception as exc: + logger.warning("Cache check failed for %s: %s", key, exc) + return None + + +def store_stl_cache(step_hash: str, quality: str, stl_path: str) -> None: + """Upload local STL file to MinIO cache.""" + from app.core.storage import get_storage + storage = get_storage() + key = _cache_key(step_hash, quality) + try: + storage.upload(stl_path, key) + logger.info("Stored STL cache: %s", key) + except Exception as exc: + logger.warning("Failed to store STL cache %s: %s", key, exc) diff --git a/backend/app/domains/products/models.py b/backend/app/domains/products/models.py index ca7da9f..758c8f3 100644 --- a/backend/app/domains/products/models.py +++ b/backend/app/domains/products/models.py @@ -31,6 +31,7 @@ class CadFile(Base): error_message: Mapped[str] = mapped_column(String(2000), nullable=True) render_log: Mapped[dict] = mapped_column(JSONB, nullable=True) mesh_attributes: Mapped[dict | None] = mapped_column(JSONB, nullable=True) + step_file_hash: Mapped[str | None] = mapped_column(String(64), nullable=True, index=True) tenant_id: Mapped[uuid.UUID | None] = mapped_column( UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True ) diff --git a/backend/app/main.py b/backend/app/main.py index 4eb7eb7..21bd804 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -16,7 +16,7 @@ from app.domains.products.router import products_router, cad_router from app.domains.materials.router import router as materials_router from app.domains.rendering.router import render_templates_router, output_types_router from app.domains.notifications.router import router as notifications_router -from app.domains.billing.router import router as pricing_router +from app.domains.billing.router import pricing_router, invoice_router from app.domains.tenants.router import router as tenants_router from app.domains.rendering.workflow_router import router as workflows_router from app.domains.media.router import router as media_router @@ -73,6 +73,7 @@ app.include_router(materials_router, prefix="/api") app.include_router(worker_router, prefix="/api") app.include_router(analytics_router, prefix="/api") app.include_router(pricing_router, prefix="/api") +app.include_router(invoice_router, prefix="/api") app.include_router(products_router, prefix="/api") app.include_router(output_types_router, prefix="/api") app.include_router(render_templates_router, prefix="/api") diff --git a/backend/app/tasks/celery_app.py b/backend/app/tasks/celery_app.py index 77145f2..9764d2e 100644 --- a/backend/app/tasks/celery_app.py +++ b/backend/app/tasks/celery_app.py @@ -10,6 +10,7 @@ celery_app = Celery( "app.tasks.ai_tasks", "app.domains.rendering.tasks", "app.domains.products.tasks", + "app.domains.imports.tasks", ], ) diff --git a/backend/app/tasks/step_tasks.py b/backend/app/tasks/step_tasks.py index 24c5a6d..c9efa30 100644 --- a/backend/app/tasks/step_tasks.py +++ b/backend/app/tasks/step_tasks.py @@ -132,6 +132,28 @@ def render_step_thumbnail(self, cad_file_id: str): On success, also auto-populates materials and marks the CadFile as completed. """ logger.info(f"Rendering thumbnail for CAD file: {cad_file_id}") + + # Compute and persist STEP file hash for STL cache lookups + try: + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + from app.config import settings as app_settings + from app.models.cad_file import CadFile + from app.domains.products.cache_service import compute_step_hash + + sync_url = app_settings.database_url.replace("+asyncpg", "") + _eng = create_engine(sync_url) + with Session(_eng) as _sess: + _cad = _sess.get(CadFile, cad_file_id) + if _cad and _cad.stored_path and not _cad.step_file_hash: + _hash = compute_step_hash(_cad.stored_path) + _cad.step_file_hash = _hash + _sess.commit() + logger.info(f"Saved step_file_hash for {cad_file_id}: {_hash[:12]}…") + _eng.dispose() + except Exception: + logger.warning(f"step_file_hash computation failed for {cad_file_id} (non-fatal)") + try: from app.services.step_processor import regenerate_cad_thumbnail success = regenerate_cad_thumbnail(cad_file_id, part_colors={}) @@ -172,13 +194,24 @@ def generate_stl_cache(self, cad_file_id: str, quality: str): try: from app.services.render_blender import convert_step_to_stl, export_per_part_stls + from app.domains.products.cache_service import compute_step_hash, check_stl_cache, store_stl_cache from pathlib import Path as _Path step = _Path(step_path) stl_out = step.parent / f"{step.stem}_{quality}.stl" parts_dir = step.parent / f"{step.stem}_{quality}_parts" if not stl_out.exists() or stl_out.stat().st_size == 0: - convert_step_to_stl(step, stl_out, quality) + # Check MinIO cache before running cadquery conversion + step_hash = compute_step_hash(step_path) + cached_bytes = check_stl_cache(step_hash, quality) + if cached_bytes: + stl_out.write_bytes(cached_bytes) + logger.info(f"STL cache hit for {cad_file_id} ({quality}), skipped conversion") + else: + convert_step_to_stl(step, stl_out, quality) + # Store result in MinIO for future workers + if stl_out.exists() and stl_out.stat().st_size > 0: + store_stl_cache(step_hash, quality, str(stl_out)) if not (parts_dir / "manifest.json").exists(): try: export_per_part_stls(step, parts_dir, quality) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 19e3890..6e5b02d 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -31,6 +31,7 @@ dependencies = [ "python-dotenv>=1.0.1", "aiofiles>=23.2.1", "boto3>=1.34.0", + "weasyprint>=62.0", ] [project.optional-dependencies] diff --git a/docker-compose.yml b/docker-compose.yml index 117d7eb..a42c1e8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -112,6 +112,37 @@ services: redis: condition: service_healthy + worker-thumbnail: + build: + context: ./backend + dockerfile: Dockerfile + command: celery -A app.tasks.celery_app worker --loglevel=info -Q thumbnail_rendering --concurrency=1 + environment: + - POSTGRES_DB=${POSTGRES_DB:-schaeffler} + - POSTGRES_USER=${POSTGRES_USER:-schaeffler} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-schaeffler} + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + - REDIS_URL=${REDIS_URL:-redis://redis:6379/0} + - JWT_SECRET_KEY=${JWT_SECRET_KEY:-changeme-in-production} + - AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY:-} + - AZURE_OPENAI_ENDPOINT=${AZURE_OPENAI_ENDPOINT:-} + - AZURE_OPENAI_DEPLOYMENT=${AZURE_OPENAI_DEPLOYMENT:-gpt-4o} + - AZURE_OPENAI_API_VERSION=${AZURE_OPENAI_API_VERSION:-2024-02-01} + - UPLOAD_DIR=/app/uploads + - MINIO_URL=${MINIO_URL:-http://minio:9000} + - MINIO_USER=${MINIO_USER:-minioadmin} + - MINIO_PASSWORD=${MINIO_PASSWORD:-minioadmin} + - MINIO_BUCKET=${MINIO_BUCKET:-uploads} + volumes: + - ./backend:/app + - uploads:/app/uploads + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + render-worker: build: context: . diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 736f5a7..a0a26e4 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -15,10 +15,12 @@ import ProductLibraryPage from './pages/ProductLibrary' import ProductDetailPage from './pages/ProductDetail' import NewProductOrderPage from './pages/NewProductOrder' import NotificationsPage from './pages/Notifications' +import NotificationSettingsPage from './pages/NotificationSettings' import PreferencesPage from './pages/Preferences' import TenantsPage from './pages/Tenants' import WorkflowEditorPage from './pages/WorkflowEditor' import MediaBrowserPage from './pages/MediaBrowser' +import BillingPage from './pages/Billing' function ProtectedRoute({ children }: { children: React.ReactNode }) { const token = useAuthStore((s) => s.token) @@ -81,6 +83,7 @@ export default function App() { } /> } /> } /> + } /> } /> } /> } /> + + + + } + /> diff --git a/frontend/src/api/billing.ts b/frontend/src/api/billing.ts new file mode 100644 index 0000000..5b86373 --- /dev/null +++ b/frontend/src/api/billing.ts @@ -0,0 +1,65 @@ +import api from './client' + +export interface InvoiceLine { + id: string + invoice_id: string + order_line_id: string | null + description: string + quantity: number + unit_price: number | null + total: number | null +} + +export interface Invoice { + id: string + tenant_id: string | null + invoice_number: string + status: 'draft' | 'sent' | 'paid' | 'cancelled' + issued_at: string | null + due_at: string | null + total_net: number | null + total_vat: number | null + vat_rate: number + currency: string + notes: string | null + pdf_key: string | null + created_at: string + lines: InvoiceLine[] +} + +export interface InvoiceCreate { + order_line_ids: string[] + notes?: string + issued_at?: string + due_at?: string + vat_rate?: number + currency?: string +} + +export async function getInvoices(skip = 0, limit = 50): Promise { + const res = await api.get('/billing/invoices', { params: { skip, limit } }) + return res.data +} + +export async function getInvoice(id: string): Promise { + const res = await api.get(`/billing/invoices/${id}`) + return res.data +} + +export async function createInvoice(data: InvoiceCreate): Promise { + const res = await api.post('/billing/invoices', data) + return res.data +} + +export async function updateInvoiceStatus(id: string, status: string): Promise { + const res = await api.patch(`/billing/invoices/${id}`, { status }) + return res.data +} + +export async function deleteInvoice(id: string): Promise { + await api.delete(`/billing/invoices/${id}`) +} + +export function getInvoicePdfUrl(id: string): string { + return `/api/billing/invoices/${id}/pdf` +} diff --git a/frontend/src/api/notifications.ts b/frontend/src/api/notifications.ts index c506580..e994e8d 100644 --- a/frontend/src/api/notifications.ts +++ b/frontend/src/api/notifications.ts @@ -37,3 +37,36 @@ export async function markAsRead(ids?: string[]): Promise { export async function markOneAsRead(id: string): Promise { await api.post(`/notifications/${id}/mark-read`) } + +// ── Notification Config ─────────────────────────────────────────────────── + +export interface NotificationConfig { + id: string + user_id: string + event_type: string + channel: 'in_app' | 'email' + enabled: boolean + created_at: string +} + +export async function getNotificationConfigs(): Promise { + const res = await api.get('/notifications/config') + return res.data +} + +export async function updateNotificationConfig( + eventType: string, + channel: string, + enabled: boolean +): Promise { + const res = await api.put( + `/notifications/config/${encodeURIComponent(eventType)}/${channel}`, + { enabled } + ) + return res.data +} + +export async function resetNotificationConfigs(): Promise { + const res = await api.post('/notifications/config/reset') + return res.data +} diff --git a/frontend/src/api/uploads.ts b/frontend/src/api/uploads.ts index 8c2cda0..ce4f390 100644 --- a/frontend/src/api/uploads.ts +++ b/frontend/src/api/uploads.ts @@ -30,6 +30,7 @@ export interface ExcelPreviewResult { rows: ExcelPreviewRow[] column_headers: string[] template_name: string | null + validation_id?: string | null } export interface ParsedComponent { @@ -100,3 +101,46 @@ export async function uploadStep(file: File) { }) return res.data } + +// ── Import Validation ───────────────────────────────────────────────────── + +export interface ValidationIssue { + type: 'missing_material' | 'material_suggestion' | 'no_step' | 'duplicate' + field: string | null + value: string | null + suggestion: string | null + message: string +} + +export interface ValidationRow { + row_index: number + product_id: string | null + pim_id: string | null + produkt_baureihe: string | null + issues: ValidationIssue[] + status: 'ok' | 'warning' | 'error' +} + +export interface ImportValidation { + id: string + tenant_id: string | null + excel_path: string + status: 'pending' | 'running' | 'completed' | 'failed' + summary: { + total: number + ok: number + warnings: number + errors: number + missing_materials: number + no_step: number + duplicates: number + } | null + rows: ValidationRow[] | null + created_at: string + completed_at: string | null +} + +export async function getImportValidation(id: string): Promise { + const res = await api.get(`/uploads/validations/${id}`) + return res.data +} diff --git a/frontend/src/components/layout/Layout.tsx b/frontend/src/components/layout/Layout.tsx index 0882f14..a95705a 100644 --- a/frontend/src/components/layout/Layout.tsx +++ b/frontend/src/components/layout/Layout.tsx @@ -1,5 +1,5 @@ import { Outlet, NavLink, useNavigate, Link } from 'react-router-dom' -import { LayoutDashboard, Package, Settings, LogOut, FlaskConical, Activity, Library, Plus, SlidersHorizontal, Building2, GitBranch, Image } from 'lucide-react' +import { LayoutDashboard, Package, Settings, LogOut, FlaskConical, Activity, Library, Plus, SlidersHorizontal, Building2, GitBranch, Image, BellRing, Receipt } from 'lucide-react' import { useAuthStore } from '../../store/auth' import { clsx } from 'clsx' import { useQuery } from '@tanstack/react-query' @@ -120,6 +120,22 @@ export default function Layout() { Admin )} + {(user?.role === 'admin' || user?.role === 'project_manager') && ( + + clsx( + 'flex items-center gap-3 px-3 py-2 rounded-md text-sm font-medium transition-colors', + isActive + ? 'bg-accent-light text-accent' + : 'text-content-secondary hover:bg-surface-hover', + ) + } + > + + Billing + + )} {(user?.role === 'admin' || user?.role === 'project_manager') && ( )} + {user?.role === 'admin' && ( + + clsx( + 'flex items-center gap-3 px-3 py-2 rounded-md text-sm font-medium transition-colors', + isActive + ? 'bg-accent-light text-accent' + : 'text-content-secondary hover:bg-surface-hover', + ) + } + > + + Notification Settings + + )} {user?.role === 'admin' && ( { + if (amount == null) return '—' + return new Intl.NumberFormat('de-DE', { style: 'currency', currency }).format(amount) +} + +const formatDate = (iso: string | null) => + iso ? new Date(iso).toLocaleDateString('de-DE') : '—' + +const STATUS_COLORS: Record = { + draft: 'bg-gray-100 text-gray-700', + sent: 'bg-blue-100 text-blue-700', + paid: 'bg-green-100 text-green-700', + cancelled: 'bg-red-100 text-red-700', +} + +// ── New Invoice Modal ───────────────────────────────────────────────────── + +function NewInvoiceModal({ onClose, onCreate }: { onClose: () => void; onCreate: (data: InvoiceCreate) => void }) { + const [notes, setNotes] = useState('') + const [issuedAt, setIssuedAt] = useState(new Date().toISOString().split('T')[0]) + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault() + onCreate({ order_line_ids: [], notes: notes || undefined, issued_at: issuedAt || undefined }) + } + + return ( +
+
+
+

New Invoice

+ +
+
+
+ + setIssuedAt(e.target.value)} + className="w-full px-3 py-2 text-sm border border-gray-200 rounded-lg focus:outline-none focus:ring-1 focus:ring-blue-500" + /> +
+
+ +