feat: sharp edge pipeline V02, tessellation presets, media cache-bust, GMSH plan
Sharp Edge Pipeline V02:
- export_step_to_gltf.py: replace BRep_Tool.Polygon3D_s (returns None in XCAF) with
GCPnts_UniformAbscissa curve sampling at 0.3mm step — extracts 17,129 segment pairs
- Inject sharp_edge_pairs + sharp_threshold_deg into GLB extras (scenes[0].extras)
via binary GLB JSON-chunk patching (no extra dependency)
- export_gltf.py: read schaeffler_sharp_edge_pairs from Blender scene custom props,
apply via KD-tree to mark edges sharp=True + seam=True (OCC mm Z-up → Blender transform)
- tools/restore_sharp_marks.py: dual-pass (dihedral angle + OCC pairs), updated coordinate
transform (X, -Z, Y) * 0.001
Tessellation:
- Admin UI: Draft / Standard / Fine preset buttons with active-state highlighting
- Default angular deflection: preview 0.5→0.1 rad, production 0.2→0.05 rad
- export_glb.py: read updated defaults from system_settings
Media / Cache:
- media/service.py: get_download_url appends ?v={file_size_bytes} cache-buster
- media/router.py: Cache-Control: no-cache for all download/thumbnail endpoints
Render pipeline:
- still_render.py / turntable_render.py: shared GPU activation + camera improvements
- render_order_line.py: global render position support
- render_thumbnail.py: updated defaults
Frontend:
- InlineCadViewer: file_size_bytes-aware URL update triggers re-fetch on regeneration
- ThreeDViewer: material panel, part selection, PBR mode improvements
- Admin.tsx: tessellation preset cards, GMSH setting dropdown
- MediaBrowser, ProductDetail, OrderDetail, Orders: various UI improvements
- New: MaterialPanel, GlobalRenderPositionsPanel, StepIndicator components
- New: renderPositions.ts API client
Plans / Docs:
- plan.md: GMSH Frontal-Delaunay tessellation plan (6 tasks)
- LEARNINGS.md: OCC Polygon3D_s None issue + GCPnts fix
- .gitignore: add backend/core (core dump from root process)
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,32 @@
|
||||
"""Add global_render_positions table.
|
||||
|
||||
Revision ID: 055
|
||||
Revises: 054
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
revision = "055"
|
||||
down_revision = "ce21c8a67543"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"global_render_positions",
|
||||
sa.Column("id", UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("name", sa.String(200), nullable=False),
|
||||
sa.Column("rotation_x", sa.Float, nullable=False, server_default="0.0"),
|
||||
sa.Column("rotation_y", sa.Float, nullable=False, server_default="0.0"),
|
||||
sa.Column("rotation_z", sa.Float, nullable=False, server_default="0.0"),
|
||||
sa.Column("is_default", sa.Boolean, nullable=False, server_default="false"),
|
||||
sa.Column("sort_order", sa.Integer, nullable=False, server_default="0"),
|
||||
sa.Column("created_at", sa.DateTime, nullable=False, server_default=sa.text("now()")),
|
||||
sa.Column("updated_at", sa.DateTime, nullable=False, server_default=sa.text("now()")),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("global_render_positions")
|
||||
@@ -0,0 +1,29 @@
|
||||
"""Add global_render_position_id FK to order_lines.
|
||||
|
||||
Revision ID: 056
|
||||
Revises: 055
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
revision = "056"
|
||||
down_revision = "055"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column(
|
||||
"order_lines",
|
||||
sa.Column(
|
||||
"global_render_position_id",
|
||||
UUID(as_uuid=True),
|
||||
sa.ForeignKey("global_render_positions.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("order_lines", "global_render_position_id")
|
||||
@@ -0,0 +1,38 @@
|
||||
"""Seed default global render positions (Beauty, 3/4 Front, 3/4 Back).
|
||||
|
||||
Revision ID: 057
|
||||
Revises: 056
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "057"
|
||||
down_revision = "056"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
_DEFAULT_POSITIONS = [
|
||||
{"name": "Beauty", "rotation_x": 0.0, "rotation_y": 0.0, "rotation_z": 0.0, "is_default": True, "sort_order": 0},
|
||||
{"name": "3/4 Front", "rotation_x": -15.0, "rotation_y": 45.0, "rotation_z": 0.0, "is_default": False, "sort_order": 1},
|
||||
{"name": "3/4 Back", "rotation_x": -15.0, "rotation_y": -135.0, "rotation_z": 0.0, "is_default": False, "sort_order": 2},
|
||||
]
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
for pos in _DEFAULT_POSITIONS:
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"INSERT INTO global_render_positions (id, name, rotation_x, rotation_y, rotation_z, is_default, sort_order, created_at, updated_at) "
|
||||
"VALUES (gen_random_uuid(), :name, :rx, :ry, :rz, :is_default, :sort_order, now(), now())"
|
||||
),
|
||||
{"name": pos["name"], "rx": pos["rotation_x"], "ry": pos["rotation_y"], "rz": pos["rotation_z"],
|
||||
"is_default": pos["is_default"], "sort_order": pos["sort_order"]},
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
conn.execute(
|
||||
sa.text("DELETE FROM global_render_positions WHERE name IN ('Beauty', '3/4 Front', '3/4 Back')")
|
||||
)
|
||||
@@ -0,0 +1,37 @@
|
||||
"""Seed additional global render positions (3/4 Front mirrored, 3/4 Back mirrored).
|
||||
|
||||
Revision ID: 058
|
||||
Revises: 057
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "058"
|
||||
down_revision = "057"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
_ADDITIONAL_POSITIONS = [
|
||||
{"name": "3/4 Front (mirrored)", "rotation_x": -15.0, "rotation_y": -45.0, "rotation_z": 0.0, "is_default": False, "sort_order": 3},
|
||||
{"name": "3/4 Back (mirrored)", "rotation_x": -15.0, "rotation_y": 135.0, "rotation_z": 0.0, "is_default": False, "sort_order": 4},
|
||||
]
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
for pos in _ADDITIONAL_POSITIONS:
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"INSERT INTO global_render_positions (id, name, rotation_x, rotation_y, rotation_z, is_default, sort_order, created_at, updated_at) "
|
||||
"VALUES (gen_random_uuid(), :name, :rx, :ry, :rz, :is_default, :sort_order, now(), now())"
|
||||
),
|
||||
{"name": pos["name"], "rx": pos["rotation_x"], "ry": pos["rotation_y"], "rz": pos["rotation_z"],
|
||||
"is_default": pos["is_default"], "sort_order": pos["sort_order"]},
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
conn.execute(
|
||||
sa.text("DELETE FROM global_render_positions WHERE name IN ('3/4 Front (mirrored)', '3/4 Back (mirrored)')")
|
||||
)
|
||||
@@ -0,0 +1,31 @@
|
||||
"""Add Top global render position and clear all per-product positions.
|
||||
|
||||
Revision ID: 059
|
||||
Revises: 058
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
revision = "059"
|
||||
down_revision = "058"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
# Add Top perspective
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"INSERT INTO global_render_positions (id, name, rotation_x, rotation_y, rotation_z, is_default, sort_order, created_at, updated_at) "
|
||||
"VALUES (gen_random_uuid(), 'Top', -90.0, 0.0, 0.0, false, 5, now(), now())"
|
||||
)
|
||||
)
|
||||
# Remove all per-product render positions (now redundant with global ones)
|
||||
conn.execute(sa.text("DELETE FROM product_render_positions"))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
conn.execute(sa.text("DELETE FROM global_render_positions WHERE name = 'Top'"))
|
||||
# Per-product positions are not restored on downgrade (data was intentionally cleared)
|
||||
@@ -0,0 +1,186 @@
|
||||
"""add part_materials json column to cad_files
|
||||
|
||||
Revision ID: ce21c8a67543
|
||||
Revises: 054
|
||||
Create Date: 2026-03-09 20:11:52.201187
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'ce21c8a67543'
|
||||
down_revision: Union[str, None] = '054'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('app_config')
|
||||
op.drop_index(op.f('ix_audit_log_channel'), table_name='audit_log')
|
||||
op.drop_index(op.f('ix_audit_log_notification_ts'), table_name='audit_log', postgresql_where='(notification = true)')
|
||||
op.drop_index(op.f('ix_audit_log_target_notification'), table_name='audit_log')
|
||||
op.add_column('cad_files', sa.Column('part_materials', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
|
||||
op.alter_column('cad_files', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
op.drop_constraint(op.f('cad_files_file_hash_key'), 'cad_files', type_='unique')
|
||||
op.drop_index(op.f('ix_cad_files_file_hash'), table_name='cad_files')
|
||||
op.create_index(op.f('ix_cad_files_file_hash'), 'cad_files', ['file_hash'], unique=True)
|
||||
op.drop_index(op.f('uq_dashboard_config_tenant_default'), table_name='dashboard_configs', postgresql_where='(is_tenant_default = true)')
|
||||
op.drop_index(op.f('uq_dashboard_config_user'), table_name='dashboard_configs', postgresql_where='(user_id IS NOT NULL)')
|
||||
op.create_index(op.f('ix_dashboard_configs_tenant_id'), 'dashboard_configs', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_dashboard_configs_user_id'), 'dashboard_configs', ['user_id'], unique=False)
|
||||
op.drop_index(op.f('ix_import_validations_status'), table_name='import_validations')
|
||||
op.drop_index(op.f('ix_import_validations_tenant'), table_name='import_validations')
|
||||
op.create_index(op.f('ix_import_validations_tenant_id'), 'import_validations', ['tenant_id'], unique=False)
|
||||
op.drop_index(op.f('ix_invoice_lines_invoice'), table_name='invoice_lines')
|
||||
op.drop_index(op.f('ix_invoices_status'), table_name='invoices')
|
||||
op.drop_index(op.f('ix_invoices_tenant'), table_name='invoices')
|
||||
op.create_index(op.f('ix_invoices_tenant_id'), 'invoices', ['tenant_id'], unique=False)
|
||||
op.drop_constraint(op.f('invoices_tenant_id_fkey'), 'invoices', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'invoices', 'tenants', ['tenant_id'], ['id'])
|
||||
op.drop_index(op.f('ix_material_aliases_material_id'), table_name='material_aliases')
|
||||
op.drop_index(op.f('uq_material_aliases_alias_lower'), table_name='material_aliases')
|
||||
op.drop_index(op.f('ix_media_assets_asset_type'), table_name='media_assets')
|
||||
op.drop_index(op.f('ix_media_assets_asset_type_created'), table_name='media_assets')
|
||||
op.drop_index(op.f('ix_media_assets_order_line'), table_name='media_assets')
|
||||
op.drop_index(op.f('ix_media_assets_product'), table_name='media_assets')
|
||||
op.drop_index(op.f('ix_media_assets_tenant'), table_name='media_assets')
|
||||
op.create_index(op.f('ix_media_assets_order_line_id'), 'media_assets', ['order_line_id'], unique=False)
|
||||
op.create_index(op.f('ix_media_assets_product_id'), 'media_assets', ['product_id'], unique=False)
|
||||
op.create_index(op.f('ix_media_assets_tenant_id'), 'media_assets', ['tenant_id'], unique=False)
|
||||
op.drop_index(op.f('ix_notification_configs_user'), table_name='notification_configs')
|
||||
op.drop_constraint(op.f('uq_notification_config_user_event_channel'), 'notification_configs', type_='unique')
|
||||
op.create_index(op.f('ix_notification_configs_user_id'), 'notification_configs', ['user_id'], unique=False)
|
||||
op.alter_column('order_items', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
op.alter_column('order_lines', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
op.drop_index(op.f('uq_order_lines_render'), table_name='order_lines', postgresql_where='(output_type_id IS NOT NULL)')
|
||||
op.drop_index(op.f('uq_order_lines_tracking'), table_name='order_lines', postgresql_where='(output_type_id IS NULL)')
|
||||
op.alter_column('orders', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
op.drop_constraint(op.f('orders_order_number_key'), 'orders', type_='unique')
|
||||
op.drop_index(op.f('ix_orders_order_number'), table_name='orders')
|
||||
op.create_index(op.f('ix_orders_order_number'), 'orders', ['order_number'], unique=True)
|
||||
op.drop_index(op.f('ix_render_positions_product_id'), table_name='product_render_positions')
|
||||
op.drop_index(op.f('uq_render_positions_product_name'), table_name='product_render_positions')
|
||||
op.create_index(op.f('ix_product_render_positions_product_id'), 'product_render_positions', ['product_id'], unique=False)
|
||||
op.alter_column('products', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
op.drop_index(op.f('ix_products_category_lagertyp'), table_name='products')
|
||||
op.drop_index(op.f('uq_products_produkt_baureihe'), table_name='products', postgresql_where='((produkt_baureihe IS NOT NULL) AND (is_active = true))')
|
||||
op.drop_index(op.f('ix_render_templates_active_unique'), table_name='render_templates', postgresql_where='(is_active = true)')
|
||||
op.drop_constraint(op.f('templates_category_key_key'), 'templates', type_='unique')
|
||||
op.drop_index(op.f('ix_templates_category_key'), table_name='templates')
|
||||
op.create_index(op.f('ix_templates_category_key'), 'templates', ['category_key'], unique=True)
|
||||
op.alter_column('users', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=True)
|
||||
op.drop_constraint(op.f('users_email_key'), 'users', type_='unique')
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
|
||||
op.drop_index(op.f('ix_workflow_node_results_run'), table_name='workflow_node_results')
|
||||
op.create_index(op.f('ix_workflow_node_results_run_id'), 'workflow_node_results', ['run_id'], unique=False)
|
||||
op.drop_index(op.f('ix_workflow_runs_order_line'), table_name='workflow_runs')
|
||||
op.drop_index(op.f('ix_workflow_runs_status'), table_name='workflow_runs')
|
||||
op.create_index(op.f('ix_workflow_runs_order_line_id'), 'workflow_runs', ['order_line_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_workflow_runs_order_line_id'), table_name='workflow_runs')
|
||||
op.create_index(op.f('ix_workflow_runs_status'), 'workflow_runs', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_workflow_runs_order_line'), 'workflow_runs', ['order_line_id'], unique=False)
|
||||
op.drop_index(op.f('ix_workflow_node_results_run_id'), table_name='workflow_node_results')
|
||||
op.create_index(op.f('ix_workflow_node_results_run'), 'workflow_node_results', ['run_id'], unique=False)
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=False)
|
||||
op.create_unique_constraint(op.f('users_email_key'), 'users', ['email'], postgresql_nulls_not_distinct=False)
|
||||
op.alter_column('users', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
op.drop_index(op.f('ix_templates_category_key'), table_name='templates')
|
||||
op.create_index(op.f('ix_templates_category_key'), 'templates', ['category_key'], unique=False)
|
||||
op.create_unique_constraint(op.f('templates_category_key_key'), 'templates', ['category_key'], postgresql_nulls_not_distinct=False)
|
||||
op.create_index(op.f('ix_render_templates_active_unique'), 'render_templates', ['category_key', 'output_type_id'], unique=True, postgresql_where='(is_active = true)')
|
||||
op.create_index(op.f('uq_products_produkt_baureihe'), 'products', [sa.literal_column('lower(produkt_baureihe::text)')], unique=True, postgresql_where='((produkt_baureihe IS NOT NULL) AND (is_active = true))')
|
||||
op.create_index(op.f('ix_products_category_lagertyp'), 'products', ['category_key', 'lagertyp'], unique=False)
|
||||
op.alter_column('products', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
op.drop_index(op.f('ix_product_render_positions_product_id'), table_name='product_render_positions')
|
||||
op.create_index(op.f('uq_render_positions_product_name'), 'product_render_positions', ['product_id', sa.literal_column('lower(name::text)')], unique=True)
|
||||
op.create_index(op.f('ix_render_positions_product_id'), 'product_render_positions', ['product_id'], unique=False)
|
||||
op.drop_index(op.f('ix_orders_order_number'), table_name='orders')
|
||||
op.create_index(op.f('ix_orders_order_number'), 'orders', ['order_number'], unique=False)
|
||||
op.create_unique_constraint(op.f('orders_order_number_key'), 'orders', ['order_number'], postgresql_nulls_not_distinct=False)
|
||||
op.alter_column('orders', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
op.create_index(op.f('uq_order_lines_tracking'), 'order_lines', ['order_id', 'product_id', sa.literal_column("COALESCE(render_position_id, '00000000-0000-0000-0000-000000000000'::uuid)")], unique=True, postgresql_where='(output_type_id IS NULL)')
|
||||
op.create_index(op.f('uq_order_lines_render'), 'order_lines', ['order_id', 'product_id', 'output_type_id', sa.literal_column("COALESCE(render_position_id, '00000000-0000-0000-0000-000000000000'::uuid)")], unique=True, postgresql_where='(output_type_id IS NOT NULL)')
|
||||
op.alter_column('order_lines', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
op.alter_column('order_items', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
op.drop_index(op.f('ix_notification_configs_user_id'), table_name='notification_configs')
|
||||
op.create_unique_constraint(op.f('uq_notification_config_user_event_channel'), 'notification_configs', ['user_id', 'event_type', 'channel'], postgresql_nulls_not_distinct=False)
|
||||
op.create_index(op.f('ix_notification_configs_user'), 'notification_configs', ['user_id'], unique=False)
|
||||
op.drop_index(op.f('ix_media_assets_tenant_id'), table_name='media_assets')
|
||||
op.drop_index(op.f('ix_media_assets_product_id'), table_name='media_assets')
|
||||
op.drop_index(op.f('ix_media_assets_order_line_id'), table_name='media_assets')
|
||||
op.create_index(op.f('ix_media_assets_tenant'), 'media_assets', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_media_assets_product'), 'media_assets', ['product_id'], unique=False)
|
||||
op.create_index(op.f('ix_media_assets_order_line'), 'media_assets', ['order_line_id'], unique=False)
|
||||
op.create_index(op.f('ix_media_assets_asset_type_created'), 'media_assets', ['asset_type', 'created_at'], unique=False)
|
||||
op.create_index(op.f('ix_media_assets_asset_type'), 'media_assets', ['asset_type'], unique=False)
|
||||
op.create_index(op.f('uq_material_aliases_alias_lower'), 'material_aliases', [sa.literal_column('lower(alias::text)')], unique=True)
|
||||
op.create_index(op.f('ix_material_aliases_material_id'), 'material_aliases', ['material_id'], unique=False)
|
||||
op.drop_constraint(None, 'invoices', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('invoices_tenant_id_fkey'), 'invoices', 'tenants', ['tenant_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_index(op.f('ix_invoices_tenant_id'), table_name='invoices')
|
||||
op.create_index(op.f('ix_invoices_tenant'), 'invoices', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_invoices_status'), 'invoices', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_invoice_lines_invoice'), 'invoice_lines', ['invoice_id'], unique=False)
|
||||
op.drop_index(op.f('ix_import_validations_tenant_id'), table_name='import_validations')
|
||||
op.create_index(op.f('ix_import_validations_tenant'), 'import_validations', ['tenant_id'], unique=False)
|
||||
op.create_index(op.f('ix_import_validations_status'), 'import_validations', ['status'], unique=False)
|
||||
op.drop_index(op.f('ix_dashboard_configs_user_id'), table_name='dashboard_configs')
|
||||
op.drop_index(op.f('ix_dashboard_configs_tenant_id'), table_name='dashboard_configs')
|
||||
op.create_index(op.f('uq_dashboard_config_user'), 'dashboard_configs', ['user_id'], unique=True, postgresql_where='(user_id IS NOT NULL)')
|
||||
op.create_index(op.f('uq_dashboard_config_tenant_default'), 'dashboard_configs', ['tenant_id'], unique=True, postgresql_where='(is_tenant_default = true)')
|
||||
op.drop_index(op.f('ix_cad_files_file_hash'), table_name='cad_files')
|
||||
op.create_index(op.f('ix_cad_files_file_hash'), 'cad_files', ['file_hash'], unique=False)
|
||||
op.create_unique_constraint(op.f('cad_files_file_hash_key'), 'cad_files', ['file_hash'], postgresql_nulls_not_distinct=False)
|
||||
op.alter_column('cad_files', 'tenant_id',
|
||||
existing_type=sa.UUID(),
|
||||
nullable=False)
|
||||
op.drop_column('cad_files', 'part_materials')
|
||||
op.create_index(op.f('ix_audit_log_target_notification'), 'audit_log', ['target_user_id', 'notification', 'read_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_log_notification_ts'), 'audit_log', ['notification', 'timestamp'], unique=False, postgresql_where='(notification = true)')
|
||||
op.create_index(op.f('ix_audit_log_channel'), 'audit_log', ['channel'], unique=False)
|
||||
op.create_table('app_config',
|
||||
sa.Column('id', sa.UUID(), server_default=sa.text('gen_random_uuid()'), autoincrement=False, nullable=False),
|
||||
sa.Column('version', sa.INTEGER(), server_default=sa.text('1'), autoincrement=False, nullable=False),
|
||||
sa.Column('render', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), autoincrement=False, nullable=False),
|
||||
sa.Column('storage', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), autoincrement=False, nullable=False),
|
||||
sa.Column('notifications', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), autoincrement=False, nullable=False),
|
||||
sa.Column('worker', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), autoincrement=False, nullable=False),
|
||||
sa.Column('billing', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), autoincrement=False, nullable=False),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('now()'), autoincrement=False, nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('app_config_pkey'))
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
@@ -43,9 +43,9 @@ SETTINGS_DEFAULTS: dict[str, str] = {
|
||||
"smtp_from_address": "",
|
||||
# glTF tessellation quality (OCC BRepMesh)
|
||||
"gltf_preview_linear_deflection": "0.1", # mm — geometry GLB for viewer
|
||||
"gltf_preview_angular_deflection": "0.5", # rad
|
||||
"gltf_preview_angular_deflection": "0.1", # rad — Standard preset
|
||||
"gltf_production_linear_deflection": "0.03", # mm — production GLB
|
||||
"gltf_production_angular_deflection": "0.2", # rad
|
||||
"gltf_production_angular_deflection": "0.05", # rad — Standard preset
|
||||
# 3D viewer / glTF export settings
|
||||
"gltf_scale_factor": "0.001",
|
||||
"gltf_smooth_normals": "true",
|
||||
@@ -77,9 +77,9 @@ class SettingsOut(BaseModel):
|
||||
smtp_password: str = ""
|
||||
smtp_from_address: str = ""
|
||||
gltf_preview_linear_deflection: float = 0.1
|
||||
gltf_preview_angular_deflection: float = 0.5
|
||||
gltf_preview_angular_deflection: float = 0.1
|
||||
gltf_production_linear_deflection: float = 0.03
|
||||
gltf_production_angular_deflection: float = 0.2
|
||||
gltf_production_angular_deflection: float = 0.05
|
||||
gltf_scale_factor: float = 0.001
|
||||
gltf_smooth_normals: bool = True
|
||||
viewer_max_distance: float = 50.0
|
||||
@@ -420,9 +420,12 @@ async def regenerate_thumbnails(
|
||||
admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Re-queue all completed CAD files for thumbnail regeneration."""
|
||||
"""Re-queue completed CAD files that are linked to a product for thumbnail regeneration."""
|
||||
from app.domains.products.models import Product
|
||||
result = await db.execute(
|
||||
select(CadFile).where(CadFile.processing_status == ProcessingStatus.completed)
|
||||
select(CadFile)
|
||||
.join(Product, Product.cad_file_id == CadFile.id)
|
||||
.where(CadFile.processing_status == ProcessingStatus.completed)
|
||||
)
|
||||
cad_files = result.scalars().all()
|
||||
|
||||
@@ -435,6 +438,71 @@ async def regenerate_thumbnails(
|
||||
return {"queued": queued, "message": f"Re-queued {queued} CAD file(s) for thumbnail regeneration"}
|
||||
|
||||
|
||||
@router.get("/settings/orphaned-cad-files")
|
||||
async def get_orphaned_cad_files(
|
||||
admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Return count and total disk size of CadFiles not linked to any product."""
|
||||
from sqlalchemy import func
|
||||
from app.domains.products.models import Product
|
||||
result = await db.execute(
|
||||
select(func.count(CadFile.id), func.sum(CadFile.file_size))
|
||||
.outerjoin(Product, Product.cad_file_id == CadFile.id)
|
||||
.where(Product.id.is_(None))
|
||||
)
|
||||
count, total_bytes = result.one()
|
||||
return {
|
||||
"count": count or 0,
|
||||
"total_mb": round((total_bytes or 0) / 1024 / 1024, 1),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/settings/cleanup-orphaned-cad-files")
|
||||
async def cleanup_orphaned_cad_files(
|
||||
admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Delete CadFile DB records and associated files on disk for all orphaned CadFiles.
|
||||
|
||||
A CadFile is orphaned if no product currently references it via products.cad_file_id.
|
||||
"""
|
||||
import os
|
||||
from app.domains.products.models import Product
|
||||
|
||||
result = await db.execute(
|
||||
select(CadFile)
|
||||
.outerjoin(Product, Product.cad_file_id == CadFile.id)
|
||||
.where(Product.id.is_(None))
|
||||
)
|
||||
orphans = result.scalars().all()
|
||||
|
||||
deleted_files = 0
|
||||
deleted_bytes = 0
|
||||
|
||||
for cad_file in orphans:
|
||||
# Remove files from disk (non-fatal if missing)
|
||||
for path_attr in ("stored_path", "thumbnail_path", "gltf_path"):
|
||||
path = getattr(cad_file, path_attr, None)
|
||||
if path:
|
||||
try:
|
||||
if os.path.isfile(path):
|
||||
size = os.path.getsize(path)
|
||||
os.remove(path)
|
||||
deleted_files += 1
|
||||
deleted_bytes += size
|
||||
except OSError:
|
||||
pass
|
||||
await db.delete(cad_file)
|
||||
|
||||
await db.commit()
|
||||
return {
|
||||
"deleted_records": len(orphans),
|
||||
"deleted_files": deleted_files,
|
||||
"freed_mb": round(deleted_bytes / 1024 / 1024, 1),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/settings/reextract-metadata", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def reextract_all_metadata(
|
||||
admin: User = Depends(require_admin),
|
||||
@@ -445,8 +513,11 @@ async def reextract_all_metadata(
|
||||
Updates mesh_attributes without re-rendering thumbnails or changing processing status.
|
||||
Use this after deploying bbox/edge extraction improvements.
|
||||
"""
|
||||
from app.domains.products.models import Product
|
||||
result = await db.execute(
|
||||
select(CadFile).where(
|
||||
select(CadFile)
|
||||
.join(Product, Product.cad_file_id == CadFile.id)
|
||||
.where(
|
||||
CadFile.processing_status == ProcessingStatus.completed,
|
||||
CadFile.stored_path.isnot(None),
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.responses import FileResponse
|
||||
@@ -15,12 +16,26 @@ from app.models.cad_file import CadFile, ProcessingStatus
|
||||
from app.models.order import Order
|
||||
from app.models.order_item import OrderItem
|
||||
from app.models.user import User
|
||||
from app.utils.auth import get_current_user
|
||||
from app.utils.auth import get_current_user, is_privileged
|
||||
from app.services.product_service import link_cad_to_product, lookup_product
|
||||
|
||||
router = APIRouter(prefix="/cad", tags=["cad"])
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Part-materials schemas
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class PartMaterialEntry(BaseModel):
|
||||
type: Literal["library", "hex"]
|
||||
value: str # material name or hex color string
|
||||
|
||||
|
||||
class PartMaterialsResponse(BaseModel):
|
||||
cad_file_id: str
|
||||
part_materials: dict[str, PartMaterialEntry] | None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Schemas for match-to-order
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -273,7 +288,7 @@ async def generate_gltf_geometry(
|
||||
Stores the result as a MediaAsset with asset_type='gltf_geometry'.
|
||||
Uses export_step_to_gltf.py (OCP/pythonocc) — no Blender needed.
|
||||
"""
|
||||
if user.role.value not in ("admin", "project_manager"):
|
||||
if not is_privileged(user):
|
||||
raise HTTPException(status_code=403, detail="Insufficient permissions")
|
||||
|
||||
cad = await _get_cad_file(id, db)
|
||||
@@ -296,7 +311,7 @@ async def generate_gltf_production(
|
||||
Requires a gltf_geometry MediaAsset to already exist (run generate-gltf-geometry first).
|
||||
Stores result as a MediaAsset with asset_type='gltf_production'.
|
||||
"""
|
||||
if user.role.value not in ("admin", "project_manager"):
|
||||
if not is_privileged(user):
|
||||
raise HTTPException(status_code=403, detail="Insufficient permissions")
|
||||
|
||||
cad = await _get_cad_file(id, db)
|
||||
@@ -359,7 +374,7 @@ async def reset_stuck_processing(
|
||||
Use when a file shows 'processing' indefinitely due to a worker crash.
|
||||
After resetting, click 'Regen thumbnail' to retry.
|
||||
"""
|
||||
if user.role.value not in ("admin", "project_manager"):
|
||||
if not is_privileged(user):
|
||||
raise HTTPException(status_code=403, detail="Insufficient permissions")
|
||||
|
||||
cad = await _get_cad_file(id, db)
|
||||
@@ -377,3 +392,45 @@ async def reset_stuck_processing(
|
||||
return {"cad_file_id": str(cad.id), "status": "failed", "message": "Reset to 'failed'. Use 'Regen thumbnail' to retry."}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Part-material assignment endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/{id}/part-materials", response_model=PartMaterialsResponse)
|
||||
async def get_part_materials(
|
||||
id: uuid.UUID,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Return the saved part-material assignments for a CAD file."""
|
||||
cad = await _get_cad_file(id, db)
|
||||
return PartMaterialsResponse(
|
||||
cad_file_id=str(cad.id),
|
||||
part_materials=cad.part_materials,
|
||||
)
|
||||
|
||||
|
||||
@router.put("/{id}/part-materials", response_model=PartMaterialsResponse)
|
||||
async def save_part_materials(
|
||||
id: uuid.UUID,
|
||||
body: dict[str, PartMaterialEntry],
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Replace the part-material assignment map for a CAD file.
|
||||
|
||||
Accepts a full dict of part-name -> {type, value} and overwrites the existing
|
||||
assignment. Pass an empty dict to clear all assignments.
|
||||
"""
|
||||
if not is_privileged(user):
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Insufficient permissions")
|
||||
cad = await _get_cad_file(id, db)
|
||||
# Serialise Pydantic models to plain dicts for JSONB storage
|
||||
cad.part_materials = {name: entry.model_dump() for name, entry in body.items()}
|
||||
cad.updated_at = datetime.utcnow()
|
||||
await db.commit()
|
||||
await db.refresh(cad)
|
||||
return PartMaterialsResponse(
|
||||
cad_file_id=str(cad.id),
|
||||
part_materials=cad.part_materials,
|
||||
)
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
import uuid
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.models import GlobalRenderPosition
|
||||
from app.domains.rendering.schemas import (
|
||||
GlobalRenderPositionCreate,
|
||||
GlobalRenderPositionPatch,
|
||||
GlobalRenderPositionOut,
|
||||
)
|
||||
from app.utils.auth import require_admin, get_current_user
|
||||
|
||||
router = APIRouter(prefix="/render-positions/global", tags=["global-render-positions"])
|
||||
|
||||
|
||||
@router.get("", response_model=list[GlobalRenderPositionOut])
|
||||
async def list_global_render_positions(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_user=Depends(get_current_user),
|
||||
):
|
||||
"""List all global render positions (available to all authenticated users)."""
|
||||
result = await db.execute(
|
||||
select(GlobalRenderPosition).order_by(GlobalRenderPosition.sort_order, GlobalRenderPosition.name)
|
||||
)
|
||||
return result.scalars().all()
|
||||
|
||||
|
||||
@router.post("", response_model=GlobalRenderPositionOut, status_code=status.HTTP_201_CREATED)
|
||||
async def create_global_render_position(
|
||||
body: GlobalRenderPositionCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_user=Depends(require_admin),
|
||||
):
|
||||
"""Create a new global render position (admin only)."""
|
||||
pos = GlobalRenderPosition(**body.model_dump())
|
||||
db.add(pos)
|
||||
await db.commit()
|
||||
await db.refresh(pos)
|
||||
return pos
|
||||
|
||||
|
||||
@router.patch("/{pos_id}", response_model=GlobalRenderPositionOut)
|
||||
async def update_global_render_position(
|
||||
pos_id: uuid.UUID,
|
||||
body: GlobalRenderPositionPatch,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_user=Depends(require_admin),
|
||||
):
|
||||
"""Update a global render position (admin only)."""
|
||||
result = await db.execute(select(GlobalRenderPosition).where(GlobalRenderPosition.id == pos_id))
|
||||
pos = result.scalar_one_or_none()
|
||||
if not pos:
|
||||
raise HTTPException(status_code=404, detail="Global render position not found")
|
||||
for field, value in body.model_dump(exclude_unset=True).items():
|
||||
setattr(pos, field, value)
|
||||
await db.commit()
|
||||
await db.refresh(pos)
|
||||
return pos
|
||||
|
||||
|
||||
@router.delete("/{pos_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_global_render_position(
|
||||
pos_id: uuid.UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_user=Depends(require_admin),
|
||||
):
|
||||
"""Delete a global render position (admin only)."""
|
||||
result = await db.execute(select(GlobalRenderPosition).where(GlobalRenderPosition.id == pos_id))
|
||||
pos = result.scalar_one_or_none()
|
||||
if not pos:
|
||||
raise HTTPException(status_code=404, detail="Global render position not found")
|
||||
await db.delete(pos)
|
||||
await db.commit()
|
||||
@@ -93,6 +93,9 @@ def _build_line_out(line: OrderLine) -> OrderLineOut:
|
||||
unit_price=float(line.unit_price) if line.unit_price is not None else None,
|
||||
render_position_id=line.render_position_id,
|
||||
render_position_name=rp_name,
|
||||
render_log=line.render_log if hasattr(line, 'render_log') else None,
|
||||
render_started_at=line.render_started_at if hasattr(line, 'render_started_at') else None,
|
||||
render_completed_at=line.render_completed_at if hasattr(line, 'render_completed_at') else None,
|
||||
notes=line.notes,
|
||||
created_at=line.created_at,
|
||||
updated_at=line.updated_at,
|
||||
@@ -384,6 +387,7 @@ async def create_order(
|
||||
product_id=line_data.product_id,
|
||||
output_type_id=line_data.output_type_id,
|
||||
render_position_id=line_data.render_position_id,
|
||||
global_render_position_id=line_data.global_render_position_id,
|
||||
gewuenschte_bildnummer=line_data.gewuenschte_bildnummer,
|
||||
notes=line_data.notes,
|
||||
tenant_id=getattr(user, 'tenant_id', None),
|
||||
@@ -827,6 +831,7 @@ async def add_order_line(
|
||||
product_id=body.product_id,
|
||||
output_type_id=body.output_type_id,
|
||||
render_position_id=body.render_position_id,
|
||||
global_render_position_id=body.global_render_position_id,
|
||||
gewuenschte_bildnummer=body.gewuenschte_bildnummer,
|
||||
notes=body.notes,
|
||||
tenant_id=getattr(user, 'tenant_id', None),
|
||||
|
||||
@@ -76,6 +76,7 @@ def _product_out(product: Product, priority: list[str] | None = None) -> Product
|
||||
out.processing_status = product.processing_status
|
||||
out.cad_parsed_objects = product.cad_parsed_objects
|
||||
out.cad_mesh_attributes = product.cad_file.mesh_attributes if product.cad_file else None
|
||||
out.cad_render_log = product.cad_file.render_log if product.cad_file else None
|
||||
out.render_image_url = _best_render_url(product, priority or ["latest_render", "cad_thumbnail"])
|
||||
return out
|
||||
|
||||
@@ -662,6 +663,8 @@ async def get_product_renders(
|
||||
.options(
|
||||
joinedload(OrderLine.output_type),
|
||||
joinedload(OrderLine.order),
|
||||
joinedload(OrderLine.render_position),
|
||||
joinedload(OrderLine.global_render_position),
|
||||
)
|
||||
.where(
|
||||
OrderLine.product_id == product_id,
|
||||
@@ -681,6 +684,11 @@ async def get_product_renders(
|
||||
if disk is None or not disk.exists():
|
||||
continue
|
||||
ext = Path(url).suffix.lower()
|
||||
position_name = (
|
||||
line.render_position.name if line.render_position
|
||||
else line.global_render_position.name if line.global_render_position
|
||||
else None
|
||||
)
|
||||
renders.append({
|
||||
"order_line_id": str(line.id),
|
||||
"order_number": line.order.order_number if line.order else None,
|
||||
@@ -689,6 +697,7 @@ async def get_product_renders(
|
||||
"is_video": ext in VIDEO_EXTENSIONS,
|
||||
"render_backend": line.render_backend_used,
|
||||
"completed_at": line.render_completed_at.isoformat() if line.render_completed_at else None,
|
||||
"render_position_name": position_name,
|
||||
})
|
||||
return renders
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ async def _resolve_thumbnails_bulk(db: AsyncSession, assets: list) -> None:
|
||||
for a in needs:
|
||||
pid = str(a.product_id)
|
||||
if pid in best_still:
|
||||
a.thumbnail_url = f"/api/media/{best_still[pid]}/download"
|
||||
a.thumbnail_url = f"/api/media/{best_still[pid]}/thumbnail"
|
||||
elif pid in product_cad:
|
||||
a.thumbnail_url = f"/api/cad/{product_cad[pid]}/thumbnail"
|
||||
|
||||
@@ -105,6 +105,7 @@ async def browse_media_assets(
|
||||
category_key: str | None = None,
|
||||
render_status: str | None = None,
|
||||
q: str | None = None,
|
||||
exclude_technical: bool = Query(True, description="Exclude GLB/STL/Blend technical assets"),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=200),
|
||||
_user: User = Depends(get_current_user),
|
||||
@@ -125,6 +126,12 @@ async def browse_media_assets(
|
||||
Product.pim_id.label("product_pim_id"),
|
||||
Product.category_key.label("category_key"),
|
||||
OrderLine.render_status.label("render_status"),
|
||||
Product.ebene1.label("product_ebene1"),
|
||||
Product.ebene2.label("product_ebene2"),
|
||||
Product.baureihe.label("product_baureihe"),
|
||||
Product.produkt_baureihe.label("product_produkt_baureihe"),
|
||||
Product.lagertyp.label("product_lagertyp"),
|
||||
Product.name_cad_modell.label("product_name_cad_modell"),
|
||||
)
|
||||
.outerjoin(Product, MediaAsset.product_id == Product.id)
|
||||
.outerjoin(OrderLine, MediaAsset.order_line_id == OrderLine.id)
|
||||
@@ -133,12 +140,21 @@ async def browse_media_assets(
|
||||
)
|
||||
|
||||
# Apply filters
|
||||
_TECHNICAL_TYPES = (
|
||||
MediaAssetType.gltf_geometry,
|
||||
MediaAssetType.gltf_production,
|
||||
MediaAssetType.blend_production,
|
||||
MediaAssetType.stl_low,
|
||||
MediaAssetType.stl_high,
|
||||
)
|
||||
if asset_type:
|
||||
try:
|
||||
at_enum = MediaAssetType(asset_type)
|
||||
stmt = stmt.where(MediaAsset.asset_type == at_enum)
|
||||
except ValueError:
|
||||
pass # invalid type → ignore filter
|
||||
elif exclude_technical:
|
||||
stmt = stmt.where(MediaAsset.asset_type.notin_(_TECHNICAL_TYPES))
|
||||
|
||||
if category_key:
|
||||
stmt = stmt.where(Product.category_key == category_key)
|
||||
@@ -153,6 +169,12 @@ async def browse_media_assets(
|
||||
or_(
|
||||
Product.name.ilike(pattern),
|
||||
Product.pim_id.ilike(pattern),
|
||||
Product.ebene1.ilike(pattern),
|
||||
Product.ebene2.ilike(pattern),
|
||||
Product.baureihe.ilike(pattern),
|
||||
Product.produkt_baureihe.ilike(pattern),
|
||||
Product.lagertyp.ilike(pattern),
|
||||
Product.name_cad_modell.ilike(pattern),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -165,15 +187,30 @@ async def browse_media_assets(
|
||||
offset = (page - 1) * page_size
|
||||
stmt = stmt.offset(offset).limit(page_size)
|
||||
|
||||
rows = await db.execute(stmt)
|
||||
all_rows = (await db.execute(stmt)).all()
|
||||
|
||||
# Pre-assign thumbnail_url so _resolve_thumbnails_bulk can check it
|
||||
raw_assets = [row[0] for row in all_rows]
|
||||
for a in raw_assets:
|
||||
a.thumbnail_url = service.get_thumbnail_url(a)
|
||||
# Resolve fallback thumbnails for non-image assets via product→cad lookup
|
||||
await _resolve_thumbnails_bulk(db, raw_assets)
|
||||
|
||||
items: list[MediaAssetBrowseItem] = []
|
||||
for row in rows.all():
|
||||
for row in all_rows:
|
||||
asset: MediaAsset = row[0]
|
||||
product_name: str | None = row[1]
|
||||
product_pim_id: str | None = row[2]
|
||||
cat_key: str | None = row[3]
|
||||
r_status: str | None = row[4]
|
||||
ebene1: str | None = row[5]
|
||||
ebene2: str | None = row[6]
|
||||
baureihe: str | None = row[7]
|
||||
produkt_baureihe: str | None = row[8]
|
||||
lagertyp: str | None = row[9]
|
||||
name_cad_modell: str | None = row[10]
|
||||
|
||||
thumb = asset.thumbnail_url
|
||||
item = MediaAssetBrowseItem(
|
||||
id=asset.id,
|
||||
asset_type=asset.asset_type,
|
||||
@@ -187,8 +224,14 @@ async def browse_media_assets(
|
||||
product_pim_id=product_pim_id,
|
||||
category_key=cat_key,
|
||||
render_status=r_status,
|
||||
product_ebene1=ebene1,
|
||||
product_ebene2=ebene2,
|
||||
product_baureihe=baureihe,
|
||||
product_produkt_baureihe=produkt_baureihe,
|
||||
product_lagertyp=lagertyp,
|
||||
product_name_cad_modell=name_cad_modell,
|
||||
download_url=f"/api/media/{asset.id}/download",
|
||||
thumbnail_url=service.get_thumbnail_url(asset),
|
||||
thumbnail_url=thumb,
|
||||
)
|
||||
items.append(item)
|
||||
|
||||
@@ -213,6 +256,48 @@ async def get_asset(asset_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
|
||||
return asset
|
||||
|
||||
|
||||
@router.get("/{asset_id}/thumbnail")
|
||||
async def thumbnail_asset(
|
||||
asset_id: uuid.UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Serve asset as an inline image — no auth required (UUID is opaque enough).
|
||||
|
||||
Only serves image/video MIME types; returns 404 for binary files.
|
||||
"""
|
||||
from fastapi.responses import FileResponse, Response
|
||||
from pathlib import Path
|
||||
asset = await service.get_media_asset(db, asset_id)
|
||||
if not asset:
|
||||
raise HTTPException(404, "Asset not found")
|
||||
|
||||
mime = asset.mime_type or ""
|
||||
if not (mime.startswith("image/") or mime.startswith("video/")):
|
||||
raise HTTPException(404, "Not a previewable asset")
|
||||
|
||||
key = asset.storage_key
|
||||
from app.config import settings
|
||||
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
|
||||
if not candidate.exists() and "/shared/renders/" in key:
|
||||
parts = key.split("/")
|
||||
if len(parts) >= 2:
|
||||
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
|
||||
if remapped.exists():
|
||||
candidate = remapped
|
||||
if candidate.exists():
|
||||
return FileResponse(
|
||||
str(candidate), media_type=mime,
|
||||
headers={"Cache-Control": "max-age=86400, public"},
|
||||
)
|
||||
try:
|
||||
from app.core.storage import get_storage
|
||||
data = get_storage().download_bytes(key)
|
||||
return Response(content=data, media_type=mime,
|
||||
headers={"Cache-Control": "max-age=86400, public"})
|
||||
except Exception:
|
||||
raise HTTPException(404, "File not available")
|
||||
|
||||
|
||||
@router.api_route("/{asset_id}/download", methods=["GET", "HEAD"])
|
||||
async def download_asset(
|
||||
asset_id: uuid.UUID,
|
||||
@@ -250,7 +335,7 @@ async def download_asset(
|
||||
fname = f"{asset.asset_type.value}_{asset_id}.{ext or 'bin'}"
|
||||
return FileResponse(
|
||||
str(candidate), media_type=mime, filename=fname,
|
||||
headers={"Cache-Control": "max-age=3600, public"},
|
||||
headers={"Cache-Control": "no-cache"},
|
||||
)
|
||||
|
||||
# Fall back to MinIO
|
||||
@@ -264,7 +349,7 @@ async def download_asset(
|
||||
media_type=mime,
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename={fname}",
|
||||
"Cache-Control": "max-age=3600, public",
|
||||
"Cache-Control": "no-cache",
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
@@ -346,3 +431,58 @@ async def delete_asset_permanent(asset_id: uuid.UUID, db: AsyncSession = Depends
|
||||
if not deleted:
|
||||
raise HTTPException(404, "Asset not found")
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@router.post("/cleanup-orphaned")
|
||||
async def cleanup_orphaned_assets(
|
||||
_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Delete all MediaAsset DB records whose backing file doesn't exist on disk or in MinIO.
|
||||
|
||||
Returns counts of checked/deleted records. Admin only.
|
||||
"""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from app.config import settings
|
||||
from app.core.storage import get_storage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
storage = get_storage()
|
||||
|
||||
def _file_exists(key: str) -> bool:
|
||||
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
|
||||
if candidate.exists():
|
||||
return True
|
||||
# Legacy path remapping
|
||||
if "/shared/renders/" in key:
|
||||
parts = key.split("/")
|
||||
if len(parts) >= 2:
|
||||
remapped = Path(settings.upload_dir) / "renders" / parts[-2] / parts[-1]
|
||||
if remapped.exists():
|
||||
return True
|
||||
# Check MinIO
|
||||
try:
|
||||
storage.download_bytes(key)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
result = await db.execute(select(MediaAsset).where(MediaAsset.is_archived == False)) # noqa: E712
|
||||
all_assets = result.scalars().all()
|
||||
|
||||
deleted_ids = []
|
||||
for asset in all_assets:
|
||||
if not _file_exists(asset.storage_key):
|
||||
logger.info("Cleanup: deleting orphaned asset %s (%s)", asset.id, asset.storage_key)
|
||||
await db.delete(asset)
|
||||
deleted_ids.append(str(asset.id))
|
||||
|
||||
if deleted_ids:
|
||||
await db.commit()
|
||||
|
||||
return {
|
||||
"checked": len(all_assets),
|
||||
"deleted": len(deleted_ids),
|
||||
"deleted_ids": deleted_ids,
|
||||
}
|
||||
|
||||
@@ -41,6 +41,13 @@ class MediaAssetBrowseItem(BaseModel):
|
||||
product_pim_id: str | None
|
||||
category_key: str | None
|
||||
render_status: str | None
|
||||
# Extended product metadata fields
|
||||
product_ebene1: str | None = None
|
||||
product_ebene2: str | None = None
|
||||
product_baureihe: str | None = None
|
||||
product_produkt_baureihe: str | None = None
|
||||
product_lagertyp: str | None = None
|
||||
product_name_cad_modell: str | None = None
|
||||
download_url: str | None = None
|
||||
thumbnail_url: str | None = None
|
||||
|
||||
|
||||
@@ -77,12 +77,26 @@ async def delete_media_asset(db: AsyncSession, asset_id: uuid.UUID) -> bool:
|
||||
|
||||
|
||||
def get_download_url(asset: MediaAsset) -> str | None:
|
||||
"""Return a backend proxy URL so the browser can always download the file."""
|
||||
return f"/api/media/{asset.id}/download"
|
||||
"""Return a backend proxy URL so the browser can always download the file.
|
||||
|
||||
Appends ?v={file_size_bytes} as a cache-buster: when a file is regenerated
|
||||
in-place (same asset UUID, new content), the size changes and the URL changes,
|
||||
which triggers a fresh fetch in InlineCadViewer's useEffect.
|
||||
"""
|
||||
v = asset.file_size_bytes or 0
|
||||
return f"/api/media/{asset.id}/download?v={v}"
|
||||
|
||||
|
||||
def get_thumbnail_url(asset: MediaAsset) -> str | None:
|
||||
"""Return CAD thumbnail URL if asset has a cad_file_id."""
|
||||
"""Return a no-auth preview URL for the asset.
|
||||
|
||||
Priority:
|
||||
1. For image-type assets (still, thumbnail): the no-auth /thumbnail endpoint.
|
||||
2. For any asset with a cad_file_id: the CAD thumbnail (also no-auth).
|
||||
3. Otherwise None (caller may use _resolve_thumbnails_bulk for fallback).
|
||||
"""
|
||||
if asset.asset_type in (MediaAssetType.still, MediaAssetType.thumbnail):
|
||||
return f"/api/media/{asset.id}/thumbnail"
|
||||
if asset.cad_file_id:
|
||||
return f"/api/cad/{asset.cad_file_id}/thumbnail"
|
||||
return None
|
||||
|
||||
@@ -145,6 +145,11 @@ class OrderLine(Base):
|
||||
ForeignKey("product_render_positions.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
)
|
||||
global_render_position_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("global_render_positions.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
)
|
||||
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
tenant_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True
|
||||
@@ -160,3 +165,6 @@ class OrderLine(Base):
|
||||
render_position: Mapped["ProductRenderPosition | None"] = relationship(
|
||||
"ProductRenderPosition", back_populates="order_lines"
|
||||
)
|
||||
global_render_position: Mapped["GlobalRenderPosition | None"] = relationship(
|
||||
"GlobalRenderPosition", back_populates="order_lines"
|
||||
)
|
||||
|
||||
@@ -64,6 +64,7 @@ class OrderLineCreate(BaseModel):
|
||||
product_id: uuid.UUID
|
||||
output_type_id: uuid.UUID | None = None
|
||||
render_position_id: uuid.UUID | None = None
|
||||
global_render_position_id: uuid.UUID | None = None
|
||||
gewuenschte_bildnummer: str | None = None
|
||||
notes: str | None = None
|
||||
|
||||
@@ -87,6 +88,9 @@ class OrderLineOut(BaseModel):
|
||||
unit_price: float | None = None
|
||||
render_position_id: uuid.UUID | None = None
|
||||
render_position_name: str | None = None
|
||||
render_log: dict | None = None
|
||||
render_started_at: datetime | None = None
|
||||
render_completed_at: datetime | None = None
|
||||
notes: str | None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
@@ -69,7 +69,7 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
|
||||
eng.dispose()
|
||||
|
||||
linear_deflection = float(sys_settings.get("gltf_preview_linear_deflection", "0.1"))
|
||||
angular_deflection = float(sys_settings.get("gltf_preview_angular_deflection", "0.5"))
|
||||
angular_deflection = float(sys_settings.get("gltf_preview_angular_deflection", "0.1"))
|
||||
|
||||
step = _Path(step_path_str)
|
||||
|
||||
@@ -230,7 +230,7 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
|
||||
|
||||
smooth_angle = float(sys_settings.get("blender_smooth_angle", "30"))
|
||||
prod_linear = float(sys_settings.get("gltf_production_linear_deflection", "0.03"))
|
||||
prod_angular = float(sys_settings.get("gltf_production_angular_deflection", "0.2"))
|
||||
prod_angular = float(sys_settings.get("gltf_production_angular_deflection", "0.05"))
|
||||
|
||||
scripts_dir = _Path(_os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
|
||||
occ_script = scripts_dir / "export_step_to_gltf.py"
|
||||
@@ -239,12 +239,14 @@ def generate_gltf_production_task(self, cad_file_id: str, product_id: str | None
|
||||
|
||||
prod_geom_glb = step_path.parent / f"{step_path.stem}_production_geom.glb"
|
||||
python_bin = _sys.executable
|
||||
sharp_threshold = float(sys_settings.get("sharp_edge_threshold", "20.0"))
|
||||
occ_cmd = [
|
||||
python_bin, str(occ_script),
|
||||
"--step_path", str(step_path),
|
||||
"--output_path", str(prod_geom_glb),
|
||||
"--linear_deflection", str(prod_linear),
|
||||
"--angular_deflection", str(prod_angular),
|
||||
"--sharp_threshold", str(sharp_threshold),
|
||||
]
|
||||
log_task_event(
|
||||
self.request.id,
|
||||
|
||||
@@ -130,7 +130,7 @@ def render_order_line_task(self, order_line_id: str):
|
||||
logger.info(f"No render template for category_key={category_key!r}, output_type_id={ot_id!r}")
|
||||
|
||||
cad_name = cad_file.original_name if cad_file else "?"
|
||||
# Load render_position for rotation values
|
||||
# Load render_position for rotation values (per-product takes priority, falls back to global)
|
||||
rotation_x = rotation_y = rotation_z = 0.0
|
||||
if line.render_position_id:
|
||||
from app.models.render_position import ProductRenderPosition
|
||||
@@ -138,6 +138,12 @@ def render_order_line_task(self, order_line_id: str):
|
||||
if rp:
|
||||
rotation_x, rotation_y, rotation_z = rp.rotation_x, rp.rotation_y, rp.rotation_z
|
||||
emit(order_line_id, f"Render position: '{rp.name}' ({rotation_x}°, {rotation_y}°, {rotation_z}°)")
|
||||
elif line.global_render_position_id:
|
||||
from app.models import GlobalRenderPosition
|
||||
grp = session.get(GlobalRenderPosition, line.global_render_position_id)
|
||||
if grp:
|
||||
rotation_x, rotation_y, rotation_z = grp.rotation_x, grp.rotation_y, grp.rotation_z
|
||||
emit(order_line_id, f"Global render position: '{grp.name}' ({rotation_x}°, {rotation_y}°, {rotation_z}°)")
|
||||
|
||||
emit(order_line_id, f"Starting render for {cad_name} ({len(part_colors)} coloured parts)")
|
||||
|
||||
@@ -345,6 +351,7 @@ def render_order_line_task(self, order_line_id: str):
|
||||
if success:
|
||||
# Create MediaAsset so the render appears in the Media Browser
|
||||
try:
|
||||
import os as _os
|
||||
from app.domains.media.models import MediaAsset, MediaAssetType as MAT
|
||||
from app.config import settings as _cfg2
|
||||
_ext = str(output_path).rsplit(".", 1)[-1].lower() if "." in str(output_path) else "bin"
|
||||
@@ -360,6 +367,33 @@ def render_order_line_task(self, order_line_id: str):
|
||||
select(MediaAsset.id).where(MediaAsset.storage_key == _norm_key).limit(1)
|
||||
).scalar_one_or_none()
|
||||
if not _existing:
|
||||
# Probe output file for metadata
|
||||
_file_size = None
|
||||
_width = None
|
||||
_height = None
|
||||
if _os.path.exists(output_path):
|
||||
try:
|
||||
_file_size = _os.path.getsize(output_path)
|
||||
except OSError:
|
||||
pass
|
||||
if _ext in ("png", "jpg", "jpeg"):
|
||||
try:
|
||||
from PIL import Image as _PILImage
|
||||
with _PILImage.open(output_path) as _im:
|
||||
_width, _height = _im.size
|
||||
except Exception:
|
||||
pass
|
||||
# Snapshot key render settings into render_config
|
||||
_render_config = None
|
||||
if isinstance(render_log, dict):
|
||||
_render_config = {
|
||||
k: render_log[k]
|
||||
for k in (
|
||||
"renderer", "engine_used", "engine", "samples",
|
||||
"device_used", "compute_type", "total_duration_s",
|
||||
)
|
||||
if k in render_log
|
||||
}
|
||||
_asset = MediaAsset(
|
||||
tenant_id=_tenant_id,
|
||||
order_line_id=line.id,
|
||||
@@ -367,6 +401,10 @@ def render_order_line_task(self, order_line_id: str):
|
||||
asset_type=_at,
|
||||
storage_key=_norm_key,
|
||||
mime_type=_mime,
|
||||
file_size_bytes=_file_size,
|
||||
width=_width,
|
||||
height=_height,
|
||||
render_config=_render_config,
|
||||
)
|
||||
session.add(_asset)
|
||||
session.commit()
|
||||
|
||||
@@ -95,6 +95,38 @@ def render_step_thumbnail(self, cad_file_id: str):
|
||||
except Exception:
|
||||
logger.exception(f"bbox extraction failed for {cad_file_id} (non-fatal)")
|
||||
|
||||
# Extract sharp edge topology (PCurve-based) if not already present.
|
||||
# This runs on render-worker which has OCP (cadquery's OCC fork).
|
||||
try:
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import Session
|
||||
from app.config import settings as _cfg3
|
||||
from app.models.cad_file import CadFile as _CadFile3
|
||||
from app.services.step_processor import extract_mesh_edge_data
|
||||
|
||||
_sync_url3 = _cfg3.database_url.replace("+asyncpg", "")
|
||||
_eng3 = create_engine(_sync_url3)
|
||||
with Session(_eng3) as _sess3:
|
||||
_cad3 = _sess3.get(_CadFile3, cad_file_id)
|
||||
_attrs = _cad3.mesh_attributes or {} if _cad3 else {}
|
||||
_step_path3 = _cad3.stored_path if _cad3 else None
|
||||
_eng3.dispose()
|
||||
|
||||
if _step_path3 and "sharp_edge_pairs" not in _attrs:
|
||||
edge_data = extract_mesh_edge_data(_step_path3)
|
||||
if edge_data:
|
||||
_eng3 = create_engine(_sync_url3)
|
||||
with Session(_eng3) as _sess3:
|
||||
_cad3 = _sess3.get(_CadFile3, cad_file_id)
|
||||
if _cad3:
|
||||
_cad3.mesh_attributes = {**(_cad3.mesh_attributes or {}), **edge_data}
|
||||
_sess3.commit()
|
||||
n_pairs = len(edge_data.get("sharp_edge_pairs", []))
|
||||
logger.info(f"Sharp edge data extracted for {cad_file_id}: {n_pairs} sharp edges")
|
||||
_eng3.dispose()
|
||||
except Exception:
|
||||
logger.exception(f"Sharp edge extraction failed for {cad_file_id} (non-fatal)")
|
||||
|
||||
# Auto-populate materials now that parsed_objects are available
|
||||
try:
|
||||
from app.domains.pipeline.tasks.extract_metadata import _auto_populate_materials_for_cad
|
||||
|
||||
@@ -31,6 +31,7 @@ class CadFile(Base):
|
||||
error_message: Mapped[str] = mapped_column(String(2000), nullable=True)
|
||||
render_log: Mapped[dict] = mapped_column(JSONB, nullable=True)
|
||||
mesh_attributes: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
part_materials: Mapped[dict | None] = mapped_column(JSONB, nullable=True, default=None)
|
||||
step_file_hash: Mapped[str | None] = mapped_column(String(64), nullable=True, index=True)
|
||||
tenant_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("tenants.id"), nullable=True, index=True
|
||||
|
||||
@@ -62,6 +62,7 @@ class ProductOut(BaseModel):
|
||||
cad_parsed_objects: list[str] | None = None
|
||||
cad_mesh_attributes: dict | None = None
|
||||
arbeitspaket: str | None = None
|
||||
cad_render_log: dict | None = None
|
||||
notes: str | None
|
||||
is_active: bool
|
||||
source_excel: str | None
|
||||
|
||||
@@ -103,6 +103,24 @@ class ProductRenderPosition(Base):
|
||||
order_lines: Mapped[list["OrderLine"]] = relationship("OrderLine", back_populates="render_position")
|
||||
|
||||
|
||||
class GlobalRenderPosition(Base):
|
||||
__tablename__ = "global_render_positions"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
name: Mapped[str] = mapped_column(String(200), nullable=False)
|
||||
rotation_x: Mapped[float] = mapped_column(Float, nullable=False, default=0.0)
|
||||
rotation_y: Mapped[float] = mapped_column(Float, nullable=False, default=0.0)
|
||||
rotation_z: Mapped[float] = mapped_column(Float, nullable=False, default=0.0)
|
||||
is_default: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
|
||||
sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
order_lines: Mapped[list["OrderLine"]] = relationship("OrderLine", back_populates="global_render_position")
|
||||
|
||||
|
||||
class WorkflowDefinition(Base):
|
||||
__tablename__ = "workflow_definitions"
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Re-export from original routers.
|
||||
from app.api.routers.render_templates import router as render_templates_router
|
||||
from app.api.routers.output_types import router as output_types_router
|
||||
from app.api.routers.global_render_positions import router as global_render_positions_router
|
||||
|
||||
__all__ = ["render_templates_router", "output_types_router"]
|
||||
__all__ = ["render_templates_router", "output_types_router", "global_render_positions_router"]
|
||||
|
||||
@@ -94,6 +94,38 @@ class RenderPositionOut(BaseModel):
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class GlobalRenderPositionCreate(BaseModel):
|
||||
name: str
|
||||
rotation_x: float = 0.0
|
||||
rotation_y: float = 0.0
|
||||
rotation_z: float = 0.0
|
||||
is_default: bool = False
|
||||
sort_order: int = 0
|
||||
|
||||
|
||||
class GlobalRenderPositionPatch(BaseModel):
|
||||
name: str | None = None
|
||||
rotation_x: float | None = None
|
||||
rotation_y: float | None = None
|
||||
rotation_z: float | None = None
|
||||
is_default: bool | None = None
|
||||
sort_order: int | None = None
|
||||
|
||||
|
||||
class GlobalRenderPositionOut(BaseModel):
|
||||
id: uuid.UUID
|
||||
name: str
|
||||
rotation_x: float
|
||||
rotation_y: float
|
||||
rotation_z: float
|
||||
is_default: bool
|
||||
sort_order: int
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class WorkflowDefinitionCreate(BaseModel):
|
||||
name: str
|
||||
output_type_id: uuid.UUID | None = None
|
||||
|
||||
+2
-1
@@ -17,7 +17,7 @@ from app.domains.orders.router import orders_router, order_items_router
|
||||
from app.domains.admin.router import admin_router, analytics_router, worker_router
|
||||
from app.domains.products.router import products_router, cad_router
|
||||
from app.domains.materials.router import router as materials_router
|
||||
from app.domains.rendering.router import render_templates_router, output_types_router
|
||||
from app.domains.rendering.router import render_templates_router, output_types_router, global_render_positions_router
|
||||
from app.domains.notifications.router import router as notifications_router
|
||||
from app.domains.billing.router import pricing_router, invoice_router
|
||||
from app.domains.tenants.router import router as tenants_router
|
||||
@@ -94,6 +94,7 @@ app.include_router(media_router)
|
||||
app.include_router(asset_libraries_router, prefix="/api")
|
||||
app.include_router(dashboard_router, prefix="/api")
|
||||
app.include_router(task_logs_router, prefix="/api")
|
||||
app.include_router(global_render_positions_router, prefix="/api")
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
|
||||
@@ -10,7 +10,7 @@ from app.domains.products.models import CadFile, Product
|
||||
from app.domains.orders.models import Order, OrderItem, OrderLine
|
||||
from app.domains.notifications.models import AuditLog
|
||||
from app.domains.billing.models import PricingTier
|
||||
from app.domains.rendering.models import OutputType, RenderTemplate, ProductRenderPosition, WorkflowDefinition, WorkflowRun, WorkflowNodeResult
|
||||
from app.domains.rendering.models import OutputType, RenderTemplate, ProductRenderPosition, GlobalRenderPosition, WorkflowDefinition, WorkflowRun, WorkflowNodeResult
|
||||
from app.domains.materials.models import Material, MaterialAlias, AssetLibrary
|
||||
from app.domains.media.models import MediaAsset, MediaAssetType
|
||||
from app.domains.admin.models import DashboardConfig
|
||||
@@ -21,7 +21,7 @@ from app.models.worker_config import WorkerConfig
|
||||
|
||||
__all__ = [
|
||||
"Tenant", "User", "Template", "CadFile", "Product", "Order", "OrderItem", "OrderLine",
|
||||
"AuditLog", "PricingTier", "OutputType", "RenderTemplate", "ProductRenderPosition",
|
||||
"AuditLog", "PricingTier", "OutputType", "RenderTemplate", "ProductRenderPosition", "GlobalRenderPosition",
|
||||
"WorkflowDefinition", "WorkflowRun", "WorkflowNodeResult",
|
||||
"Material", "MaterialAlias", "AssetLibrary", "MediaAsset", "MediaAssetType", "SystemSetting",
|
||||
"DashboardConfig", "WorkerConfig",
|
||||
|
||||
@@ -196,26 +196,60 @@ def process_cad_file(cad_file_id: str) -> None:
|
||||
|
||||
|
||||
def extract_mesh_edge_data(step_path: str) -> dict:
|
||||
"""Extract sharp edge metrics and suggested smooth angle from STEP topology.
|
||||
"""Extract sharp edge data and suggested smooth angle from STEP topology.
|
||||
|
||||
Uses PCurve-based normal evaluation: for each shared edge, the 2D curve of
|
||||
the edge on each adjacent face (BRep_Tool.CurveOnSurface) is evaluated at
|
||||
its midpoint to get the exact UV coordinates on that face. BRepLProp_SLProps
|
||||
then computes the surface normal at that precise location — far more accurate
|
||||
than sampling at the face's UV center.
|
||||
|
||||
Returns dict with:
|
||||
- suggested_smooth_angle: float (degrees) — recommended auto-smooth angle
|
||||
- has_mechanical_edges: bool — True if part has distinct hard edges (bearings etc.)
|
||||
- sharp_edge_midpoints: list of [x, y, z] — midpoints of sharp edges in mm (max 500)
|
||||
- has_mechanical_edges: bool — True if part has distinct hard edges
|
||||
- sharp_edge_pairs: list of [[x0,y0,z0],[x1,y1,z1]] — vertex pairs of
|
||||
sharp edges in mm (no artificial cap)
|
||||
"""
|
||||
try:
|
||||
from OCC.Core.STEPControl import STEPControl_Reader
|
||||
from OCC.Core.IFSelect import IFSelect_RetDone
|
||||
from OCC.Core.TopExp import TopExp_Explorer
|
||||
from OCC.Core.TopAbs import TopAbs_EDGE, TopAbs_FACE
|
||||
from OCC.Core.BRepAdaptor import BRepAdaptor_Surface
|
||||
from OCC.Core.BRep import BRep_Tool
|
||||
from OCC.Core.BRepGProp import brepgprop
|
||||
from OCC.Core.GProp import GProp_GProps
|
||||
from OCC.Core.BRepMesh import BRepMesh_IncrementalMesh
|
||||
from OCC.Core.gp import gp_Pnt
|
||||
# Try OCP first (cadquery's fork, available in render-worker).
|
||||
# Fall back to OCC.Core (standard pythonocc, if installed elsewhere).
|
||||
_using_ocp = False
|
||||
try:
|
||||
from OCP.STEPControl import STEPControl_Reader
|
||||
from OCP.IFSelect import IFSelect_RetDone
|
||||
from OCP.TopAbs import TopAbs_EDGE, TopAbs_FACE, TopAbs_FORWARD
|
||||
from OCP.BRepAdaptor import BRepAdaptor_Surface, BRepAdaptor_Curve, BRepAdaptor_Curve2d
|
||||
from OCP.BRepLProp import BRepLProp_SLProps
|
||||
from OCP.BRepMesh import BRepMesh_IncrementalMesh
|
||||
from OCP.TopTools import TopTools_IndexedDataMapOfShapeListOfShape
|
||||
from OCP.TopExp import TopExp as _TopExp
|
||||
from OCP.TopoDS import TopoDS as _TopoDS
|
||||
_using_ocp = True
|
||||
except ImportError:
|
||||
from OCC.Core.STEPControl import STEPControl_Reader
|
||||
from OCC.Core.IFSelect import IFSelect_RetDone
|
||||
from OCC.Core.TopAbs import TopAbs_EDGE, TopAbs_FACE, TopAbs_FORWARD
|
||||
from OCC.Core.BRepAdaptor import BRepAdaptor_Surface, BRepAdaptor_Curve, BRepAdaptor_Curve2d
|
||||
from OCC.Core.BRepLProp import BRepLProp_SLProps
|
||||
from OCC.Core.BRepMesh import BRepMesh_IncrementalMesh
|
||||
from OCC.Core.TopTools import TopTools_IndexedDataMapOfShapeListOfShape
|
||||
from OCC.Core.TopExp import topexp as _TopExp
|
||||
from OCC.Core.TopoDS import TopoDS as _TopoDS
|
||||
import math
|
||||
|
||||
# OCP uses _s suffix for static methods; OCC.Core uses module-level callables.
|
||||
def _map_shapes(shape, edge_type, face_type, out_map):
|
||||
if _using_ocp:
|
||||
_TopExp.MapShapesAndAncestors_s(shape, edge_type, face_type, out_map)
|
||||
else:
|
||||
_TopExp.MapShapesAndAncestors(shape, edge_type, face_type, out_map)
|
||||
|
||||
def _to_edge(s):
|
||||
return _TopoDS.Edge_s(s) if _using_ocp else _TopoDS.Edge(s)
|
||||
|
||||
def _to_face(s):
|
||||
return _TopoDS.Face_s(s) if _using_ocp else _TopoDS.Face(s)
|
||||
|
||||
reader = STEPControl_Reader()
|
||||
status = reader.ReadFile(step_path)
|
||||
if status != IFSelect_RetDone:
|
||||
@@ -223,71 +257,88 @@ def extract_mesh_edge_data(step_path: str) -> dict:
|
||||
reader.TransferRoots()
|
||||
shape = reader.OneShape()
|
||||
|
||||
# Mesh the shape for geometry access
|
||||
# Mesh at 0.5 mm deflection
|
||||
BRepMesh_IncrementalMesh(shape, 0.5, False, 0.5)
|
||||
|
||||
# Collect face normals per edge (for dihedral angle computation)
|
||||
from OCC.Core.TopTools import TopTools_IndexedDataMapOfShapeListOfShape
|
||||
from OCC.Core.TopExp import topexp
|
||||
|
||||
# Build edge → adjacent faces map
|
||||
edge_face_map = TopTools_IndexedDataMapOfShapeListOfShape()
|
||||
topexp.MapShapesAndAncestors(shape, TopAbs_EDGE, TopAbs_FACE, edge_face_map)
|
||||
_map_shapes(shape, TopAbs_EDGE, TopAbs_FACE, edge_face_map)
|
||||
|
||||
dihedral_angles = []
|
||||
sharp_midpoints = []
|
||||
sharp_pairs = []
|
||||
SHARP_THRESHOLD_DEG = 20.0
|
||||
|
||||
for i in range(1, edge_face_map.Extent() + 1):
|
||||
edge = edge_face_map.FindKey(i)
|
||||
edge_shape = edge_face_map.FindKey(i)
|
||||
faces = edge_face_map.FindFromIndex(i)
|
||||
if faces.Size() < 2:
|
||||
continue
|
||||
|
||||
# Get the two adjacent faces
|
||||
face_list = list(faces)
|
||||
if len(face_list) < 2:
|
||||
face_shapes = list(faces)
|
||||
if len(face_shapes) < 2:
|
||||
continue
|
||||
|
||||
try:
|
||||
surf1 = BRepAdaptor_Surface(face_list[0])
|
||||
surf2 = BRepAdaptor_Surface(face_list[1])
|
||||
edge = _to_edge(edge_shape)
|
||||
face1 = _to_face(face_shapes[0])
|
||||
face2 = _to_face(face_shapes[1])
|
||||
|
||||
# Get normals at midpoint of edge
|
||||
from OCC.Core.BRepAdaptor import BRepAdaptor_Curve
|
||||
curve = BRepAdaptor_Curve(edge)
|
||||
mid_u = (curve.FirstParameter() + curve.LastParameter()) / 2
|
||||
mid_pt = curve.Value(mid_u)
|
||||
# 3D edge endpoints in mm
|
||||
curve3d = BRepAdaptor_Curve(edge)
|
||||
pt_start = curve3d.Value(curve3d.FirstParameter())
|
||||
pt_end = curve3d.Value(curve3d.LastParameter())
|
||||
|
||||
# Sample face normals at UV center
|
||||
u1 = (surf1.FirstUParameter() + surf1.LastUParameter()) / 2
|
||||
v1 = (surf1.FirstVParameter() + surf1.LastVParameter()) / 2
|
||||
n1 = surf1.DN(u1, v1, 0, 1).Crossed(surf1.DN(u1, v1, 1, 0))
|
||||
# PCurve-based normal evaluation: BRepAdaptor_Curve2d gives UV at the
|
||||
# edge's actual location on the face — far more accurate than UV center.
|
||||
c2d_1 = BRepAdaptor_Curve2d(edge, face1)
|
||||
uv1 = c2d_1.Value((c2d_1.FirstParameter() + c2d_1.LastParameter()) / 2)
|
||||
surf1 = BRepAdaptor_Surface(face1)
|
||||
props1 = BRepLProp_SLProps(surf1, uv1.X(), uv1.Y(), 1, 1e-6)
|
||||
if not props1.IsNormalDefined():
|
||||
continue
|
||||
n1 = props1.Normal()
|
||||
if face1.Orientation() != TopAbs_FORWARD:
|
||||
n1.Reverse()
|
||||
|
||||
u2 = (surf2.FirstUParameter() + surf2.LastUParameter()) / 2
|
||||
v2 = (surf2.FirstVParameter() + surf2.LastVParameter()) / 2
|
||||
n2 = surf2.DN(u2, v2, 0, 1).Crossed(surf2.DN(u2, v2, 1, 0))
|
||||
c2d_2 = BRepAdaptor_Curve2d(edge, face2)
|
||||
uv2 = c2d_2.Value((c2d_2.FirstParameter() + c2d_2.LastParameter()) / 2)
|
||||
surf2 = BRepAdaptor_Surface(face2)
|
||||
props2 = BRepLProp_SLProps(surf2, uv2.X(), uv2.Y(), 1, 1e-6)
|
||||
if not props2.IsNormalDefined():
|
||||
continue
|
||||
n2 = props2.Normal()
|
||||
if face2.Orientation() != TopAbs_FORWARD:
|
||||
n2.Reverse()
|
||||
|
||||
if n1.Magnitude() > 1e-10 and n2.Magnitude() > 1e-10:
|
||||
n1.Normalize()
|
||||
n2.Normalize()
|
||||
cos_angle = max(-1.0, min(1.0, n1.Dot(n2)))
|
||||
angle_deg = math.degrees(math.acos(abs(cos_angle)))
|
||||
dihedral_angles.append(angle_deg)
|
||||
cos_angle = max(-1.0, min(1.0, n1.Dot(n2)))
|
||||
angle_deg = math.degrees(math.acos(cos_angle))
|
||||
# Use exterior angle (supplement when normals point same side)
|
||||
if angle_deg > 90:
|
||||
angle_deg = 180.0 - angle_deg
|
||||
dihedral_angles.append(angle_deg)
|
||||
|
||||
if angle_deg > 20 and len(sharp_midpoints) < 500:
|
||||
sharp_midpoints.append([
|
||||
round(mid_pt.X(), 3),
|
||||
round(mid_pt.Y(), 3),
|
||||
round(mid_pt.Z(), 3),
|
||||
])
|
||||
if angle_deg > SHARP_THRESHOLD_DEG:
|
||||
sharp_pairs.append([
|
||||
[round(pt_start.X(), 3), round(pt_start.Y(), 3), round(pt_start.Z(), 3)],
|
||||
[round(pt_end.X(), 3), round(pt_end.Y(), 3), round(pt_end.Z(), 3)],
|
||||
])
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Bounding box extraction (OCC Bnd_Box)
|
||||
from OCC.Core.Bnd import Bnd_Box
|
||||
from OCC.Core.BRepBndLib import brepbndlib
|
||||
# Bounding box
|
||||
if _using_ocp:
|
||||
from OCP.Bnd import Bnd_Box
|
||||
from OCP.BRepBndLib import BRepBndLib as _brepbndlib_mod
|
||||
def _brepbndlib_add(shape, bbox):
|
||||
_brepbndlib_mod.Add_s(shape, bbox)
|
||||
else:
|
||||
from OCC.Core.Bnd import Bnd_Box
|
||||
from OCC.Core.BRepBndLib import brepbndlib as _brepbndlib_mod
|
||||
def _brepbndlib_add(shape, bbox):
|
||||
_brepbndlib_mod.Add(shape, bbox)
|
||||
try:
|
||||
bbox = Bnd_Box()
|
||||
brepbndlib.Add(shape, bbox)
|
||||
_brepbndlib_add(shape, bbox)
|
||||
xmin, ymin, zmin, xmax, ymax, zmax = bbox.Get()
|
||||
dimensions_mm = {
|
||||
"x": round(xmax - xmin, 2),
|
||||
@@ -311,11 +362,8 @@ def extract_mesh_edge_data(step_path: str) -> dict:
|
||||
return result
|
||||
|
||||
import statistics
|
||||
median_angle = statistics.median(dihedral_angles)
|
||||
max_angle = max(dihedral_angles)
|
||||
|
||||
# Suggest smooth angle: slightly below the median of hard edges
|
||||
hard_edges = [a for a in dihedral_angles if a > 20]
|
||||
hard_edges = [a for a in dihedral_angles if a > SHARP_THRESHOLD_DEG]
|
||||
if hard_edges:
|
||||
suggested = max(15.0, min(60.0, statistics.median(hard_edges) * 0.8))
|
||||
else:
|
||||
@@ -324,7 +372,7 @@ def extract_mesh_edge_data(step_path: str) -> dict:
|
||||
result = {
|
||||
"suggested_smooth_angle": round(suggested, 1),
|
||||
"has_mechanical_edges": max_angle > 45,
|
||||
"sharp_edge_midpoints": sharp_midpoints[:500],
|
||||
"sharp_edge_pairs": sharp_pairs,
|
||||
}
|
||||
if dimensions_mm:
|
||||
result["dimensions_mm"] = dimensions_mm
|
||||
|
||||
Reference in New Issue
Block a user