feat: per-position camera settings, material alias dialog, product delete, media browser links

- Per-render-position focal_length_mm/sensor_width_mm (DB → pipeline → Blender)
- FOV-based camera distance with min clamp fix for wide-angle lenses
- Unmapped materials blocking dialog on "Dispatch Renders" with batch alias creation
- Material check endpoint (GET /orders/{id}/check-materials)
- Batch alias endpoint (POST /materials/batch-aliases)
- Quick-map "No alias" badges on Materials page
- Full product hard-delete with storage cleanup (MinIO + disk files + orphaned CadFile)
- Delete button on ProductDetail page with confirmation
- Clickable product names in Media Browser (links to product page)
- Single-line render dispatch/retry (POST /orders/{id}/lines/{id}/dispatch-render)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-14 12:16:37 +01:00
parent 0020376702
commit b583b0d7a2
48 changed files with 1827 additions and 376 deletions
+57
View File
@@ -825,3 +825,60 @@ async def import_existing_media_assets(
await db.commit()
return {"created": created, "skipped": skipped}
@router.delete("/settings/purge-render-media", status_code=status.HTTP_200_OK)
async def purge_render_media(
admin: User = Depends(require_global_admin),
db: AsyncSession = Depends(get_db),
):
"""Delete all still and turntable MediaAsset records and their backing files.
This removes rendered images and animations but leaves thumbnails, GLBs,
STLs, and USD masters intact.
"""
import logging
from pathlib import Path
from app.config import settings
from app.core.storage import get_storage
from app.domains.media.models import MediaAsset, MediaAssetType
logger = logging.getLogger(__name__)
storage = get_storage()
result = await db.execute(
select(MediaAsset).where(
MediaAsset.asset_type.in_([MediaAssetType.still, MediaAssetType.turntable])
)
)
assets = result.scalars().all()
deleted_db = 0
deleted_files = 0
freed_bytes = 0
for asset in assets:
# Delete backing file
key = asset.storage_key
try:
candidate = Path(key) if Path(key).is_absolute() else Path(settings.upload_dir) / key
if candidate.exists():
freed_bytes += candidate.stat().st_size
candidate.unlink()
deleted_files += 1
elif hasattr(storage, 'delete'):
storage.delete(key)
deleted_files += 1
except Exception as exc:
logger.warning("Could not delete file for asset %s (%s): %s", asset.id, key, exc)
await db.delete(asset)
deleted_db += 1
await db.commit()
return {
"deleted_records": deleted_db,
"deleted_files": deleted_files,
"freed_mb": round(freed_bytes / 1024 / 1024, 1),
"message": f"Purged {deleted_db} still/turntable asset(s), freed {round(freed_bytes / 1024 / 1024, 1)} MB",
}
+1 -1
View File
@@ -313,7 +313,7 @@ async def regenerate_thumbnail(
db: AsyncSession = Depends(get_db),
):
"""Queue a Celery task to reprocess the STEP file and regenerate its thumbnail."""
if user.role.value != "admin":
if user.role.value not in ("admin", "global_admin", "tenant_admin"):
raise HTTPException(
status_code=403,
detail="Only admins can trigger thumbnail regeneration",
+56
View File
@@ -174,6 +174,62 @@ async def seed_aliases(
return {"inserted": inserted, "total": total}
class BatchAliasMapping(BaseModel):
alias: str
material_id: uuid.UUID
class BatchAliasCreate(BaseModel):
mappings: list[BatchAliasMapping]
@router.post("/batch-aliases")
async def batch_create_aliases(
body: BatchAliasCreate,
user: User = Depends(require_admin_or_pm),
db: AsyncSession = Depends(get_db),
):
"""Create multiple material aliases in one request.
Skips aliases that already exist (case-insensitive). Validates that
each material_id exists.
"""
created = 0
skipped = 0
for mapping in body.mappings:
alias_str = mapping.alias.strip()
if not alias_str:
skipped += 1
continue
# Verify material exists
mat_result = await db.execute(
select(Material).where(Material.id == mapping.material_id)
)
if not mat_result.scalar_one_or_none():
raise HTTPException(
status.HTTP_404_NOT_FOUND,
detail=f"Material {mapping.material_id} not found",
)
# Check if alias already exists (case-insensitive)
existing = await db.execute(
select(MaterialAlias).where(
func.lower(MaterialAlias.alias) == alias_str.lower()
)
)
if existing.scalar_one_or_none():
skipped += 1
continue
db.add(MaterialAlias(material_id=mapping.material_id, alias=alias_str))
created += 1
await db.commit()
return {"created": created, "skipped": skipped}
@router.delete("/aliases/{alias_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_alias(
alias_id: uuid.UUID,
+96
View File
@@ -865,6 +865,50 @@ async def add_order_line(
return _build_line_out(line_loaded)
@router.get("/{order_id}/check-materials")
async def check_materials(
order_id: uuid.UUID,
user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Check if all materials in this order's products are mapped to library materials."""
from app.domains.materials.service import find_unmapped_materials
result = await db.execute(select(Order).where(Order.id == order_id))
order = result.scalar_one_or_none()
if not order:
raise HTTPException(404, detail="Order not found")
lines_result = await db.execute(
select(OrderLine)
.options(selectinload(OrderLine.product))
.where(OrderLine.order_id == order_id)
)
lines = lines_result.scalars().all()
# Collect all unique material names from all products
all_material_names: list[str] = []
seen: set[str] = set()
for line in lines:
if not line.product or not line.product.cad_part_materials:
continue
for entry in line.product.cad_part_materials:
mat_name = entry.get("material", "")
if mat_name and mat_name.lower() not in seen:
seen.add(mat_name.lower())
all_material_names.append(mat_name)
unmapped = await find_unmapped_materials(all_material_names, db)
total = len(all_material_names)
mapped = total - len(unmapped)
return {
"unmapped": unmapped,
"total_materials": total,
"mapped_count": mapped,
}
@router.post("/{order_id}/dispatch-renders")
async def dispatch_renders(
order_id: uuid.UUID,
@@ -1000,6 +1044,58 @@ async def cancel_line_render(
}
@router.post("/{order_id}/lines/{line_id}/dispatch-render")
async def dispatch_single_line_render(
order_id: uuid.UUID,
line_id: uuid.UUID,
user: User = Depends(require_admin_or_pm),
db: AsyncSession = Depends(get_db),
):
"""Dispatch (or retry) a render for a single order line (admin/PM only)."""
result = await db.execute(select(Order).where(Order.id == order_id))
order = result.scalar_one_or_none()
if not order:
raise HTTPException(404, detail="Order not found")
line_result = await db.execute(
select(OrderLine).where(OrderLine.id == line_id, OrderLine.order_id == order.id)
)
line = line_result.scalar_one_or_none()
if not line:
raise HTTPException(404, detail="Order line not found")
if line.render_status not in ("pending", "failed", "cancelled"):
raise HTTPException(400, detail=f"Cannot dispatch line in {line.render_status} status")
# Reset to pending
from sqlalchemy import update as sql_update
await db.execute(
sql_update(OrderLine)
.where(OrderLine.id == line.id)
.values(render_status="pending", render_completed_at=None, render_log=None)
)
# Auto-advance order to processing if needed
if order.status in (OrderStatus.submitted, OrderStatus.completed):
now = datetime.utcnow()
order.status = OrderStatus.processing
order.processing_started_at = now
order.completed_at = None
order.updated_at = now
await db.commit()
from app.domains.rendering.dispatch_service import dispatch_render_with_workflow
try:
dispatch_render_with_workflow(str(line.id))
except Exception as exc:
logger.warning("dispatch_render_with_workflow failed for %s: %s", line.id, exc)
from app.tasks.step_tasks import dispatch_order_line_render
dispatch_order_line_render.delay(str(line.id))
return {"dispatched": True, "line_id": str(line.id)}
class RejectLineBody(BaseModel):
reason: str = ""
+63 -2
View File
@@ -270,12 +270,73 @@ async def delete_product(
raise HTTPException(404, detail="Product not found")
if hard:
from sqlalchemy import delete as sql_delete
# Delete order_lines referencing this product
from app.domains.media.models import MediaAsset
from app.core.storage import get_storage
# 1. Collect storage keys from MediaAssets before cascade deletes them
media_result = await db.execute(
select(MediaAsset.storage_key).where(MediaAsset.product_id == product_id)
)
storage_keys = [row[0] for row in media_result.all() if row[0]]
# 2. Collect render result paths from order lines
ol_result = await db.execute(
select(OrderLine.result_path).where(
OrderLine.product_id == product_id,
OrderLine.result_path.isnot(None),
)
)
result_paths = [row[0] for row in ol_result.all() if row[0]]
# 3. Check if CadFile is used by other products
cad_file_id = product.cad_file_id
orphan_cad = False
if cad_file_id:
other_count = await db.execute(
select(func.count(Product.id)).where(
Product.cad_file_id == cad_file_id,
Product.id != product_id,
)
)
orphan_cad = (other_count.scalar() or 0) == 0
# 4. Delete order_lines referencing this product
await db.execute(sql_delete(OrderLine).where(OrderLine.product_id == product_id))
# 5. Delete orphaned CadFile if no other products reference it
if orphan_cad and cad_file_id:
from app.models.cad_file import CadFile
# Collect CadFile media assets too
cad_media_result = await db.execute(
select(MediaAsset.storage_key).where(MediaAsset.cad_file_id == cad_file_id)
)
storage_keys.extend(row[0] for row in cad_media_result.all() if row[0])
product.cad_file_id = None
await db.flush()
await db.execute(sql_delete(CadFile).where(CadFile.id == cad_file_id))
# 6. Delete product (cascades MediaAsset + ProductRenderPosition)
await db.delete(product)
await db.commit()
# 7. Clean up storage files (best-effort, after commit)
storage = get_storage()
for key in storage_keys:
try:
storage.delete(key)
except Exception:
pass
# Clean up render result files on disk
import os
for path in result_paths:
try:
if os.path.isfile(path):
os.unlink(path)
except Exception:
pass
else:
product.is_active = False
await db.commit()
await db.commit()
@router.post("/{product_id}/cad", status_code=status.HTTP_201_CREATED)
+56 -16
View File
@@ -116,9 +116,11 @@ async def list_render_templates(
@router.post("/render-templates", response_model=RenderTemplateOut, status_code=status.HTTP_201_CREATED)
async def create_render_template(
name: str = Form(...),
file: UploadFile = File(...),
file: UploadFile | None = File(None),
clone_blend_from: str | None = Form(None),
category_key: str | None = Form(None),
output_type_id: str | None = Form(None),
output_type_ids: str | None = Form(None),
target_collection: str = Form("Product"),
material_replace_enabled: bool = Form(False),
lighting_only: bool = Form(False),
@@ -127,30 +129,54 @@ async def create_render_template(
user: User = Depends(require_admin_or_pm),
db: AsyncSession = Depends(get_db),
):
if not file.filename or not file.filename.endswith(".blend"):
raise HTTPException(400, detail="File must be a .blend file")
# Normalise empty strings from form data to None
if category_key == "" or category_key == "null":
category_key = None
if output_type_id == "" or output_type_id == "null":
output_type_id = None
if clone_blend_from == "" or clone_blend_from == "null":
clone_blend_from = None
template_id = uuid.uuid4()
blend_path = _blend_dir() / f"{template_id}.blend"
with open(blend_path, "wb") as f:
shutil.copyfileobj(file.file, f)
if file and file.filename:
if not file.filename.endswith(".blend"):
raise HTTPException(400, detail="File must be a .blend file")
with open(blend_path, "wb") as f:
shutil.copyfileobj(file.file, f)
original_filename = file.filename
final_blend_path = str(blend_path)
elif clone_blend_from:
# Share the same .blend file (no copy — just reference the same path)
source = await db.execute(
select(RenderTemplate).where(RenderTemplate.id == uuid.UUID(clone_blend_from))
)
source_tmpl = source.unique().scalar_one_or_none()
if not source_tmpl:
raise HTTPException(404, detail="Source template not found")
source_path = Path(source_tmpl.blend_file_path)
if not source_path.exists():
raise HTTPException(404, detail="Source .blend file not found on disk")
final_blend_path = source_tmpl.blend_file_path
original_filename = source_tmpl.original_filename
else:
raise HTTPException(400, detail="Provide either a .blend file or clone_blend_from template ID")
ot_uuid = uuid.UUID(output_type_id) if output_type_id else None
# Parse M2M output_type_ids (comma-separated string from FormData)
m2m_ot_ids: list[str] = []
if output_type_ids and output_type_ids.strip():
m2m_ot_ids = [s.strip() for s in output_type_ids.split(",") if s.strip()]
tmpl = RenderTemplate(
id=template_id,
name=name,
category_key=category_key,
output_type_id=ot_uuid,
blend_file_path=str(blend_path),
original_filename=file.filename,
blend_file_path=final_blend_path,
original_filename=original_filename,
target_collection=target_collection,
material_replace_enabled=material_replace_enabled,
lighting_only=lighting_only,
@@ -160,12 +186,13 @@ async def create_render_template(
db.add(tmpl)
await db.flush()
# Sync M2M from initial output_type_id
if ot_uuid:
from app.domains.rendering.models import render_template_output_types
# Sync M2M output types
from app.domains.rendering.models import render_template_output_types
ot_ids_to_link = m2m_ot_ids if m2m_ot_ids else ([str(ot_uuid)] if ot_uuid else [])
for ot_id_str in ot_ids_to_link:
await db.execute(
render_template_output_types.insert().values(
template_id=template_id, output_type_id=ot_uuid,
template_id=template_id, output_type_id=uuid.UUID(ot_id_str),
)
)
@@ -250,9 +277,15 @@ async def delete_render_template(
if not tmpl:
raise HTTPException(404, detail="Render template not found")
# Delete .blend file
# Only delete .blend file if no other template shares it
blend_path = Path(tmpl.blend_file_path)
if blend_path.exists():
other_refs = await db.execute(
select(RenderTemplate.id).where(
RenderTemplate.blend_file_path == tmpl.blend_file_path,
RenderTemplate.id != template_id,
)
)
if not other_refs.first() and blend_path.exists():
blend_path.unlink(missing_ok=True)
await db.execute(sql_delete(RenderTemplate).where(RenderTemplate.id == template_id))
@@ -277,10 +310,17 @@ async def upload_blend_file(
blend_path = _blend_dir() / f"{template_id}.blend"
# Remove old file if path changed
# Only remove old file if no other template shares it
old_path = Path(tmpl.blend_file_path)
if old_path.exists() and old_path != blend_path:
old_path.unlink(missing_ok=True)
other_refs = await db.execute(
select(RenderTemplate.id).where(
RenderTemplate.blend_file_path == tmpl.blend_file_path,
RenderTemplate.id != template_id,
)
)
if not other_refs.first():
old_path.unlink(missing_ok=True)
with open(blend_path, "wb") as f:
shutil.copyfileobj(file.file, f)
+6
View File
@@ -429,14 +429,20 @@ async def scale_workers(
compose_dir = os.environ.get("COMPOSE_PROJECT_DIR", "/compose")
compose_file = os.path.join(compose_dir, "docker-compose.yml")
# Derive project name from compose dir on host (directory name = project name).
# Inside the container the compose file is at /compose, but the host project
# dir name determines the container naming prefix (e.g. "schaefflerautomat").
compose_project = os.environ.get("COMPOSE_PROJECT_NAME", "schaefflerautomat")
def _scale() -> subprocess.CompletedProcess:
return subprocess.run(
[
"docker", "compose",
"-f", compose_file,
"-p", compose_project,
"up",
"--scale", f"{body.service}={body.count}",
"--no-build",
"--no-recreate",
"-d",
],
+64
View File
@@ -9,6 +9,7 @@ Resolution chain:
3. Pass through unchanged → Blender will show FailedMaterial magenta
"""
import logging
from difflib import SequenceMatcher
from sqlalchemy import create_engine, select, func
from sqlalchemy.orm import Session, selectinload
@@ -138,3 +139,66 @@ async def seed_material_aliases_from_mappings(
await db.flush()
return {"created": created, "skipped": skipped}
async def find_unmapped_materials(
material_names: list[str], db: AsyncSession
) -> list[dict]:
"""Find material names that have no alias or library match.
Returns a list of {"raw_name": str, "suggestions": [...]} for each
unmapped name. Suggestions are the top 5 SCHAEFFLER library materials
by string similarity.
"""
if not material_names:
return []
# Load all aliases (case-insensitive lookup)
alias_rows = (await db.execute(select(MaterialAlias))).scalars().all()
alias_set: set[str] = {a.alias.lower() for a in alias_rows}
# Load all materials
mat_rows = (await db.execute(select(Material))).scalars().all()
# Library materials have a schaeffler_code
library_mats = [m for m in mat_rows if m.schaeffler_code is not None]
# All material names (case-insensitive) for exact-match check
name_lookup: dict[str, Material] = {m.name.lower(): m for m in mat_rows}
unmapped: list[dict] = []
seen: set[str] = set()
for raw_name in material_names:
raw_lower = raw_name.lower()
if raw_lower in seen:
continue
seen.add(raw_lower)
# 1. Alias match → mapped
if raw_lower in alias_set:
continue
# 2. Exact name match with a library material → mapped
matched_mat = name_lookup.get(raw_lower)
if matched_mat and matched_mat.schaeffler_code is not None:
continue
# Unmapped — compute suggestions from library materials
scored = []
for lib_mat in library_mats:
ratio = SequenceMatcher(None, raw_lower, lib_mat.name.lower()).ratio()
if ratio > 0.3:
scored.append((ratio, lib_mat))
scored.sort(key=lambda x: x[0], reverse=True)
suggestions = [
{
"id": str(m.id),
"name": m.name,
"schaeffler_code": str(m.schaeffler_code),
}
for _, m in scored[:5]
]
unmapped.append({"raw_name": raw_name, "suggestions": suggestions})
return unmapped
+18
View File
@@ -432,6 +432,24 @@ async def delete_asset_permanent(asset_id: uuid.UUID, db: AsyncSession = Depends
return {"ok": True}
@router.post("/batch-delete")
async def batch_delete_assets(
asset_ids: list[uuid.UUID],
_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""Permanently delete multiple MediaAsset records."""
from app.utils.auth import require_global_admin
require_global_admin(_user)
deleted = 0
for aid in asset_ids:
ok = await service.delete_media_asset(db, aid)
if ok:
deleted += 1
return {"deleted": deleted, "requested": len(asset_ids)}
@router.post("/cleanup-orphaned")
async def cleanup_orphaned_assets(
_user: User = Depends(get_current_user),
@@ -95,9 +95,9 @@ def generate_gltf_geometry_task(self, cad_file_id: str):
_cache_hit_asset_id = None
# Composite cache key includes deflection settings so changing them invalidates cache
# v2: tessellation now happens after mm→m scaling (fixes destroyed tessellation)
# v3: removed BRepBuilderAPI_Transform, writer handles mm→m from STEP unit metadata
effective_cache_key = (
f"v2:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}"
f"v3:{_current_hash}:{linear_deflection}:{angular_deflection}:{tessellation_engine}"
if _current_hash else None
)
@@ -208,18 +208,26 @@ def render_order_line_task(self, order_line_id: str):
cad_name = cad_file.original_name if cad_file else "?"
# Load render_position for rotation values (per-product takes priority, falls back to global)
rotation_x = rotation_y = rotation_z = 0.0
focal_length_mm = None
sensor_width_mm = None
if line.render_position_id:
from app.models.render_position import ProductRenderPosition
rp = session.get(ProductRenderPosition, line.render_position_id)
if rp:
rotation_x, rotation_y, rotation_z = rp.rotation_x, rp.rotation_y, rp.rotation_z
emit(order_line_id, f"Render position: '{rp.name}' ({rotation_x}°, {rotation_y}°, {rotation_z}°)")
focal_length_mm = rp.focal_length_mm
sensor_width_mm = rp.sensor_width_mm
emit(order_line_id, f"Render position: '{rp.name}' ({rotation_x}°, {rotation_y}°, {rotation_z}°)" +
(f" focal_length={focal_length_mm}mm" if focal_length_mm else ""))
elif line.global_render_position_id:
from app.models import GlobalRenderPosition
grp = session.get(GlobalRenderPosition, line.global_render_position_id)
if grp:
rotation_x, rotation_y, rotation_z = grp.rotation_x, grp.rotation_y, grp.rotation_z
emit(order_line_id, f"Global render position: '{grp.name}' ({rotation_x}°, {rotation_y}°, {rotation_z}°)")
focal_length_mm = grp.focal_length_mm
sensor_width_mm = grp.sensor_width_mm
emit(order_line_id, f"Global render position: '{grp.name}' ({rotation_x}°, {rotation_y}°, {rotation_z}°)" +
(f" focal_length={focal_length_mm}mm" if focal_length_mm else ""))
emit(order_line_id, f"Starting render for {cad_name} ({len(part_colors)} coloured parts)")
@@ -334,7 +342,10 @@ def render_order_line_task(self, order_line_id: str):
rotation_x=rotation_x,
rotation_y=rotation_y,
rotation_z=rotation_z,
camera_orbit=bool(template.camera_orbit) if template else True,
usd_path=usd_render_path,
focal_length_mm=focal_length_mm,
sensor_width_mm=sensor_width_mm,
)
success = True
render_log = {
@@ -391,6 +402,8 @@ def render_order_line_task(self, order_line_id: str):
rotation_x=rotation_x,
rotation_y=rotation_y,
rotation_z=rotation_z,
focal_length_mm=focal_length_mm,
sensor_width_mm=sensor_width_mm,
job_id=order_line_id,
order_line_id=order_line_id,
noise_threshold=noise_threshold,
+4
View File
@@ -94,6 +94,8 @@ class ProductRenderPosition(Base):
rotation_z: Mapped[float] = mapped_column(Float, nullable=False, default=0.0)
is_default: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
focal_length_mm: Mapped[float | None] = mapped_column(Float, nullable=True, default=None)
sensor_width_mm: Mapped[float | None] = mapped_column(Float, nullable=True, default=None)
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False)
updated_at: Mapped[datetime] = mapped_column(
DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
@@ -113,6 +115,8 @@ class GlobalRenderPosition(Base):
rotation_z: Mapped[float] = mapped_column(Float, nullable=False, default=0.0)
is_default: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
sort_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
focal_length_mm: Mapped[float | None] = mapped_column(Float, nullable=True, default=None)
sensor_width_mm: Mapped[float | None] = mapped_column(Float, nullable=True, default=None)
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False)
updated_at: Mapped[datetime] = mapped_column(
DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
+12
View File
@@ -68,6 +68,8 @@ class RenderPositionCreate(BaseModel):
rotation_z: float = 0.0
is_default: bool = False
sort_order: int = 0
focal_length_mm: float | None = None
sensor_width_mm: float | None = None
class RenderPositionPatch(BaseModel):
@@ -77,6 +79,8 @@ class RenderPositionPatch(BaseModel):
rotation_z: float | None = None
is_default: bool | None = None
sort_order: int | None = None
focal_length_mm: float | None = None
sensor_width_mm: float | None = None
class RenderPositionOut(BaseModel):
@@ -88,6 +92,8 @@ class RenderPositionOut(BaseModel):
rotation_z: float
is_default: bool
sort_order: int
focal_length_mm: float | None = None
sensor_width_mm: float | None = None
created_at: datetime
updated_at: datetime
@@ -101,6 +107,8 @@ class GlobalRenderPositionCreate(BaseModel):
rotation_z: float = 0.0
is_default: bool = False
sort_order: int = 0
focal_length_mm: float | None = None
sensor_width_mm: float | None = None
class GlobalRenderPositionPatch(BaseModel):
@@ -110,6 +118,8 @@ class GlobalRenderPositionPatch(BaseModel):
rotation_z: float | None = None
is_default: bool | None = None
sort_order: int | None = None
focal_length_mm: float | None = None
sensor_width_mm: float | None = None
class GlobalRenderPositionOut(BaseModel):
@@ -120,6 +130,8 @@ class GlobalRenderPositionOut(BaseModel):
rotation_z: float
is_default: bool
sort_order: int
focal_length_mm: float | None = None
sensor_width_mm: float | None = None
created_at: datetime
updated_at: datetime
+2 -2
View File
@@ -1,3 +1,3 @@
# Compat shim — use app.domains.materials.service instead
from app.domains.materials.service import resolve_material_map, seed_material_aliases_from_mappings
__all__ = ["resolve_material_map", "seed_material_aliases_from_mappings"]
from app.domains.materials.service import resolve_material_map, seed_material_aliases_from_mappings, find_unmapped_materials
__all__ = ["resolve_material_map", "seed_material_aliases_from_mappings", "find_unmapped_materials"]
+15
View File
@@ -92,6 +92,8 @@ def render_still(
log_callback: "Callable[[str], None] | None" = None,
usd_path: "Path | None" = None,
tessellation_engine: str = "occ",
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
) -> dict:
"""Convert STEP → GLB (OCC or GMSH) → PNG (Blender subprocess).
@@ -179,6 +181,10 @@ def render_still(
logger.debug("[render_blender] usd_path active — mesh_attributes ignored")
elif mesh_attributes:
cmd += ["--mesh-attributes", json.dumps(mesh_attributes)]
if focal_length_mm is not None:
cmd += ["--focal-length", str(focal_length_mm)]
if sensor_width_mm is not None:
cmd += ["--sensor-width", str(sensor_width_mm)]
return cmd
def _run(eng: str) -> tuple[int, list[str], list[str]]:
@@ -311,8 +317,11 @@ def render_turntable_to_file(
rotation_x: float = 0.0,
rotation_y: float = 0.0,
rotation_z: float = 0.0,
camera_orbit: bool = True,
usd_path: "Path | None" = None,
tessellation_engine: str = "occ",
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
) -> dict:
"""Render a turntable animation: STEP → STL → N frames (Blender) → mp4 (ffmpeg).
@@ -391,8 +400,14 @@ def render_turntable_to_file(
bg_color or "",
"1" if transparent_bg else "0",
]
if camera_orbit:
cmd += ["--camera-orbit"]
if use_usd:
cmd += ["--usd-path", str(usd_path)]
if focal_length_mm is not None:
cmd += ["--focal-length", str(focal_length_mm)]
if sensor_width_mm is not None:
cmd += ["--sensor-width", str(sensor_width_mm)]
log_lines: list[str] = []
+4
View File
@@ -891,6 +891,8 @@ def render_to_file(
order_line_id: str | None = None,
usd_path: "Path | None" = None,
tessellation_engine: str | None = None,
focal_length_mm: float | None = None,
sensor_width_mm: float | None = None,
) -> tuple[bool, dict]:
"""Render a STEP file to a specific output path using current system settings.
@@ -1027,6 +1029,8 @@ def render_to_file(
log_callback=_log_cb,
usd_path=usd_path,
tessellation_engine=tessellation_engine or settings["tessellation_engine"],
focal_length_mm=focal_length_mm,
sensor_width_mm=sensor_width_mm,
)
rendered_png = tmp_png if tmp_png.exists() else None
except Exception as exc: