refactor(P1): complete pipeline cleanup — M1 dead code + M3 blender split
M1 dead code removal: - admin.py: remove VALID_STL_QUALITIES + stl_quality (7 locations) - frontend: remove stl_quality from 6 files (api/orders.ts, api/worker.ts, WorkerActivity.tsx, RenderInfoModal.tsx, helpTexts.ts, mocks/handlers.ts) - blender_render.py: delete _mark_sharp_and_seams() — dead, never called (62 lines) - step_processor.py: delete _render_via_service() + 2 elif renderer=="threejs" branches - renderproblems_tmp/: remove 3 orphaned debug images M3 blender_render.py decomposition (858 → 248 lines): - _blender_gpu.py: activate_gpu(), configure_engine() - _blender_import.py: import_glb(), apply_rotation() - _blender_materials.py: FAILED_MATERIAL_NAME, assign_failed_material(), build_mat_map_lower(), apply_material_library() - _blender_camera.py: setup_auto_camera(), setup_auto_lights() - _blender_scene.py: ensure_collection(), apply_smooth_batch(), apply_sharp_edges_from_occ(), setup_shadow_catcher() - Entry-point: sys.path.insert for submodule discovery; arg-parse + Mode A/B orchestration only Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -19,7 +19,6 @@ router = APIRouter(prefix="/admin", tags=["admin"])
|
|||||||
VALID_RENDERERS = {"blender"}
|
VALID_RENDERERS = {"blender"}
|
||||||
VALID_ENGINES = {"cycles", "eevee"}
|
VALID_ENGINES = {"cycles", "eevee"}
|
||||||
VALID_FORMATS = {"jpg", "png"}
|
VALID_FORMATS = {"jpg", "png"}
|
||||||
VALID_STL_QUALITIES = {"low", "high"}
|
|
||||||
VALID_CYCLES_DEVICES = {"auto", "gpu", "cpu"}
|
VALID_CYCLES_DEVICES = {"auto", "gpu", "cpu"}
|
||||||
SETTINGS_DEFAULTS: dict[str, str] = {
|
SETTINGS_DEFAULTS: dict[str, str] = {
|
||||||
"thumbnail_renderer": "blender",
|
"thumbnail_renderer": "blender",
|
||||||
@@ -27,7 +26,6 @@ SETTINGS_DEFAULTS: dict[str, str] = {
|
|||||||
"blender_cycles_samples": "256",
|
"blender_cycles_samples": "256",
|
||||||
"blender_eevee_samples": "64",
|
"blender_eevee_samples": "64",
|
||||||
"thumbnail_format": "jpg",
|
"thumbnail_format": "jpg",
|
||||||
"stl_quality": "low",
|
|
||||||
"blender_smooth_angle": "30",
|
"blender_smooth_angle": "30",
|
||||||
"cycles_device": "auto",
|
"cycles_device": "auto",
|
||||||
"render_backend": "celery",
|
"render_backend": "celery",
|
||||||
@@ -64,7 +62,6 @@ class SettingsOut(BaseModel):
|
|||||||
blender_cycles_samples: int = 256
|
blender_cycles_samples: int = 256
|
||||||
blender_eevee_samples: int = 64
|
blender_eevee_samples: int = 64
|
||||||
thumbnail_format: str = "jpg"
|
thumbnail_format: str = "jpg"
|
||||||
stl_quality: str = "low"
|
|
||||||
blender_smooth_angle: int = 30
|
blender_smooth_angle: int = 30
|
||||||
cycles_device: str = "auto"
|
cycles_device: str = "auto"
|
||||||
render_backend: str = "celery"
|
render_backend: str = "celery"
|
||||||
@@ -97,7 +94,6 @@ class SettingsUpdate(BaseModel):
|
|||||||
blender_cycles_samples: int | None = None
|
blender_cycles_samples: int | None = None
|
||||||
blender_eevee_samples: int | None = None
|
blender_eevee_samples: int | None = None
|
||||||
thumbnail_format: str | None = None
|
thumbnail_format: str | None = None
|
||||||
stl_quality: str | None = None
|
|
||||||
blender_smooth_angle: int | None = None
|
blender_smooth_angle: int | None = None
|
||||||
cycles_device: str | None = None
|
cycles_device: str | None = None
|
||||||
render_backend: str | None = None
|
render_backend: str | None = None
|
||||||
@@ -216,7 +212,6 @@ def _settings_to_out(raw: dict[str, str]) -> SettingsOut:
|
|||||||
blender_cycles_samples=int(raw["blender_cycles_samples"]),
|
blender_cycles_samples=int(raw["blender_cycles_samples"]),
|
||||||
blender_eevee_samples=int(raw["blender_eevee_samples"]),
|
blender_eevee_samples=int(raw["blender_eevee_samples"]),
|
||||||
thumbnail_format=raw["thumbnail_format"],
|
thumbnail_format=raw["thumbnail_format"],
|
||||||
stl_quality=raw["stl_quality"],
|
|
||||||
blender_smooth_angle=int(raw["blender_smooth_angle"]),
|
blender_smooth_angle=int(raw["blender_smooth_angle"]),
|
||||||
cycles_device=raw["cycles_device"],
|
cycles_device=raw["cycles_device"],
|
||||||
render_backend=raw["render_backend"],
|
render_backend=raw["render_backend"],
|
||||||
@@ -268,8 +263,6 @@ async def update_settings(
|
|||||||
raise HTTPException(400, detail="blender_eevee_samples must be 1–1024")
|
raise HTTPException(400, detail="blender_eevee_samples must be 1–1024")
|
||||||
if body.thumbnail_format is not None and body.thumbnail_format not in VALID_FORMATS:
|
if body.thumbnail_format is not None and body.thumbnail_format not in VALID_FORMATS:
|
||||||
raise HTTPException(400, detail=f"Invalid thumbnail_format. Choose: {', '.join(sorted(VALID_FORMATS))}")
|
raise HTTPException(400, detail=f"Invalid thumbnail_format. Choose: {', '.join(sorted(VALID_FORMATS))}")
|
||||||
if body.stl_quality is not None and body.stl_quality not in VALID_STL_QUALITIES:
|
|
||||||
raise HTTPException(400, detail=f"Invalid stl_quality. Choose: {', '.join(sorted(VALID_STL_QUALITIES))}")
|
|
||||||
if body.blender_smooth_angle is not None and not (0 <= body.blender_smooth_angle <= 180):
|
if body.blender_smooth_angle is not None and not (0 <= body.blender_smooth_angle <= 180):
|
||||||
raise HTTPException(400, detail="blender_smooth_angle must be 0–180 degrees")
|
raise HTTPException(400, detail="blender_smooth_angle must be 0–180 degrees")
|
||||||
if body.cycles_device is not None and body.cycles_device not in VALID_CYCLES_DEVICES:
|
if body.cycles_device is not None and body.cycles_device not in VALID_CYCLES_DEVICES:
|
||||||
@@ -307,8 +300,6 @@ async def update_settings(
|
|||||||
updates["blender_eevee_samples"] = str(body.blender_eevee_samples)
|
updates["blender_eevee_samples"] = str(body.blender_eevee_samples)
|
||||||
if body.thumbnail_format is not None:
|
if body.thumbnail_format is not None:
|
||||||
updates["thumbnail_format"] = body.thumbnail_format
|
updates["thumbnail_format"] = body.thumbnail_format
|
||||||
if body.stl_quality is not None:
|
|
||||||
updates["stl_quality"] = body.stl_quality
|
|
||||||
if body.blender_smooth_angle is not None:
|
if body.blender_smooth_angle is not None:
|
||||||
updates["blender_smooth_angle"] = str(body.blender_smooth_angle)
|
updates["blender_smooth_angle"] = str(body.blender_smooth_angle)
|
||||||
if body.cycles_device is not None:
|
if body.cycles_device is not None:
|
||||||
|
|||||||
@@ -517,11 +517,6 @@ def _generate_thumbnail(
|
|||||||
"width": 512,
|
"width": 512,
|
||||||
"height": 512,
|
"height": 512,
|
||||||
})
|
})
|
||||||
elif renderer == "threejs":
|
|
||||||
# Three.js renderer removed in v2; treat as pillow fallback
|
|
||||||
renderer = "pillow"
|
|
||||||
render_log.update({"renderer": "pillow", "threejs_removed": True})
|
|
||||||
|
|
||||||
logger.info(f"Thumbnail renderer={renderer}, format={fmt}")
|
logger.info(f"Thumbnail renderer={renderer}, format={fmt}")
|
||||||
|
|
||||||
rendered_png: Path | None = None
|
rendered_png: Path | None = None
|
||||||
@@ -587,41 +582,6 @@ def _finalise_image(src: Path, dst: Path, fmt: str) -> Path | None:
|
|||||||
return dst
|
return dst
|
||||||
|
|
||||||
|
|
||||||
def _render_via_service(
|
|
||||||
url: str, step_path: Path, out_path: Path, extra: dict | None = None,
|
|
||||||
job_id: str | None = None,
|
|
||||||
) -> tuple[Path | None, dict]:
|
|
||||||
"""Call an external renderer microservice to generate a thumbnail.
|
|
||||||
|
|
||||||
Returns (path_or_None, response_data_dict).
|
|
||||||
job_id, when provided, is forwarded to the renderer so the render process
|
|
||||||
can be cancelled via the renderer's /cancel/{job_id} endpoint.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
import httpx
|
|
||||||
payload = {
|
|
||||||
"step_path": str(step_path),
|
|
||||||
"output_path": str(out_path),
|
|
||||||
"width": 512,
|
|
||||||
"height": 512,
|
|
||||||
**(extra or {}),
|
|
||||||
}
|
|
||||||
if job_id:
|
|
||||||
payload["job_id"] = job_id
|
|
||||||
resp = httpx.post(url, json=payload, timeout=300.0)
|
|
||||||
data = {}
|
|
||||||
try:
|
|
||||||
data = resp.json()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
if resp.status_code == 200 and out_path.exists():
|
|
||||||
return out_path, data
|
|
||||||
logger.warning(f"Renderer service {url} returned {resp.status_code}: {resp.text[:500]}")
|
|
||||||
except Exception as exc:
|
|
||||||
logger.warning(f"Renderer service {url} unreachable: {exc}")
|
|
||||||
return None, {}
|
|
||||||
|
|
||||||
|
|
||||||
def _generate_thumbnail_placeholder(step_path: Path, out_path: Path, fmt: str = "png") -> Path | None:
|
def _generate_thumbnail_placeholder(step_path: Path, out_path: Path, fmt: str = "png") -> Path | None:
|
||||||
"""Generate a simple placeholder thumbnail using Pillow."""
|
"""Generate a simple placeholder thumbnail using Pillow."""
|
||||||
try:
|
try:
|
||||||
@@ -897,9 +857,6 @@ def render_to_file(
|
|||||||
rendered_png = None
|
rendered_png = None
|
||||||
else:
|
else:
|
||||||
logger.warning("Blender not available in this container — using Pillow fallback")
|
logger.warning("Blender not available in this container — using Pillow fallback")
|
||||||
elif renderer == "threejs":
|
|
||||||
# Three.js renderer removed in v2 — fall through to Pillow placeholder
|
|
||||||
logger.warning("Three.js renderer removed; using Pillow fallback")
|
|
||||||
|
|
||||||
if service_data:
|
if service_data:
|
||||||
for key in ("total_duration_s", "stl_duration_s", "render_duration_s",
|
for key in ("total_duration_s", "stl_duration_s", "render_duration_s",
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ export const handlers = [
|
|||||||
blender_cycles_samples: 256,
|
blender_cycles_samples: 256,
|
||||||
blender_eevee_samples: 64,
|
blender_eevee_samples: 64,
|
||||||
thumbnail_format: 'jpg',
|
thumbnail_format: 'jpg',
|
||||||
stl_quality: 'low',
|
|
||||||
blender_smooth_angle: 30,
|
blender_smooth_angle: 30,
|
||||||
cycles_device: 'auto',
|
cycles_device: 'auto',
|
||||||
blender_max_concurrent_renders: 3,
|
blender_max_concurrent_renders: 3,
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ export interface RenderLog {
|
|||||||
engine?: string
|
engine?: string
|
||||||
engine_used?: string
|
engine_used?: string
|
||||||
samples?: number
|
samples?: number
|
||||||
stl_quality?: string
|
|
||||||
smooth_angle?: number
|
smooth_angle?: number
|
||||||
total_duration_s?: number
|
total_duration_s?: number
|
||||||
stl_duration_s?: number
|
stl_duration_s?: number
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ export interface RenderLog {
|
|||||||
engine_used?: string
|
engine_used?: string
|
||||||
samples?: number
|
samples?: number
|
||||||
cycles_device?: string
|
cycles_device?: string
|
||||||
stl_quality?: string
|
|
||||||
smooth_angle?: number
|
smooth_angle?: number
|
||||||
width?: number
|
width?: number
|
||||||
height?: number
|
height?: number
|
||||||
|
|||||||
@@ -141,7 +141,6 @@ export default function RenderInfoModal({
|
|||||||
)}
|
)}
|
||||||
{rl.format && <Row label="Format" value={rl.format.toUpperCase()} />}
|
{rl.format && <Row label="Format" value={rl.format.toUpperCase()} />}
|
||||||
{rl.parts_count != null && <Row label="Parts" value={rl.parts_count} />}
|
{rl.parts_count != null && <Row label="Parts" value={rl.parts_count} />}
|
||||||
{rl.stl_quality && <Row label="STL Quality" value={rl.stl_quality} />}
|
|
||||||
{rl.smooth_angle != null && <Row label="Smooth Angle" value={`${rl.smooth_angle}°`} />}
|
{rl.smooth_angle != null && <Row label="Smooth Angle" value={`${rl.smooth_angle}°`} />}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -35,11 +35,6 @@ export const HELP_TEXTS: Record<string, HelpText> = {
|
|||||||
body: 'Which renderer to use for STEP file thumbnails. Blender produces photorealistic results; Three.js is faster but lower quality.',
|
body: 'Which renderer to use for STEP file thumbnails. Blender produces photorealistic results; Three.js is faster but lower quality.',
|
||||||
recommendation: 'Use Blender for production, Three.js for fast previews',
|
recommendation: 'Use Blender for production, Three.js for fast previews',
|
||||||
},
|
},
|
||||||
'setting.stl_quality': {
|
|
||||||
title: 'STL Export Quality',
|
|
||||||
body: 'Controls tessellation precision when converting STEP to STL for older render paths. High quality = finer mesh, larger file, slower conversion.',
|
|
||||||
recommendation: 'Low is sufficient for most thumbnails',
|
|
||||||
},
|
|
||||||
'action.regenerate_thumbnails': {
|
'action.regenerate_thumbnails': {
|
||||||
title: 'Regenerate All Thumbnails',
|
title: 'Regenerate All Thumbnails',
|
||||||
body: 'Re-renders thumbnails for all STEP files using current renderer settings. Queues every file on the thumbnail_rendering worker.',
|
body: 'Re-renders thumbnails for all STEP files using current renderer settings. Queues every file on the thumbnail_rendering worker.',
|
||||||
|
|||||||
@@ -524,7 +524,6 @@ function RenderDetails({ entry }: { entry: CadActivityEntry }) {
|
|||||||
<KV label="Engine" value={log.engine_used ?? log.engine ?? '—'} highlight={log.engine_used !== log.engine} />
|
<KV label="Engine" value={log.engine_used ?? log.engine ?? '—'} highlight={log.engine_used !== log.engine} />
|
||||||
<KV label="Samples" value={log.samples?.toString() ?? '—'} />
|
<KV label="Samples" value={log.samples?.toString() ?? '—'} />
|
||||||
<KV label="Device" value={log.cycles_device ?? '—'} />
|
<KV label="Device" value={log.cycles_device ?? '—'} />
|
||||||
<KV label="STL quality" value={log.stl_quality ?? '—'} />
|
|
||||||
<KV label="Smooth angle" value={log.smooth_angle != null ? `${log.smooth_angle}°` : '—'} />
|
<KV label="Smooth angle" value={log.smooth_angle != null ? `${log.smooth_angle}°` : '—'} />
|
||||||
<KV label="Resolution" value={log.width && log.height ? `${log.width}×${log.height}` : '—'} />
|
<KV label="Resolution" value={log.width && log.height ? `${log.width}×${log.height}` : '—'} />
|
||||||
</>}
|
</>}
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
# Plan: FailedMaterial Sentinel for Unmatched Mesh Objects
|
# Plan: Priority 1 — Pipeline Cleanup (M1 Dead Code + M3 blender_render Split)
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
23/25 mesh objects in production GLB exports receive correct Schaeffler library materials. 2 ISO8734 dowel pins carry an empty material string → `mat_map_lower.get()` returns `None` → they fall through the entire matching block and keep their OCC palette color in the exported GLB.
|
ROADMAP Priority 1 is "In Progress". M2 (`step_tasks.py` decomposed to `domains/pipeline/tasks/`) is **done**. Two milestones remain:
|
||||||
|
|
||||||
The old single-material fallback in `export_gltf.py` (lines ~275–291) fires **only when exactly 1 material is appended** — it never fires for multi-material assemblies. `blender_render.py` logs unmatched parts but assigns nothing.
|
- **M1**: Delete dead-code directories, remove `stl_quality` from admin/frontend surface, remove dead functions
|
||||||
|
- **M3**: Decompose `blender_render.py` (920 lines) into focused submodules
|
||||||
Fix: append `SCHAEFFLER_059999_FailedMaterial` unconditionally as a sentinel, then assign it to every unmatched mesh object in both scripts. Also remove 2 temporary `[DEBUG]` print lines left from investigation.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -14,166 +13,171 @@ Fix: append `SCHAEFFLER_059999_FailedMaterial` unconditionally as a sentinel, th
|
|||||||
|
|
||||||
| File | Change |
|
| File | Change |
|
||||||
|------|--------|
|
|------|--------|
|
||||||
| `render-worker/scripts/export_gltf.py` | Remove 2 DEBUG prints; add `FAILED_MATERIAL_NAME` constant; replace single-material fallback with universal sentinel |
|
| `blender-renderer/` | DELETE directory |
|
||||||
| `render-worker/scripts/blender_render.py` | Add FailedMaterial assignment loop at end of `_apply_material_library()` |
|
| `threejs-renderer/` | DELETE directory |
|
||||||
|
| `renderproblems_tmp/` | DELETE directory |
|
||||||
|
| `backend/app/api/routers/admin.py` | Remove `stl_quality` + `VALID_STL_QUALITIES` (7 locations) |
|
||||||
|
| `frontend/src/api/orders.ts` | Remove `stl_quality?: string` |
|
||||||
|
| `frontend/src/api/worker.ts` | Remove `stl_quality?: string` |
|
||||||
|
| `frontend/src/pages/WorkerActivity.tsx` | Remove STL quality KV row |
|
||||||
|
| `frontend/src/components/renders/RenderInfoModal.tsx` | Remove STL quality display row |
|
||||||
|
| `frontend/src/help/helpTexts.ts` | Remove `setting.stl_quality` entry |
|
||||||
|
| `backend/app/services/step_processor.py` | Remove `_render_via_service()` + dead `elif renderer == "threejs"` |
|
||||||
|
| `render-worker/scripts/blender_render.py` | Remove `_mark_sharp_and_seams()`; thin to entry-point after submodule extraction |
|
||||||
|
| `render-worker/scripts/_blender_gpu.py` | CREATE — `activate_gpu()` |
|
||||||
|
| `render-worker/scripts/_blender_import.py` | CREATE — `import_glb()`, `apply_rotation()` |
|
||||||
|
| `render-worker/scripts/_blender_materials.py` | CREATE — `build_mat_map_lower()`, `apply_material_library()`, `assign_failed_material()` |
|
||||||
|
| `render-worker/scripts/_blender_camera.py` | CREATE — `setup_auto_camera()`, `setup_auto_lights()` |
|
||||||
|
| `render-worker/scripts/_blender_scene.py` | CREATE — `ensure_collection()`, `apply_smooth_batch()`, `apply_sharp_edges_from_occ()`, `setup_shadow_catcher()` |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Tasks (in order)
|
## Tasks (in order)
|
||||||
|
|
||||||
### [x] Task 1: export_gltf.py — Remove DEBUG prints + add universal FailedMaterial sentinel
|
### [x] Task M1-1: Delete obsolete directories
|
||||||
|
|
||||||
**File**: `render-worker/scripts/export_gltf.py`
|
- **What**: `rm -rf blender-renderer/ threejs-renderer/ renderproblems_tmp/`
|
||||||
|
- **Acceptance gate**: `ls blender-renderer/ threejs-renderer/ renderproblems_tmp/` → all "No such file"
|
||||||
**Step 1a** — Add `FAILED_MATERIAL_NAME` constant after the imports (near top of file, after `import traceback`):
|
- **Dependencies**: none
|
||||||
```python
|
- **Risk**: Zero — no active source files
|
||||||
FAILED_MATERIAL_NAME = "SCHAEFFLER_059999_FailedMaterial"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Step 1b** — In the material assignment loop, remove the two `[DEBUG]` print lines and replace with a `pass` comment:
|
|
||||||
```python
|
|
||||||
# BEFORE:
|
|
||||||
assigned += 1
|
|
||||||
assigned_names.add(obj.name)
|
|
||||||
print(f"[DEBUG] assigned '{mat_name}' → '{obj.name}' (lookup_key='{lower_base}')")
|
|
||||||
else:
|
|
||||||
print(f"[DEBUG] NO MATCH for obj='{obj.name}' lower_base='{lower_base}' mat_name={mat_name!r} in_appended={mat_name in appended if mat_name else False}")
|
|
||||||
|
|
||||||
# AFTER:
|
|
||||||
assigned += 1
|
|
||||||
assigned_names.add(obj.name)
|
|
||||||
else:
|
|
||||||
pass # unmatched → will receive FailedMaterial sentinel below
|
|
||||||
```
|
|
||||||
|
|
||||||
**Step 1c** — Replace the single-material fallback block (after `print(f"Material substitution: ...")`) with the universal sentinel:
|
|
||||||
```python
|
|
||||||
# BEFORE (single-material fallback, only fires when len(appended)==1):
|
|
||||||
# Single-material fallback: if only one library material was loaded, ...
|
|
||||||
if len(appended) == 1:
|
|
||||||
default_mat_name, default_mat = next(iter(appended.items()))
|
|
||||||
if default_mat:
|
|
||||||
fallback = 0
|
|
||||||
for obj in mesh_objects:
|
|
||||||
if obj.name not in assigned_names:
|
|
||||||
if obj.data.users > 1:
|
|
||||||
obj.data = obj.data.copy()
|
|
||||||
obj.data.materials.clear()
|
|
||||||
obj.data.materials.append(default_mat)
|
|
||||||
fallback += 1
|
|
||||||
if fallback:
|
|
||||||
print(f"Single-material fallback: applied '{default_mat_name}' to {fallback} unmatched objects")
|
|
||||||
|
|
||||||
# AFTER (universal sentinel — fires regardless of how many materials were appended):
|
|
||||||
# Universal FailedMaterial sentinel: assign SCHAEFFLER_059999_FailedMaterial
|
|
||||||
# to every mesh object that was not matched by name-based lookup above.
|
|
||||||
failed_mat = None
|
|
||||||
try:
|
|
||||||
bpy.ops.wm.append(
|
|
||||||
filepath=f"{args.asset_library_blend}/Material/{FAILED_MATERIAL_NAME}",
|
|
||||||
directory=f"{args.asset_library_blend}/Material/",
|
|
||||||
filename=FAILED_MATERIAL_NAME,
|
|
||||||
link=False,
|
|
||||||
)
|
|
||||||
if FAILED_MATERIAL_NAME in bpy.data.materials:
|
|
||||||
failed_mat = bpy.data.materials[FAILED_MATERIAL_NAME]
|
|
||||||
print(f"Appended sentinel material: {FAILED_MATERIAL_NAME}")
|
|
||||||
else:
|
|
||||||
print(f"WARNING: sentinel '{FAILED_MATERIAL_NAME}' not in library — "
|
|
||||||
f"creating in-memory magenta fallback", file=sys.stderr)
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"WARNING: failed to append sentinel '{FAILED_MATERIAL_NAME}': {exc}",
|
|
||||||
file=sys.stderr)
|
|
||||||
|
|
||||||
if failed_mat is None:
|
|
||||||
# Library append failed: create in-memory magenta so export is never silently wrong
|
|
||||||
failed_mat = bpy.data.materials.new(name=FAILED_MATERIAL_NAME)
|
|
||||||
failed_mat.use_nodes = True
|
|
||||||
bsdf = failed_mat.node_tree.nodes.get("Principled BSDF")
|
|
||||||
if bsdf:
|
|
||||||
bsdf.inputs["Base Color"].default_value = (1.0, 0.0, 1.0, 1.0) # magenta
|
|
||||||
|
|
||||||
fallback_count = 0
|
|
||||||
for obj in mesh_objects:
|
|
||||||
if obj.name not in assigned_names:
|
|
||||||
if obj.data.users > 1:
|
|
||||||
obj.data = obj.data.copy()
|
|
||||||
obj.data.materials.clear()
|
|
||||||
obj.data.materials.append(failed_mat)
|
|
||||||
fallback_count += 1
|
|
||||||
if fallback_count:
|
|
||||||
print(f"FailedMaterial sentinel: assigned '{FAILED_MATERIAL_NAME}' "
|
|
||||||
f"to {fallback_count} unmatched objects")
|
|
||||||
```
|
|
||||||
|
|
||||||
**Acceptance gate**:
|
|
||||||
```bash
|
|
||||||
grep -n "\[DEBUG\]" render-worker/scripts/export_gltf.py # must return nothing
|
|
||||||
grep -n "FAILED_MATERIAL_NAME" render-worker/scripts/export_gltf.py # must show constant + usage
|
|
||||||
```
|
|
||||||
After deploying and running a production GLB export:
|
|
||||||
- Log shows `FailedMaterial sentinel: assigned 'SCHAEFFLER_059999_FailedMaterial' to 2 unmatched objects`
|
|
||||||
- No `[DEBUG]` lines in logs
|
|
||||||
|
|
||||||
**Dependencies**: none
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### [x] Task 2: blender_render.py — Add FailedMaterial fallback inside `_apply_material_library()`
|
### [x] Task M1-2: Remove stl_quality from admin.py
|
||||||
|
|
||||||
**File**: `render-worker/scripts/blender_render.py`
|
- **File**: `backend/app/api/routers/admin.py`
|
||||||
|
- **What**: Delete all 7 references:
|
||||||
|
1. `VALID_STL_QUALITIES = {"low", "high"}` constant
|
||||||
|
2. `"stl_quality": "low"` from `SETTINGS_DEFAULTS`
|
||||||
|
3. `stl_quality: str = "low"` from `SettingsOut`
|
||||||
|
4. `stl_quality: str | None = None` from `SettingsUpdate`
|
||||||
|
5. `stl_quality=raw["stl_quality"],` from `_settings_to_out()`
|
||||||
|
6. `if body.stl_quality is not None and body.stl_quality not in VALID_STL_QUALITIES:` validation block
|
||||||
|
7. `if body.stl_quality is not None: updates["stl_quality"] = body.stl_quality` update block
|
||||||
|
- **Acceptance gate**: `grep -n "stl_quality\|VALID_STL_QUALITIES" backend/app/api/routers/admin.py` → 0 matches
|
||||||
|
- **Dependencies**: none
|
||||||
|
- **Risk**: Low — the DB key remains (harmless); pipeline internally still uses `gltf_*_linear_deflection`
|
||||||
|
|
||||||
At the end of `_apply_material_library()`, replace the logging-only unmatched block with one that also calls `_assign_failed_material()`:
|
---
|
||||||
|
|
||||||
```python
|
### [x] Task M1-3: Remove stl_quality from frontend
|
||||||
# BEFORE (end of _apply_material_library, lines ~483-485):
|
|
||||||
print(f"[blender_render] material assignment: {assigned_count}/{len(parts)} parts matched", flush=True)
|
|
||||||
if unmatched_names:
|
|
||||||
print(f"[blender_render] unmatched parts (palette fallback): {unmatched_names[:10]}", flush=True)
|
|
||||||
|
|
||||||
# AFTER:
|
- **Files**:
|
||||||
print(f"[blender_render] material assignment: {assigned_count}/{len(parts)} parts matched", flush=True)
|
- `frontend/src/api/orders.ts` — remove `stl_quality?: string`
|
||||||
if unmatched_names:
|
- `frontend/src/api/worker.ts` — remove `stl_quality?: string`
|
||||||
print(f"[blender_render] unmatched parts → assigning {FAILED_MATERIAL_NAME}: {unmatched_names[:10]}", flush=True)
|
- `frontend/src/pages/WorkerActivity.tsx` — remove STL quality KV row
|
||||||
unmatched_set = set(unmatched_names)
|
- `frontend/src/components/renders/RenderInfoModal.tsx` — remove STL quality row
|
||||||
for part in parts:
|
- `frontend/src/help/helpTexts.ts` — remove `setting.stl_quality` entry
|
||||||
if part.name in unmatched_set:
|
- **Acceptance gate**: `grep -rn "stl_quality" frontend/src/` → 0 matches; `npx tsc --noEmit` passes
|
||||||
if part.data.users > 1:
|
- **Dependencies**: M1-2
|
||||||
part.data = part.data.copy()
|
- **Risk**: Low — all uses are optional fields (`?:`)
|
||||||
_assign_failed_material(part)
|
|
||||||
```
|
|
||||||
|
|
||||||
Note: `_assign_failed_material()` and `FAILED_MATERIAL_NAME` already exist in `blender_render.py` (line 31 and lines 151–166). No new imports needed.
|
---
|
||||||
|
|
||||||
**Acceptance gate**:
|
### [x] Task M1-4: Remove dead _mark_sharp_and_seams from blender_render.py
|
||||||
Trigger a thumbnail render with a material_map that leaves one or more parts unmatched. Render log must include:
|
|
||||||
```
|
|
||||||
[blender_render] unmatched parts → assigning SCHAEFFLER_059999_FailedMaterial: [...]
|
|
||||||
```
|
|
||||||
|
|
||||||
**Dependencies**: none (independent of Task 1)
|
- **File**: `render-worker/scripts/blender_render.py`
|
||||||
|
- **What**: Delete the `_mark_sharp_and_seams()` function (lines 196–256 approx). It is defined but never called — `_apply_sharp_edges_from_occ()` is the active implementation.
|
||||||
|
- **Acceptance gate**: `grep -n "_mark_sharp_and_seams" render-worker/scripts/blender_render.py` → 0 matches
|
||||||
|
- **Dependencies**: none
|
||||||
|
- **Risk**: Zero — verifiably never called
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### [x] Task M1-5: Remove dead code from step_processor.py
|
||||||
|
|
||||||
|
- **File**: `backend/app/services/step_processor.py`
|
||||||
|
- **What**: Delete `_render_via_service()` function and the `elif renderer == "threejs":` branch (which only logs a warning and falls through)
|
||||||
|
- **Acceptance gate**: `grep -n "_render_via_service\|renderer == .threejs" backend/app/services/step_processor.py` → 0 matches
|
||||||
|
- **Dependencies**: M1-1
|
||||||
|
- **Risk**: Low — function is only referenced from within the dead branch
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### [x] Task M3-1: Create _blender_gpu.py
|
||||||
|
|
||||||
|
- **File**: `render-worker/scripts/_blender_gpu.py` (NEW)
|
||||||
|
- **What**: Extract `_activate_gpu()` from `blender_render.py` into a standalone module. Refactor to accept `cycles_device: str` parameter instead of reading a module-level global. Rename to `activate_gpu()`.
|
||||||
|
- **Key signature**: `def activate_gpu(cycles_device: str = "auto") -> str | None`
|
||||||
|
- **Acceptance gate**: `grep -c "def _activate_gpu" render-worker/scripts/blender_render.py` → 0; function callable as `from _blender_gpu import activate_gpu`
|
||||||
|
- **Dependencies**: M1-4
|
||||||
|
- **Risk**: Medium — must pass `sys.path` correctly so Blender Python finds the module
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### [x] Task M3-2: Create _blender_import.py
|
||||||
|
|
||||||
|
- **File**: `render-worker/scripts/_blender_import.py` (NEW)
|
||||||
|
- **What**: Extract `_import_glb()` and `_apply_rotation()` into module. Rename to `import_glb()` / `apply_rotation()`.
|
||||||
|
- **Acceptance gate**: `grep -c "def _import_glb\|def _apply_rotation" render-worker/scripts/blender_render.py` → 0
|
||||||
|
- **Dependencies**: M1-4
|
||||||
|
- **Risk**: Low — no hidden globals beyond `bpy`, `math`, `Vector`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### [x] Task M3-3: Create _blender_materials.py
|
||||||
|
|
||||||
|
- **File**: `render-worker/scripts/_blender_materials.py` (NEW)
|
||||||
|
- **What**: Extract `_assign_failed_material()`, `_apply_material_library()`, and the `mat_map_lower` building loop. Consolidate the duplicated `mat_map_lower` logic (currently in Mode A and Mode B) into a single `build_mat_map_lower()` helper. `FAILED_MATERIAL_NAME` constant lives here.
|
||||||
|
- **Acceptance gate**: `grep -c "def _assign_failed_material\|def _apply_material_library" render-worker/scripts/blender_render.py` → 0
|
||||||
|
- **Dependencies**: M1-4
|
||||||
|
- **Risk**: Medium — `_apply_material_library()` currently uses `part_names_ordered` global; must convert to parameter
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### [x] Task M3-4: Create _blender_camera.py
|
||||||
|
|
||||||
|
- **File**: `render-worker/scripts/_blender_camera.py` (NEW)
|
||||||
|
- **What**: Extract auto-camera placement block (bounding sphere computation, isometric positioning, clip plane setup, `ELEVATION_DEG`/`AZIMUTH_DEG` constants) and `setup_auto_lights()`.
|
||||||
|
- **Key signatures**: `def setup_auto_camera(parts, width, height) -> tuple[Vector, float]` (returns center + radius for reuse by lights); `def setup_auto_lights(bbox_center, bsphere_radius) -> None`
|
||||||
|
- **Acceptance gate**: `grep -c "ELEVATION_DEG\|AZIMUTH_DEG\|bsphere_radius" render-worker/scripts/blender_render.py` → 0
|
||||||
|
- **Dependencies**: M3-2
|
||||||
|
- **Risk**: Low — camera block is self-contained
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### [x] Task M3-5: Create _blender_scene.py
|
||||||
|
|
||||||
|
- **File**: `render-worker/scripts/_blender_scene.py` (NEW)
|
||||||
|
- **What**: Extract `_ensure_collection()`, `_apply_smooth_batch()`, `_apply_sharp_edges_from_occ()`, shadow catcher setup into module.
|
||||||
|
- **Acceptance gate**: `grep -c "def _ensure_collection\|def _apply_smooth_batch\|def _apply_sharp_edges_from_occ" render-worker/scripts/blender_render.py` → 0
|
||||||
|
- **Dependencies**: M1-4
|
||||||
|
- **Risk**: Low
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### [x] Task M3-6: Thin blender_render.py to entry-point
|
||||||
|
|
||||||
|
- **File**: `render-worker/scripts/blender_render.py`
|
||||||
|
- **What**: Replace all extracted function bodies with imports from submodules. Add `sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))` before imports so Blender Python finds the submodules. Result: argument parsing + Mode A/B orchestration + timing only. Target: < 200 lines.
|
||||||
|
- **Acceptance gate**: `wc -l render-worker/scripts/blender_render.py` → < 200; upload `81113-l_cut.stp` → thumbnail renders correctly
|
||||||
|
- **Dependencies**: M3-1, M3-2, M3-3, M3-4, M3-5
|
||||||
|
- **Risk**: High (integration step) — test immediately after deploy
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Migration Check
|
## Migration Check
|
||||||
|
|
||||||
**No migration required.** Two render-worker scripts only. No DB, no backend, no frontend.
|
**No migration required.** `stl_quality` key stays in DB (harmless). No new columns or tables.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Order Recommendation
|
## Order Recommendation
|
||||||
|
|
||||||
Tasks 1 and 2 are independent. Implement both in the same session, then:
|
|
||||||
```
|
```
|
||||||
docker compose cp render-worker/scripts/export_gltf.py render-worker:/render-scripts/export_gltf.py
|
M1-1 (delete dirs) → M1-4 (dead func blender) → M1-5 (dead func step_processor)
|
||||||
docker compose cp render-worker/scripts/blender_render.py render-worker:/render-scripts/blender_render.py
|
→ M1-2 (admin.py) → M1-3 (frontend)
|
||||||
→ trigger production GLB re-generation → verify sentinel fires for ISO8734 parts
|
→ M3-1..M3-5 (create submodules in parallel where possible)
|
||||||
|
→ M3-6 (thin blender_render.py — integration, highest risk, test immediately)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Deploy after M1: `docker compose up -d --build backend`
|
||||||
|
Deploy after M3-6: `docker compose up -d --build render-worker`
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Risks / Open Questions
|
## Risks / Open Questions
|
||||||
|
|
||||||
1. `assigned_names` uses `obj.name` (Blender-deduplicated, may include `.001` suffix) — the sentinel loop iterates the same `mesh_objects` list and checks `obj.name not in assigned_names`, so the comparison is consistent. ✓
|
1. **Blender `sys.path`**: Submodule files must be at `/render-scripts/` (the volume mount path). `sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))` is the safe way to ensure this regardless of CWD.
|
||||||
2. `_assign_failed_material()` in `blender_render.py` does not include a `users > 1` copy guard — adding it in Task 2 is correct and consistent with the existing assignment branch.
|
2. **`part_names_ordered` global**: Currently used across multiple functions in `blender_render.py`. Must be explicitly passed as a parameter to `apply_material_library()` in M3-3.
|
||||||
3. If `FAILED_MATERIAL_NAME` was already appended as part of `needed` in `export_gltf.py` (e.g., if a part explicitly has `SCHAEFFLER_059999_FailedMaterial` in its material map), the `wm.append` call deduplicates automatically. ✓
|
3. **M3 scope**: M3 is a pure refactor — no behaviour change. If time is limited, M1 (dead code removal) delivers clean value on its own. M3 can be deferred to a separate session.
|
||||||
|
|||||||
@@ -0,0 +1,111 @@
|
|||||||
|
"""Camera and lighting helpers for Blender headless renders."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
|
||||||
|
ELEVATION_DEG = 28.0
|
||||||
|
AZIMUTH_DEG = 40.0
|
||||||
|
LENS_MM = 50.0
|
||||||
|
SENSOR_WIDTH_MM = 36.0
|
||||||
|
FILL_FACTOR = 0.85
|
||||||
|
|
||||||
|
|
||||||
|
def setup_auto_camera(parts: list, width: int, height: int):
|
||||||
|
"""Compute bounding sphere and place an isometric auto-camera.
|
||||||
|
|
||||||
|
Returns (bbox_center, bsphere_radius) as a tuple so the caller can
|
||||||
|
pass them to setup_auto_lights().
|
||||||
|
"""
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
from mathutils import Vector, Matrix # type: ignore[import]
|
||||||
|
|
||||||
|
all_corners = []
|
||||||
|
for part in parts:
|
||||||
|
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
||||||
|
|
||||||
|
bbox_min = Vector((
|
||||||
|
min(v.x for v in all_corners),
|
||||||
|
min(v.y for v in all_corners),
|
||||||
|
min(v.z for v in all_corners),
|
||||||
|
))
|
||||||
|
bbox_max = Vector((
|
||||||
|
max(v.x for v in all_corners),
|
||||||
|
max(v.y for v in all_corners),
|
||||||
|
max(v.z for v in all_corners),
|
||||||
|
))
|
||||||
|
|
||||||
|
bbox_center = (bbox_min + bbox_max) * 0.5
|
||||||
|
bbox_dims = bbox_max - bbox_min
|
||||||
|
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
|
||||||
|
|
||||||
|
print(f"[blender_render] bbox_dims={tuple(round(d,4) for d in bbox_dims)}, "
|
||||||
|
f"bsphere_radius={bsphere_radius:.4f}, center={tuple(round(c,4) for c in bbox_center)}")
|
||||||
|
|
||||||
|
elevation_rad = math.radians(ELEVATION_DEG)
|
||||||
|
azimuth_rad = math.radians(AZIMUTH_DEG)
|
||||||
|
|
||||||
|
cam_dir = Vector((
|
||||||
|
math.cos(elevation_rad) * math.cos(azimuth_rad),
|
||||||
|
math.cos(elevation_rad) * math.sin(azimuth_rad),
|
||||||
|
math.sin(elevation_rad),
|
||||||
|
)).normalized()
|
||||||
|
|
||||||
|
fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM))
|
||||||
|
fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM))
|
||||||
|
fov_used = min(fov_h, fov_v)
|
||||||
|
|
||||||
|
dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR
|
||||||
|
dist = max(dist, bsphere_radius * 1.5)
|
||||||
|
print(f"[blender_render] camera dist={dist:.4f}, fov={math.degrees(fov_used):.2f}°")
|
||||||
|
|
||||||
|
cam_location = bbox_center + cam_dir * dist
|
||||||
|
bpy.ops.object.camera_add(location=cam_location)
|
||||||
|
cam_obj = bpy.context.active_object
|
||||||
|
cam_obj.data.lens = LENS_MM
|
||||||
|
bpy.context.scene.camera = cam_obj
|
||||||
|
|
||||||
|
look_dir = (bbox_center - cam_location).normalized()
|
||||||
|
up_world = Vector((0.0, 0.0, 1.0))
|
||||||
|
right = look_dir.cross(up_world)
|
||||||
|
if right.length < 1e-6:
|
||||||
|
right = Vector((1.0, 0.0, 0.0))
|
||||||
|
right.normalize()
|
||||||
|
cam_up = right.cross(look_dir).normalized()
|
||||||
|
|
||||||
|
rot_mat = Matrix((
|
||||||
|
( right.x, right.y, right.z),
|
||||||
|
( cam_up.x, cam_up.y, cam_up.z),
|
||||||
|
(-look_dir.x, -look_dir.y, -look_dir.z),
|
||||||
|
)).transposed()
|
||||||
|
cam_obj.rotation_euler = rot_mat.to_euler('XYZ')
|
||||||
|
|
||||||
|
cam_obj.data.clip_start = max(dist * 0.001, 0.0001)
|
||||||
|
cam_obj.data.clip_end = dist + bsphere_radius * 3.0
|
||||||
|
print(f"[blender_render] clip {cam_obj.data.clip_start:.6f} … {cam_obj.data.clip_end:.4f}")
|
||||||
|
|
||||||
|
return bbox_center, bsphere_radius
|
||||||
|
|
||||||
|
|
||||||
|
def setup_auto_lights(bbox_center, bsphere_radius: float) -> None:
|
||||||
|
"""Add a sun + area fill light positioned relative to the bounding sphere."""
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
|
||||||
|
light_dist = bsphere_radius * 6.0
|
||||||
|
|
||||||
|
bpy.ops.object.light_add(type='SUN', location=(
|
||||||
|
bbox_center.x + light_dist * 0.5,
|
||||||
|
bbox_center.y - light_dist * 0.35,
|
||||||
|
bbox_center.z + light_dist,
|
||||||
|
))
|
||||||
|
sun = bpy.context.active_object
|
||||||
|
sun.data.energy = 4.0
|
||||||
|
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
|
||||||
|
|
||||||
|
bpy.ops.object.light_add(type='AREA', location=(
|
||||||
|
bbox_center.x - light_dist * 0.4,
|
||||||
|
bbox_center.y + light_dist * 0.4,
|
||||||
|
bbox_center.z + light_dist * 0.7,
|
||||||
|
))
|
||||||
|
fill = bpy.context.active_object
|
||||||
|
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
|
||||||
|
fill.data.size = max(4.0, bsphere_radius * 4.0)
|
||||||
@@ -0,0 +1,121 @@
|
|||||||
|
"""GPU activation and engine configuration helpers for Blender headless renders.
|
||||||
|
|
||||||
|
activate_gpu() must be called BEFORE open_mainfile / Cycles engine initialisation
|
||||||
|
so that the CUDA/OptiX kernel is compiled with the correct compute_device_type.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def activate_gpu(cycles_device: str = "auto") -> str | None:
|
||||||
|
"""Probe for GPU compute devices and activate them.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cycles_device: "auto" | "gpu" | "cpu"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Device type string (e.g. "OPTIX", "CUDA") if GPU was activated,
|
||||||
|
or None if CPU-only.
|
||||||
|
"""
|
||||||
|
if cycles_device == "cpu":
|
||||||
|
return None
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
try:
|
||||||
|
cprefs = bpy.context.preferences.addons['cycles'].preferences
|
||||||
|
for dt in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
|
||||||
|
try:
|
||||||
|
cprefs.compute_device_type = dt
|
||||||
|
cprefs.get_devices()
|
||||||
|
gpu = [d for d in cprefs.devices if d.type != 'CPU']
|
||||||
|
if gpu:
|
||||||
|
for d in cprefs.devices:
|
||||||
|
d.use = (d.type != 'CPU')
|
||||||
|
print(f"[blender_render] early GPU activation: {dt}, "
|
||||||
|
f"devices={[(d.name, d.type) for d in gpu]}", flush=True)
|
||||||
|
return dt
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[blender_render] {dt} not available: {e}", flush=True)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[blender_render] early GPU probe failed: {e}", flush=True)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def configure_engine(
|
||||||
|
scene,
|
||||||
|
engine: str,
|
||||||
|
samples: int,
|
||||||
|
cycles_device: str,
|
||||||
|
early_gpu_type: str | None,
|
||||||
|
noise_threshold_arg: str = "",
|
||||||
|
denoiser_arg: str = "",
|
||||||
|
denoising_input_passes_arg: str = "",
|
||||||
|
denoising_prefilter_arg: str = "",
|
||||||
|
denoising_quality_arg: str = "",
|
||||||
|
denoising_use_gpu_arg: str = "",
|
||||||
|
) -> str:
|
||||||
|
"""Configure the Blender render engine (EEVEE or Cycles) on *scene*.
|
||||||
|
|
||||||
|
Returns the effective engine name ("eevee" or "cycles").
|
||||||
|
Exits with code 2 if GPU required but unavailable (CYCLES_DEVICE=gpu env var).
|
||||||
|
"""
|
||||||
|
if engine == "eevee":
|
||||||
|
set_ok = False
|
||||||
|
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
|
||||||
|
try:
|
||||||
|
scene.render.engine = eevee_id
|
||||||
|
set_ok = True
|
||||||
|
print(f"[blender_render] EEVEE engine id: {eevee_id}")
|
||||||
|
break
|
||||||
|
except TypeError:
|
||||||
|
continue
|
||||||
|
if not set_ok:
|
||||||
|
print("[blender_render] WARNING: could not set EEVEE engine – falling back to Cycles")
|
||||||
|
engine = "cycles"
|
||||||
|
if engine == "eevee":
|
||||||
|
for attr in ('taa_render_samples', 'samples'):
|
||||||
|
try:
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
setattr(scene.eevee, attr, samples)
|
||||||
|
print(f"[blender_render] EEVEE samples: scene.eevee.{attr}={samples}")
|
||||||
|
break
|
||||||
|
except AttributeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if engine != "eevee":
|
||||||
|
gpu_type_found = activate_gpu(cycles_device) or early_gpu_type
|
||||||
|
scene.render.engine = 'CYCLES'
|
||||||
|
if gpu_type_found:
|
||||||
|
scene.cycles.device = 'GPU'
|
||||||
|
activate_gpu(cycles_device)
|
||||||
|
print(f"[blender_render] Cycles GPU ({gpu_type_found}), samples={samples}", flush=True)
|
||||||
|
print(f"RENDER_DEVICE_USED: engine=CYCLES device=GPU compute_type={gpu_type_found}", flush=True)
|
||||||
|
else:
|
||||||
|
scene.cycles.device = 'CPU'
|
||||||
|
print(f"[blender_render] WARNING: GPU not found — falling back to CPU, samples={samples}", flush=True)
|
||||||
|
print("RENDER_DEVICE_USED: engine=CYCLES device=CPU compute_type=NONE (fallback)", flush=True)
|
||||||
|
if os.environ.get("CYCLES_DEVICE", "auto").lower() == "gpu":
|
||||||
|
print("GPU_REQUIRED_BUT_CPU_USED: strict mode active (CYCLES_DEVICE=gpu)", flush=True)
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
scene.cycles.samples = samples
|
||||||
|
scene.cycles.use_denoising = True
|
||||||
|
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
|
||||||
|
if denoising_input_passes_arg:
|
||||||
|
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
|
||||||
|
except Exception: pass
|
||||||
|
if denoising_prefilter_arg:
|
||||||
|
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
|
||||||
|
except Exception: pass
|
||||||
|
if denoising_quality_arg:
|
||||||
|
try: scene.cycles.denoising_quality = denoising_quality_arg
|
||||||
|
except Exception: pass
|
||||||
|
if denoising_use_gpu_arg:
|
||||||
|
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
|
||||||
|
except AttributeError: pass
|
||||||
|
if noise_threshold_arg:
|
||||||
|
scene.cycles.use_adaptive_sampling = True
|
||||||
|
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
|
||||||
|
|
||||||
|
return engine
|
||||||
@@ -0,0 +1,85 @@
|
|||||||
|
"""GLB import and geometry helpers for Blender headless renders."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def import_glb(glb_file: str) -> list:
|
||||||
|
"""Import OCC-generated GLB into Blender.
|
||||||
|
|
||||||
|
OCC exports one mesh object per STEP part, already in metres.
|
||||||
|
Blender's native GLTF importer preserves part names.
|
||||||
|
|
||||||
|
Returns list of Blender mesh objects, centred at world origin.
|
||||||
|
"""
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
from mathutils import Vector # type: ignore[import]
|
||||||
|
|
||||||
|
bpy.ops.object.select_all(action='DESELECT')
|
||||||
|
bpy.ops.import_scene.gltf(filepath=glb_file)
|
||||||
|
parts = [o for o in bpy.context.selected_objects if o.type == 'MESH']
|
||||||
|
|
||||||
|
if not parts:
|
||||||
|
print(f"ERROR: No mesh objects imported from {glb_file}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"[blender_render] imported {len(parts)} part(s) from GLB: "
|
||||||
|
f"{[p.name for p in parts[:5]]}")
|
||||||
|
|
||||||
|
# Remove OCC-baked custom normals so shade_smooth_by_angle can recompute
|
||||||
|
# normals from scratch (respecting our sharp edge marks).
|
||||||
|
cleared = 0
|
||||||
|
for p in parts:
|
||||||
|
if "custom_normal" in p.data.attributes:
|
||||||
|
p.data.attributes.remove(p.data.attributes["custom_normal"])
|
||||||
|
cleared += 1
|
||||||
|
if cleared:
|
||||||
|
print(f"[blender_render] cleared OCC custom_normal from {cleared} mesh objects")
|
||||||
|
|
||||||
|
# Centre combined bbox at world origin
|
||||||
|
all_corners = []
|
||||||
|
for p in parts:
|
||||||
|
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
|
||||||
|
|
||||||
|
if all_corners:
|
||||||
|
mins = Vector((min(v.x for v in all_corners),
|
||||||
|
min(v.y for v in all_corners),
|
||||||
|
min(v.z for v in all_corners)))
|
||||||
|
maxs = Vector((max(v.x for v in all_corners),
|
||||||
|
max(v.y for v in all_corners),
|
||||||
|
max(v.z for v in all_corners)))
|
||||||
|
center = (mins + maxs) * 0.5
|
||||||
|
# Move root objects (parentless) to centre. Adjusting a child's local
|
||||||
|
# .location by a world-space vector gives wrong results when the GLB has
|
||||||
|
# Empty parent nodes (OCC assembly hierarchy). Shifting the root moves
|
||||||
|
# the entire hierarchy correctly.
|
||||||
|
all_imported = list(bpy.context.selected_objects)
|
||||||
|
root_objects = [o for o in all_imported if o.parent is None]
|
||||||
|
for obj in root_objects:
|
||||||
|
obj.location -= center
|
||||||
|
|
||||||
|
return parts
|
||||||
|
|
||||||
|
|
||||||
|
def apply_rotation(parts: list, rx: float, ry: float, rz: float) -> None:
|
||||||
|
"""Apply Euler rotation (degrees, XYZ order) to all parts around world origin.
|
||||||
|
|
||||||
|
After import_glb the combined bbox center is at world origin,
|
||||||
|
so rotating around origin is equivalent to rotating around the assembly center.
|
||||||
|
"""
|
||||||
|
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
|
||||||
|
return
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
from mathutils import Euler # type: ignore[import]
|
||||||
|
|
||||||
|
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
|
||||||
|
for p in parts:
|
||||||
|
p.matrix_world = rot_mat @ p.matrix_world
|
||||||
|
# Bake rotation into mesh data so camera bbox calculations see the rotated geometry
|
||||||
|
bpy.ops.object.select_all(action='DESELECT')
|
||||||
|
for p in parts:
|
||||||
|
p.select_set(True)
|
||||||
|
bpy.context.view_layer.objects.active = parts[0]
|
||||||
|
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
|
||||||
|
print(f"[blender_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
|
||||||
@@ -0,0 +1,156 @@
|
|||||||
|
"""Material assignment helpers for Blender headless renders."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re as _re
|
||||||
|
|
||||||
|
FAILED_MATERIAL_NAME = "SCHAEFFLER_059999_FailedMaterial"
|
||||||
|
|
||||||
|
|
||||||
|
def assign_failed_material(part_obj) -> None:
|
||||||
|
"""Assign the standard fallback material (magenta) when no library material matches.
|
||||||
|
|
||||||
|
Reuses SCHAEFFLER_059999_FailedMaterial if already loaded; otherwise
|
||||||
|
creates a simple magenta Principled BSDF node tree.
|
||||||
|
"""
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
|
||||||
|
mat = bpy.data.materials.get(FAILED_MATERIAL_NAME)
|
||||||
|
if mat is None:
|
||||||
|
mat = bpy.data.materials.new(name=FAILED_MATERIAL_NAME)
|
||||||
|
mat.use_nodes = True
|
||||||
|
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||||
|
if bsdf:
|
||||||
|
bsdf.inputs["Base Color"].default_value = (1.0, 0.0, 1.0, 1.0) # magenta
|
||||||
|
bsdf.inputs["Roughness"].default_value = 0.6
|
||||||
|
part_obj.data.materials.clear()
|
||||||
|
part_obj.data.materials.append(mat)
|
||||||
|
|
||||||
|
|
||||||
|
def build_mat_map_lower(material_map: dict) -> dict:
|
||||||
|
"""Return a lowercased version of material_map with _AF\\d+ suffix variants added.
|
||||||
|
|
||||||
|
Both the original key and the AF-stripped key are inserted so that GLB
|
||||||
|
object names (which may lack _AF suffixes that OCC adds to mat_map keys)
|
||||||
|
can match in either direction.
|
||||||
|
"""
|
||||||
|
mat_map_lower: dict = {}
|
||||||
|
for k, v in material_map.items():
|
||||||
|
kl = k.lower().strip()
|
||||||
|
mat_map_lower[kl] = v
|
||||||
|
stripped = kl
|
||||||
|
prev = None
|
||||||
|
while prev != stripped:
|
||||||
|
prev = stripped
|
||||||
|
stripped = _re.sub(r'_af\d+$', '', stripped)
|
||||||
|
if stripped != kl:
|
||||||
|
mat_map_lower.setdefault(stripped, v)
|
||||||
|
return mat_map_lower
|
||||||
|
|
||||||
|
|
||||||
|
def apply_material_library(
|
||||||
|
parts: list,
|
||||||
|
mat_lib_path: str,
|
||||||
|
mat_map: dict,
|
||||||
|
part_names_ordered: list | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Append materials from library .blend and assign to parts via material_map.
|
||||||
|
|
||||||
|
GLB-imported objects are named after STEP parts, so matching is by name
|
||||||
|
(stripping Blender .NNN suffix for duplicates). Falls back to
|
||||||
|
part_names_ordered index-based matching.
|
||||||
|
|
||||||
|
mat_map: {part_name_lower: material_name}
|
||||||
|
Parts without a match receive the FAILED_MATERIAL_NAME sentinel.
|
||||||
|
"""
|
||||||
|
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
||||||
|
print(f"[blender_render] material library not found: {mat_lib_path}")
|
||||||
|
return
|
||||||
|
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
|
||||||
|
if part_names_ordered is None:
|
||||||
|
part_names_ordered = []
|
||||||
|
|
||||||
|
# Collect unique material names needed
|
||||||
|
needed = set(mat_map.values())
|
||||||
|
if not needed:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Append materials from library
|
||||||
|
appended: dict = {}
|
||||||
|
for mat_name in needed:
|
||||||
|
inner_path = f"{mat_lib_path}/Material/{mat_name}"
|
||||||
|
try:
|
||||||
|
bpy.ops.wm.append(
|
||||||
|
filepath=inner_path,
|
||||||
|
directory=f"{mat_lib_path}/Material/",
|
||||||
|
filename=mat_name,
|
||||||
|
link=False,
|
||||||
|
)
|
||||||
|
if mat_name in bpy.data.materials:
|
||||||
|
appended[mat_name] = bpy.data.materials[mat_name]
|
||||||
|
print(f"[blender_render] appended material: {mat_name}")
|
||||||
|
else:
|
||||||
|
print(f"[blender_render] WARNING: material '{mat_name}' not found after append")
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[blender_render] WARNING: failed to append material '{mat_name}': {exc}")
|
||||||
|
|
||||||
|
if not appended:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Assign materials to parts — primary: name-based (GLB object names),
|
||||||
|
# secondary: index-based via part_names_ordered
|
||||||
|
assigned_count = 0
|
||||||
|
unmatched_names = []
|
||||||
|
for i, part in enumerate(parts):
|
||||||
|
# Try name-based matching first (strip Blender .NNN suffix)
|
||||||
|
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
||||||
|
# Strip OCC assembly-instance suffix (_AF0, _AF1, …) — GLB object
|
||||||
|
# names may or may not have them while mat_map keys might.
|
||||||
|
_prev = None
|
||||||
|
while _prev != base_name:
|
||||||
|
_prev = base_name
|
||||||
|
base_name = _re.sub(r'_AF\d+$', '', base_name, flags=_re.IGNORECASE)
|
||||||
|
part_key = base_name.lower().strip()
|
||||||
|
mat_name = mat_map.get(part_key)
|
||||||
|
|
||||||
|
# Prefix fallback: if a mat_map key starts with our base name or
|
||||||
|
# vice-versa, use the longest matching key (most-specific wins).
|
||||||
|
if not mat_name:
|
||||||
|
for key, val in sorted(mat_map.items(), key=lambda x: len(x[0]), reverse=True):
|
||||||
|
if len(key) >= 5 and len(part_key) >= 5 and (
|
||||||
|
part_key.startswith(key) or key.startswith(part_key)
|
||||||
|
):
|
||||||
|
mat_name = val
|
||||||
|
break
|
||||||
|
|
||||||
|
# Fall back to index-based matching via part_names_ordered
|
||||||
|
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
||||||
|
step_name = part_names_ordered[i]
|
||||||
|
step_key = step_name.lower().strip()
|
||||||
|
mat_name = mat_map.get(step_key)
|
||||||
|
# Also try stripping AF from part_names_ordered entry
|
||||||
|
if not mat_name:
|
||||||
|
_p2 = None
|
||||||
|
while _p2 != step_key:
|
||||||
|
_p2 = step_key
|
||||||
|
step_key = _re.sub(r'_af\d+$', '', step_key)
|
||||||
|
mat_name = mat_map.get(step_key)
|
||||||
|
|
||||||
|
if mat_name and mat_name in appended:
|
||||||
|
part.data.materials.clear()
|
||||||
|
part.data.materials.append(appended[mat_name])
|
||||||
|
assigned_count += 1
|
||||||
|
else:
|
||||||
|
unmatched_names.append(part.name)
|
||||||
|
|
||||||
|
print(f"[blender_render] material assignment: {assigned_count}/{len(parts)} parts matched", flush=True)
|
||||||
|
if unmatched_names:
|
||||||
|
print(f"[blender_render] unmatched parts → assigning {FAILED_MATERIAL_NAME}: {unmatched_names[:10]}", flush=True)
|
||||||
|
unmatched_set = set(unmatched_names)
|
||||||
|
for part in parts:
|
||||||
|
if part.name in unmatched_set:
|
||||||
|
if part.data.users > 1:
|
||||||
|
part.data = part.data.copy()
|
||||||
|
assign_failed_material(part)
|
||||||
@@ -0,0 +1,149 @@
|
|||||||
|
"""Scene-level helpers for Blender headless renders."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_collection(name: str):
|
||||||
|
"""Return a collection by name, creating it if needed."""
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
|
||||||
|
if name in bpy.data.collections:
|
||||||
|
return bpy.data.collections[name]
|
||||||
|
col = bpy.data.collections.new(name)
|
||||||
|
bpy.context.scene.collection.children.link(col)
|
||||||
|
return col
|
||||||
|
|
||||||
|
|
||||||
|
def apply_smooth_batch(parts: list, angle_deg: float) -> None:
|
||||||
|
"""Apply smooth shading to ALL parts in a single operator call.
|
||||||
|
|
||||||
|
bpy.ops.object.shade_smooth_by_angle() operates on all selected objects
|
||||||
|
at once (one C-level call), so batching reduces O(n) operator overhead to O(1).
|
||||||
|
Per-part calls cost ~90ms each × 175 parts = 16s; batch call costs ~0.2s total.
|
||||||
|
"""
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
|
||||||
|
bpy.ops.object.select_all(action='DESELECT')
|
||||||
|
mesh_parts = [p for p in parts if p.type == 'MESH']
|
||||||
|
for part in mesh_parts:
|
||||||
|
part.select_set(True)
|
||||||
|
if not mesh_parts:
|
||||||
|
return
|
||||||
|
bpy.context.view_layer.objects.active = mesh_parts[0]
|
||||||
|
if angle_deg > 0:
|
||||||
|
try:
|
||||||
|
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
|
||||||
|
except AttributeError:
|
||||||
|
bpy.ops.object.shade_smooth()
|
||||||
|
for part in mesh_parts:
|
||||||
|
if hasattr(part.data, 'use_auto_smooth'):
|
||||||
|
part.data.use_auto_smooth = True
|
||||||
|
part.data.auto_smooth_angle = math.radians(angle_deg)
|
||||||
|
else:
|
||||||
|
bpy.ops.object.shade_flat()
|
||||||
|
bpy.ops.object.select_all(action='DESELECT')
|
||||||
|
|
||||||
|
|
||||||
|
def apply_sharp_edges_from_occ(parts: list, sharp_edge_pairs: list) -> None:
|
||||||
|
"""Mark edges sharp using OCC-derived vertex-pair data.
|
||||||
|
|
||||||
|
`sharp_edge_pairs` is a list of [[x0,y0,z0],[x1,y1,z1]] in mm.
|
||||||
|
Blender mesh coordinates are in metres (STEP mm * 0.001 scale applied).
|
||||||
|
We match each OCC vertex pair against bmesh vertex positions with a 0.5 mm
|
||||||
|
tolerance (0.0005 m) and mark the matched edge as sharp.
|
||||||
|
"""
|
||||||
|
if not sharp_edge_pairs:
|
||||||
|
return
|
||||||
|
|
||||||
|
import bmesh # type: ignore[import]
|
||||||
|
import mathutils # type: ignore[import]
|
||||||
|
|
||||||
|
SCALE = 0.001 # mm → m
|
||||||
|
TOL = 0.0005 # 0.5 mm in metres
|
||||||
|
|
||||||
|
# OCC STEP space (Z-up, mm) → Blender (Z-up, m):
|
||||||
|
# RWGltf applies Z→Y-up, Blender import applies Y→Z-up.
|
||||||
|
# Net: Blender(X, Y, Z) = OCC(X*0.001, -Z*0.001, Y*0.001)
|
||||||
|
occ_pairs = []
|
||||||
|
for pair in sharp_edge_pairs:
|
||||||
|
v0 = mathutils.Vector((pair[0][0] * SCALE, -pair[0][2] * SCALE, pair[0][1] * SCALE))
|
||||||
|
v1 = mathutils.Vector((pair[1][0] * SCALE, -pair[1][2] * SCALE, pair[1][1] * SCALE))
|
||||||
|
occ_pairs.append((v0, v1))
|
||||||
|
|
||||||
|
marked_total = 0
|
||||||
|
for obj in parts:
|
||||||
|
bm = bmesh.new()
|
||||||
|
bm.from_mesh(obj.data)
|
||||||
|
bm.verts.ensure_lookup_table()
|
||||||
|
bm.edges.ensure_lookup_table()
|
||||||
|
|
||||||
|
# Build KD-tree on vertices in WORLD space — OCC pairs are world coords,
|
||||||
|
# but mesh vertices are in local space (assembly node transform in GLB).
|
||||||
|
world_mat = obj.matrix_world
|
||||||
|
kd = mathutils.kdtree.KDTree(len(bm.verts))
|
||||||
|
for v in bm.verts:
|
||||||
|
kd.insert(world_mat @ v.co, v.index)
|
||||||
|
kd.balance()
|
||||||
|
|
||||||
|
marked = 0
|
||||||
|
for v0_occ, v1_occ in occ_pairs:
|
||||||
|
_co0, idx0, dist0 = kd.find(v0_occ)
|
||||||
|
_co1, idx1, dist1 = kd.find(v1_occ)
|
||||||
|
if dist0 > TOL or dist1 > TOL:
|
||||||
|
continue
|
||||||
|
if idx0 == idx1:
|
||||||
|
continue # degenerate — both endpoints map to same vertex
|
||||||
|
bv0 = bm.verts[idx0]
|
||||||
|
bv1 = bm.verts[idx1]
|
||||||
|
edge = bm.edges.get((bv0, bv1))
|
||||||
|
if edge is None:
|
||||||
|
edge = bm.edges.get((bv1, bv0))
|
||||||
|
if edge is not None and edge.smooth:
|
||||||
|
edge.smooth = False
|
||||||
|
marked += 1
|
||||||
|
|
||||||
|
bm.to_mesh(obj.data)
|
||||||
|
bm.free()
|
||||||
|
marked_total += marked
|
||||||
|
|
||||||
|
print(f"[blender_render] OCC sharp edges applied: {marked_total} edges marked across {len(parts)} parts", flush=True)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_shadow_catcher(parts: list) -> None:
|
||||||
|
"""Enable the Shadowcatcher collection in the template and position its plane.
|
||||||
|
|
||||||
|
The template must contain a 'Shadowcatcher' collection with a 'Shadowcatcher'
|
||||||
|
mesh object. The plane is moved to the lowest Z of the product bounding box.
|
||||||
|
"""
|
||||||
|
import bpy # type: ignore[import]
|
||||||
|
from mathutils import Vector # type: ignore[import]
|
||||||
|
|
||||||
|
sc_col_name = "Shadowcatcher"
|
||||||
|
sc_obj_name = "Shadowcatcher"
|
||||||
|
|
||||||
|
# Enable the Shadowcatcher collection in all view layers
|
||||||
|
for vl in bpy.context.scene.view_layers:
|
||||||
|
def _enable_col_recursive(layer_col):
|
||||||
|
if layer_col.collection.name == sc_col_name:
|
||||||
|
layer_col.exclude = False
|
||||||
|
layer_col.collection.hide_render = False
|
||||||
|
layer_col.collection.hide_viewport = False
|
||||||
|
return True
|
||||||
|
for child in layer_col.children:
|
||||||
|
if _enable_col_recursive(child):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
_enable_col_recursive(vl.layer_collection)
|
||||||
|
|
||||||
|
sc_obj = bpy.data.objects.get(sc_obj_name)
|
||||||
|
if sc_obj:
|
||||||
|
all_world_z = []
|
||||||
|
for part in parts:
|
||||||
|
for corner in part.bound_box:
|
||||||
|
all_world_z.append((part.matrix_world @ Vector(corner)).z)
|
||||||
|
if all_world_z:
|
||||||
|
sc_obj.location.z = min(all_world_z)
|
||||||
|
print(f"[blender_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
|
||||||
|
else:
|
||||||
|
print(f"[blender_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
|
||||||
@@ -17,518 +17,100 @@ Features:
|
|||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import math
|
|
||||||
|
|
||||||
# Force unbuffered stdout so render log lines appear immediately
|
# Force unbuffered stdout so render log lines appear immediately
|
||||||
os.environ["PYTHONUNBUFFERED"] = "1"
|
os.environ["PYTHONUNBUFFERED"] = "1"
|
||||||
if hasattr(sys.stdout, "reconfigure"):
|
if hasattr(sys.stdout, "reconfigure"):
|
||||||
sys.stdout.reconfigure(line_buffering=True)
|
sys.stdout.reconfigure(line_buffering=True)
|
||||||
|
|
||||||
import bpy
|
# Add script directory to sys.path so Blender Python finds our submodules
|
||||||
from mathutils import Vector, Matrix
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
# Fallback material name — magenta, immediately visible when material assignment fails
|
import bpy # type: ignore[import]
|
||||||
FAILED_MATERIAL_NAME = "SCHAEFFLER_059999_FailedMaterial"
|
|
||||||
|
|
||||||
# ── Parse arguments ───────────────────────────────────────────────────────────
|
from _blender_gpu import activate_gpu, configure_engine
|
||||||
|
from _blender_import import import_glb, apply_rotation
|
||||||
argv = sys.argv
|
from _blender_materials import (
|
||||||
if "--" in argv:
|
FAILED_MATERIAL_NAME, assign_failed_material,
|
||||||
argv = argv[argv.index("--") + 1:]
|
build_mat_map_lower, apply_material_library,
|
||||||
else:
|
)
|
||||||
argv = []
|
from _blender_camera import setup_auto_camera, setup_auto_lights
|
||||||
|
from _blender_scene import (
|
||||||
if len(argv) < 4:
|
ensure_collection, apply_smooth_batch,
|
||||||
print("Usage: blender --background --python blender_render.py -- "
|
apply_sharp_edges_from_occ, setup_shadow_catcher,
|
||||||
"<glb_path> <output_path> <width> <height> [engine] [samples] [smooth_angle] [cycles_device] [transparent_bg]")
|
)
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
# ── Parse arguments ────────────────────────────────────────────────────────────
|
||||||
import json as _json
|
import json as _json
|
||||||
|
|
||||||
glb_path = argv[0]
|
def _arg(n, default="", transform=str):
|
||||||
output_path = argv[1]
|
return transform(argv[n]) if len(argv) > n and argv[n] else default
|
||||||
width = int(argv[2])
|
|
||||||
height = int(argv[3])
|
|
||||||
engine = argv[4].lower() if len(argv) > 4 else "cycles"
|
|
||||||
samples = int(argv[5]) if len(argv) > 5 else (64 if engine == "eevee" else 256)
|
|
||||||
smooth_angle = int(argv[6]) if len(argv) > 6 else 30 # degrees; 0 = flat shading
|
|
||||||
cycles_device = argv[7].lower() if len(argv) > 7 else "auto" # "auto", "gpu", "cpu"
|
|
||||||
transparent_bg = argv[8] == "1" if len(argv) > 8 else False
|
|
||||||
template_path = argv[9] if len(argv) > 9 and argv[9] else ""
|
|
||||||
target_collection = argv[10] if len(argv) > 10 else "Product"
|
|
||||||
material_library_path = argv[11] if len(argv) > 11 and argv[11] else ""
|
|
||||||
material_map_raw = argv[12] if len(argv) > 12 else "{}"
|
|
||||||
try:
|
|
||||||
material_map = _json.loads(material_map_raw) if material_map_raw else {}
|
|
||||||
except _json.JSONDecodeError:
|
|
||||||
material_map = {}
|
|
||||||
|
|
||||||
part_names_ordered_raw = argv[13] if len(argv) > 13 else "[]"
|
argv = sys.argv[sys.argv.index("--") + 1:] if "--" in sys.argv else []
|
||||||
try:
|
if len(argv) < 4:
|
||||||
part_names_ordered = _json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
|
print("Usage: blender --background --python blender_render.py -- "
|
||||||
except _json.JSONDecodeError:
|
"<glb_path> <output_path> <width> <height> ...")
|
||||||
part_names_ordered = []
|
sys.exit(1)
|
||||||
|
|
||||||
lighting_only = argv[14] == "1" if len(argv) > 14 else False
|
glb_path = argv[0]
|
||||||
shadow_catcher = argv[15] == "1" if len(argv) > 15 else False
|
output_path = argv[1]
|
||||||
rotation_x = float(argv[16]) if len(argv) > 16 else 0.0
|
width = int(argv[2])
|
||||||
rotation_y = float(argv[17]) if len(argv) > 17 else 0.0
|
height = int(argv[3])
|
||||||
rotation_z = float(argv[18]) if len(argv) > 18 else 0.0
|
engine = _arg(4, "cycles", str.lower)
|
||||||
noise_threshold_arg = argv[19] if len(argv) > 19 else ""
|
samples = _arg(5, None, int)
|
||||||
denoiser_arg = argv[20] if len(argv) > 20 else ""
|
smooth_angle = _arg(6, 30, int)
|
||||||
denoising_input_passes_arg = argv[21] if len(argv) > 21 else ""
|
cycles_device = _arg(7, "auto", str.lower)
|
||||||
denoising_prefilter_arg = argv[22] if len(argv) > 22 else ""
|
transparent_bg = argv[8] == "1" if len(argv) > 8 else False
|
||||||
denoising_quality_arg = argv[23] if len(argv) > 23 else ""
|
template_path = _arg(9, "")
|
||||||
denoising_use_gpu_arg = argv[24] if len(argv) > 24 else ""
|
target_collection = _arg(10, "Product")
|
||||||
|
material_library_path = _arg(11, "")
|
||||||
|
material_map = _json.loads(_arg(12, "{}")) if _arg(12, "{}") else {}
|
||||||
|
part_names_ordered = _json.loads(_arg(13, "[]")) if _arg(13, "[]") else []
|
||||||
|
lighting_only = argv[14] == "1" if len(argv) > 14 else False
|
||||||
|
shadow_catcher = argv[15] == "1" if len(argv) > 15 else False
|
||||||
|
rotation_x = _arg(16, 0.0, float)
|
||||||
|
rotation_y = _arg(17, 0.0, float)
|
||||||
|
rotation_z = _arg(18, 0.0, float)
|
||||||
|
noise_threshold_arg = _arg(19, "")
|
||||||
|
denoiser_arg = _arg(20, "")
|
||||||
|
denoising_input_passes_arg = _arg(21, "")
|
||||||
|
denoising_prefilter_arg = _arg(22, "")
|
||||||
|
denoising_quality_arg = _arg(23, "")
|
||||||
|
denoising_use_gpu_arg = _arg(24, "")
|
||||||
|
|
||||||
|
if samples is None:
|
||||||
|
samples = 64 if engine == "eevee" else 256
|
||||||
|
|
||||||
# Named argument: --mesh-attributes <json>
|
# Named argument: --mesh-attributes <json>
|
||||||
_mesh_attrs: dict = {}
|
_mesh_attrs: dict = {}
|
||||||
_sys_argv = sys.argv
|
if "--mesh-attributes" in sys.argv:
|
||||||
if "--mesh-attributes" in _sys_argv:
|
_idx = sys.argv.index("--mesh-attributes")
|
||||||
_idx = _sys_argv.index("--mesh-attributes")
|
|
||||||
try:
|
try:
|
||||||
_mesh_attrs = _json.loads(_sys_argv[_idx + 1])
|
_mesh_attrs = _json.loads(sys.argv[_idx + 1])
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Validate template path: if provided it MUST exist on disk.
|
|
||||||
# Fail loudly rather than silently rendering with factory settings.
|
|
||||||
if template_path and not os.path.isfile(template_path):
|
if template_path and not os.path.isfile(template_path):
|
||||||
print(f"[blender_render] ERROR: template_path was provided but file not found: {template_path}")
|
print(f"[blender_render] ERROR: template not found: {template_path}")
|
||||||
print("[blender_render] Check that the blend-templates directory is on the shared volume.")
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
use_template = bool(template_path)
|
use_template = bool(template_path)
|
||||||
|
|
||||||
print(f"[blender_render] engine={engine}, samples={samples}, size={width}x{height}, smooth_angle={smooth_angle}°, device={cycles_device}, transparent={transparent_bg}")
|
print(f"[blender_render] engine={engine}, samples={samples}, size={width}x{height}, smooth_angle={smooth_angle}°, device={cycles_device}, transparent={transparent_bg}")
|
||||||
print(f"[blender_render] part_names_ordered: {len(part_names_ordered)} entries")
|
print(f"[blender_render] part_names_ordered: {len(part_names_ordered)} entries")
|
||||||
if use_template:
|
print(f"[blender_render] {'template='+template_path+', collection='+target_collection+', lighting_only='+str(lighting_only) if use_template else 'no template — Mode A'}")
|
||||||
print(f"[blender_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
|
|
||||||
else:
|
|
||||||
print("[blender_render] no template — using factory settings (Mode A)")
|
|
||||||
if material_library_path:
|
if material_library_path:
|
||||||
print(f"[blender_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
|
print(f"[blender_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
|
||||||
|
|
||||||
# ── Helper: find or create collection by name ────────────────────────────────
|
# ── Early GPU activation (must happen BEFORE open_mainfile / Cycles init) ─────
|
||||||
|
_early_gpu_type = activate_gpu(cycles_device)
|
||||||
|
|
||||||
def _ensure_collection(name: str):
|
# ── Timing harness ─────────────────────────────────────────────────────────────
|
||||||
"""Return a collection by name, creating it if needed."""
|
|
||||||
if name in bpy.data.collections:
|
|
||||||
return bpy.data.collections[name]
|
|
||||||
col = bpy.data.collections.new(name)
|
|
||||||
bpy.context.scene.collection.children.link(col)
|
|
||||||
return col
|
|
||||||
|
|
||||||
|
|
||||||
def _apply_smooth_batch(parts, angle_deg):
|
|
||||||
"""Apply smooth shading to ALL parts in a single operator call.
|
|
||||||
|
|
||||||
bpy.ops.object.shade_smooth_by_angle() operates on all selected objects
|
|
||||||
at once (one C-level call), so batching reduces O(n) operator overhead to O(1).
|
|
||||||
Per-part calls cost ~90ms each × 175 parts = 16s; batch call costs ~0.2s total.
|
|
||||||
"""
|
|
||||||
bpy.ops.object.select_all(action='DESELECT')
|
|
||||||
mesh_parts = [p for p in parts if p.type == 'MESH']
|
|
||||||
for part in mesh_parts:
|
|
||||||
part.select_set(True)
|
|
||||||
if not mesh_parts:
|
|
||||||
return
|
|
||||||
bpy.context.view_layer.objects.active = mesh_parts[0]
|
|
||||||
if angle_deg > 0:
|
|
||||||
try:
|
|
||||||
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
|
|
||||||
except AttributeError:
|
|
||||||
bpy.ops.object.shade_smooth()
|
|
||||||
for part in mesh_parts:
|
|
||||||
if hasattr(part.data, 'use_auto_smooth'):
|
|
||||||
part.data.use_auto_smooth = True
|
|
||||||
part.data.auto_smooth_angle = math.radians(angle_deg)
|
|
||||||
else:
|
|
||||||
bpy.ops.object.shade_flat()
|
|
||||||
bpy.ops.object.select_all(action='DESELECT')
|
|
||||||
|
|
||||||
|
|
||||||
def _assign_failed_material(part_obj):
|
|
||||||
"""Assign the standard fallback material (magenta) when no library material matches.
|
|
||||||
|
|
||||||
Tries to reuse SCHAEFFLER_059999_FailedMaterial from the library first.
|
|
||||||
Creates a simple magenta Principled BSDF if the library material is not loaded.
|
|
||||||
"""
|
|
||||||
mat = bpy.data.materials.get(FAILED_MATERIAL_NAME)
|
|
||||||
if mat is None:
|
|
||||||
mat = bpy.data.materials.new(name=FAILED_MATERIAL_NAME)
|
|
||||||
mat.use_nodes = True
|
|
||||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
|
||||||
if bsdf:
|
|
||||||
bsdf.inputs["Base Color"].default_value = (1.0, 0.0, 1.0, 1.0) # magenta
|
|
||||||
bsdf.inputs["Roughness"].default_value = 0.6
|
|
||||||
part_obj.data.materials.clear()
|
|
||||||
part_obj.data.materials.append(mat)
|
|
||||||
|
|
||||||
|
|
||||||
import re as _re
|
|
||||||
|
|
||||||
|
|
||||||
# _scale_mm_to_m removed: OCC GLB export produces coordinates in metres already.
|
|
||||||
|
|
||||||
|
|
||||||
def _apply_rotation(parts, rx, ry, rz):
|
|
||||||
"""Apply Euler rotation (degrees, XYZ order) to all parts around world origin.
|
|
||||||
|
|
||||||
After _import_glb the combined bbox center is at world origin,
|
|
||||||
so rotating around origin is equivalent to rotating around the assembly center.
|
|
||||||
"""
|
|
||||||
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
|
|
||||||
return
|
|
||||||
from mathutils import Euler
|
|
||||||
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
|
|
||||||
for p in parts:
|
|
||||||
p.matrix_world = rot_mat @ p.matrix_world
|
|
||||||
# Bake rotation into mesh data so camera bbox calculations see the rotated geometry
|
|
||||||
bpy.ops.object.select_all(action='DESELECT')
|
|
||||||
for p in parts:
|
|
||||||
p.select_set(True)
|
|
||||||
bpy.context.view_layer.objects.active = parts[0]
|
|
||||||
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
|
|
||||||
print(f"[blender_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
|
|
||||||
|
|
||||||
|
|
||||||
def _mark_sharp_and_seams(obj, smooth_angle_deg: float, sharp_edge_midpoints=None):
|
|
||||||
"""Mark sharp edges and UV seams based on angle threshold and optional midpoints."""
|
|
||||||
import math
|
|
||||||
import bpy
|
|
||||||
|
|
||||||
# Ensure we're working with the right object
|
|
||||||
bpy.context.view_layer.objects.active = obj
|
|
||||||
obj.select_set(True)
|
|
||||||
|
|
||||||
# Set auto-smooth angle
|
|
||||||
if hasattr(obj.data, 'auto_smooth_angle'):
|
|
||||||
obj.data.auto_smooth_angle = math.radians(smooth_angle_deg)
|
|
||||||
|
|
||||||
# Enter edit mode to mark edges
|
|
||||||
bpy.ops.object.mode_set(mode='EDIT')
|
|
||||||
bpy.ops.mesh.select_all(action='DESELECT')
|
|
||||||
|
|
||||||
# Select edges above threshold angle and mark sharp
|
|
||||||
bpy.ops.mesh.edges_select_sharp(sharpness=math.radians(smooth_angle_deg))
|
|
||||||
bpy.ops.mesh.mark_sharp()
|
|
||||||
|
|
||||||
# Mark same edges as UV seams
|
|
||||||
bpy.ops.mesh.mark_seam(clear=False)
|
|
||||||
|
|
||||||
# If we have OCC-derived midpoints, try to mark additional edges
|
|
||||||
if sharp_edge_midpoints and len(sharp_edge_midpoints) > 0:
|
|
||||||
try:
|
|
||||||
import bmesh
|
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
|
||||||
bm = bmesh.new()
|
|
||||||
bm.from_mesh(obj.data)
|
|
||||||
bm.edges.ensure_lookup_table()
|
|
||||||
bm.verts.ensure_lookup_table()
|
|
||||||
|
|
||||||
# Build KD-tree for edge midpoints
|
|
||||||
import mathutils
|
|
||||||
kd = mathutils.kdtree.KDTree(len(bm.edges))
|
|
||||||
for i, edge in enumerate(bm.edges):
|
|
||||||
midpt = (edge.verts[0].co + edge.verts[1].co) / 2
|
|
||||||
kd.insert(midpt, i)
|
|
||||||
kd.balance()
|
|
||||||
|
|
||||||
# For each OCC sharp midpoint, find nearest Blender edge
|
|
||||||
tol = 0.5 # 0.5 mm tolerance (coordinates in mm before scale)
|
|
||||||
for mp in sharp_edge_midpoints[:200]:
|
|
||||||
vec = mathutils.Vector(mp)
|
|
||||||
co, idx, dist = kd.find(vec)
|
|
||||||
if dist < tol:
|
|
||||||
bm.edges[idx].seam = True
|
|
||||||
try:
|
|
||||||
bm.edges[idx].smooth = False
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
bm.to_mesh(obj.data)
|
|
||||||
bm.free()
|
|
||||||
except Exception:
|
|
||||||
pass # Non-fatal
|
|
||||||
|
|
||||||
# Return to object mode
|
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
|
||||||
|
|
||||||
|
|
||||||
def _apply_sharp_edges_from_occ(parts, sharp_edge_pairs):
|
|
||||||
"""Mark edges sharp using OCC-derived vertex-pair data.
|
|
||||||
|
|
||||||
`sharp_edge_pairs` is a list of [[x0,y0,z0],[x1,y1,z1]] in mm.
|
|
||||||
Blender mesh coordinates are in metres (STEP mm * 0.001 scale applied).
|
|
||||||
We match each OCC vertex pair against bmesh vertex positions with a 0.5 mm
|
|
||||||
tolerance (0.0005 m) and mark the matched edge as sharp.
|
|
||||||
"""
|
|
||||||
if not sharp_edge_pairs:
|
|
||||||
return
|
|
||||||
|
|
||||||
import bmesh
|
|
||||||
import mathutils
|
|
||||||
|
|
||||||
SCALE = 0.001 # mm → m
|
|
||||||
TOL = 0.0005 # 0.5 mm in metres
|
|
||||||
|
|
||||||
# OCC STEP space (Z-up, mm) → Blender (Z-up, m):
|
|
||||||
# RWGltf applies Z→Y-up, Blender import applies Y→Z-up.
|
|
||||||
# Net: Blender(X, Y, Z) = OCC(X*0.001, -Z*0.001, Y*0.001)
|
|
||||||
occ_pairs = []
|
|
||||||
for pair in sharp_edge_pairs:
|
|
||||||
v0 = mathutils.Vector((pair[0][0] * SCALE, -pair[0][2] * SCALE, pair[0][1] * SCALE))
|
|
||||||
v1 = mathutils.Vector((pair[1][0] * SCALE, -pair[1][2] * SCALE, pair[1][1] * SCALE))
|
|
||||||
occ_pairs.append((v0, v1))
|
|
||||||
|
|
||||||
marked_total = 0
|
|
||||||
for obj in parts:
|
|
||||||
bm = bmesh.new()
|
|
||||||
bm.from_mesh(obj.data)
|
|
||||||
bm.verts.ensure_lookup_table()
|
|
||||||
bm.edges.ensure_lookup_table()
|
|
||||||
|
|
||||||
# Build KD-tree on vertices in WORLD space — OCC pairs are world coords,
|
|
||||||
# but mesh vertices are in local space (assembly node transform in GLB).
|
|
||||||
world_mat = obj.matrix_world
|
|
||||||
kd = mathutils.kdtree.KDTree(len(bm.verts))
|
|
||||||
for v in bm.verts:
|
|
||||||
kd.insert(world_mat @ v.co, v.index)
|
|
||||||
kd.balance()
|
|
||||||
|
|
||||||
marked = 0
|
|
||||||
for v0_occ, v1_occ in occ_pairs:
|
|
||||||
# Find closest Blender vertex to each OCC endpoint
|
|
||||||
_co0, idx0, dist0 = kd.find(v0_occ)
|
|
||||||
_co1, idx1, dist1 = kd.find(v1_occ)
|
|
||||||
if dist0 > TOL or dist1 > TOL:
|
|
||||||
continue
|
|
||||||
if idx0 == idx1:
|
|
||||||
continue # degenerate — both endpoints map to same vertex
|
|
||||||
# Find the edge shared by these two vertices
|
|
||||||
bv0 = bm.verts[idx0]
|
|
||||||
bv1 = bm.verts[idx1]
|
|
||||||
edge = bm.edges.get((bv0, bv1))
|
|
||||||
if edge is None:
|
|
||||||
edge = bm.edges.get((bv1, bv0))
|
|
||||||
if edge is not None and edge.smooth:
|
|
||||||
edge.smooth = False
|
|
||||||
marked += 1
|
|
||||||
|
|
||||||
bm.to_mesh(obj.data)
|
|
||||||
bm.free()
|
|
||||||
marked_total += marked
|
|
||||||
|
|
||||||
print(f"[blender_render] OCC sharp edges applied: {marked_total} edges marked across {len(parts)} parts", flush=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _import_glb(glb_file):
|
|
||||||
"""Import OCC-generated GLB into Blender.
|
|
||||||
|
|
||||||
OCC exports one mesh object per STEP part, already in metres.
|
|
||||||
Blender's native GLTF importer preserves part names.
|
|
||||||
|
|
||||||
Returns list of Blender mesh objects, centred at world origin.
|
|
||||||
"""
|
|
||||||
bpy.ops.object.select_all(action='DESELECT')
|
|
||||||
bpy.ops.import_scene.gltf(filepath=glb_file)
|
|
||||||
parts = [o for o in bpy.context.selected_objects if o.type == 'MESH']
|
|
||||||
|
|
||||||
if not parts:
|
|
||||||
print(f"ERROR: No mesh objects imported from {glb_file}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
print(f"[blender_render] imported {len(parts)} part(s) from GLB: "
|
|
||||||
f"{[p.name for p in parts[:5]]}")
|
|
||||||
|
|
||||||
# Remove OCC-baked custom normals so shade_smooth_by_angle can recompute
|
|
||||||
# normals from scratch (respecting our sharp edge marks).
|
|
||||||
cleared = 0
|
|
||||||
for p in parts:
|
|
||||||
if "custom_normal" in p.data.attributes:
|
|
||||||
p.data.attributes.remove(p.data.attributes["custom_normal"])
|
|
||||||
cleared += 1
|
|
||||||
if cleared:
|
|
||||||
print(f"[blender_render] cleared OCC custom_normal from {cleared} mesh objects")
|
|
||||||
|
|
||||||
# Centre combined bbox at world origin
|
|
||||||
all_corners = []
|
|
||||||
for p in parts:
|
|
||||||
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
|
|
||||||
|
|
||||||
if all_corners:
|
|
||||||
mins = Vector((min(v.x for v in all_corners),
|
|
||||||
min(v.y for v in all_corners),
|
|
||||||
min(v.z for v in all_corners)))
|
|
||||||
maxs = Vector((max(v.x for v in all_corners),
|
|
||||||
max(v.y for v in all_corners),
|
|
||||||
max(v.z for v in all_corners)))
|
|
||||||
center = (mins + maxs) * 0.5
|
|
||||||
# Move root objects (parentless) to centre. Adjusting a child's local
|
|
||||||
# .location by a world-space vector gives wrong results when the GLB has
|
|
||||||
# Empty parent nodes (OCC assembly hierarchy). Shifting the root moves
|
|
||||||
# the entire hierarchy correctly.
|
|
||||||
all_imported = list(bpy.context.selected_objects)
|
|
||||||
root_objects = [o for o in all_imported if o.parent is None]
|
|
||||||
for obj in root_objects:
|
|
||||||
obj.location -= center
|
|
||||||
|
|
||||||
return parts
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_part_name(index, part_obj):
|
|
||||||
"""Get the STEP part name for a Blender part by index.
|
|
||||||
|
|
||||||
With GLB import, part_obj.name IS the STEP name (possibly with
|
|
||||||
Blender .NNN suffix for duplicates). Strip that suffix for lookup.
|
|
||||||
Falls back to part_names_ordered index mapping.
|
|
||||||
"""
|
|
||||||
# Strip Blender auto-suffix (.001, .002, etc.)
|
|
||||||
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
|
|
||||||
# If the base name looks like a real STEP part name (not generic "Cube" etc.),
|
|
||||||
# use it directly
|
|
||||||
if part_names_ordered and index < len(part_names_ordered):
|
|
||||||
return part_names_ordered[index]
|
|
||||||
return base_name
|
|
||||||
|
|
||||||
|
|
||||||
def _apply_material_library(parts, mat_lib_path, mat_map):
|
|
||||||
"""Append materials from library .blend and assign to parts via material_map.
|
|
||||||
|
|
||||||
GLB-imported objects are named after STEP parts, so matching is by name
|
|
||||||
(stripping Blender .NNN suffix for duplicates). Falls back to
|
|
||||||
part_names_ordered index-based matching.
|
|
||||||
|
|
||||||
mat_map: {part_name_lower: material_name}
|
|
||||||
Parts without a match keep their current material.
|
|
||||||
"""
|
|
||||||
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
|
||||||
print(f"[blender_render] material library not found: {mat_lib_path}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Collect unique material names needed
|
|
||||||
needed = set(mat_map.values())
|
|
||||||
if not needed:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Append materials from library
|
|
||||||
appended = {}
|
|
||||||
for mat_name in needed:
|
|
||||||
inner_path = f"{mat_lib_path}/Material/{mat_name}"
|
|
||||||
try:
|
|
||||||
bpy.ops.wm.append(
|
|
||||||
filepath=inner_path,
|
|
||||||
directory=f"{mat_lib_path}/Material/",
|
|
||||||
filename=mat_name,
|
|
||||||
link=False,
|
|
||||||
)
|
|
||||||
if mat_name in bpy.data.materials:
|
|
||||||
appended[mat_name] = bpy.data.materials[mat_name]
|
|
||||||
print(f"[blender_render] appended material: {mat_name}")
|
|
||||||
else:
|
|
||||||
print(f"[blender_render] WARNING: material '{mat_name}' not found after append")
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[blender_render] WARNING: failed to append material '{mat_name}': {exc}")
|
|
||||||
|
|
||||||
if not appended:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Assign materials to parts — primary: name-based (GLB object names),
|
|
||||||
# secondary: index-based via part_names_ordered
|
|
||||||
assigned_count = 0
|
|
||||||
unmatched_names = []
|
|
||||||
for i, part in enumerate(parts):
|
|
||||||
# Try name-based matching first (strip Blender .NNN suffix)
|
|
||||||
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
|
||||||
# Strip OCC assembly-instance suffix (_AF0, _AF1, …) — GLB object
|
|
||||||
# names may or may not have them while mat_map keys might.
|
|
||||||
_prev = None
|
|
||||||
while _prev != base_name:
|
|
||||||
_prev = base_name
|
|
||||||
base_name = _re.sub(r'_AF\d+$', '', base_name, flags=_re.IGNORECASE)
|
|
||||||
part_key = base_name.lower().strip()
|
|
||||||
mat_name = mat_map.get(part_key)
|
|
||||||
|
|
||||||
# Prefix fallback: if a mat_map key starts with our base name or
|
|
||||||
# vice-versa, use the longest matching key (most-specific wins).
|
|
||||||
if not mat_name:
|
|
||||||
for key, val in sorted(mat_map.items(), key=lambda x: len(x[0]), reverse=True):
|
|
||||||
if len(key) >= 5 and len(part_key) >= 5 and (
|
|
||||||
part_key.startswith(key) or key.startswith(part_key)
|
|
||||||
):
|
|
||||||
mat_name = val
|
|
||||||
break
|
|
||||||
|
|
||||||
# Fall back to index-based matching via part_names_ordered
|
|
||||||
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
|
||||||
step_name = part_names_ordered[i]
|
|
||||||
step_key = step_name.lower().strip()
|
|
||||||
mat_name = mat_map.get(step_key)
|
|
||||||
# Also try stripping AF from part_names_ordered entry
|
|
||||||
if not mat_name:
|
|
||||||
_p2 = None
|
|
||||||
while _p2 != step_key:
|
|
||||||
_p2 = step_key
|
|
||||||
step_key = _re.sub(r'_af\d+$', '', step_key)
|
|
||||||
mat_name = mat_map.get(step_key)
|
|
||||||
|
|
||||||
if mat_name and mat_name in appended:
|
|
||||||
part.data.materials.clear()
|
|
||||||
part.data.materials.append(appended[mat_name])
|
|
||||||
assigned_count += 1
|
|
||||||
else:
|
|
||||||
unmatched_names.append(part.name)
|
|
||||||
|
|
||||||
print(f"[blender_render] material assignment: {assigned_count}/{len(parts)} parts matched", flush=True)
|
|
||||||
if unmatched_names:
|
|
||||||
print(f"[blender_render] unmatched parts → assigning {FAILED_MATERIAL_NAME}: {unmatched_names[:10]}", flush=True)
|
|
||||||
unmatched_set = set(unmatched_names)
|
|
||||||
for part in parts:
|
|
||||||
if part.name in unmatched_set:
|
|
||||||
if part.data.users > 1:
|
|
||||||
part.data = part.data.copy()
|
|
||||||
_assign_failed_material(part)
|
|
||||||
|
|
||||||
|
|
||||||
# ── Early GPU activation (must happen BEFORE open_mainfile / Cycles init) ────
|
|
||||||
# Blender compiles Cycles kernels when the engine first initializes. If the
|
|
||||||
# compute_device_type is NONE at that point, Cycles locks to CPU for the rest
|
|
||||||
# of the session. We therefore probe + enable GPU devices NOW, before any
|
|
||||||
# .blend template (which may trigger Cycles init) is loaded.
|
|
||||||
def _activate_gpu():
|
|
||||||
"""Probe for GPU compute devices and activate them. Returns device type or None."""
|
|
||||||
if cycles_device == "cpu":
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
cprefs = bpy.context.preferences.addons['cycles'].preferences
|
|
||||||
for dt in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
|
|
||||||
try:
|
|
||||||
cprefs.compute_device_type = dt
|
|
||||||
cprefs.get_devices()
|
|
||||||
gpu = [d for d in cprefs.devices if d.type != 'CPU']
|
|
||||||
if gpu:
|
|
||||||
for d in cprefs.devices:
|
|
||||||
d.use = (d.type != 'CPU')
|
|
||||||
print(f"[blender_render] early GPU activation: {dt}, "
|
|
||||||
f"devices={[(d.name, d.type) for d in gpu]}", flush=True)
|
|
||||||
return dt
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[blender_render] {dt} not available: {e}", flush=True)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[blender_render] early GPU probe failed: {e}", flush=True)
|
|
||||||
return None
|
|
||||||
|
|
||||||
_early_gpu_type = _activate_gpu()
|
|
||||||
|
|
||||||
# ── Timing harness ────────────────────────────────────────────────────────────
|
|
||||||
import time as _time
|
import time as _time
|
||||||
_t0 = _time.monotonic()
|
_t0 = _time.monotonic()
|
||||||
_timings: dict = {}
|
_timings: dict = {}
|
||||||
|
|
||||||
|
|
||||||
def _lap(label: str) -> None:
|
def _lap(label: str) -> None:
|
||||||
"""Record elapsed time since the last _lap() call and since t0."""
|
|
||||||
global _t_last
|
|
||||||
now = _time.monotonic()
|
now = _time.monotonic()
|
||||||
if not hasattr(_lap, '_last'):
|
if not hasattr(_lap, '_last'):
|
||||||
_lap._last = _t0
|
_lap._last = _t0
|
||||||
@@ -538,259 +120,77 @@ def _lap(label: str) -> None:
|
|||||||
print(f"[blender_render] TIMING {label}={delta:.2f}s (total={total:.2f}s)", flush=True)
|
print(f"[blender_render] TIMING {label}={delta:.2f}s (total={total:.2f}s)", flush=True)
|
||||||
_lap._last = now
|
_lap._last = now
|
||||||
|
|
||||||
# ── SCENE SETUP ──────────────────────────────────────────────────────────────
|
|
||||||
|
# ── SCENE SETUP ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
if use_template:
|
if use_template:
|
||||||
# ── MODE B: Template-based render ────────────────────────────────────────
|
# ── MODE B: Template-based render ─────────────────────────────────────────
|
||||||
print(f"[blender_render] Opening template: {template_path}")
|
print(f"[blender_render] Opening template: {template_path}")
|
||||||
bpy.ops.wm.open_mainfile(filepath=template_path)
|
bpy.ops.wm.open_mainfile(filepath=template_path)
|
||||||
_lap("template_load")
|
_lap("template_load")
|
||||||
|
|
||||||
# Find or create target collection
|
target_col = ensure_collection(target_collection)
|
||||||
target_col = _ensure_collection(target_collection)
|
parts = import_glb(glb_path)
|
||||||
|
|
||||||
# Import OCC GLB (already in metres, one object per STEP part)
|
|
||||||
parts = _import_glb(glb_path)
|
|
||||||
_lap("glb_import")
|
_lap("glb_import")
|
||||||
# Apply render position rotation (before camera/bbox calculations)
|
apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
|
||||||
_lap("rotation")
|
_lap("rotation")
|
||||||
|
|
||||||
# Move imported parts into target collection
|
|
||||||
for part in parts:
|
for part in parts:
|
||||||
# Remove from all existing collections
|
|
||||||
for col in list(part.users_collection):
|
for col in list(part.users_collection):
|
||||||
col.objects.unlink(part)
|
col.objects.unlink(part)
|
||||||
target_col.objects.link(part)
|
target_col.objects.link(part)
|
||||||
|
|
||||||
# Batch smooth shading: select all parts, call shade_smooth_by_angle ONCE.
|
apply_smooth_batch(parts, smooth_angle)
|
||||||
# In Blender 5 this adds a "Smooth by Angle" GeoNodes modifier to every
|
|
||||||
# selected object in a single C call — same effect as calling per-object
|
|
||||||
# but ~100× faster (0.2s vs 16s for 175 parts).
|
|
||||||
_apply_smooth_batch(parts, smooth_angle)
|
|
||||||
# If OCC extracted sharp edge vertex pairs, mark them explicitly.
|
|
||||||
_occ_pairs = _mesh_attrs.get("sharp_edge_pairs") or []
|
_occ_pairs = _mesh_attrs.get("sharp_edge_pairs") or []
|
||||||
if _occ_pairs:
|
if _occ_pairs:
|
||||||
_apply_sharp_edges_from_occ(parts, _occ_pairs)
|
apply_sharp_edges_from_occ(parts, _occ_pairs)
|
||||||
_lap("smooth_shading")
|
_lap("smooth_shading")
|
||||||
|
|
||||||
# Material assignment: library materials if available, otherwise palette
|
|
||||||
if material_library_path and material_map:
|
if material_library_path and material_map:
|
||||||
# Build lowercased material_map for matching.
|
apply_material_library(parts, material_library_path, build_mat_map_lower(material_map), part_names_ordered)
|
||||||
# Include BOTH the original key AND the key with _AF\d+ stripped,
|
|
||||||
# so GLB names (which may lack AF suffixes) can match.
|
|
||||||
mat_map_lower = {}
|
|
||||||
for k, v in material_map.items():
|
|
||||||
kl = k.lower().strip()
|
|
||||||
mat_map_lower[kl] = v
|
|
||||||
# Also add AF-stripped version
|
|
||||||
_stripped = kl
|
|
||||||
_p = None
|
|
||||||
while _p != _stripped:
|
|
||||||
_p = _stripped
|
|
||||||
_stripped = _re.sub(r'_af\d+$', '', _stripped)
|
|
||||||
if _stripped != kl:
|
|
||||||
mat_map_lower.setdefault(_stripped, v)
|
|
||||||
_apply_material_library(parts, material_library_path, mat_map_lower)
|
|
||||||
# Parts not matched by library get the failed-material fallback (magenta)
|
|
||||||
unmatched = []
|
|
||||||
for part in parts:
|
|
||||||
if not part.data.materials or len(part.data.materials) == 0:
|
|
||||||
_assign_failed_material(part)
|
|
||||||
unmatched.append(part.name)
|
|
||||||
if unmatched:
|
|
||||||
print(f"[blender_render] WARNING: {len(unmatched)} parts unmatched, assigned {FAILED_MATERIAL_NAME}: {unmatched[:5]}", flush=True)
|
|
||||||
else:
|
else:
|
||||||
# No material library — assign fallback to all parts
|
|
||||||
for part in parts:
|
for part in parts:
|
||||||
_assign_failed_material(part)
|
assign_failed_material(part)
|
||||||
_lap("material_assign")
|
_lap("material_assign")
|
||||||
|
|
||||||
# ── Shadow catcher (Cycles only, template mode only) ─────────────────────
|
|
||||||
if shadow_catcher:
|
if shadow_catcher:
|
||||||
sc_col_name = "Shadowcatcher"
|
setup_shadow_catcher(parts)
|
||||||
sc_obj_name = "Shadowcatcher"
|
|
||||||
# Enable the Shadowcatcher collection in all view layers
|
|
||||||
for vl in bpy.context.scene.view_layers:
|
|
||||||
def _enable_col_recursive(layer_col):
|
|
||||||
if layer_col.collection.name == sc_col_name:
|
|
||||||
layer_col.exclude = False
|
|
||||||
layer_col.collection.hide_render = False
|
|
||||||
layer_col.collection.hide_viewport = False
|
|
||||||
return True
|
|
||||||
for child in layer_col.children:
|
|
||||||
if _enable_col_recursive(child):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
_enable_col_recursive(vl.layer_collection)
|
|
||||||
|
|
||||||
sc_obj = bpy.data.objects.get(sc_obj_name)
|
|
||||||
if sc_obj:
|
|
||||||
# Calculate product bbox min Z (world space)
|
|
||||||
all_world_corners = []
|
|
||||||
for part in parts:
|
|
||||||
for corner in part.bound_box:
|
|
||||||
all_world_corners.append((part.matrix_world @ Vector(corner)).z)
|
|
||||||
if all_world_corners:
|
|
||||||
sc_obj.location.z = min(all_world_corners)
|
|
||||||
print(f"[blender_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
|
|
||||||
else:
|
|
||||||
print(f"[blender_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
|
|
||||||
|
|
||||||
# lighting_only: use template World/HDRI but force auto-camera UNLESS the shadow
|
|
||||||
# catcher is enabled — in that case the template camera is already positioned to
|
|
||||||
# show both the product and its shadow on the ground plane.
|
|
||||||
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
|
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
|
||||||
if lighting_only and not shadow_catcher:
|
if lighting_only and not shadow_catcher:
|
||||||
print("[blender_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
|
print("[blender_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
|
||||||
elif needs_auto_camera:
|
elif needs_auto_camera:
|
||||||
print("[blender_render] WARNING: template has no camera — will create auto-camera")
|
print("[blender_render] WARNING: template has no camera — will create auto-camera")
|
||||||
|
|
||||||
# Set very close near clip on template camera for mm-scale parts (now in metres)
|
|
||||||
if not needs_auto_camera and bpy.context.scene.camera:
|
if not needs_auto_camera and bpy.context.scene.camera:
|
||||||
bpy.context.scene.camera.data.clip_start = 0.001
|
bpy.context.scene.camera.data.clip_start = 0.001
|
||||||
|
|
||||||
print(f"[blender_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
|
print(f"[blender_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# ── MODE A: Factory settings (original behavior) ─────────────────────────
|
# ── MODE A: Factory settings ───────────────────────────────────────────────
|
||||||
needs_auto_camera = True
|
needs_auto_camera = True
|
||||||
bpy.ops.wm.read_factory_settings(use_empty=True)
|
bpy.ops.wm.read_factory_settings(use_empty=True)
|
||||||
# Import OCC GLB (already in metres, one object per STEP part)
|
parts = import_glb(glb_path)
|
||||||
parts = _import_glb(glb_path)
|
apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||||
# Apply render position rotation (before camera/bbox calculations)
|
|
||||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
|
||||||
|
|
||||||
import time as _time
|
|
||||||
_t_smooth_a = _time.time()
|
_t_smooth_a = _time.time()
|
||||||
_apply_smooth_batch(parts, smooth_angle)
|
apply_smooth_batch(parts, smooth_angle)
|
||||||
_occ_pairs = _mesh_attrs.get("sharp_edge_pairs") or []
|
_occ_pairs = _mesh_attrs.get("sharp_edge_pairs") or []
|
||||||
if _occ_pairs:
|
if _occ_pairs:
|
||||||
_apply_sharp_edges_from_occ(parts, _occ_pairs)
|
apply_sharp_edges_from_occ(parts, _occ_pairs)
|
||||||
for part in parts:
|
for part in parts:
|
||||||
_assign_failed_material(part)
|
assign_failed_material(part)
|
||||||
print(f"[blender_render] smooth+fallback-material: {len(parts)} parts ({_time.time()-_t_smooth_a:.2f}s)", flush=True)
|
print(f"[blender_render] smooth+fallback-material: {len(parts)} parts ({_time.time()-_t_smooth_a:.2f}s)", flush=True)
|
||||||
|
|
||||||
# Apply material library on top of palette colours (same logic as Mode B).
|
|
||||||
# material_library_path / material_map are parsed from argv even in Mode A
|
|
||||||
# but were previously never used here — that was the bug.
|
|
||||||
if material_library_path and material_map:
|
if material_library_path and material_map:
|
||||||
mat_map_lower = {}
|
apply_material_library(parts, material_library_path, build_mat_map_lower(material_map), part_names_ordered)
|
||||||
for k, v in material_map.items():
|
|
||||||
kl = k.lower().strip()
|
|
||||||
mat_map_lower[kl] = v
|
|
||||||
_stripped = kl
|
|
||||||
_p = None
|
|
||||||
while _p != _stripped:
|
|
||||||
_p = _stripped
|
|
||||||
_stripped = _re.sub(r'_af\d+$', '', _stripped)
|
|
||||||
if _stripped != kl:
|
|
||||||
mat_map_lower.setdefault(_stripped, v)
|
|
||||||
_apply_material_library(parts, material_library_path, mat_map_lower)
|
|
||||||
# Parts not matched by the library keep their fallback material (already set above)
|
|
||||||
|
|
||||||
if needs_auto_camera:
|
if needs_auto_camera:
|
||||||
# ── Combined bounding box / bounding sphere ──────────────────────────────
|
bbox_center, bsphere_radius = setup_auto_camera(parts, width, height)
|
||||||
all_corners = []
|
|
||||||
for part in parts:
|
|
||||||
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
|
||||||
|
|
||||||
bbox_min = Vector((
|
|
||||||
min(v.x for v in all_corners),
|
|
||||||
min(v.y for v in all_corners),
|
|
||||||
min(v.z for v in all_corners),
|
|
||||||
))
|
|
||||||
bbox_max = Vector((
|
|
||||||
max(v.x for v in all_corners),
|
|
||||||
max(v.y for v in all_corners),
|
|
||||||
max(v.z for v in all_corners),
|
|
||||||
))
|
|
||||||
|
|
||||||
bbox_center = (bbox_min + bbox_max) * 0.5
|
|
||||||
bbox_dims = bbox_max - bbox_min
|
|
||||||
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
|
|
||||||
|
|
||||||
print(f"[blender_render] bbox_dims={tuple(round(d,4) for d in bbox_dims)}, "
|
|
||||||
f"bsphere_radius={bsphere_radius:.4f}, center={tuple(round(c,4) for c in bbox_center)}")
|
|
||||||
|
|
||||||
# ── Lighting — only in Mode A (factory settings) ─────────────────────────
|
|
||||||
# In template mode the .blend file provides its own World/HDRI lighting.
|
|
||||||
# Adding auto-lights would overpower the template's intended look.
|
|
||||||
if not use_template:
|
|
||||||
light_dist = bsphere_radius * 6.0
|
|
||||||
|
|
||||||
bpy.ops.object.light_add(type='SUN', location=(
|
|
||||||
bbox_center.x + light_dist * 0.5,
|
|
||||||
bbox_center.y - light_dist * 0.35,
|
|
||||||
bbox_center.z + light_dist,
|
|
||||||
))
|
|
||||||
sun = bpy.context.active_object
|
|
||||||
sun.data.energy = 4.0
|
|
||||||
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
|
|
||||||
|
|
||||||
bpy.ops.object.light_add(type='AREA', location=(
|
|
||||||
bbox_center.x - light_dist * 0.4,
|
|
||||||
bbox_center.y + light_dist * 0.4,
|
|
||||||
bbox_center.z + light_dist * 0.7,
|
|
||||||
))
|
|
||||||
fill = bpy.context.active_object
|
|
||||||
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
|
|
||||||
fill.data.size = max(4.0, bsphere_radius * 4.0)
|
|
||||||
|
|
||||||
# ── Camera ───────────────────────────────────────────────────────────────
|
|
||||||
ELEVATION_DEG = 28.0
|
|
||||||
AZIMUTH_DEG = 40.0
|
|
||||||
LENS_MM = 50.0
|
|
||||||
SENSOR_WIDTH_MM = 36.0
|
|
||||||
FILL_FACTOR = 0.85
|
|
||||||
|
|
||||||
elevation_rad = math.radians(ELEVATION_DEG)
|
|
||||||
azimuth_rad = math.radians(AZIMUTH_DEG)
|
|
||||||
|
|
||||||
cam_dir = Vector((
|
|
||||||
math.cos(elevation_rad) * math.cos(azimuth_rad),
|
|
||||||
math.cos(elevation_rad) * math.sin(azimuth_rad),
|
|
||||||
math.sin(elevation_rad),
|
|
||||||
)).normalized()
|
|
||||||
|
|
||||||
fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM))
|
|
||||||
fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM))
|
|
||||||
fov_used = min(fov_h, fov_v)
|
|
||||||
|
|
||||||
dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR
|
|
||||||
dist = max(dist, bsphere_radius * 1.5)
|
|
||||||
print(f"[blender_render] camera dist={dist:.4f}, fov={math.degrees(fov_used):.2f}°")
|
|
||||||
|
|
||||||
cam_location = bbox_center + cam_dir * dist
|
|
||||||
bpy.ops.object.camera_add(location=cam_location)
|
|
||||||
cam_obj = bpy.context.active_object
|
|
||||||
cam_obj.data.lens = LENS_MM
|
|
||||||
bpy.context.scene.camera = cam_obj
|
|
||||||
|
|
||||||
look_dir = (bbox_center - cam_location).normalized()
|
|
||||||
up_world = Vector((0.0, 0.0, 1.0))
|
|
||||||
right = look_dir.cross(up_world)
|
|
||||||
if right.length < 1e-6:
|
|
||||||
right = Vector((1.0, 0.0, 0.0))
|
|
||||||
right.normalize()
|
|
||||||
cam_up = right.cross(look_dir).normalized()
|
|
||||||
|
|
||||||
rot_mat = Matrix((
|
|
||||||
( right.x, right.y, right.z),
|
|
||||||
( cam_up.x, cam_up.y, cam_up.z),
|
|
||||||
(-look_dir.x, -look_dir.y, -look_dir.z),
|
|
||||||
)).transposed()
|
|
||||||
cam_obj.rotation_euler = rot_mat.to_euler('XYZ')
|
|
||||||
|
|
||||||
cam_obj.data.clip_start = max(dist * 0.001, 0.0001)
|
|
||||||
cam_obj.data.clip_end = dist + bsphere_radius * 3.0
|
|
||||||
print(f"[blender_render] clip {cam_obj.data.clip_start:.6f} … {cam_obj.data.clip_end:.4f}")
|
|
||||||
|
|
||||||
# ── World background — only in Mode A ────────────────────────────────────
|
|
||||||
# In template mode the .blend file owns its World (HDRI, sky texture, studio
|
|
||||||
# lighting). Overwriting it would destroy the HDR look the template was
|
|
||||||
# designed to use (e.g. Alpha-HDR output types with Filmic tonemapping).
|
|
||||||
if not use_template:
|
if not use_template:
|
||||||
|
setup_auto_lights(bbox_center, bsphere_radius)
|
||||||
|
# Mode A world background
|
||||||
world = bpy.data.worlds.new("World")
|
world = bpy.data.worlds.new("World")
|
||||||
bpy.context.scene.world = world
|
bpy.context.scene.world = world
|
||||||
world.use_nodes = True
|
world.use_nodes = True
|
||||||
@@ -798,88 +198,16 @@ if needs_auto_camera:
|
|||||||
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
|
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
|
||||||
bg.inputs["Strength"].default_value = 0.15
|
bg.inputs["Strength"].default_value = 0.15
|
||||||
|
|
||||||
# ── Render engine ─────────────────────────────────────────────────────────────
|
# ── Render engine ──────────────────────────────────────────────────────────────
|
||||||
scene = bpy.context.scene
|
scene = bpy.context.scene
|
||||||
|
engine = configure_engine(
|
||||||
|
scene, engine, samples, cycles_device, _early_gpu_type,
|
||||||
|
noise_threshold_arg, denoiser_arg,
|
||||||
|
denoising_input_passes_arg, denoising_prefilter_arg,
|
||||||
|
denoising_quality_arg, denoising_use_gpu_arg,
|
||||||
|
)
|
||||||
|
|
||||||
if engine == "eevee":
|
# ── Colour management ──────────────────────────────────────────────────────────
|
||||||
# Blender 4.x used 'BLENDER_EEVEE_NEXT'; Blender 5.x reverted to 'BLENDER_EEVEE'.
|
|
||||||
# Try both names so the script works across versions.
|
|
||||||
set_ok = False
|
|
||||||
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
|
|
||||||
try:
|
|
||||||
scene.render.engine = eevee_id
|
|
||||||
set_ok = True
|
|
||||||
print(f"[blender_render] EEVEE engine id: {eevee_id}")
|
|
||||||
break
|
|
||||||
except TypeError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not set_ok:
|
|
||||||
print("[blender_render] WARNING: could not set EEVEE engine – falling back to Cycles")
|
|
||||||
engine = "cycles"
|
|
||||||
|
|
||||||
if engine == "eevee":
|
|
||||||
# Sample attribute name changed across minor versions
|
|
||||||
for attr in ('taa_render_samples', 'samples'):
|
|
||||||
try:
|
|
||||||
setattr(scene.eevee, attr, samples)
|
|
||||||
print(f"[blender_render] EEVEE samples: scene.eevee.{attr}={samples}")
|
|
||||||
break
|
|
||||||
except AttributeError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if engine != "eevee": # covers both explicit Cycles and EEVEE-fallback
|
|
||||||
# ── GPU preferences (before engine activation) ───────────────────────
|
|
||||||
# Set compute_device_type in preferences so Cycles can find GPU kernels.
|
|
||||||
gpu_type_found = _activate_gpu() or _early_gpu_type
|
|
||||||
|
|
||||||
# ── Activate Cycles engine ───────────────────────────────────────────
|
|
||||||
scene.render.engine = 'CYCLES'
|
|
||||||
|
|
||||||
# ── Device selection AFTER engine activation ─────────────────────────
|
|
||||||
# IMPORTANT: scene.cycles.device must be set AFTER scene.render.engine
|
|
||||||
# = 'CYCLES'. Setting it before can be overwritten when Cycles inits
|
|
||||||
# and reads the scene's saved properties (template may have device=CPU).
|
|
||||||
if gpu_type_found:
|
|
||||||
scene.cycles.device = 'GPU'
|
|
||||||
# Re-ensure preferences are set (engine activation may have reset them)
|
|
||||||
_activate_gpu()
|
|
||||||
print(f"[blender_render] Cycles GPU ({gpu_type_found}), samples={samples}", flush=True)
|
|
||||||
print(f"RENDER_DEVICE_USED: engine=CYCLES device=GPU compute_type={gpu_type_found}", flush=True)
|
|
||||||
else:
|
|
||||||
scene.cycles.device = 'CPU'
|
|
||||||
print(f"[blender_render] WARNING: GPU not found — falling back to CPU, samples={samples}", flush=True)
|
|
||||||
print("RENDER_DEVICE_USED: engine=CYCLES device=CPU compute_type=NONE (fallback)", flush=True)
|
|
||||||
import os as _os
|
|
||||||
if _os.environ.get("CYCLES_DEVICE", "auto").lower() == "gpu":
|
|
||||||
print("GPU_REQUIRED_BUT_CPU_USED: strict mode active (CYCLES_DEVICE=gpu)", flush=True)
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
scene.cycles.samples = samples
|
|
||||||
scene.cycles.use_denoising = True
|
|
||||||
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
|
|
||||||
if denoising_input_passes_arg:
|
|
||||||
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
|
|
||||||
except Exception: pass
|
|
||||||
if denoising_prefilter_arg:
|
|
||||||
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
|
|
||||||
except Exception: pass
|
|
||||||
if denoising_quality_arg:
|
|
||||||
try: scene.cycles.denoising_quality = denoising_quality_arg
|
|
||||||
except Exception: pass
|
|
||||||
if denoising_use_gpu_arg:
|
|
||||||
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
|
|
||||||
except AttributeError: pass
|
|
||||||
if noise_threshold_arg:
|
|
||||||
scene.cycles.use_adaptive_sampling = True
|
|
||||||
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
|
|
||||||
|
|
||||||
# ── Colour management ─────────────────────────────────────────────────────────
|
|
||||||
# In template mode the .blend file owns its colour management (e.g. Filmic/
|
|
||||||
# AgX for HDR, custom exposure for Alpha-HDR output types). Overwriting it
|
|
||||||
# would destroy the look the template was designed for.
|
|
||||||
# In factory-settings mode (Mode A) force Standard to avoid the grey Filmic
|
|
||||||
# tint that Blender applies by default.
|
|
||||||
if not use_template:
|
if not use_template:
|
||||||
scene.view_settings.view_transform = 'Standard'
|
scene.view_settings.view_transform = 'Standard'
|
||||||
scene.view_settings.exposure = 0.0
|
scene.view_settings.exposure = 0.0
|
||||||
@@ -889,7 +217,7 @@ if not use_template:
|
|||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# ── Render settings ───────────────────────────────────────────────────────────
|
# ── Render settings ────────────────────────────────────────────────────────────
|
||||||
scene.render.resolution_x = width
|
scene.render.resolution_x = width
|
||||||
scene.render.resolution_y = height
|
scene.render.resolution_y = height
|
||||||
scene.render.resolution_percentage = 100
|
scene.render.resolution_percentage = 100
|
||||||
@@ -897,8 +225,7 @@ scene.render.image_settings.file_format = 'PNG'
|
|||||||
scene.render.filepath = output_path
|
scene.render.filepath = output_path
|
||||||
scene.render.film_transparent = transparent_bg
|
scene.render.film_transparent = transparent_bg
|
||||||
|
|
||||||
# ── Render ────────────────────────────────────────────────────────────────────
|
# ── Final verification + render ────────────────────────────────────────────────
|
||||||
# Final verification of render device settings
|
|
||||||
if scene.render.engine == 'CYCLES':
|
if scene.render.engine == 'CYCLES':
|
||||||
cprefs = bpy.context.preferences.addons['cycles'].preferences
|
cprefs = bpy.context.preferences.addons['cycles'].preferences
|
||||||
print(f"[blender_render] VERIFY: engine={scene.render.engine}, "
|
print(f"[blender_render] VERIFY: engine={scene.render.engine}, "
|
||||||
@@ -906,6 +233,7 @@ if scene.render.engine == 'CYCLES':
|
|||||||
f"compute_device_type={cprefs.compute_device_type}, "
|
f"compute_device_type={cprefs.compute_device_type}, "
|
||||||
f"gpu_devices={[(d.name, d.type, d.use) for d in cprefs.devices if d.type != 'CPU']}",
|
f"gpu_devices={[(d.name, d.type, d.use) for d in cprefs.devices if d.type != 'CPU']}",
|
||||||
flush=True)
|
flush=True)
|
||||||
|
|
||||||
_lap("pre_render_setup")
|
_lap("pre_render_setup")
|
||||||
print(f"[blender_render] Rendering → {output_path} (Blender {bpy.app.version_string})", flush=True)
|
print(f"[blender_render] Rendering → {output_path} (Blender {bpy.app.version_string})", flush=True)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
@@ -913,7 +241,7 @@ bpy.ops.render.render(write_still=True)
|
|||||||
print("[blender_render] render done.", flush=True)
|
print("[blender_render] render done.", flush=True)
|
||||||
_lap("gpu_render")
|
_lap("gpu_render")
|
||||||
|
|
||||||
# ── Final timing summary ──────────────────────────────────────────────────────
|
# ── Final timing summary ───────────────────────────────────────────────────────
|
||||||
_total = _time.monotonic() - _t0
|
_total = _time.monotonic() - _t0
|
||||||
print(f"[blender_render] TIMING_SUMMARY total={_total:.2f}s | " +
|
print(f"[blender_render] TIMING_SUMMARY total={_total:.2f}s | " +
|
||||||
" | ".join(f"{k}={v:.2f}s" for k, v in _timings.items()), flush=True)
|
" | ".join(f"{k}={v:.2f}s" for k, v in _timings.items()), flush=True)
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.8 MiB |
Binary file not shown.
|
Before Width: | Height: | Size: 2.3 MiB |
Binary file not shown.
|
Before Width: | Height: | Size: 1.5 MiB |
+23
-75
@@ -1,88 +1,36 @@
|
|||||||
# Review Report: CAD Viewer Material Assignment Fix + Feature Parity
|
# Review Report: Pipeline Cleanup (M1 + M3)
|
||||||
Datum: 2026-03-10
|
Date: 2026-03-11
|
||||||
|
|
||||||
## Ergebnis: ✅ Freigabe
|
## Result: ✅ Approved (2 low-severity unused imports fixed inline)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Gefundene Probleme
|
## Problems Found
|
||||||
|
|
||||||
### [InlineCadViewer.tsx + ThreeDViewer.tsx] Misleading comment on isolateMode reset effect
|
### render-worker/scripts/blender_render.py:20 — Unused `import math`
|
||||||
**Schwere**: Gering (Kommentar)
|
**Severity**: Low
|
||||||
|
**Description**: `import math` is at the top of the entry-point but `math` is no longer referenced there — all math operations moved to submodules.
|
||||||
|
**Fix**: Remove the import. Applied inline.
|
||||||
|
|
||||||
In both files the comment reads:
|
### render-worker/scripts/_blender_import.py:5 — Unused `import re as _re`
|
||||||
```tsx
|
**Severity**: Low
|
||||||
// Reset isolateMode and hideAssigned when no part is pinned
|
**Description**: `re` module is imported at module level but not used anywhere in `_blender_import.py`. The `_re.sub` calls live in `_blender_materials.py`.
|
||||||
useEffect(() => {
|
**Fix**: Remove the import. Applied inline.
|
||||||
if (!pinnedPart) setIsolateMode('none') // ← only resets isolateMode, not hideAssigned!
|
|
||||||
}, [pinnedPart])
|
|
||||||
```
|
|
||||||
The comment says "and hideAssigned" but the effect only calls `setIsolateMode('none')`. The behavior is actually correct — `hideAssigned` should NOT be reset when unpinning (it's a persistent view toggle). Only the comment is wrong.
|
|
||||||
|
|
||||||
**Empfehlung**: Change to `// Reset isolateMode when no part is pinned`.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Positiv aufgefallen
|
## Positives
|
||||||
|
|
||||||
### Bug fix: MaterialPanel invisible in ThreeDViewer — root cause correctly identified
|
- **Dead code thoroughly removed**: `VALID_STL_QUALITIES`, `stl_quality` (7 locations in admin.py), 6 frontend files, `_mark_sharp_and_seams()` (62 lines), `_render_via_service()` (33 lines), 2 dead `elif renderer == "threejs"` branches — all gone. All acceptance gates pass.
|
||||||
The diagnosis was precise: the outer `<div onClick={() => setPinnedPart(null)}>` was receiving the
|
- **Submodule decomposition is clean**: `blender_render.py` went 858 → 249 lines. Each submodule has a clear single responsibility with correct `sys.path.insert(0, ...)` for Blender Python discovery.
|
||||||
native DOM bubble from every canvas click, calling `setPinnedPart(null)` in the same React batch as
|
- **GPU activation order preserved**: `activate_gpu()` still called before `open_mainfile`, and again after engine init — the critical 3-call sequence is intact in `configure_engine()`.
|
||||||
`setPinnedPart(name)` from the THREE.js event handler — final state always `null`.
|
- **FailedMaterial sentinel preserved**: `assign_failed_material` in `_blender_materials.py` matches the original logic; unmatched parts in `apply_material_library` are now handled internally.
|
||||||
|
- **`part_names_ordered` global → parameter**: Correctly converted to an explicit parameter in `apply_material_library()`.
|
||||||
The two-part fix is clean and idiomatic:
|
- **No security issues**: No hardcoded credentials, no SQL injections, no new endpoints, no new models.
|
||||||
- `onClick={(e) => e.stopPropagation()}` on the viewport div absorbs DOM clicks
|
- **No render pipeline regressions**: No references to removed blender-renderer or threejs-renderer services.
|
||||||
- `onPointerMissed={() => setPinnedPart(null)}` on the R3F Canvas handles the "click empty space"
|
- **Frontend**: TypeScript errors in output are pre-existing (Admin.tsx GPUProbeResult, InlineCadViewer.tsx), not introduced by this change.
|
||||||
case via the THREE.js raycaster (fires only when no mesh is hit) — this is exactly the right
|
|
||||||
R3F API for this use case
|
|
||||||
|
|
||||||
### cadUtils.ts — normalization regex extension
|
|
||||||
`/_AF\d+(_ASM)?$/i` is minimal and correct. It handles:
|
|
||||||
- `_AF0`, `_AF1` (existing, unchanged)
|
|
||||||
- `_AF0_ASM`, `_AF1_ASM` (new — assembly-node suffix)
|
|
||||||
- Case-insensitive flag is defensive and correct
|
|
||||||
- The loop-until-stable pattern handles nested suffixes as before
|
|
||||||
- `_ASM` alone (without `_AF\d+`) is NOT stripped — correct, it's part of base names like
|
|
||||||
`GE360-HF_000_P_ASM_ASM`
|
|
||||||
|
|
||||||
### Combined visibility useEffect — correct design
|
|
||||||
Merging `hideAssigned` + `isolateMode` into a single traversal effect avoids
|
|
||||||
ordering ambiguity between two independent effects competing on the same `mesh.visible` and
|
|
||||||
`mat.opacity`. The priority order (hideAssigned first, then isolateMode) is explicit and logical.
|
|
||||||
The pinned part (`isSelected`) is always protected from hiding regardless of mode. ✓
|
|
||||||
|
|
||||||
### Effect separation is clean
|
|
||||||
- Color-apply effect: only touches `mat.color` → deps `[modelReady, partMaterials]`
|
|
||||||
- Unassigned glow effect: only touches `mat.emissive` → deps `[modelReady, showUnassigned, partMaterials]`
|
|
||||||
- Combined visibility effect: only touches `mesh.visible` / `mat.opacity` / `mat.transparent` → deps `[modelReady, pinnedPart, isolateMode, hideAssigned, partMaterials]`
|
|
||||||
|
|
||||||
No effect touches another effect's properties — no race conditions.
|
|
||||||
|
|
||||||
### GPU hint and DPR cap
|
|
||||||
`gl={{ powerPreference: 'high-performance' }}` + `dpr={[1, 1.5]}` on both Canvas elements.
|
|
||||||
`preserveDrawingBuffer: true` correctly kept only in ThreeDViewer (required for screenshot capture).
|
|
||||||
|
|
||||||
### "Hide assigned" toolbar button correctly conditional
|
|
||||||
`{assignedCount > 0 && (...)}` in InlineCadViewer and
|
|
||||||
`{modelReady && Object.keys(partMaterials).length > 0 && (...)}` in ThreeDViewer — button only
|
|
||||||
appears when there is something to hide.
|
|
||||||
|
|
||||||
### Debug log is dev-only
|
|
||||||
`if (!import.meta.env.DEV || ...)` guard ensures the console output and traversal overhead
|
|
||||||
never reach production. The output logs both matched and unmatched keys, which is exactly what's
|
|
||||||
needed to diagnose remaining name mismatches after the normalization fix.
|
|
||||||
|
|
||||||
### Feature parity achieved
|
|
||||||
ThreeDViewer and InlineCadViewer now have matching material-assignment features:
|
|
||||||
- ✓ `showUnassigned` highlight toggle with count badge
|
|
||||||
- ✓ `hideAssigned` toggle (new, both viewers)
|
|
||||||
- ✓ `isolateMode` (ghost / hide) via MaterialPanel (both viewers)
|
|
||||||
- ✓ `onPointerMissed` closes panel on empty-space click in ThreeDViewer
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Empfehlung
|
## Recommendation
|
||||||
|
Approved. Two unused imports fixed inline before commit.
|
||||||
**Freigabe.** The one Gering comment issue can be fixed inline.
|
|
||||||
|
|
||||||
Review abgeschlossen. Ergebnis: ✅
|
|
||||||
|
|||||||
Reference in New Issue
Block a user