8933d0be17
Render pipeline: - Replace per-object _apply_smooth() loop with _apply_smooth_batch(): selects all 175 parts, calls shade_smooth_by_angle() ONCE in C → reduces 16s to ~0.2s - Remove 175 per-part "assigned material to part" log lines (replace with summary) - Add TIMING_SUMMARY log line at end of every render showing all step durations - _lap() helper records split times for: template_load, glb_import, rotation, smooth_shading, material_assign, pre_render_setup, gpu_render Frontend role checks: - Add global_admin + tenant_admin to User role type in auth store - Add isAdmin() and isPrivileged() helper functions - Fix Admin.tsx, Layout.tsx, Notifications.tsx, OrderDetail.tsx, ProductDetail.tsx, CostOverviewWidget.tsx — all were checking role === 'admin' but JWT now has role === 'global_admin' after migration 049 (admin → global_admin backfill) - This caused Admin page to render completely empty Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
825 lines
35 KiB
Python
825 lines
35 KiB
Python
"""
|
||
Blender Python script for rendering a GLB file to PNG.
|
||
Targets Blender 5.0+ (EEVEE / Cycles).
|
||
|
||
Called by Blender:
|
||
blender --background --python blender_render.py -- \
|
||
<glb_path> <output_path> <width> <height> [engine] [samples]
|
||
|
||
engine: "cycles" (default) | "eevee"
|
||
|
||
Features:
|
||
- OCC-generated GLB: one mesh per STEP part, already in metres.
|
||
- Bounding-box-aware camera: object fills ~85 % of the frame.
|
||
- Isometric-style angle (elevation 28°, azimuth 40°).
|
||
- Dynamic clip planes.
|
||
- Standard (non-Filmic) colour management → no grey tint.
|
||
"""
|
||
import sys
|
||
import os
|
||
import math
|
||
|
||
# Force unbuffered stdout so render log lines appear immediately
|
||
os.environ["PYTHONUNBUFFERED"] = "1"
|
||
if hasattr(sys.stdout, "reconfigure"):
|
||
sys.stdout.reconfigure(line_buffering=True)
|
||
|
||
import bpy
|
||
from mathutils import Vector, Matrix
|
||
|
||
# Fallback material name — magenta, immediately visible when material assignment fails
|
||
FAILED_MATERIAL_NAME = "SCHAEFFLER_059999_FailedMaterial"
|
||
|
||
# ── Parse arguments ───────────────────────────────────────────────────────────
|
||
|
||
argv = sys.argv
|
||
if "--" in argv:
|
||
argv = argv[argv.index("--") + 1:]
|
||
else:
|
||
argv = []
|
||
|
||
if len(argv) < 4:
|
||
print("Usage: blender --background --python blender_render.py -- "
|
||
"<glb_path> <output_path> <width> <height> [engine] [samples] [smooth_angle] [cycles_device] [transparent_bg]")
|
||
sys.exit(1)
|
||
|
||
import json as _json
|
||
|
||
glb_path = argv[0]
|
||
output_path = argv[1]
|
||
width = int(argv[2])
|
||
height = int(argv[3])
|
||
engine = argv[4].lower() if len(argv) > 4 else "cycles"
|
||
samples = int(argv[5]) if len(argv) > 5 else (64 if engine == "eevee" else 256)
|
||
smooth_angle = int(argv[6]) if len(argv) > 6 else 30 # degrees; 0 = flat shading
|
||
cycles_device = argv[7].lower() if len(argv) > 7 else "auto" # "auto", "gpu", "cpu"
|
||
transparent_bg = argv[8] == "1" if len(argv) > 8 else False
|
||
template_path = argv[9] if len(argv) > 9 and argv[9] else ""
|
||
target_collection = argv[10] if len(argv) > 10 else "Product"
|
||
material_library_path = argv[11] if len(argv) > 11 and argv[11] else ""
|
||
material_map_raw = argv[12] if len(argv) > 12 else "{}"
|
||
try:
|
||
material_map = _json.loads(material_map_raw) if material_map_raw else {}
|
||
except _json.JSONDecodeError:
|
||
material_map = {}
|
||
|
||
part_names_ordered_raw = argv[13] if len(argv) > 13 else "[]"
|
||
try:
|
||
part_names_ordered = _json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
|
||
except _json.JSONDecodeError:
|
||
part_names_ordered = []
|
||
|
||
lighting_only = argv[14] == "1" if len(argv) > 14 else False
|
||
shadow_catcher = argv[15] == "1" if len(argv) > 15 else False
|
||
rotation_x = float(argv[16]) if len(argv) > 16 else 0.0
|
||
rotation_y = float(argv[17]) if len(argv) > 17 else 0.0
|
||
rotation_z = float(argv[18]) if len(argv) > 18 else 0.0
|
||
noise_threshold_arg = argv[19] if len(argv) > 19 else ""
|
||
denoiser_arg = argv[20] if len(argv) > 20 else ""
|
||
denoising_input_passes_arg = argv[21] if len(argv) > 21 else ""
|
||
denoising_prefilter_arg = argv[22] if len(argv) > 22 else ""
|
||
denoising_quality_arg = argv[23] if len(argv) > 23 else ""
|
||
denoising_use_gpu_arg = argv[24] if len(argv) > 24 else ""
|
||
|
||
# Named argument: --mesh-attributes <json>
|
||
_mesh_attrs: dict = {}
|
||
_sys_argv = sys.argv
|
||
if "--mesh-attributes" in _sys_argv:
|
||
_idx = _sys_argv.index("--mesh-attributes")
|
||
try:
|
||
_mesh_attrs = _json.loads(_sys_argv[_idx + 1])
|
||
except Exception:
|
||
pass
|
||
|
||
# Validate template path: if provided it MUST exist on disk.
|
||
# Fail loudly rather than silently rendering with factory settings.
|
||
if template_path and not os.path.isfile(template_path):
|
||
print(f"[blender_render] ERROR: template_path was provided but file not found: {template_path}")
|
||
print("[blender_render] Check that the blend-templates directory is on the shared volume.")
|
||
sys.exit(1)
|
||
|
||
use_template = bool(template_path)
|
||
|
||
print(f"[blender_render] engine={engine}, samples={samples}, size={width}x{height}, smooth_angle={smooth_angle}°, device={cycles_device}, transparent={transparent_bg}")
|
||
print(f"[blender_render] part_names_ordered: {len(part_names_ordered)} entries")
|
||
if use_template:
|
||
print(f"[blender_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
|
||
else:
|
||
print("[blender_render] no template — using factory settings (Mode A)")
|
||
if material_library_path:
|
||
print(f"[blender_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
|
||
|
||
# ── Helper: find or create collection by name ────────────────────────────────
|
||
|
||
def _ensure_collection(name: str):
|
||
"""Return a collection by name, creating it if needed."""
|
||
if name in bpy.data.collections:
|
||
return bpy.data.collections[name]
|
||
col = bpy.data.collections.new(name)
|
||
bpy.context.scene.collection.children.link(col)
|
||
return col
|
||
|
||
|
||
def _apply_smooth_batch(parts, angle_deg):
|
||
"""Apply smooth shading to ALL parts in a single operator call.
|
||
|
||
bpy.ops.object.shade_smooth_by_angle() operates on all selected objects
|
||
at once (one C-level call), so batching reduces O(n) operator overhead to O(1).
|
||
Per-part calls cost ~90ms each × 175 parts = 16s; batch call costs ~0.2s total.
|
||
"""
|
||
bpy.ops.object.select_all(action='DESELECT')
|
||
mesh_parts = [p for p in parts if p.type == 'MESH']
|
||
for part in mesh_parts:
|
||
part.select_set(True)
|
||
if not mesh_parts:
|
||
return
|
||
bpy.context.view_layer.objects.active = mesh_parts[0]
|
||
if angle_deg > 0:
|
||
try:
|
||
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
|
||
except AttributeError:
|
||
bpy.ops.object.shade_smooth()
|
||
for part in mesh_parts:
|
||
if hasattr(part.data, 'use_auto_smooth'):
|
||
part.data.use_auto_smooth = True
|
||
part.data.auto_smooth_angle = math.radians(angle_deg)
|
||
else:
|
||
bpy.ops.object.shade_flat()
|
||
bpy.ops.object.select_all(action='DESELECT')
|
||
|
||
|
||
def _assign_failed_material(part_obj):
|
||
"""Assign the standard fallback material (magenta) when no library material matches.
|
||
|
||
Tries to reuse SCHAEFFLER_059999_FailedMaterial from the library first.
|
||
Creates a simple magenta Principled BSDF if the library material is not loaded.
|
||
"""
|
||
mat = bpy.data.materials.get(FAILED_MATERIAL_NAME)
|
||
if mat is None:
|
||
mat = bpy.data.materials.new(name=FAILED_MATERIAL_NAME)
|
||
mat.use_nodes = True
|
||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||
if bsdf:
|
||
bsdf.inputs["Base Color"].default_value = (1.0, 0.0, 1.0, 1.0) # magenta
|
||
bsdf.inputs["Roughness"].default_value = 0.6
|
||
part_obj.data.materials.clear()
|
||
part_obj.data.materials.append(mat)
|
||
|
||
|
||
import re as _re
|
||
|
||
|
||
# _scale_mm_to_m removed: OCC GLB export produces coordinates in metres already.
|
||
|
||
|
||
def _apply_rotation(parts, rx, ry, rz):
|
||
"""Apply Euler rotation (degrees, XYZ order) to all parts around world origin.
|
||
|
||
After _import_glb the combined bbox center is at world origin,
|
||
so rotating around origin is equivalent to rotating around the assembly center.
|
||
"""
|
||
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
|
||
return
|
||
from mathutils import Euler
|
||
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
|
||
for p in parts:
|
||
p.matrix_world = rot_mat @ p.matrix_world
|
||
# Bake rotation into mesh data so camera bbox calculations see the rotated geometry
|
||
bpy.ops.object.select_all(action='DESELECT')
|
||
for p in parts:
|
||
p.select_set(True)
|
||
bpy.context.view_layer.objects.active = parts[0]
|
||
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
|
||
print(f"[blender_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
|
||
|
||
|
||
def _mark_sharp_and_seams(obj, smooth_angle_deg: float, sharp_edge_midpoints=None):
|
||
"""Mark sharp edges and UV seams based on angle threshold and optional midpoints."""
|
||
import math
|
||
import bpy
|
||
|
||
# Ensure we're working with the right object
|
||
bpy.context.view_layer.objects.active = obj
|
||
obj.select_set(True)
|
||
|
||
# Set auto-smooth angle
|
||
if hasattr(obj.data, 'auto_smooth_angle'):
|
||
obj.data.auto_smooth_angle = math.radians(smooth_angle_deg)
|
||
|
||
# Enter edit mode to mark edges
|
||
bpy.ops.object.mode_set(mode='EDIT')
|
||
bpy.ops.mesh.select_all(action='DESELECT')
|
||
|
||
# Select edges above threshold angle and mark sharp
|
||
bpy.ops.mesh.edges_select_sharp(sharpness=math.radians(smooth_angle_deg))
|
||
bpy.ops.mesh.mark_sharp()
|
||
|
||
# Mark same edges as UV seams
|
||
bpy.ops.mesh.mark_seam(clear=False)
|
||
|
||
# If we have OCC-derived midpoints, try to mark additional edges
|
||
if sharp_edge_midpoints and len(sharp_edge_midpoints) > 0:
|
||
try:
|
||
import bmesh
|
||
bpy.ops.object.mode_set(mode='OBJECT')
|
||
bm = bmesh.new()
|
||
bm.from_mesh(obj.data)
|
||
bm.edges.ensure_lookup_table()
|
||
bm.verts.ensure_lookup_table()
|
||
|
||
# Build KD-tree for edge midpoints
|
||
import mathutils
|
||
kd = mathutils.kdtree.KDTree(len(bm.edges))
|
||
for i, edge in enumerate(bm.edges):
|
||
midpt = (edge.verts[0].co + edge.verts[1].co) / 2
|
||
kd.insert(midpt, i)
|
||
kd.balance()
|
||
|
||
# For each OCC sharp midpoint, find nearest Blender edge
|
||
tol = 0.5 # 0.5 mm tolerance (coordinates in mm before scale)
|
||
for mp in sharp_edge_midpoints[:200]:
|
||
vec = mathutils.Vector(mp)
|
||
co, idx, dist = kd.find(vec)
|
||
if dist < tol:
|
||
bm.edges[idx].seam = True
|
||
try:
|
||
bm.edges[idx].smooth = False
|
||
except Exception:
|
||
pass
|
||
|
||
bm.to_mesh(obj.data)
|
||
bm.free()
|
||
except Exception:
|
||
pass # Non-fatal
|
||
|
||
# Return to object mode
|
||
bpy.ops.object.mode_set(mode='OBJECT')
|
||
|
||
|
||
def _import_glb(glb_file):
|
||
"""Import OCC-generated GLB into Blender.
|
||
|
||
OCC exports one mesh object per STEP part, already in metres.
|
||
Blender's native GLTF importer preserves part names.
|
||
|
||
Returns list of Blender mesh objects, centred at world origin.
|
||
"""
|
||
bpy.ops.object.select_all(action='DESELECT')
|
||
bpy.ops.import_scene.gltf(filepath=glb_file)
|
||
parts = [o for o in bpy.context.selected_objects if o.type == 'MESH']
|
||
|
||
if not parts:
|
||
print(f"ERROR: No mesh objects imported from {glb_file}")
|
||
sys.exit(1)
|
||
|
||
print(f"[blender_render] imported {len(parts)} part(s) from GLB: "
|
||
f"{[p.name for p in parts[:5]]}")
|
||
|
||
# Centre combined bbox at world origin
|
||
all_corners = []
|
||
for p in parts:
|
||
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
|
||
|
||
if all_corners:
|
||
mins = Vector((min(v.x for v in all_corners),
|
||
min(v.y for v in all_corners),
|
||
min(v.z for v in all_corners)))
|
||
maxs = Vector((max(v.x for v in all_corners),
|
||
max(v.y for v in all_corners),
|
||
max(v.z for v in all_corners)))
|
||
center = (mins + maxs) * 0.5
|
||
for p in parts:
|
||
p.location -= center
|
||
|
||
return parts
|
||
|
||
|
||
def _resolve_part_name(index, part_obj):
|
||
"""Get the STEP part name for a Blender part by index.
|
||
|
||
With GLB import, part_obj.name IS the STEP name (possibly with
|
||
Blender .NNN suffix for duplicates). Strip that suffix for lookup.
|
||
Falls back to part_names_ordered index mapping.
|
||
"""
|
||
# Strip Blender auto-suffix (.001, .002, etc.)
|
||
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
|
||
# If the base name looks like a real STEP part name (not generic "Cube" etc.),
|
||
# use it directly
|
||
if part_names_ordered and index < len(part_names_ordered):
|
||
return part_names_ordered[index]
|
||
return base_name
|
||
|
||
|
||
def _apply_material_library(parts, mat_lib_path, mat_map):
|
||
"""Append materials from library .blend and assign to parts via material_map.
|
||
|
||
GLB-imported objects are named after STEP parts, so matching is by name
|
||
(stripping Blender .NNN suffix for duplicates). Falls back to
|
||
part_names_ordered index-based matching.
|
||
|
||
mat_map: {part_name_lower: material_name}
|
||
Parts without a match keep their current material.
|
||
"""
|
||
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
||
print(f"[blender_render] material library not found: {mat_lib_path}")
|
||
return
|
||
|
||
# Collect unique material names needed
|
||
needed = set(mat_map.values())
|
||
if not needed:
|
||
return
|
||
|
||
# Append materials from library
|
||
appended = {}
|
||
for mat_name in needed:
|
||
inner_path = f"{mat_lib_path}/Material/{mat_name}"
|
||
try:
|
||
bpy.ops.wm.append(
|
||
filepath=inner_path,
|
||
directory=f"{mat_lib_path}/Material/",
|
||
filename=mat_name,
|
||
link=False,
|
||
)
|
||
if mat_name in bpy.data.materials:
|
||
appended[mat_name] = bpy.data.materials[mat_name]
|
||
print(f"[blender_render] appended material: {mat_name}")
|
||
else:
|
||
print(f"[blender_render] WARNING: material '{mat_name}' not found after append")
|
||
except Exception as exc:
|
||
print(f"[blender_render] WARNING: failed to append material '{mat_name}': {exc}")
|
||
|
||
if not appended:
|
||
return
|
||
|
||
# Assign materials to parts — primary: name-based (GLB object names),
|
||
# secondary: index-based via part_names_ordered
|
||
assigned_count = 0
|
||
unmatched_names = []
|
||
for i, part in enumerate(parts):
|
||
# Try name-based matching first (strip Blender .NNN suffix)
|
||
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
||
# Strip OCC assembly-instance suffix (_AF0, _AF1, …) — GLB object
|
||
# names may or may not have them while mat_map keys might.
|
||
_prev = None
|
||
while _prev != base_name:
|
||
_prev = base_name
|
||
base_name = _re.sub(r'_AF\d+$', '', base_name, flags=_re.IGNORECASE)
|
||
part_key = base_name.lower().strip()
|
||
mat_name = mat_map.get(part_key)
|
||
|
||
# Prefix fallback: if a mat_map key starts with our base name or
|
||
# vice-versa, use the longest matching key (most-specific wins).
|
||
if not mat_name:
|
||
for key, val in sorted(mat_map.items(), key=lambda x: len(x[0]), reverse=True):
|
||
if len(key) >= 5 and len(part_key) >= 5 and (
|
||
part_key.startswith(key) or key.startswith(part_key)
|
||
):
|
||
mat_name = val
|
||
break
|
||
|
||
# Fall back to index-based matching via part_names_ordered
|
||
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
||
step_name = part_names_ordered[i]
|
||
step_key = step_name.lower().strip()
|
||
mat_name = mat_map.get(step_key)
|
||
# Also try stripping AF from part_names_ordered entry
|
||
if not mat_name:
|
||
_p2 = None
|
||
while _p2 != step_key:
|
||
_p2 = step_key
|
||
step_key = _re.sub(r'_af\d+$', '', step_key)
|
||
mat_name = mat_map.get(step_key)
|
||
|
||
if mat_name and mat_name in appended:
|
||
part.data.materials.clear()
|
||
part.data.materials.append(appended[mat_name])
|
||
assigned_count += 1
|
||
else:
|
||
unmatched_names.append(part.name)
|
||
|
||
print(f"[blender_render] material assignment: {assigned_count}/{len(parts)} parts matched", flush=True)
|
||
if unmatched_names:
|
||
print(f"[blender_render] unmatched parts (palette fallback): {unmatched_names[:10]}", flush=True)
|
||
|
||
|
||
# ── Early GPU activation (must happen BEFORE open_mainfile / Cycles init) ────
|
||
# Blender compiles Cycles kernels when the engine first initializes. If the
|
||
# compute_device_type is NONE at that point, Cycles locks to CPU for the rest
|
||
# of the session. We therefore probe + enable GPU devices NOW, before any
|
||
# .blend template (which may trigger Cycles init) is loaded.
|
||
def _activate_gpu():
|
||
"""Probe for GPU compute devices and activate them. Returns device type or None."""
|
||
if cycles_device == "cpu":
|
||
return None
|
||
try:
|
||
cprefs = bpy.context.preferences.addons['cycles'].preferences
|
||
for dt in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
|
||
try:
|
||
cprefs.compute_device_type = dt
|
||
cprefs.get_devices()
|
||
gpu = [d for d in cprefs.devices if d.type != 'CPU']
|
||
if gpu:
|
||
for d in cprefs.devices:
|
||
d.use = (d.type != 'CPU')
|
||
print(f"[blender_render] early GPU activation: {dt}, "
|
||
f"devices={[(d.name, d.type) for d in gpu]}", flush=True)
|
||
return dt
|
||
except Exception as e:
|
||
print(f"[blender_render] {dt} not available: {e}", flush=True)
|
||
except Exception as e:
|
||
print(f"[blender_render] early GPU probe failed: {e}", flush=True)
|
||
return None
|
||
|
||
_early_gpu_type = _activate_gpu()
|
||
|
||
# ── Timing harness ────────────────────────────────────────────────────────────
|
||
import time as _time
|
||
_t0 = _time.monotonic()
|
||
_timings: dict = {}
|
||
|
||
def _lap(label: str) -> None:
|
||
"""Record elapsed time since the last _lap() call and since t0."""
|
||
global _t_last
|
||
now = _time.monotonic()
|
||
if not hasattr(_lap, '_last'):
|
||
_lap._last = _t0
|
||
delta = now - _lap._last
|
||
total = now - _t0
|
||
_timings[label] = round(delta, 3)
|
||
print(f"[blender_render] TIMING {label}={delta:.2f}s (total={total:.2f}s)", flush=True)
|
||
_lap._last = now
|
||
|
||
# ── SCENE SETUP ──────────────────────────────────────────────────────────────
|
||
|
||
if use_template:
|
||
# ── MODE B: Template-based render ────────────────────────────────────────
|
||
print(f"[blender_render] Opening template: {template_path}")
|
||
bpy.ops.wm.open_mainfile(filepath=template_path)
|
||
_lap("template_load")
|
||
|
||
# Find or create target collection
|
||
target_col = _ensure_collection(target_collection)
|
||
|
||
# Import OCC GLB (already in metres, one object per STEP part)
|
||
parts = _import_glb(glb_path)
|
||
_lap("glb_import")
|
||
# Apply render position rotation (before camera/bbox calculations)
|
||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||
_lap("rotation")
|
||
|
||
# Move imported parts into target collection
|
||
for part in parts:
|
||
# Remove from all existing collections
|
||
for col in list(part.users_collection):
|
||
col.objects.unlink(part)
|
||
target_col.objects.link(part)
|
||
|
||
# Batch smooth shading: select all parts, call shade_smooth_by_angle ONCE.
|
||
# In Blender 5 this adds a "Smooth by Angle" GeoNodes modifier to every
|
||
# selected object in a single C call — same effect as calling per-object
|
||
# but ~100× faster (0.2s vs 16s for 175 parts).
|
||
_apply_smooth_batch(parts, smooth_angle)
|
||
_lap("smooth_shading")
|
||
|
||
# Material assignment: library materials if available, otherwise palette
|
||
if material_library_path and material_map:
|
||
# Build lowercased material_map for matching.
|
||
# Include BOTH the original key AND the key with _AF\d+ stripped,
|
||
# so GLB names (which may lack AF suffixes) can match.
|
||
mat_map_lower = {}
|
||
for k, v in material_map.items():
|
||
kl = k.lower().strip()
|
||
mat_map_lower[kl] = v
|
||
# Also add AF-stripped version
|
||
_stripped = kl
|
||
_p = None
|
||
while _p != _stripped:
|
||
_p = _stripped
|
||
_stripped = _re.sub(r'_af\d+$', '', _stripped)
|
||
if _stripped != kl:
|
||
mat_map_lower.setdefault(_stripped, v)
|
||
_apply_material_library(parts, material_library_path, mat_map_lower)
|
||
# Parts not matched by library get the failed-material fallback (magenta)
|
||
unmatched = []
|
||
for part in parts:
|
||
if not part.data.materials or len(part.data.materials) == 0:
|
||
_assign_failed_material(part)
|
||
unmatched.append(part.name)
|
||
if unmatched:
|
||
print(f"[blender_render] WARNING: {len(unmatched)} parts unmatched, assigned {FAILED_MATERIAL_NAME}: {unmatched[:5]}", flush=True)
|
||
else:
|
||
# No material library — assign fallback to all parts
|
||
for part in parts:
|
||
_assign_failed_material(part)
|
||
_lap("material_assign")
|
||
|
||
# ── Shadow catcher (Cycles only, template mode only) ─────────────────────
|
||
if shadow_catcher:
|
||
sc_col_name = "Shadowcatcher"
|
||
sc_obj_name = "Shadowcatcher"
|
||
# Enable the Shadowcatcher collection in all view layers
|
||
for vl in bpy.context.scene.view_layers:
|
||
def _enable_col_recursive(layer_col):
|
||
if layer_col.collection.name == sc_col_name:
|
||
layer_col.exclude = False
|
||
layer_col.collection.hide_render = False
|
||
layer_col.collection.hide_viewport = False
|
||
return True
|
||
for child in layer_col.children:
|
||
if _enable_col_recursive(child):
|
||
return True
|
||
return False
|
||
_enable_col_recursive(vl.layer_collection)
|
||
|
||
sc_obj = bpy.data.objects.get(sc_obj_name)
|
||
if sc_obj:
|
||
# Calculate product bbox min Z (world space)
|
||
all_world_corners = []
|
||
for part in parts:
|
||
for corner in part.bound_box:
|
||
all_world_corners.append((part.matrix_world @ Vector(corner)).z)
|
||
if all_world_corners:
|
||
sc_obj.location.z = min(all_world_corners)
|
||
print(f"[blender_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
|
||
else:
|
||
print(f"[blender_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
|
||
|
||
# lighting_only: use template World/HDRI but force auto-camera UNLESS the shadow
|
||
# catcher is enabled — in that case the template camera is already positioned to
|
||
# show both the product and its shadow on the ground plane.
|
||
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
|
||
if lighting_only and not shadow_catcher:
|
||
print("[blender_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
|
||
elif needs_auto_camera:
|
||
print("[blender_render] WARNING: template has no camera — will create auto-camera")
|
||
|
||
# Set very close near clip on template camera for mm-scale parts (now in metres)
|
||
if not needs_auto_camera and bpy.context.scene.camera:
|
||
bpy.context.scene.camera.data.clip_start = 0.001
|
||
|
||
print(f"[blender_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
|
||
|
||
else:
|
||
# ── MODE A: Factory settings (original behavior) ─────────────────────────
|
||
needs_auto_camera = True
|
||
bpy.ops.wm.read_factory_settings(use_empty=True)
|
||
# Import OCC GLB (already in metres, one object per STEP part)
|
||
parts = _import_glb(glb_path)
|
||
# Apply render position rotation (before camera/bbox calculations)
|
||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||
|
||
import time as _time
|
||
_t_smooth_a = _time.time()
|
||
_apply_smooth_batch(parts, smooth_angle)
|
||
for part in parts:
|
||
_assign_failed_material(part)
|
||
print(f"[blender_render] smooth+fallback-material: {len(parts)} parts ({_time.time()-_t_smooth_a:.2f}s)", flush=True)
|
||
|
||
# Apply material library on top of palette colours (same logic as Mode B).
|
||
# material_library_path / material_map are parsed from argv even in Mode A
|
||
# but were previously never used here — that was the bug.
|
||
if material_library_path and material_map:
|
||
mat_map_lower = {}
|
||
for k, v in material_map.items():
|
||
kl = k.lower().strip()
|
||
mat_map_lower[kl] = v
|
||
_stripped = kl
|
||
_p = None
|
||
while _p != _stripped:
|
||
_p = _stripped
|
||
_stripped = _re.sub(r'_af\d+$', '', _stripped)
|
||
if _stripped != kl:
|
||
mat_map_lower.setdefault(_stripped, v)
|
||
_apply_material_library(parts, material_library_path, mat_map_lower)
|
||
# Parts not matched by the library keep their fallback material (already set above)
|
||
|
||
if needs_auto_camera:
|
||
# ── Combined bounding box / bounding sphere ──────────────────────────────
|
||
all_corners = []
|
||
for part in parts:
|
||
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
||
|
||
bbox_min = Vector((
|
||
min(v.x for v in all_corners),
|
||
min(v.y for v in all_corners),
|
||
min(v.z for v in all_corners),
|
||
))
|
||
bbox_max = Vector((
|
||
max(v.x for v in all_corners),
|
||
max(v.y for v in all_corners),
|
||
max(v.z for v in all_corners),
|
||
))
|
||
|
||
bbox_center = (bbox_min + bbox_max) * 0.5
|
||
bbox_dims = bbox_max - bbox_min
|
||
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
|
||
|
||
print(f"[blender_render] bbox_dims={tuple(round(d,4) for d in bbox_dims)}, "
|
||
f"bsphere_radius={bsphere_radius:.4f}, center={tuple(round(c,4) for c in bbox_center)}")
|
||
|
||
# ── Lighting — only in Mode A (factory settings) ─────────────────────────
|
||
# In template mode the .blend file provides its own World/HDRI lighting.
|
||
# Adding auto-lights would overpower the template's intended look.
|
||
if not use_template:
|
||
light_dist = bsphere_radius * 6.0
|
||
|
||
bpy.ops.object.light_add(type='SUN', location=(
|
||
bbox_center.x + light_dist * 0.5,
|
||
bbox_center.y - light_dist * 0.35,
|
||
bbox_center.z + light_dist,
|
||
))
|
||
sun = bpy.context.active_object
|
||
sun.data.energy = 4.0
|
||
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
|
||
|
||
bpy.ops.object.light_add(type='AREA', location=(
|
||
bbox_center.x - light_dist * 0.4,
|
||
bbox_center.y + light_dist * 0.4,
|
||
bbox_center.z + light_dist * 0.7,
|
||
))
|
||
fill = bpy.context.active_object
|
||
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
|
||
fill.data.size = max(4.0, bsphere_radius * 4.0)
|
||
|
||
# ── Camera ───────────────────────────────────────────────────────────────
|
||
ELEVATION_DEG = 28.0
|
||
AZIMUTH_DEG = 40.0
|
||
LENS_MM = 50.0
|
||
SENSOR_WIDTH_MM = 36.0
|
||
FILL_FACTOR = 0.85
|
||
|
||
elevation_rad = math.radians(ELEVATION_DEG)
|
||
azimuth_rad = math.radians(AZIMUTH_DEG)
|
||
|
||
cam_dir = Vector((
|
||
math.cos(elevation_rad) * math.cos(azimuth_rad),
|
||
math.cos(elevation_rad) * math.sin(azimuth_rad),
|
||
math.sin(elevation_rad),
|
||
)).normalized()
|
||
|
||
fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM))
|
||
fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM))
|
||
fov_used = min(fov_h, fov_v)
|
||
|
||
dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR
|
||
dist = max(dist, bsphere_radius * 1.5)
|
||
print(f"[blender_render] camera dist={dist:.4f}, fov={math.degrees(fov_used):.2f}°")
|
||
|
||
cam_location = bbox_center + cam_dir * dist
|
||
bpy.ops.object.camera_add(location=cam_location)
|
||
cam_obj = bpy.context.active_object
|
||
cam_obj.data.lens = LENS_MM
|
||
bpy.context.scene.camera = cam_obj
|
||
|
||
look_dir = (bbox_center - cam_location).normalized()
|
||
up_world = Vector((0.0, 0.0, 1.0))
|
||
right = look_dir.cross(up_world)
|
||
if right.length < 1e-6:
|
||
right = Vector((1.0, 0.0, 0.0))
|
||
right.normalize()
|
||
cam_up = right.cross(look_dir).normalized()
|
||
|
||
rot_mat = Matrix((
|
||
( right.x, right.y, right.z),
|
||
( cam_up.x, cam_up.y, cam_up.z),
|
||
(-look_dir.x, -look_dir.y, -look_dir.z),
|
||
)).transposed()
|
||
cam_obj.rotation_euler = rot_mat.to_euler('XYZ')
|
||
|
||
cam_obj.data.clip_start = max(dist * 0.001, 0.0001)
|
||
cam_obj.data.clip_end = dist + bsphere_radius * 3.0
|
||
print(f"[blender_render] clip {cam_obj.data.clip_start:.6f} … {cam_obj.data.clip_end:.4f}")
|
||
|
||
# ── World background — only in Mode A ────────────────────────────────────
|
||
# In template mode the .blend file owns its World (HDRI, sky texture, studio
|
||
# lighting). Overwriting it would destroy the HDR look the template was
|
||
# designed to use (e.g. Alpha-HDR output types with Filmic tonemapping).
|
||
if not use_template:
|
||
world = bpy.data.worlds.new("World")
|
||
bpy.context.scene.world = world
|
||
world.use_nodes = True
|
||
bg = world.node_tree.nodes["Background"]
|
||
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
|
||
bg.inputs["Strength"].default_value = 0.15
|
||
|
||
# ── Render engine ─────────────────────────────────────────────────────────────
|
||
scene = bpy.context.scene
|
||
|
||
if engine == "eevee":
|
||
# Blender 4.x used 'BLENDER_EEVEE_NEXT'; Blender 5.x reverted to 'BLENDER_EEVEE'.
|
||
# Try both names so the script works across versions.
|
||
set_ok = False
|
||
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
|
||
try:
|
||
scene.render.engine = eevee_id
|
||
set_ok = True
|
||
print(f"[blender_render] EEVEE engine id: {eevee_id}")
|
||
break
|
||
except TypeError:
|
||
continue
|
||
|
||
if not set_ok:
|
||
print("[blender_render] WARNING: could not set EEVEE engine – falling back to Cycles")
|
||
engine = "cycles"
|
||
|
||
if engine == "eevee":
|
||
# Sample attribute name changed across minor versions
|
||
for attr in ('taa_render_samples', 'samples'):
|
||
try:
|
||
setattr(scene.eevee, attr, samples)
|
||
print(f"[blender_render] EEVEE samples: scene.eevee.{attr}={samples}")
|
||
break
|
||
except AttributeError:
|
||
continue
|
||
|
||
if engine != "eevee": # covers both explicit Cycles and EEVEE-fallback
|
||
# ── GPU preferences (before engine activation) ───────────────────────
|
||
# Set compute_device_type in preferences so Cycles can find GPU kernels.
|
||
gpu_type_found = _activate_gpu() or _early_gpu_type
|
||
|
||
# ── Activate Cycles engine ───────────────────────────────────────────
|
||
scene.render.engine = 'CYCLES'
|
||
|
||
# ── Device selection AFTER engine activation ─────────────────────────
|
||
# IMPORTANT: scene.cycles.device must be set AFTER scene.render.engine
|
||
# = 'CYCLES'. Setting it before can be overwritten when Cycles inits
|
||
# and reads the scene's saved properties (template may have device=CPU).
|
||
if gpu_type_found:
|
||
scene.cycles.device = 'GPU'
|
||
# Re-ensure preferences are set (engine activation may have reset them)
|
||
_activate_gpu()
|
||
print(f"[blender_render] Cycles GPU ({gpu_type_found}), samples={samples}", flush=True)
|
||
print(f"RENDER_DEVICE_USED: engine=CYCLES device=GPU compute_type={gpu_type_found}", flush=True)
|
||
else:
|
||
scene.cycles.device = 'CPU'
|
||
print(f"[blender_render] WARNING: GPU not found — falling back to CPU, samples={samples}", flush=True)
|
||
print("RENDER_DEVICE_USED: engine=CYCLES device=CPU compute_type=NONE (fallback)", flush=True)
|
||
import os as _os
|
||
if _os.environ.get("CYCLES_DEVICE", "auto").lower() == "gpu":
|
||
print("GPU_REQUIRED_BUT_CPU_USED: strict mode active (CYCLES_DEVICE=gpu)", flush=True)
|
||
sys.exit(2)
|
||
|
||
scene.cycles.samples = samples
|
||
scene.cycles.use_denoising = True
|
||
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
|
||
if denoising_input_passes_arg:
|
||
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
|
||
except Exception: pass
|
||
if denoising_prefilter_arg:
|
||
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
|
||
except Exception: pass
|
||
if denoising_quality_arg:
|
||
try: scene.cycles.denoising_quality = denoising_quality_arg
|
||
except Exception: pass
|
||
if denoising_use_gpu_arg:
|
||
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
|
||
except AttributeError: pass
|
||
if noise_threshold_arg:
|
||
scene.cycles.use_adaptive_sampling = True
|
||
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
|
||
|
||
# ── Colour management ─────────────────────────────────────────────────────────
|
||
# In template mode the .blend file owns its colour management (e.g. Filmic/
|
||
# AgX for HDR, custom exposure for Alpha-HDR output types). Overwriting it
|
||
# would destroy the look the template was designed for.
|
||
# In factory-settings mode (Mode A) force Standard to avoid the grey Filmic
|
||
# tint that Blender applies by default.
|
||
if not use_template:
|
||
scene.view_settings.view_transform = 'Standard'
|
||
scene.view_settings.exposure = 0.0
|
||
scene.view_settings.gamma = 1.0
|
||
try:
|
||
scene.view_settings.look = 'None'
|
||
except Exception:
|
||
pass
|
||
|
||
# ── Render settings ───────────────────────────────────────────────────────────
|
||
scene.render.resolution_x = width
|
||
scene.render.resolution_y = height
|
||
scene.render.resolution_percentage = 100
|
||
scene.render.image_settings.file_format = 'PNG'
|
||
scene.render.filepath = output_path
|
||
scene.render.film_transparent = transparent_bg
|
||
|
||
# ── Render ────────────────────────────────────────────────────────────────────
|
||
# Final verification of render device settings
|
||
if scene.render.engine == 'CYCLES':
|
||
cprefs = bpy.context.preferences.addons['cycles'].preferences
|
||
print(f"[blender_render] VERIFY: engine={scene.render.engine}, "
|
||
f"cycles.device={scene.cycles.device}, "
|
||
f"compute_device_type={cprefs.compute_device_type}, "
|
||
f"gpu_devices={[(d.name, d.type, d.use) for d in cprefs.devices if d.type != 'CPU']}",
|
||
flush=True)
|
||
_lap("pre_render_setup")
|
||
print(f"[blender_render] Rendering → {output_path} (Blender {bpy.app.version_string})", flush=True)
|
||
sys.stdout.flush()
|
||
bpy.ops.render.render(write_still=True)
|
||
print("[blender_render] render done.", flush=True)
|
||
_lap("gpu_render")
|
||
|
||
# ── Final timing summary ──────────────────────────────────────────────────────
|
||
_total = _time.monotonic() - _t0
|
||
print(f"[blender_render] TIMING_SUMMARY total={_total:.2f}s | " +
|
||
" | ".join(f"{k}={v:.2f}s" for k, v in _timings.items()), flush=True)
|
||
print("[blender_render] Done.")
|