refactor(P1): complete pipeline cleanup — M1 dead code + M3 blender split
M1 dead code removal: - admin.py: remove VALID_STL_QUALITIES + stl_quality (7 locations) - frontend: remove stl_quality from 6 files (api/orders.ts, api/worker.ts, WorkerActivity.tsx, RenderInfoModal.tsx, helpTexts.ts, mocks/handlers.ts) - blender_render.py: delete _mark_sharp_and_seams() — dead, never called (62 lines) - step_processor.py: delete _render_via_service() + 2 elif renderer=="threejs" branches - renderproblems_tmp/: remove 3 orphaned debug images M3 blender_render.py decomposition (858 → 248 lines): - _blender_gpu.py: activate_gpu(), configure_engine() - _blender_import.py: import_glb(), apply_rotation() - _blender_materials.py: FAILED_MATERIAL_NAME, assign_failed_material(), build_mat_map_lower(), apply_material_library() - _blender_camera.py: setup_auto_camera(), setup_auto_lights() - _blender_scene.py: ensure_collection(), apply_smooth_batch(), apply_sharp_edges_from_occ(), setup_shadow_catcher() - Entry-point: sys.path.insert for submodule discovery; arg-parse + Mode A/B orchestration only Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,111 @@
|
||||
"""Camera and lighting helpers for Blender headless renders."""
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
|
||||
ELEVATION_DEG = 28.0
|
||||
AZIMUTH_DEG = 40.0
|
||||
LENS_MM = 50.0
|
||||
SENSOR_WIDTH_MM = 36.0
|
||||
FILL_FACTOR = 0.85
|
||||
|
||||
|
||||
def setup_auto_camera(parts: list, width: int, height: int):
|
||||
"""Compute bounding sphere and place an isometric auto-camera.
|
||||
|
||||
Returns (bbox_center, bsphere_radius) as a tuple so the caller can
|
||||
pass them to setup_auto_lights().
|
||||
"""
|
||||
import bpy # type: ignore[import]
|
||||
from mathutils import Vector, Matrix # type: ignore[import]
|
||||
|
||||
all_corners = []
|
||||
for part in parts:
|
||||
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
||||
|
||||
bbox_min = Vector((
|
||||
min(v.x for v in all_corners),
|
||||
min(v.y for v in all_corners),
|
||||
min(v.z for v in all_corners),
|
||||
))
|
||||
bbox_max = Vector((
|
||||
max(v.x for v in all_corners),
|
||||
max(v.y for v in all_corners),
|
||||
max(v.z for v in all_corners),
|
||||
))
|
||||
|
||||
bbox_center = (bbox_min + bbox_max) * 0.5
|
||||
bbox_dims = bbox_max - bbox_min
|
||||
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
|
||||
|
||||
print(f"[blender_render] bbox_dims={tuple(round(d,4) for d in bbox_dims)}, "
|
||||
f"bsphere_radius={bsphere_radius:.4f}, center={tuple(round(c,4) for c in bbox_center)}")
|
||||
|
||||
elevation_rad = math.radians(ELEVATION_DEG)
|
||||
azimuth_rad = math.radians(AZIMUTH_DEG)
|
||||
|
||||
cam_dir = Vector((
|
||||
math.cos(elevation_rad) * math.cos(azimuth_rad),
|
||||
math.cos(elevation_rad) * math.sin(azimuth_rad),
|
||||
math.sin(elevation_rad),
|
||||
)).normalized()
|
||||
|
||||
fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM))
|
||||
fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM))
|
||||
fov_used = min(fov_h, fov_v)
|
||||
|
||||
dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR
|
||||
dist = max(dist, bsphere_radius * 1.5)
|
||||
print(f"[blender_render] camera dist={dist:.4f}, fov={math.degrees(fov_used):.2f}°")
|
||||
|
||||
cam_location = bbox_center + cam_dir * dist
|
||||
bpy.ops.object.camera_add(location=cam_location)
|
||||
cam_obj = bpy.context.active_object
|
||||
cam_obj.data.lens = LENS_MM
|
||||
bpy.context.scene.camera = cam_obj
|
||||
|
||||
look_dir = (bbox_center - cam_location).normalized()
|
||||
up_world = Vector((0.0, 0.0, 1.0))
|
||||
right = look_dir.cross(up_world)
|
||||
if right.length < 1e-6:
|
||||
right = Vector((1.0, 0.0, 0.0))
|
||||
right.normalize()
|
||||
cam_up = right.cross(look_dir).normalized()
|
||||
|
||||
rot_mat = Matrix((
|
||||
( right.x, right.y, right.z),
|
||||
( cam_up.x, cam_up.y, cam_up.z),
|
||||
(-look_dir.x, -look_dir.y, -look_dir.z),
|
||||
)).transposed()
|
||||
cam_obj.rotation_euler = rot_mat.to_euler('XYZ')
|
||||
|
||||
cam_obj.data.clip_start = max(dist * 0.001, 0.0001)
|
||||
cam_obj.data.clip_end = dist + bsphere_radius * 3.0
|
||||
print(f"[blender_render] clip {cam_obj.data.clip_start:.6f} … {cam_obj.data.clip_end:.4f}")
|
||||
|
||||
return bbox_center, bsphere_radius
|
||||
|
||||
|
||||
def setup_auto_lights(bbox_center, bsphere_radius: float) -> None:
|
||||
"""Add a sun + area fill light positioned relative to the bounding sphere."""
|
||||
import bpy # type: ignore[import]
|
||||
|
||||
light_dist = bsphere_radius * 6.0
|
||||
|
||||
bpy.ops.object.light_add(type='SUN', location=(
|
||||
bbox_center.x + light_dist * 0.5,
|
||||
bbox_center.y - light_dist * 0.35,
|
||||
bbox_center.z + light_dist,
|
||||
))
|
||||
sun = bpy.context.active_object
|
||||
sun.data.energy = 4.0
|
||||
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
|
||||
|
||||
bpy.ops.object.light_add(type='AREA', location=(
|
||||
bbox_center.x - light_dist * 0.4,
|
||||
bbox_center.y + light_dist * 0.4,
|
||||
bbox_center.z + light_dist * 0.7,
|
||||
))
|
||||
fill = bpy.context.active_object
|
||||
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
|
||||
fill.data.size = max(4.0, bsphere_radius * 4.0)
|
||||
@@ -0,0 +1,121 @@
|
||||
"""GPU activation and engine configuration helpers for Blender headless renders.
|
||||
|
||||
activate_gpu() must be called BEFORE open_mainfile / Cycles engine initialisation
|
||||
so that the CUDA/OptiX kernel is compiled with the correct compute_device_type.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def activate_gpu(cycles_device: str = "auto") -> str | None:
|
||||
"""Probe for GPU compute devices and activate them.
|
||||
|
||||
Args:
|
||||
cycles_device: "auto" | "gpu" | "cpu"
|
||||
|
||||
Returns:
|
||||
Device type string (e.g. "OPTIX", "CUDA") if GPU was activated,
|
||||
or None if CPU-only.
|
||||
"""
|
||||
if cycles_device == "cpu":
|
||||
return None
|
||||
import bpy # type: ignore[import]
|
||||
try:
|
||||
cprefs = bpy.context.preferences.addons['cycles'].preferences
|
||||
for dt in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
|
||||
try:
|
||||
cprefs.compute_device_type = dt
|
||||
cprefs.get_devices()
|
||||
gpu = [d for d in cprefs.devices if d.type != 'CPU']
|
||||
if gpu:
|
||||
for d in cprefs.devices:
|
||||
d.use = (d.type != 'CPU')
|
||||
print(f"[blender_render] early GPU activation: {dt}, "
|
||||
f"devices={[(d.name, d.type) for d in gpu]}", flush=True)
|
||||
return dt
|
||||
except Exception as e:
|
||||
print(f"[blender_render] {dt} not available: {e}", flush=True)
|
||||
except Exception as e:
|
||||
print(f"[blender_render] early GPU probe failed: {e}", flush=True)
|
||||
return None
|
||||
|
||||
|
||||
def configure_engine(
|
||||
scene,
|
||||
engine: str,
|
||||
samples: int,
|
||||
cycles_device: str,
|
||||
early_gpu_type: str | None,
|
||||
noise_threshold_arg: str = "",
|
||||
denoiser_arg: str = "",
|
||||
denoising_input_passes_arg: str = "",
|
||||
denoising_prefilter_arg: str = "",
|
||||
denoising_quality_arg: str = "",
|
||||
denoising_use_gpu_arg: str = "",
|
||||
) -> str:
|
||||
"""Configure the Blender render engine (EEVEE or Cycles) on *scene*.
|
||||
|
||||
Returns the effective engine name ("eevee" or "cycles").
|
||||
Exits with code 2 if GPU required but unavailable (CYCLES_DEVICE=gpu env var).
|
||||
"""
|
||||
if engine == "eevee":
|
||||
set_ok = False
|
||||
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
|
||||
try:
|
||||
scene.render.engine = eevee_id
|
||||
set_ok = True
|
||||
print(f"[blender_render] EEVEE engine id: {eevee_id}")
|
||||
break
|
||||
except TypeError:
|
||||
continue
|
||||
if not set_ok:
|
||||
print("[blender_render] WARNING: could not set EEVEE engine – falling back to Cycles")
|
||||
engine = "cycles"
|
||||
if engine == "eevee":
|
||||
for attr in ('taa_render_samples', 'samples'):
|
||||
try:
|
||||
import bpy # type: ignore[import]
|
||||
setattr(scene.eevee, attr, samples)
|
||||
print(f"[blender_render] EEVEE samples: scene.eevee.{attr}={samples}")
|
||||
break
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
if engine != "eevee":
|
||||
gpu_type_found = activate_gpu(cycles_device) or early_gpu_type
|
||||
scene.render.engine = 'CYCLES'
|
||||
if gpu_type_found:
|
||||
scene.cycles.device = 'GPU'
|
||||
activate_gpu(cycles_device)
|
||||
print(f"[blender_render] Cycles GPU ({gpu_type_found}), samples={samples}", flush=True)
|
||||
print(f"RENDER_DEVICE_USED: engine=CYCLES device=GPU compute_type={gpu_type_found}", flush=True)
|
||||
else:
|
||||
scene.cycles.device = 'CPU'
|
||||
print(f"[blender_render] WARNING: GPU not found — falling back to CPU, samples={samples}", flush=True)
|
||||
print("RENDER_DEVICE_USED: engine=CYCLES device=CPU compute_type=NONE (fallback)", flush=True)
|
||||
if os.environ.get("CYCLES_DEVICE", "auto").lower() == "gpu":
|
||||
print("GPU_REQUIRED_BUT_CPU_USED: strict mode active (CYCLES_DEVICE=gpu)", flush=True)
|
||||
sys.exit(2)
|
||||
|
||||
scene.cycles.samples = samples
|
||||
scene.cycles.use_denoising = True
|
||||
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
|
||||
if denoising_input_passes_arg:
|
||||
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
|
||||
except Exception: pass
|
||||
if denoising_prefilter_arg:
|
||||
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
|
||||
except Exception: pass
|
||||
if denoising_quality_arg:
|
||||
try: scene.cycles.denoising_quality = denoising_quality_arg
|
||||
except Exception: pass
|
||||
if denoising_use_gpu_arg:
|
||||
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
|
||||
except AttributeError: pass
|
||||
if noise_threshold_arg:
|
||||
scene.cycles.use_adaptive_sampling = True
|
||||
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
|
||||
|
||||
return engine
|
||||
@@ -0,0 +1,85 @@
|
||||
"""GLB import and geometry helpers for Blender headless renders."""
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
import sys
|
||||
|
||||
|
||||
def import_glb(glb_file: str) -> list:
|
||||
"""Import OCC-generated GLB into Blender.
|
||||
|
||||
OCC exports one mesh object per STEP part, already in metres.
|
||||
Blender's native GLTF importer preserves part names.
|
||||
|
||||
Returns list of Blender mesh objects, centred at world origin.
|
||||
"""
|
||||
import bpy # type: ignore[import]
|
||||
from mathutils import Vector # type: ignore[import]
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
bpy.ops.import_scene.gltf(filepath=glb_file)
|
||||
parts = [o for o in bpy.context.selected_objects if o.type == 'MESH']
|
||||
|
||||
if not parts:
|
||||
print(f"ERROR: No mesh objects imported from {glb_file}")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"[blender_render] imported {len(parts)} part(s) from GLB: "
|
||||
f"{[p.name for p in parts[:5]]}")
|
||||
|
||||
# Remove OCC-baked custom normals so shade_smooth_by_angle can recompute
|
||||
# normals from scratch (respecting our sharp edge marks).
|
||||
cleared = 0
|
||||
for p in parts:
|
||||
if "custom_normal" in p.data.attributes:
|
||||
p.data.attributes.remove(p.data.attributes["custom_normal"])
|
||||
cleared += 1
|
||||
if cleared:
|
||||
print(f"[blender_render] cleared OCC custom_normal from {cleared} mesh objects")
|
||||
|
||||
# Centre combined bbox at world origin
|
||||
all_corners = []
|
||||
for p in parts:
|
||||
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
|
||||
|
||||
if all_corners:
|
||||
mins = Vector((min(v.x for v in all_corners),
|
||||
min(v.y for v in all_corners),
|
||||
min(v.z for v in all_corners)))
|
||||
maxs = Vector((max(v.x for v in all_corners),
|
||||
max(v.y for v in all_corners),
|
||||
max(v.z for v in all_corners)))
|
||||
center = (mins + maxs) * 0.5
|
||||
# Move root objects (parentless) to centre. Adjusting a child's local
|
||||
# .location by a world-space vector gives wrong results when the GLB has
|
||||
# Empty parent nodes (OCC assembly hierarchy). Shifting the root moves
|
||||
# the entire hierarchy correctly.
|
||||
all_imported = list(bpy.context.selected_objects)
|
||||
root_objects = [o for o in all_imported if o.parent is None]
|
||||
for obj in root_objects:
|
||||
obj.location -= center
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
def apply_rotation(parts: list, rx: float, ry: float, rz: float) -> None:
|
||||
"""Apply Euler rotation (degrees, XYZ order) to all parts around world origin.
|
||||
|
||||
After import_glb the combined bbox center is at world origin,
|
||||
so rotating around origin is equivalent to rotating around the assembly center.
|
||||
"""
|
||||
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
|
||||
return
|
||||
import bpy # type: ignore[import]
|
||||
from mathutils import Euler # type: ignore[import]
|
||||
|
||||
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
|
||||
for p in parts:
|
||||
p.matrix_world = rot_mat @ p.matrix_world
|
||||
# Bake rotation into mesh data so camera bbox calculations see the rotated geometry
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
for p in parts:
|
||||
p.select_set(True)
|
||||
bpy.context.view_layer.objects.active = parts[0]
|
||||
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
|
||||
print(f"[blender_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
|
||||
@@ -0,0 +1,156 @@
|
||||
"""Material assignment helpers for Blender headless renders."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re as _re
|
||||
|
||||
FAILED_MATERIAL_NAME = "SCHAEFFLER_059999_FailedMaterial"
|
||||
|
||||
|
||||
def assign_failed_material(part_obj) -> None:
|
||||
"""Assign the standard fallback material (magenta) when no library material matches.
|
||||
|
||||
Reuses SCHAEFFLER_059999_FailedMaterial if already loaded; otherwise
|
||||
creates a simple magenta Principled BSDF node tree.
|
||||
"""
|
||||
import bpy # type: ignore[import]
|
||||
|
||||
mat = bpy.data.materials.get(FAILED_MATERIAL_NAME)
|
||||
if mat is None:
|
||||
mat = bpy.data.materials.new(name=FAILED_MATERIAL_NAME)
|
||||
mat.use_nodes = True
|
||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||
if bsdf:
|
||||
bsdf.inputs["Base Color"].default_value = (1.0, 0.0, 1.0, 1.0) # magenta
|
||||
bsdf.inputs["Roughness"].default_value = 0.6
|
||||
part_obj.data.materials.clear()
|
||||
part_obj.data.materials.append(mat)
|
||||
|
||||
|
||||
def build_mat_map_lower(material_map: dict) -> dict:
|
||||
"""Return a lowercased version of material_map with _AF\\d+ suffix variants added.
|
||||
|
||||
Both the original key and the AF-stripped key are inserted so that GLB
|
||||
object names (which may lack _AF suffixes that OCC adds to mat_map keys)
|
||||
can match in either direction.
|
||||
"""
|
||||
mat_map_lower: dict = {}
|
||||
for k, v in material_map.items():
|
||||
kl = k.lower().strip()
|
||||
mat_map_lower[kl] = v
|
||||
stripped = kl
|
||||
prev = None
|
||||
while prev != stripped:
|
||||
prev = stripped
|
||||
stripped = _re.sub(r'_af\d+$', '', stripped)
|
||||
if stripped != kl:
|
||||
mat_map_lower.setdefault(stripped, v)
|
||||
return mat_map_lower
|
||||
|
||||
|
||||
def apply_material_library(
|
||||
parts: list,
|
||||
mat_lib_path: str,
|
||||
mat_map: dict,
|
||||
part_names_ordered: list | None = None,
|
||||
) -> None:
|
||||
"""Append materials from library .blend and assign to parts via material_map.
|
||||
|
||||
GLB-imported objects are named after STEP parts, so matching is by name
|
||||
(stripping Blender .NNN suffix for duplicates). Falls back to
|
||||
part_names_ordered index-based matching.
|
||||
|
||||
mat_map: {part_name_lower: material_name}
|
||||
Parts without a match receive the FAILED_MATERIAL_NAME sentinel.
|
||||
"""
|
||||
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
||||
print(f"[blender_render] material library not found: {mat_lib_path}")
|
||||
return
|
||||
|
||||
import bpy # type: ignore[import]
|
||||
|
||||
if part_names_ordered is None:
|
||||
part_names_ordered = []
|
||||
|
||||
# Collect unique material names needed
|
||||
needed = set(mat_map.values())
|
||||
if not needed:
|
||||
return
|
||||
|
||||
# Append materials from library
|
||||
appended: dict = {}
|
||||
for mat_name in needed:
|
||||
inner_path = f"{mat_lib_path}/Material/{mat_name}"
|
||||
try:
|
||||
bpy.ops.wm.append(
|
||||
filepath=inner_path,
|
||||
directory=f"{mat_lib_path}/Material/",
|
||||
filename=mat_name,
|
||||
link=False,
|
||||
)
|
||||
if mat_name in bpy.data.materials:
|
||||
appended[mat_name] = bpy.data.materials[mat_name]
|
||||
print(f"[blender_render] appended material: {mat_name}")
|
||||
else:
|
||||
print(f"[blender_render] WARNING: material '{mat_name}' not found after append")
|
||||
except Exception as exc:
|
||||
print(f"[blender_render] WARNING: failed to append material '{mat_name}': {exc}")
|
||||
|
||||
if not appended:
|
||||
return
|
||||
|
||||
# Assign materials to parts — primary: name-based (GLB object names),
|
||||
# secondary: index-based via part_names_ordered
|
||||
assigned_count = 0
|
||||
unmatched_names = []
|
||||
for i, part in enumerate(parts):
|
||||
# Try name-based matching first (strip Blender .NNN suffix)
|
||||
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
||||
# Strip OCC assembly-instance suffix (_AF0, _AF1, …) — GLB object
|
||||
# names may or may not have them while mat_map keys might.
|
||||
_prev = None
|
||||
while _prev != base_name:
|
||||
_prev = base_name
|
||||
base_name = _re.sub(r'_AF\d+$', '', base_name, flags=_re.IGNORECASE)
|
||||
part_key = base_name.lower().strip()
|
||||
mat_name = mat_map.get(part_key)
|
||||
|
||||
# Prefix fallback: if a mat_map key starts with our base name or
|
||||
# vice-versa, use the longest matching key (most-specific wins).
|
||||
if not mat_name:
|
||||
for key, val in sorted(mat_map.items(), key=lambda x: len(x[0]), reverse=True):
|
||||
if len(key) >= 5 and len(part_key) >= 5 and (
|
||||
part_key.startswith(key) or key.startswith(part_key)
|
||||
):
|
||||
mat_name = val
|
||||
break
|
||||
|
||||
# Fall back to index-based matching via part_names_ordered
|
||||
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
||||
step_name = part_names_ordered[i]
|
||||
step_key = step_name.lower().strip()
|
||||
mat_name = mat_map.get(step_key)
|
||||
# Also try stripping AF from part_names_ordered entry
|
||||
if not mat_name:
|
||||
_p2 = None
|
||||
while _p2 != step_key:
|
||||
_p2 = step_key
|
||||
step_key = _re.sub(r'_af\d+$', '', step_key)
|
||||
mat_name = mat_map.get(step_key)
|
||||
|
||||
if mat_name and mat_name in appended:
|
||||
part.data.materials.clear()
|
||||
part.data.materials.append(appended[mat_name])
|
||||
assigned_count += 1
|
||||
else:
|
||||
unmatched_names.append(part.name)
|
||||
|
||||
print(f"[blender_render] material assignment: {assigned_count}/{len(parts)} parts matched", flush=True)
|
||||
if unmatched_names:
|
||||
print(f"[blender_render] unmatched parts → assigning {FAILED_MATERIAL_NAME}: {unmatched_names[:10]}", flush=True)
|
||||
unmatched_set = set(unmatched_names)
|
||||
for part in parts:
|
||||
if part.name in unmatched_set:
|
||||
if part.data.users > 1:
|
||||
part.data = part.data.copy()
|
||||
assign_failed_material(part)
|
||||
@@ -0,0 +1,149 @@
|
||||
"""Scene-level helpers for Blender headless renders."""
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
|
||||
|
||||
def ensure_collection(name: str):
|
||||
"""Return a collection by name, creating it if needed."""
|
||||
import bpy # type: ignore[import]
|
||||
|
||||
if name in bpy.data.collections:
|
||||
return bpy.data.collections[name]
|
||||
col = bpy.data.collections.new(name)
|
||||
bpy.context.scene.collection.children.link(col)
|
||||
return col
|
||||
|
||||
|
||||
def apply_smooth_batch(parts: list, angle_deg: float) -> None:
|
||||
"""Apply smooth shading to ALL parts in a single operator call.
|
||||
|
||||
bpy.ops.object.shade_smooth_by_angle() operates on all selected objects
|
||||
at once (one C-level call), so batching reduces O(n) operator overhead to O(1).
|
||||
Per-part calls cost ~90ms each × 175 parts = 16s; batch call costs ~0.2s total.
|
||||
"""
|
||||
import bpy # type: ignore[import]
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
mesh_parts = [p for p in parts if p.type == 'MESH']
|
||||
for part in mesh_parts:
|
||||
part.select_set(True)
|
||||
if not mesh_parts:
|
||||
return
|
||||
bpy.context.view_layer.objects.active = mesh_parts[0]
|
||||
if angle_deg > 0:
|
||||
try:
|
||||
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
|
||||
except AttributeError:
|
||||
bpy.ops.object.shade_smooth()
|
||||
for part in mesh_parts:
|
||||
if hasattr(part.data, 'use_auto_smooth'):
|
||||
part.data.use_auto_smooth = True
|
||||
part.data.auto_smooth_angle = math.radians(angle_deg)
|
||||
else:
|
||||
bpy.ops.object.shade_flat()
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
|
||||
def apply_sharp_edges_from_occ(parts: list, sharp_edge_pairs: list) -> None:
|
||||
"""Mark edges sharp using OCC-derived vertex-pair data.
|
||||
|
||||
`sharp_edge_pairs` is a list of [[x0,y0,z0],[x1,y1,z1]] in mm.
|
||||
Blender mesh coordinates are in metres (STEP mm * 0.001 scale applied).
|
||||
We match each OCC vertex pair against bmesh vertex positions with a 0.5 mm
|
||||
tolerance (0.0005 m) and mark the matched edge as sharp.
|
||||
"""
|
||||
if not sharp_edge_pairs:
|
||||
return
|
||||
|
||||
import bmesh # type: ignore[import]
|
||||
import mathutils # type: ignore[import]
|
||||
|
||||
SCALE = 0.001 # mm → m
|
||||
TOL = 0.0005 # 0.5 mm in metres
|
||||
|
||||
# OCC STEP space (Z-up, mm) → Blender (Z-up, m):
|
||||
# RWGltf applies Z→Y-up, Blender import applies Y→Z-up.
|
||||
# Net: Blender(X, Y, Z) = OCC(X*0.001, -Z*0.001, Y*0.001)
|
||||
occ_pairs = []
|
||||
for pair in sharp_edge_pairs:
|
||||
v0 = mathutils.Vector((pair[0][0] * SCALE, -pair[0][2] * SCALE, pair[0][1] * SCALE))
|
||||
v1 = mathutils.Vector((pair[1][0] * SCALE, -pair[1][2] * SCALE, pair[1][1] * SCALE))
|
||||
occ_pairs.append((v0, v1))
|
||||
|
||||
marked_total = 0
|
||||
for obj in parts:
|
||||
bm = bmesh.new()
|
||||
bm.from_mesh(obj.data)
|
||||
bm.verts.ensure_lookup_table()
|
||||
bm.edges.ensure_lookup_table()
|
||||
|
||||
# Build KD-tree on vertices in WORLD space — OCC pairs are world coords,
|
||||
# but mesh vertices are in local space (assembly node transform in GLB).
|
||||
world_mat = obj.matrix_world
|
||||
kd = mathutils.kdtree.KDTree(len(bm.verts))
|
||||
for v in bm.verts:
|
||||
kd.insert(world_mat @ v.co, v.index)
|
||||
kd.balance()
|
||||
|
||||
marked = 0
|
||||
for v0_occ, v1_occ in occ_pairs:
|
||||
_co0, idx0, dist0 = kd.find(v0_occ)
|
||||
_co1, idx1, dist1 = kd.find(v1_occ)
|
||||
if dist0 > TOL or dist1 > TOL:
|
||||
continue
|
||||
if idx0 == idx1:
|
||||
continue # degenerate — both endpoints map to same vertex
|
||||
bv0 = bm.verts[idx0]
|
||||
bv1 = bm.verts[idx1]
|
||||
edge = bm.edges.get((bv0, bv1))
|
||||
if edge is None:
|
||||
edge = bm.edges.get((bv1, bv0))
|
||||
if edge is not None and edge.smooth:
|
||||
edge.smooth = False
|
||||
marked += 1
|
||||
|
||||
bm.to_mesh(obj.data)
|
||||
bm.free()
|
||||
marked_total += marked
|
||||
|
||||
print(f"[blender_render] OCC sharp edges applied: {marked_total} edges marked across {len(parts)} parts", flush=True)
|
||||
|
||||
|
||||
def setup_shadow_catcher(parts: list) -> None:
|
||||
"""Enable the Shadowcatcher collection in the template and position its plane.
|
||||
|
||||
The template must contain a 'Shadowcatcher' collection with a 'Shadowcatcher'
|
||||
mesh object. The plane is moved to the lowest Z of the product bounding box.
|
||||
"""
|
||||
import bpy # type: ignore[import]
|
||||
from mathutils import Vector # type: ignore[import]
|
||||
|
||||
sc_col_name = "Shadowcatcher"
|
||||
sc_obj_name = "Shadowcatcher"
|
||||
|
||||
# Enable the Shadowcatcher collection in all view layers
|
||||
for vl in bpy.context.scene.view_layers:
|
||||
def _enable_col_recursive(layer_col):
|
||||
if layer_col.collection.name == sc_col_name:
|
||||
layer_col.exclude = False
|
||||
layer_col.collection.hide_render = False
|
||||
layer_col.collection.hide_viewport = False
|
||||
return True
|
||||
for child in layer_col.children:
|
||||
if _enable_col_recursive(child):
|
||||
return True
|
||||
return False
|
||||
_enable_col_recursive(vl.layer_collection)
|
||||
|
||||
sc_obj = bpy.data.objects.get(sc_obj_name)
|
||||
if sc_obj:
|
||||
all_world_z = []
|
||||
for part in parts:
|
||||
for corner in part.bound_box:
|
||||
all_world_z.append((part.matrix_world @ Vector(corner)).z)
|
||||
if all_world_z:
|
||||
sc_obj.location.z = min(all_world_z)
|
||||
print(f"[blender_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
|
||||
else:
|
||||
print(f"[blender_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
|
||||
@@ -17,518 +17,100 @@ Features:
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
import math
|
||||
|
||||
# Force unbuffered stdout so render log lines appear immediately
|
||||
os.environ["PYTHONUNBUFFERED"] = "1"
|
||||
if hasattr(sys.stdout, "reconfigure"):
|
||||
sys.stdout.reconfigure(line_buffering=True)
|
||||
|
||||
import bpy
|
||||
from mathutils import Vector, Matrix
|
||||
# Add script directory to sys.path so Blender Python finds our submodules
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# Fallback material name — magenta, immediately visible when material assignment fails
|
||||
FAILED_MATERIAL_NAME = "SCHAEFFLER_059999_FailedMaterial"
|
||||
import bpy # type: ignore[import]
|
||||
|
||||
# ── Parse arguments ───────────────────────────────────────────────────────────
|
||||
|
||||
argv = sys.argv
|
||||
if "--" in argv:
|
||||
argv = argv[argv.index("--") + 1:]
|
||||
else:
|
||||
argv = []
|
||||
|
||||
if len(argv) < 4:
|
||||
print("Usage: blender --background --python blender_render.py -- "
|
||||
"<glb_path> <output_path> <width> <height> [engine] [samples] [smooth_angle] [cycles_device] [transparent_bg]")
|
||||
sys.exit(1)
|
||||
from _blender_gpu import activate_gpu, configure_engine
|
||||
from _blender_import import import_glb, apply_rotation
|
||||
from _blender_materials import (
|
||||
FAILED_MATERIAL_NAME, assign_failed_material,
|
||||
build_mat_map_lower, apply_material_library,
|
||||
)
|
||||
from _blender_camera import setup_auto_camera, setup_auto_lights
|
||||
from _blender_scene import (
|
||||
ensure_collection, apply_smooth_batch,
|
||||
apply_sharp_edges_from_occ, setup_shadow_catcher,
|
||||
)
|
||||
|
||||
# ── Parse arguments ────────────────────────────────────────────────────────────
|
||||
import json as _json
|
||||
|
||||
glb_path = argv[0]
|
||||
output_path = argv[1]
|
||||
width = int(argv[2])
|
||||
height = int(argv[3])
|
||||
engine = argv[4].lower() if len(argv) > 4 else "cycles"
|
||||
samples = int(argv[5]) if len(argv) > 5 else (64 if engine == "eevee" else 256)
|
||||
smooth_angle = int(argv[6]) if len(argv) > 6 else 30 # degrees; 0 = flat shading
|
||||
cycles_device = argv[7].lower() if len(argv) > 7 else "auto" # "auto", "gpu", "cpu"
|
||||
transparent_bg = argv[8] == "1" if len(argv) > 8 else False
|
||||
template_path = argv[9] if len(argv) > 9 and argv[9] else ""
|
||||
target_collection = argv[10] if len(argv) > 10 else "Product"
|
||||
material_library_path = argv[11] if len(argv) > 11 and argv[11] else ""
|
||||
material_map_raw = argv[12] if len(argv) > 12 else "{}"
|
||||
try:
|
||||
material_map = _json.loads(material_map_raw) if material_map_raw else {}
|
||||
except _json.JSONDecodeError:
|
||||
material_map = {}
|
||||
def _arg(n, default="", transform=str):
|
||||
return transform(argv[n]) if len(argv) > n and argv[n] else default
|
||||
|
||||
part_names_ordered_raw = argv[13] if len(argv) > 13 else "[]"
|
||||
try:
|
||||
part_names_ordered = _json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
|
||||
except _json.JSONDecodeError:
|
||||
part_names_ordered = []
|
||||
argv = sys.argv[sys.argv.index("--") + 1:] if "--" in sys.argv else []
|
||||
if len(argv) < 4:
|
||||
print("Usage: blender --background --python blender_render.py -- "
|
||||
"<glb_path> <output_path> <width> <height> ...")
|
||||
sys.exit(1)
|
||||
|
||||
lighting_only = argv[14] == "1" if len(argv) > 14 else False
|
||||
shadow_catcher = argv[15] == "1" if len(argv) > 15 else False
|
||||
rotation_x = float(argv[16]) if len(argv) > 16 else 0.0
|
||||
rotation_y = float(argv[17]) if len(argv) > 17 else 0.0
|
||||
rotation_z = float(argv[18]) if len(argv) > 18 else 0.0
|
||||
noise_threshold_arg = argv[19] if len(argv) > 19 else ""
|
||||
denoiser_arg = argv[20] if len(argv) > 20 else ""
|
||||
denoising_input_passes_arg = argv[21] if len(argv) > 21 else ""
|
||||
denoising_prefilter_arg = argv[22] if len(argv) > 22 else ""
|
||||
denoising_quality_arg = argv[23] if len(argv) > 23 else ""
|
||||
denoising_use_gpu_arg = argv[24] if len(argv) > 24 else ""
|
||||
glb_path = argv[0]
|
||||
output_path = argv[1]
|
||||
width = int(argv[2])
|
||||
height = int(argv[3])
|
||||
engine = _arg(4, "cycles", str.lower)
|
||||
samples = _arg(5, None, int)
|
||||
smooth_angle = _arg(6, 30, int)
|
||||
cycles_device = _arg(7, "auto", str.lower)
|
||||
transparent_bg = argv[8] == "1" if len(argv) > 8 else False
|
||||
template_path = _arg(9, "")
|
||||
target_collection = _arg(10, "Product")
|
||||
material_library_path = _arg(11, "")
|
||||
material_map = _json.loads(_arg(12, "{}")) if _arg(12, "{}") else {}
|
||||
part_names_ordered = _json.loads(_arg(13, "[]")) if _arg(13, "[]") else []
|
||||
lighting_only = argv[14] == "1" if len(argv) > 14 else False
|
||||
shadow_catcher = argv[15] == "1" if len(argv) > 15 else False
|
||||
rotation_x = _arg(16, 0.0, float)
|
||||
rotation_y = _arg(17, 0.0, float)
|
||||
rotation_z = _arg(18, 0.0, float)
|
||||
noise_threshold_arg = _arg(19, "")
|
||||
denoiser_arg = _arg(20, "")
|
||||
denoising_input_passes_arg = _arg(21, "")
|
||||
denoising_prefilter_arg = _arg(22, "")
|
||||
denoising_quality_arg = _arg(23, "")
|
||||
denoising_use_gpu_arg = _arg(24, "")
|
||||
|
||||
if samples is None:
|
||||
samples = 64 if engine == "eevee" else 256
|
||||
|
||||
# Named argument: --mesh-attributes <json>
|
||||
_mesh_attrs: dict = {}
|
||||
_sys_argv = sys.argv
|
||||
if "--mesh-attributes" in _sys_argv:
|
||||
_idx = _sys_argv.index("--mesh-attributes")
|
||||
if "--mesh-attributes" in sys.argv:
|
||||
_idx = sys.argv.index("--mesh-attributes")
|
||||
try:
|
||||
_mesh_attrs = _json.loads(_sys_argv[_idx + 1])
|
||||
_mesh_attrs = _json.loads(sys.argv[_idx + 1])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Validate template path: if provided it MUST exist on disk.
|
||||
# Fail loudly rather than silently rendering with factory settings.
|
||||
if template_path and not os.path.isfile(template_path):
|
||||
print(f"[blender_render] ERROR: template_path was provided but file not found: {template_path}")
|
||||
print("[blender_render] Check that the blend-templates directory is on the shared volume.")
|
||||
print(f"[blender_render] ERROR: template not found: {template_path}")
|
||||
sys.exit(1)
|
||||
|
||||
use_template = bool(template_path)
|
||||
|
||||
print(f"[blender_render] engine={engine}, samples={samples}, size={width}x{height}, smooth_angle={smooth_angle}°, device={cycles_device}, transparent={transparent_bg}")
|
||||
print(f"[blender_render] part_names_ordered: {len(part_names_ordered)} entries")
|
||||
if use_template:
|
||||
print(f"[blender_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
|
||||
else:
|
||||
print("[blender_render] no template — using factory settings (Mode A)")
|
||||
print(f"[blender_render] {'template='+template_path+', collection='+target_collection+', lighting_only='+str(lighting_only) if use_template else 'no template — Mode A'}")
|
||||
if material_library_path:
|
||||
print(f"[blender_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
|
||||
|
||||
# ── Helper: find or create collection by name ────────────────────────────────
|
||||
# ── Early GPU activation (must happen BEFORE open_mainfile / Cycles init) ─────
|
||||
_early_gpu_type = activate_gpu(cycles_device)
|
||||
|
||||
def _ensure_collection(name: str):
|
||||
"""Return a collection by name, creating it if needed."""
|
||||
if name in bpy.data.collections:
|
||||
return bpy.data.collections[name]
|
||||
col = bpy.data.collections.new(name)
|
||||
bpy.context.scene.collection.children.link(col)
|
||||
return col
|
||||
|
||||
|
||||
def _apply_smooth_batch(parts, angle_deg):
|
||||
"""Apply smooth shading to ALL parts in a single operator call.
|
||||
|
||||
bpy.ops.object.shade_smooth_by_angle() operates on all selected objects
|
||||
at once (one C-level call), so batching reduces O(n) operator overhead to O(1).
|
||||
Per-part calls cost ~90ms each × 175 parts = 16s; batch call costs ~0.2s total.
|
||||
"""
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
mesh_parts = [p for p in parts if p.type == 'MESH']
|
||||
for part in mesh_parts:
|
||||
part.select_set(True)
|
||||
if not mesh_parts:
|
||||
return
|
||||
bpy.context.view_layer.objects.active = mesh_parts[0]
|
||||
if angle_deg > 0:
|
||||
try:
|
||||
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
|
||||
except AttributeError:
|
||||
bpy.ops.object.shade_smooth()
|
||||
for part in mesh_parts:
|
||||
if hasattr(part.data, 'use_auto_smooth'):
|
||||
part.data.use_auto_smooth = True
|
||||
part.data.auto_smooth_angle = math.radians(angle_deg)
|
||||
else:
|
||||
bpy.ops.object.shade_flat()
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
|
||||
def _assign_failed_material(part_obj):
|
||||
"""Assign the standard fallback material (magenta) when no library material matches.
|
||||
|
||||
Tries to reuse SCHAEFFLER_059999_FailedMaterial from the library first.
|
||||
Creates a simple magenta Principled BSDF if the library material is not loaded.
|
||||
"""
|
||||
mat = bpy.data.materials.get(FAILED_MATERIAL_NAME)
|
||||
if mat is None:
|
||||
mat = bpy.data.materials.new(name=FAILED_MATERIAL_NAME)
|
||||
mat.use_nodes = True
|
||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||
if bsdf:
|
||||
bsdf.inputs["Base Color"].default_value = (1.0, 0.0, 1.0, 1.0) # magenta
|
||||
bsdf.inputs["Roughness"].default_value = 0.6
|
||||
part_obj.data.materials.clear()
|
||||
part_obj.data.materials.append(mat)
|
||||
|
||||
|
||||
import re as _re
|
||||
|
||||
|
||||
# _scale_mm_to_m removed: OCC GLB export produces coordinates in metres already.
|
||||
|
||||
|
||||
def _apply_rotation(parts, rx, ry, rz):
|
||||
"""Apply Euler rotation (degrees, XYZ order) to all parts around world origin.
|
||||
|
||||
After _import_glb the combined bbox center is at world origin,
|
||||
so rotating around origin is equivalent to rotating around the assembly center.
|
||||
"""
|
||||
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
|
||||
return
|
||||
from mathutils import Euler
|
||||
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
|
||||
for p in parts:
|
||||
p.matrix_world = rot_mat @ p.matrix_world
|
||||
# Bake rotation into mesh data so camera bbox calculations see the rotated geometry
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
for p in parts:
|
||||
p.select_set(True)
|
||||
bpy.context.view_layer.objects.active = parts[0]
|
||||
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
|
||||
print(f"[blender_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
|
||||
|
||||
|
||||
def _mark_sharp_and_seams(obj, smooth_angle_deg: float, sharp_edge_midpoints=None):
|
||||
"""Mark sharp edges and UV seams based on angle threshold and optional midpoints."""
|
||||
import math
|
||||
import bpy
|
||||
|
||||
# Ensure we're working with the right object
|
||||
bpy.context.view_layer.objects.active = obj
|
||||
obj.select_set(True)
|
||||
|
||||
# Set auto-smooth angle
|
||||
if hasattr(obj.data, 'auto_smooth_angle'):
|
||||
obj.data.auto_smooth_angle = math.radians(smooth_angle_deg)
|
||||
|
||||
# Enter edit mode to mark edges
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
bpy.ops.mesh.select_all(action='DESELECT')
|
||||
|
||||
# Select edges above threshold angle and mark sharp
|
||||
bpy.ops.mesh.edges_select_sharp(sharpness=math.radians(smooth_angle_deg))
|
||||
bpy.ops.mesh.mark_sharp()
|
||||
|
||||
# Mark same edges as UV seams
|
||||
bpy.ops.mesh.mark_seam(clear=False)
|
||||
|
||||
# If we have OCC-derived midpoints, try to mark additional edges
|
||||
if sharp_edge_midpoints and len(sharp_edge_midpoints) > 0:
|
||||
try:
|
||||
import bmesh
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
bm = bmesh.new()
|
||||
bm.from_mesh(obj.data)
|
||||
bm.edges.ensure_lookup_table()
|
||||
bm.verts.ensure_lookup_table()
|
||||
|
||||
# Build KD-tree for edge midpoints
|
||||
import mathutils
|
||||
kd = mathutils.kdtree.KDTree(len(bm.edges))
|
||||
for i, edge in enumerate(bm.edges):
|
||||
midpt = (edge.verts[0].co + edge.verts[1].co) / 2
|
||||
kd.insert(midpt, i)
|
||||
kd.balance()
|
||||
|
||||
# For each OCC sharp midpoint, find nearest Blender edge
|
||||
tol = 0.5 # 0.5 mm tolerance (coordinates in mm before scale)
|
||||
for mp in sharp_edge_midpoints[:200]:
|
||||
vec = mathutils.Vector(mp)
|
||||
co, idx, dist = kd.find(vec)
|
||||
if dist < tol:
|
||||
bm.edges[idx].seam = True
|
||||
try:
|
||||
bm.edges[idx].smooth = False
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
bm.to_mesh(obj.data)
|
||||
bm.free()
|
||||
except Exception:
|
||||
pass # Non-fatal
|
||||
|
||||
# Return to object mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
def _apply_sharp_edges_from_occ(parts, sharp_edge_pairs):
|
||||
"""Mark edges sharp using OCC-derived vertex-pair data.
|
||||
|
||||
`sharp_edge_pairs` is a list of [[x0,y0,z0],[x1,y1,z1]] in mm.
|
||||
Blender mesh coordinates are in metres (STEP mm * 0.001 scale applied).
|
||||
We match each OCC vertex pair against bmesh vertex positions with a 0.5 mm
|
||||
tolerance (0.0005 m) and mark the matched edge as sharp.
|
||||
"""
|
||||
if not sharp_edge_pairs:
|
||||
return
|
||||
|
||||
import bmesh
|
||||
import mathutils
|
||||
|
||||
SCALE = 0.001 # mm → m
|
||||
TOL = 0.0005 # 0.5 mm in metres
|
||||
|
||||
# OCC STEP space (Z-up, mm) → Blender (Z-up, m):
|
||||
# RWGltf applies Z→Y-up, Blender import applies Y→Z-up.
|
||||
# Net: Blender(X, Y, Z) = OCC(X*0.001, -Z*0.001, Y*0.001)
|
||||
occ_pairs = []
|
||||
for pair in sharp_edge_pairs:
|
||||
v0 = mathutils.Vector((pair[0][0] * SCALE, -pair[0][2] * SCALE, pair[0][1] * SCALE))
|
||||
v1 = mathutils.Vector((pair[1][0] * SCALE, -pair[1][2] * SCALE, pair[1][1] * SCALE))
|
||||
occ_pairs.append((v0, v1))
|
||||
|
||||
marked_total = 0
|
||||
for obj in parts:
|
||||
bm = bmesh.new()
|
||||
bm.from_mesh(obj.data)
|
||||
bm.verts.ensure_lookup_table()
|
||||
bm.edges.ensure_lookup_table()
|
||||
|
||||
# Build KD-tree on vertices in WORLD space — OCC pairs are world coords,
|
||||
# but mesh vertices are in local space (assembly node transform in GLB).
|
||||
world_mat = obj.matrix_world
|
||||
kd = mathutils.kdtree.KDTree(len(bm.verts))
|
||||
for v in bm.verts:
|
||||
kd.insert(world_mat @ v.co, v.index)
|
||||
kd.balance()
|
||||
|
||||
marked = 0
|
||||
for v0_occ, v1_occ in occ_pairs:
|
||||
# Find closest Blender vertex to each OCC endpoint
|
||||
_co0, idx0, dist0 = kd.find(v0_occ)
|
||||
_co1, idx1, dist1 = kd.find(v1_occ)
|
||||
if dist0 > TOL or dist1 > TOL:
|
||||
continue
|
||||
if idx0 == idx1:
|
||||
continue # degenerate — both endpoints map to same vertex
|
||||
# Find the edge shared by these two vertices
|
||||
bv0 = bm.verts[idx0]
|
||||
bv1 = bm.verts[idx1]
|
||||
edge = bm.edges.get((bv0, bv1))
|
||||
if edge is None:
|
||||
edge = bm.edges.get((bv1, bv0))
|
||||
if edge is not None and edge.smooth:
|
||||
edge.smooth = False
|
||||
marked += 1
|
||||
|
||||
bm.to_mesh(obj.data)
|
||||
bm.free()
|
||||
marked_total += marked
|
||||
|
||||
print(f"[blender_render] OCC sharp edges applied: {marked_total} edges marked across {len(parts)} parts", flush=True)
|
||||
|
||||
|
||||
def _import_glb(glb_file):
|
||||
"""Import OCC-generated GLB into Blender.
|
||||
|
||||
OCC exports one mesh object per STEP part, already in metres.
|
||||
Blender's native GLTF importer preserves part names.
|
||||
|
||||
Returns list of Blender mesh objects, centred at world origin.
|
||||
"""
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
bpy.ops.import_scene.gltf(filepath=glb_file)
|
||||
parts = [o for o in bpy.context.selected_objects if o.type == 'MESH']
|
||||
|
||||
if not parts:
|
||||
print(f"ERROR: No mesh objects imported from {glb_file}")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"[blender_render] imported {len(parts)} part(s) from GLB: "
|
||||
f"{[p.name for p in parts[:5]]}")
|
||||
|
||||
# Remove OCC-baked custom normals so shade_smooth_by_angle can recompute
|
||||
# normals from scratch (respecting our sharp edge marks).
|
||||
cleared = 0
|
||||
for p in parts:
|
||||
if "custom_normal" in p.data.attributes:
|
||||
p.data.attributes.remove(p.data.attributes["custom_normal"])
|
||||
cleared += 1
|
||||
if cleared:
|
||||
print(f"[blender_render] cleared OCC custom_normal from {cleared} mesh objects")
|
||||
|
||||
# Centre combined bbox at world origin
|
||||
all_corners = []
|
||||
for p in parts:
|
||||
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
|
||||
|
||||
if all_corners:
|
||||
mins = Vector((min(v.x for v in all_corners),
|
||||
min(v.y for v in all_corners),
|
||||
min(v.z for v in all_corners)))
|
||||
maxs = Vector((max(v.x for v in all_corners),
|
||||
max(v.y for v in all_corners),
|
||||
max(v.z for v in all_corners)))
|
||||
center = (mins + maxs) * 0.5
|
||||
# Move root objects (parentless) to centre. Adjusting a child's local
|
||||
# .location by a world-space vector gives wrong results when the GLB has
|
||||
# Empty parent nodes (OCC assembly hierarchy). Shifting the root moves
|
||||
# the entire hierarchy correctly.
|
||||
all_imported = list(bpy.context.selected_objects)
|
||||
root_objects = [o for o in all_imported if o.parent is None]
|
||||
for obj in root_objects:
|
||||
obj.location -= center
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
def _resolve_part_name(index, part_obj):
|
||||
"""Get the STEP part name for a Blender part by index.
|
||||
|
||||
With GLB import, part_obj.name IS the STEP name (possibly with
|
||||
Blender .NNN suffix for duplicates). Strip that suffix for lookup.
|
||||
Falls back to part_names_ordered index mapping.
|
||||
"""
|
||||
# Strip Blender auto-suffix (.001, .002, etc.)
|
||||
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
|
||||
# If the base name looks like a real STEP part name (not generic "Cube" etc.),
|
||||
# use it directly
|
||||
if part_names_ordered and index < len(part_names_ordered):
|
||||
return part_names_ordered[index]
|
||||
return base_name
|
||||
|
||||
|
||||
def _apply_material_library(parts, mat_lib_path, mat_map):
|
||||
"""Append materials from library .blend and assign to parts via material_map.
|
||||
|
||||
GLB-imported objects are named after STEP parts, so matching is by name
|
||||
(stripping Blender .NNN suffix for duplicates). Falls back to
|
||||
part_names_ordered index-based matching.
|
||||
|
||||
mat_map: {part_name_lower: material_name}
|
||||
Parts without a match keep their current material.
|
||||
"""
|
||||
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
||||
print(f"[blender_render] material library not found: {mat_lib_path}")
|
||||
return
|
||||
|
||||
# Collect unique material names needed
|
||||
needed = set(mat_map.values())
|
||||
if not needed:
|
||||
return
|
||||
|
||||
# Append materials from library
|
||||
appended = {}
|
||||
for mat_name in needed:
|
||||
inner_path = f"{mat_lib_path}/Material/{mat_name}"
|
||||
try:
|
||||
bpy.ops.wm.append(
|
||||
filepath=inner_path,
|
||||
directory=f"{mat_lib_path}/Material/",
|
||||
filename=mat_name,
|
||||
link=False,
|
||||
)
|
||||
if mat_name in bpy.data.materials:
|
||||
appended[mat_name] = bpy.data.materials[mat_name]
|
||||
print(f"[blender_render] appended material: {mat_name}")
|
||||
else:
|
||||
print(f"[blender_render] WARNING: material '{mat_name}' not found after append")
|
||||
except Exception as exc:
|
||||
print(f"[blender_render] WARNING: failed to append material '{mat_name}': {exc}")
|
||||
|
||||
if not appended:
|
||||
return
|
||||
|
||||
# Assign materials to parts — primary: name-based (GLB object names),
|
||||
# secondary: index-based via part_names_ordered
|
||||
assigned_count = 0
|
||||
unmatched_names = []
|
||||
for i, part in enumerate(parts):
|
||||
# Try name-based matching first (strip Blender .NNN suffix)
|
||||
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
||||
# Strip OCC assembly-instance suffix (_AF0, _AF1, …) — GLB object
|
||||
# names may or may not have them while mat_map keys might.
|
||||
_prev = None
|
||||
while _prev != base_name:
|
||||
_prev = base_name
|
||||
base_name = _re.sub(r'_AF\d+$', '', base_name, flags=_re.IGNORECASE)
|
||||
part_key = base_name.lower().strip()
|
||||
mat_name = mat_map.get(part_key)
|
||||
|
||||
# Prefix fallback: if a mat_map key starts with our base name or
|
||||
# vice-versa, use the longest matching key (most-specific wins).
|
||||
if not mat_name:
|
||||
for key, val in sorted(mat_map.items(), key=lambda x: len(x[0]), reverse=True):
|
||||
if len(key) >= 5 and len(part_key) >= 5 and (
|
||||
part_key.startswith(key) or key.startswith(part_key)
|
||||
):
|
||||
mat_name = val
|
||||
break
|
||||
|
||||
# Fall back to index-based matching via part_names_ordered
|
||||
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
||||
step_name = part_names_ordered[i]
|
||||
step_key = step_name.lower().strip()
|
||||
mat_name = mat_map.get(step_key)
|
||||
# Also try stripping AF from part_names_ordered entry
|
||||
if not mat_name:
|
||||
_p2 = None
|
||||
while _p2 != step_key:
|
||||
_p2 = step_key
|
||||
step_key = _re.sub(r'_af\d+$', '', step_key)
|
||||
mat_name = mat_map.get(step_key)
|
||||
|
||||
if mat_name and mat_name in appended:
|
||||
part.data.materials.clear()
|
||||
part.data.materials.append(appended[mat_name])
|
||||
assigned_count += 1
|
||||
else:
|
||||
unmatched_names.append(part.name)
|
||||
|
||||
print(f"[blender_render] material assignment: {assigned_count}/{len(parts)} parts matched", flush=True)
|
||||
if unmatched_names:
|
||||
print(f"[blender_render] unmatched parts → assigning {FAILED_MATERIAL_NAME}: {unmatched_names[:10]}", flush=True)
|
||||
unmatched_set = set(unmatched_names)
|
||||
for part in parts:
|
||||
if part.name in unmatched_set:
|
||||
if part.data.users > 1:
|
||||
part.data = part.data.copy()
|
||||
_assign_failed_material(part)
|
||||
|
||||
|
||||
# ── Early GPU activation (must happen BEFORE open_mainfile / Cycles init) ────
|
||||
# Blender compiles Cycles kernels when the engine first initializes. If the
|
||||
# compute_device_type is NONE at that point, Cycles locks to CPU for the rest
|
||||
# of the session. We therefore probe + enable GPU devices NOW, before any
|
||||
# .blend template (which may trigger Cycles init) is loaded.
|
||||
def _activate_gpu():
|
||||
"""Probe for GPU compute devices and activate them. Returns device type or None."""
|
||||
if cycles_device == "cpu":
|
||||
return None
|
||||
try:
|
||||
cprefs = bpy.context.preferences.addons['cycles'].preferences
|
||||
for dt in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
|
||||
try:
|
||||
cprefs.compute_device_type = dt
|
||||
cprefs.get_devices()
|
||||
gpu = [d for d in cprefs.devices if d.type != 'CPU']
|
||||
if gpu:
|
||||
for d in cprefs.devices:
|
||||
d.use = (d.type != 'CPU')
|
||||
print(f"[blender_render] early GPU activation: {dt}, "
|
||||
f"devices={[(d.name, d.type) for d in gpu]}", flush=True)
|
||||
return dt
|
||||
except Exception as e:
|
||||
print(f"[blender_render] {dt} not available: {e}", flush=True)
|
||||
except Exception as e:
|
||||
print(f"[blender_render] early GPU probe failed: {e}", flush=True)
|
||||
return None
|
||||
|
||||
_early_gpu_type = _activate_gpu()
|
||||
|
||||
# ── Timing harness ────────────────────────────────────────────────────────────
|
||||
# ── Timing harness ─────────────────────────────────────────────────────────────
|
||||
import time as _time
|
||||
_t0 = _time.monotonic()
|
||||
_timings: dict = {}
|
||||
|
||||
|
||||
def _lap(label: str) -> None:
|
||||
"""Record elapsed time since the last _lap() call and since t0."""
|
||||
global _t_last
|
||||
now = _time.monotonic()
|
||||
if not hasattr(_lap, '_last'):
|
||||
_lap._last = _t0
|
||||
@@ -538,259 +120,77 @@ def _lap(label: str) -> None:
|
||||
print(f"[blender_render] TIMING {label}={delta:.2f}s (total={total:.2f}s)", flush=True)
|
||||
_lap._last = now
|
||||
|
||||
# ── SCENE SETUP ──────────────────────────────────────────────────────────────
|
||||
|
||||
# ── SCENE SETUP ───────────────────────────────────────────────────────────────
|
||||
|
||||
if use_template:
|
||||
# ── MODE B: Template-based render ────────────────────────────────────────
|
||||
# ── MODE B: Template-based render ─────────────────────────────────────────
|
||||
print(f"[blender_render] Opening template: {template_path}")
|
||||
bpy.ops.wm.open_mainfile(filepath=template_path)
|
||||
_lap("template_load")
|
||||
|
||||
# Find or create target collection
|
||||
target_col = _ensure_collection(target_collection)
|
||||
|
||||
# Import OCC GLB (already in metres, one object per STEP part)
|
||||
parts = _import_glb(glb_path)
|
||||
target_col = ensure_collection(target_collection)
|
||||
parts = import_glb(glb_path)
|
||||
_lap("glb_import")
|
||||
# Apply render position rotation (before camera/bbox calculations)
|
||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
_lap("rotation")
|
||||
|
||||
# Move imported parts into target collection
|
||||
for part in parts:
|
||||
# Remove from all existing collections
|
||||
for col in list(part.users_collection):
|
||||
col.objects.unlink(part)
|
||||
target_col.objects.link(part)
|
||||
|
||||
# Batch smooth shading: select all parts, call shade_smooth_by_angle ONCE.
|
||||
# In Blender 5 this adds a "Smooth by Angle" GeoNodes modifier to every
|
||||
# selected object in a single C call — same effect as calling per-object
|
||||
# but ~100× faster (0.2s vs 16s for 175 parts).
|
||||
_apply_smooth_batch(parts, smooth_angle)
|
||||
# If OCC extracted sharp edge vertex pairs, mark them explicitly.
|
||||
apply_smooth_batch(parts, smooth_angle)
|
||||
_occ_pairs = _mesh_attrs.get("sharp_edge_pairs") or []
|
||||
if _occ_pairs:
|
||||
_apply_sharp_edges_from_occ(parts, _occ_pairs)
|
||||
apply_sharp_edges_from_occ(parts, _occ_pairs)
|
||||
_lap("smooth_shading")
|
||||
|
||||
# Material assignment: library materials if available, otherwise palette
|
||||
if material_library_path and material_map:
|
||||
# Build lowercased material_map for matching.
|
||||
# Include BOTH the original key AND the key with _AF\d+ stripped,
|
||||
# so GLB names (which may lack AF suffixes) can match.
|
||||
mat_map_lower = {}
|
||||
for k, v in material_map.items():
|
||||
kl = k.lower().strip()
|
||||
mat_map_lower[kl] = v
|
||||
# Also add AF-stripped version
|
||||
_stripped = kl
|
||||
_p = None
|
||||
while _p != _stripped:
|
||||
_p = _stripped
|
||||
_stripped = _re.sub(r'_af\d+$', '', _stripped)
|
||||
if _stripped != kl:
|
||||
mat_map_lower.setdefault(_stripped, v)
|
||||
_apply_material_library(parts, material_library_path, mat_map_lower)
|
||||
# Parts not matched by library get the failed-material fallback (magenta)
|
||||
unmatched = []
|
||||
for part in parts:
|
||||
if not part.data.materials or len(part.data.materials) == 0:
|
||||
_assign_failed_material(part)
|
||||
unmatched.append(part.name)
|
||||
if unmatched:
|
||||
print(f"[blender_render] WARNING: {len(unmatched)} parts unmatched, assigned {FAILED_MATERIAL_NAME}: {unmatched[:5]}", flush=True)
|
||||
apply_material_library(parts, material_library_path, build_mat_map_lower(material_map), part_names_ordered)
|
||||
else:
|
||||
# No material library — assign fallback to all parts
|
||||
for part in parts:
|
||||
_assign_failed_material(part)
|
||||
assign_failed_material(part)
|
||||
_lap("material_assign")
|
||||
|
||||
# ── Shadow catcher (Cycles only, template mode only) ─────────────────────
|
||||
if shadow_catcher:
|
||||
sc_col_name = "Shadowcatcher"
|
||||
sc_obj_name = "Shadowcatcher"
|
||||
# Enable the Shadowcatcher collection in all view layers
|
||||
for vl in bpy.context.scene.view_layers:
|
||||
def _enable_col_recursive(layer_col):
|
||||
if layer_col.collection.name == sc_col_name:
|
||||
layer_col.exclude = False
|
||||
layer_col.collection.hide_render = False
|
||||
layer_col.collection.hide_viewport = False
|
||||
return True
|
||||
for child in layer_col.children:
|
||||
if _enable_col_recursive(child):
|
||||
return True
|
||||
return False
|
||||
_enable_col_recursive(vl.layer_collection)
|
||||
setup_shadow_catcher(parts)
|
||||
|
||||
sc_obj = bpy.data.objects.get(sc_obj_name)
|
||||
if sc_obj:
|
||||
# Calculate product bbox min Z (world space)
|
||||
all_world_corners = []
|
||||
for part in parts:
|
||||
for corner in part.bound_box:
|
||||
all_world_corners.append((part.matrix_world @ Vector(corner)).z)
|
||||
if all_world_corners:
|
||||
sc_obj.location.z = min(all_world_corners)
|
||||
print(f"[blender_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
|
||||
else:
|
||||
print(f"[blender_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
|
||||
|
||||
# lighting_only: use template World/HDRI but force auto-camera UNLESS the shadow
|
||||
# catcher is enabled — in that case the template camera is already positioned to
|
||||
# show both the product and its shadow on the ground plane.
|
||||
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
|
||||
if lighting_only and not shadow_catcher:
|
||||
print("[blender_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
|
||||
elif needs_auto_camera:
|
||||
print("[blender_render] WARNING: template has no camera — will create auto-camera")
|
||||
|
||||
# Set very close near clip on template camera for mm-scale parts (now in metres)
|
||||
if not needs_auto_camera and bpy.context.scene.camera:
|
||||
bpy.context.scene.camera.data.clip_start = 0.001
|
||||
|
||||
print(f"[blender_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
|
||||
|
||||
else:
|
||||
# ── MODE A: Factory settings (original behavior) ─────────────────────────
|
||||
# ── MODE A: Factory settings ───────────────────────────────────────────────
|
||||
needs_auto_camera = True
|
||||
bpy.ops.wm.read_factory_settings(use_empty=True)
|
||||
# Import OCC GLB (already in metres, one object per STEP part)
|
||||
parts = _import_glb(glb_path)
|
||||
# Apply render position rotation (before camera/bbox calculations)
|
||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
parts = import_glb(glb_path)
|
||||
apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
|
||||
import time as _time
|
||||
_t_smooth_a = _time.time()
|
||||
_apply_smooth_batch(parts, smooth_angle)
|
||||
apply_smooth_batch(parts, smooth_angle)
|
||||
_occ_pairs = _mesh_attrs.get("sharp_edge_pairs") or []
|
||||
if _occ_pairs:
|
||||
_apply_sharp_edges_from_occ(parts, _occ_pairs)
|
||||
apply_sharp_edges_from_occ(parts, _occ_pairs)
|
||||
for part in parts:
|
||||
_assign_failed_material(part)
|
||||
assign_failed_material(part)
|
||||
print(f"[blender_render] smooth+fallback-material: {len(parts)} parts ({_time.time()-_t_smooth_a:.2f}s)", flush=True)
|
||||
|
||||
# Apply material library on top of palette colours (same logic as Mode B).
|
||||
# material_library_path / material_map are parsed from argv even in Mode A
|
||||
# but were previously never used here — that was the bug.
|
||||
if material_library_path and material_map:
|
||||
mat_map_lower = {}
|
||||
for k, v in material_map.items():
|
||||
kl = k.lower().strip()
|
||||
mat_map_lower[kl] = v
|
||||
_stripped = kl
|
||||
_p = None
|
||||
while _p != _stripped:
|
||||
_p = _stripped
|
||||
_stripped = _re.sub(r'_af\d+$', '', _stripped)
|
||||
if _stripped != kl:
|
||||
mat_map_lower.setdefault(_stripped, v)
|
||||
_apply_material_library(parts, material_library_path, mat_map_lower)
|
||||
# Parts not matched by the library keep their fallback material (already set above)
|
||||
apply_material_library(parts, material_library_path, build_mat_map_lower(material_map), part_names_ordered)
|
||||
|
||||
if needs_auto_camera:
|
||||
# ── Combined bounding box / bounding sphere ──────────────────────────────
|
||||
all_corners = []
|
||||
for part in parts:
|
||||
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
||||
|
||||
bbox_min = Vector((
|
||||
min(v.x for v in all_corners),
|
||||
min(v.y for v in all_corners),
|
||||
min(v.z for v in all_corners),
|
||||
))
|
||||
bbox_max = Vector((
|
||||
max(v.x for v in all_corners),
|
||||
max(v.y for v in all_corners),
|
||||
max(v.z for v in all_corners),
|
||||
))
|
||||
|
||||
bbox_center = (bbox_min + bbox_max) * 0.5
|
||||
bbox_dims = bbox_max - bbox_min
|
||||
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
|
||||
|
||||
print(f"[blender_render] bbox_dims={tuple(round(d,4) for d in bbox_dims)}, "
|
||||
f"bsphere_radius={bsphere_radius:.4f}, center={tuple(round(c,4) for c in bbox_center)}")
|
||||
|
||||
# ── Lighting — only in Mode A (factory settings) ─────────────────────────
|
||||
# In template mode the .blend file provides its own World/HDRI lighting.
|
||||
# Adding auto-lights would overpower the template's intended look.
|
||||
if not use_template:
|
||||
light_dist = bsphere_radius * 6.0
|
||||
|
||||
bpy.ops.object.light_add(type='SUN', location=(
|
||||
bbox_center.x + light_dist * 0.5,
|
||||
bbox_center.y - light_dist * 0.35,
|
||||
bbox_center.z + light_dist,
|
||||
))
|
||||
sun = bpy.context.active_object
|
||||
sun.data.energy = 4.0
|
||||
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
|
||||
|
||||
bpy.ops.object.light_add(type='AREA', location=(
|
||||
bbox_center.x - light_dist * 0.4,
|
||||
bbox_center.y + light_dist * 0.4,
|
||||
bbox_center.z + light_dist * 0.7,
|
||||
))
|
||||
fill = bpy.context.active_object
|
||||
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
|
||||
fill.data.size = max(4.0, bsphere_radius * 4.0)
|
||||
|
||||
# ── Camera ───────────────────────────────────────────────────────────────
|
||||
ELEVATION_DEG = 28.0
|
||||
AZIMUTH_DEG = 40.0
|
||||
LENS_MM = 50.0
|
||||
SENSOR_WIDTH_MM = 36.0
|
||||
FILL_FACTOR = 0.85
|
||||
|
||||
elevation_rad = math.radians(ELEVATION_DEG)
|
||||
azimuth_rad = math.radians(AZIMUTH_DEG)
|
||||
|
||||
cam_dir = Vector((
|
||||
math.cos(elevation_rad) * math.cos(azimuth_rad),
|
||||
math.cos(elevation_rad) * math.sin(azimuth_rad),
|
||||
math.sin(elevation_rad),
|
||||
)).normalized()
|
||||
|
||||
fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM))
|
||||
fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM))
|
||||
fov_used = min(fov_h, fov_v)
|
||||
|
||||
dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR
|
||||
dist = max(dist, bsphere_radius * 1.5)
|
||||
print(f"[blender_render] camera dist={dist:.4f}, fov={math.degrees(fov_used):.2f}°")
|
||||
|
||||
cam_location = bbox_center + cam_dir * dist
|
||||
bpy.ops.object.camera_add(location=cam_location)
|
||||
cam_obj = bpy.context.active_object
|
||||
cam_obj.data.lens = LENS_MM
|
||||
bpy.context.scene.camera = cam_obj
|
||||
|
||||
look_dir = (bbox_center - cam_location).normalized()
|
||||
up_world = Vector((0.0, 0.0, 1.0))
|
||||
right = look_dir.cross(up_world)
|
||||
if right.length < 1e-6:
|
||||
right = Vector((1.0, 0.0, 0.0))
|
||||
right.normalize()
|
||||
cam_up = right.cross(look_dir).normalized()
|
||||
|
||||
rot_mat = Matrix((
|
||||
( right.x, right.y, right.z),
|
||||
( cam_up.x, cam_up.y, cam_up.z),
|
||||
(-look_dir.x, -look_dir.y, -look_dir.z),
|
||||
)).transposed()
|
||||
cam_obj.rotation_euler = rot_mat.to_euler('XYZ')
|
||||
|
||||
cam_obj.data.clip_start = max(dist * 0.001, 0.0001)
|
||||
cam_obj.data.clip_end = dist + bsphere_radius * 3.0
|
||||
print(f"[blender_render] clip {cam_obj.data.clip_start:.6f} … {cam_obj.data.clip_end:.4f}")
|
||||
|
||||
# ── World background — only in Mode A ────────────────────────────────────
|
||||
# In template mode the .blend file owns its World (HDRI, sky texture, studio
|
||||
# lighting). Overwriting it would destroy the HDR look the template was
|
||||
# designed to use (e.g. Alpha-HDR output types with Filmic tonemapping).
|
||||
bbox_center, bsphere_radius = setup_auto_camera(parts, width, height)
|
||||
if not use_template:
|
||||
setup_auto_lights(bbox_center, bsphere_radius)
|
||||
# Mode A world background
|
||||
world = bpy.data.worlds.new("World")
|
||||
bpy.context.scene.world = world
|
||||
world.use_nodes = True
|
||||
@@ -798,88 +198,16 @@ if needs_auto_camera:
|
||||
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
|
||||
bg.inputs["Strength"].default_value = 0.15
|
||||
|
||||
# ── Render engine ─────────────────────────────────────────────────────────────
|
||||
# ── Render engine ──────────────────────────────────────────────────────────────
|
||||
scene = bpy.context.scene
|
||||
engine = configure_engine(
|
||||
scene, engine, samples, cycles_device, _early_gpu_type,
|
||||
noise_threshold_arg, denoiser_arg,
|
||||
denoising_input_passes_arg, denoising_prefilter_arg,
|
||||
denoising_quality_arg, denoising_use_gpu_arg,
|
||||
)
|
||||
|
||||
if engine == "eevee":
|
||||
# Blender 4.x used 'BLENDER_EEVEE_NEXT'; Blender 5.x reverted to 'BLENDER_EEVEE'.
|
||||
# Try both names so the script works across versions.
|
||||
set_ok = False
|
||||
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
|
||||
try:
|
||||
scene.render.engine = eevee_id
|
||||
set_ok = True
|
||||
print(f"[blender_render] EEVEE engine id: {eevee_id}")
|
||||
break
|
||||
except TypeError:
|
||||
continue
|
||||
|
||||
if not set_ok:
|
||||
print("[blender_render] WARNING: could not set EEVEE engine – falling back to Cycles")
|
||||
engine = "cycles"
|
||||
|
||||
if engine == "eevee":
|
||||
# Sample attribute name changed across minor versions
|
||||
for attr in ('taa_render_samples', 'samples'):
|
||||
try:
|
||||
setattr(scene.eevee, attr, samples)
|
||||
print(f"[blender_render] EEVEE samples: scene.eevee.{attr}={samples}")
|
||||
break
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
if engine != "eevee": # covers both explicit Cycles and EEVEE-fallback
|
||||
# ── GPU preferences (before engine activation) ───────────────────────
|
||||
# Set compute_device_type in preferences so Cycles can find GPU kernels.
|
||||
gpu_type_found = _activate_gpu() or _early_gpu_type
|
||||
|
||||
# ── Activate Cycles engine ───────────────────────────────────────────
|
||||
scene.render.engine = 'CYCLES'
|
||||
|
||||
# ── Device selection AFTER engine activation ─────────────────────────
|
||||
# IMPORTANT: scene.cycles.device must be set AFTER scene.render.engine
|
||||
# = 'CYCLES'. Setting it before can be overwritten when Cycles inits
|
||||
# and reads the scene's saved properties (template may have device=CPU).
|
||||
if gpu_type_found:
|
||||
scene.cycles.device = 'GPU'
|
||||
# Re-ensure preferences are set (engine activation may have reset them)
|
||||
_activate_gpu()
|
||||
print(f"[blender_render] Cycles GPU ({gpu_type_found}), samples={samples}", flush=True)
|
||||
print(f"RENDER_DEVICE_USED: engine=CYCLES device=GPU compute_type={gpu_type_found}", flush=True)
|
||||
else:
|
||||
scene.cycles.device = 'CPU'
|
||||
print(f"[blender_render] WARNING: GPU not found — falling back to CPU, samples={samples}", flush=True)
|
||||
print("RENDER_DEVICE_USED: engine=CYCLES device=CPU compute_type=NONE (fallback)", flush=True)
|
||||
import os as _os
|
||||
if _os.environ.get("CYCLES_DEVICE", "auto").lower() == "gpu":
|
||||
print("GPU_REQUIRED_BUT_CPU_USED: strict mode active (CYCLES_DEVICE=gpu)", flush=True)
|
||||
sys.exit(2)
|
||||
|
||||
scene.cycles.samples = samples
|
||||
scene.cycles.use_denoising = True
|
||||
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
|
||||
if denoising_input_passes_arg:
|
||||
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
|
||||
except Exception: pass
|
||||
if denoising_prefilter_arg:
|
||||
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
|
||||
except Exception: pass
|
||||
if denoising_quality_arg:
|
||||
try: scene.cycles.denoising_quality = denoising_quality_arg
|
||||
except Exception: pass
|
||||
if denoising_use_gpu_arg:
|
||||
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
|
||||
except AttributeError: pass
|
||||
if noise_threshold_arg:
|
||||
scene.cycles.use_adaptive_sampling = True
|
||||
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
|
||||
|
||||
# ── Colour management ─────────────────────────────────────────────────────────
|
||||
# In template mode the .blend file owns its colour management (e.g. Filmic/
|
||||
# AgX for HDR, custom exposure for Alpha-HDR output types). Overwriting it
|
||||
# would destroy the look the template was designed for.
|
||||
# In factory-settings mode (Mode A) force Standard to avoid the grey Filmic
|
||||
# tint that Blender applies by default.
|
||||
# ── Colour management ──────────────────────────────────────────────────────────
|
||||
if not use_template:
|
||||
scene.view_settings.view_transform = 'Standard'
|
||||
scene.view_settings.exposure = 0.0
|
||||
@@ -889,7 +217,7 @@ if not use_template:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ── Render settings ───────────────────────────────────────────────────────────
|
||||
# ── Render settings ────────────────────────────────────────────────────────────
|
||||
scene.render.resolution_x = width
|
||||
scene.render.resolution_y = height
|
||||
scene.render.resolution_percentage = 100
|
||||
@@ -897,8 +225,7 @@ scene.render.image_settings.file_format = 'PNG'
|
||||
scene.render.filepath = output_path
|
||||
scene.render.film_transparent = transparent_bg
|
||||
|
||||
# ── Render ────────────────────────────────────────────────────────────────────
|
||||
# Final verification of render device settings
|
||||
# ── Final verification + render ────────────────────────────────────────────────
|
||||
if scene.render.engine == 'CYCLES':
|
||||
cprefs = bpy.context.preferences.addons['cycles'].preferences
|
||||
print(f"[blender_render] VERIFY: engine={scene.render.engine}, "
|
||||
@@ -906,6 +233,7 @@ if scene.render.engine == 'CYCLES':
|
||||
f"compute_device_type={cprefs.compute_device_type}, "
|
||||
f"gpu_devices={[(d.name, d.type, d.use) for d in cprefs.devices if d.type != 'CPU']}",
|
||||
flush=True)
|
||||
|
||||
_lap("pre_render_setup")
|
||||
print(f"[blender_render] Rendering → {output_path} (Blender {bpy.app.version_string})", flush=True)
|
||||
sys.stdout.flush()
|
||||
@@ -913,7 +241,7 @@ bpy.ops.render.render(write_still=True)
|
||||
print("[blender_render] render done.", flush=True)
|
||||
_lap("gpu_render")
|
||||
|
||||
# ── Final timing summary ──────────────────────────────────────────────────────
|
||||
# ── Final timing summary ───────────────────────────────────────────────────────
|
||||
_total = _time.monotonic() - _t0
|
||||
print(f"[blender_render] TIMING_SUMMARY total={_total:.2f}s | " +
|
||||
" | ".join(f"{k}={v:.2f}s" for k, v in _timings.items()), flush=True)
|
||||
|
||||
Reference in New Issue
Block a user