805 lines
34 KiB
Python
805 lines
34 KiB
Python
"""Blender Python script: single-frame still render for Flamenco.
|
|
|
|
Matches the lighting, camera, materials, and post-processing of the
|
|
Celery blender_render.py so that LQ and HQ renders look consistent.
|
|
|
|
Usage (from Blender):
|
|
blender --background --python still_render.py -- \
|
|
<stl_path> <output_path> <width> <height> <engine> <samples> \
|
|
<part_colors_json> <transparent_bg> \
|
|
[template_path] [target_collection] [material_library_path] [material_map_json]
|
|
"""
|
|
import bpy
|
|
import sys
|
|
import os
|
|
import json
|
|
import math
|
|
from mathutils import Vector, Matrix
|
|
|
|
# ── Colour palette (matches blender_render.py / Three.js renderer) ───────────
|
|
PALETTE_HEX = [
|
|
"#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8",
|
|
"#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8",
|
|
]
|
|
|
|
def _srgb_to_linear(c: int) -> float:
|
|
v = c / 255.0
|
|
return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4
|
|
|
|
def _hex_to_linear(hex_color: str) -> tuple:
|
|
h = hex_color.lstrip('#')
|
|
return (
|
|
_srgb_to_linear(int(h[0:2], 16)),
|
|
_srgb_to_linear(int(h[2:4], 16)),
|
|
_srgb_to_linear(int(h[4:6], 16)),
|
|
1.0,
|
|
)
|
|
|
|
PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX]
|
|
|
|
SMOOTH_ANGLE = 30 # degrees
|
|
|
|
|
|
# ── Helper functions ─────────────────────────────────────────────────────────
|
|
|
|
def _ensure_collection(name: str):
|
|
"""Return a collection by name, creating it if needed."""
|
|
if name in bpy.data.collections:
|
|
return bpy.data.collections[name]
|
|
col = bpy.data.collections.new(name)
|
|
bpy.context.scene.collection.children.link(col)
|
|
return col
|
|
|
|
|
|
def _assign_palette_material(part_obj, index):
|
|
"""Assign a palette colour material to a mesh part."""
|
|
color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)]
|
|
mat = bpy.data.materials.new(name=f"Part_{index}")
|
|
mat.use_nodes = True
|
|
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
|
if bsdf:
|
|
bsdf.inputs["Base Color"].default_value = color
|
|
bsdf.inputs["Metallic"].default_value = 0.35
|
|
bsdf.inputs["Roughness"].default_value = 0.40
|
|
try:
|
|
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
|
except KeyError:
|
|
pass
|
|
part_obj.data.materials.clear()
|
|
part_obj.data.materials.append(mat)
|
|
|
|
|
|
def _apply_smooth(part_obj, angle_deg):
|
|
"""Apply smooth or flat shading to a mesh object."""
|
|
bpy.context.view_layer.objects.active = part_obj
|
|
part_obj.select_set(True)
|
|
if angle_deg > 0:
|
|
try:
|
|
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
|
|
except AttributeError:
|
|
bpy.ops.object.shade_smooth()
|
|
part_obj.data.use_auto_smooth = True
|
|
part_obj.data.auto_smooth_angle = math.radians(angle_deg)
|
|
else:
|
|
bpy.ops.object.shade_flat()
|
|
|
|
|
|
import re as _re
|
|
|
|
|
|
# _scale_mm_to_m removed: OCC GLB export produces coordinates in metres already.
|
|
|
|
|
|
def _apply_rotation(parts, rx, ry, rz):
|
|
"""Apply Euler rotation (degrees, XYZ order) to all parts around world origin."""
|
|
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
|
|
return
|
|
import math
|
|
from mathutils import Euler
|
|
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
|
|
for p in parts:
|
|
p.matrix_world = rot_mat @ p.matrix_world
|
|
bpy.ops.object.select_all(action='DESELECT')
|
|
for p in parts:
|
|
p.select_set(True)
|
|
bpy.context.view_layer.objects.active = parts[0]
|
|
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
|
|
print(f"[still_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
|
|
|
|
|
|
def _apply_mesh_attributes(objects: list, mesh_attributes: dict) -> None:
|
|
"""Apply topology-based shading settings from OCC analysis."""
|
|
import math
|
|
if not mesh_attributes or mesh_attributes.get("error"):
|
|
return
|
|
|
|
curved_ratio = mesh_attributes.get("curved_ratio", 0.0)
|
|
threshold_deg = mesh_attributes.get("sharp_angle_threshold_deg", 30.0)
|
|
threshold_rad = threshold_deg * math.pi / 180.0
|
|
|
|
for obj in objects:
|
|
if obj.type != 'MESH':
|
|
continue
|
|
# Enable smooth shading for predominantly curved parts (bearings etc.)
|
|
if curved_ratio > 0.3:
|
|
for poly in obj.data.polygons:
|
|
poly.use_smooth = True
|
|
# Auto-smooth at topology threshold
|
|
obj.data.use_auto_smooth = True
|
|
obj.data.auto_smooth_angle = threshold_rad
|
|
|
|
|
|
def _mark_sharp_and_seams(obj, smooth_angle_deg: float, sharp_edge_midpoints=None):
|
|
"""Mark sharp edges and UV seams based on angle threshold and optional midpoints."""
|
|
import math
|
|
import bpy
|
|
|
|
# Ensure we're working with the right object
|
|
bpy.context.view_layer.objects.active = obj
|
|
obj.select_set(True)
|
|
|
|
# Set auto-smooth angle
|
|
if hasattr(obj.data, 'auto_smooth_angle'):
|
|
obj.data.auto_smooth_angle = math.radians(smooth_angle_deg)
|
|
|
|
# Enter edit mode to mark edges
|
|
bpy.ops.object.mode_set(mode='EDIT')
|
|
bpy.ops.mesh.select_all(action='DESELECT')
|
|
|
|
# Select edges above threshold angle and mark sharp
|
|
bpy.ops.mesh.edges_select_sharp(sharpness=math.radians(smooth_angle_deg))
|
|
bpy.ops.mesh.mark_sharp()
|
|
|
|
# Mark same edges as UV seams
|
|
bpy.ops.mesh.mark_seam(clear=False)
|
|
|
|
# If we have OCC-derived midpoints, try to mark additional edges
|
|
if sharp_edge_midpoints and len(sharp_edge_midpoints) > 0:
|
|
try:
|
|
import bmesh
|
|
bpy.ops.object.mode_set(mode='OBJECT')
|
|
bm = bmesh.new()
|
|
bm.from_mesh(obj.data)
|
|
bm.edges.ensure_lookup_table()
|
|
bm.verts.ensure_lookup_table()
|
|
|
|
# Build KD-tree for edge midpoints
|
|
import mathutils
|
|
kd = mathutils.kdtree.KDTree(len(bm.edges))
|
|
for i, edge in enumerate(bm.edges):
|
|
midpt = (edge.verts[0].co + edge.verts[1].co) / 2
|
|
kd.insert(midpt, i)
|
|
kd.balance()
|
|
|
|
# For each OCC sharp midpoint, find nearest Blender edge
|
|
tol = 0.5 # 0.5 mm tolerance (coordinates in mm before scale)
|
|
for mp in sharp_edge_midpoints[:200]:
|
|
vec = mathutils.Vector(mp)
|
|
co, idx, dist = kd.find(vec)
|
|
if dist < tol:
|
|
bm.edges[idx].seam = True
|
|
# Mark sharp via custom attribute
|
|
try:
|
|
bm.edges[idx].smooth = False
|
|
except Exception:
|
|
pass
|
|
|
|
bm.to_mesh(obj.data)
|
|
bm.free()
|
|
except Exception:
|
|
pass # Non-fatal
|
|
|
|
# Return to object mode
|
|
bpy.ops.object.mode_set(mode='OBJECT')
|
|
|
|
|
|
def _import_glb(glb_file):
|
|
"""Import OCC-generated GLB into Blender.
|
|
|
|
OCC exports one mesh object per STEP part, already in metres.
|
|
Returns list of Blender mesh objects, centred at world origin.
|
|
"""
|
|
bpy.ops.object.select_all(action='DESELECT')
|
|
bpy.ops.import_scene.gltf(filepath=glb_file)
|
|
parts = [o for o in bpy.context.selected_objects if o.type == 'MESH']
|
|
|
|
if not parts:
|
|
print(f"ERROR: No mesh objects imported from {glb_file}")
|
|
sys.exit(1)
|
|
|
|
print(f"[still_render] imported {len(parts)} part(s) from GLB: "
|
|
f"{[p.name for p in parts[:5]]}")
|
|
|
|
# Centre combined bbox at world origin
|
|
all_corners = []
|
|
for p in parts:
|
|
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
|
|
|
|
if all_corners:
|
|
mins = Vector((min(v.x for v in all_corners),
|
|
min(v.y for v in all_corners),
|
|
min(v.z for v in all_corners)))
|
|
maxs = Vector((max(v.x for v in all_corners),
|
|
max(v.y for v in all_corners),
|
|
max(v.z for v in all_corners)))
|
|
center = (mins + maxs) * 0.5
|
|
# Move root objects (parentless) to centre. Adjusting a child's local
|
|
# .location by a world-space vector gives wrong results when the GLB has
|
|
# Empty parent nodes (OCC assembly hierarchy). Shifting the root moves
|
|
# the entire hierarchy correctly.
|
|
all_imported = list(bpy.context.selected_objects)
|
|
root_objects = [o for o in all_imported if o.parent is None]
|
|
for obj in root_objects:
|
|
obj.location -= center
|
|
|
|
return parts
|
|
|
|
|
|
def _resolve_part_name(index, part_obj, part_names_ordered):
|
|
"""Get the STEP part name for a Blender part by index.
|
|
|
|
With per-part import, part_obj.name IS the STEP name (possibly with
|
|
Blender .NNN suffix). Falls back to part_names_ordered for combined-STL mode.
|
|
"""
|
|
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
|
|
if part_names_ordered and index < len(part_names_ordered):
|
|
return part_names_ordered[index]
|
|
return base_name
|
|
|
|
|
|
def _apply_material_library(parts, mat_lib_path, mat_map, part_names_ordered=None):
|
|
"""Append materials from library .blend and assign to parts via material_map.
|
|
|
|
Matching priority per part:
|
|
1. GLB object name (strip Blender .NNN suffix + OCC _AF0/_AF1 suffix)
|
|
2. Prefix fallback (longest mat_map key that is a prefix of / contains part name)
|
|
3. Index-based via part_names_ordered (also strips _AF suffix)
|
|
|
|
mat_map: {part_name_lower: material_name}
|
|
Parts without a match keep their current material.
|
|
"""
|
|
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
|
print(f"[still_render] material library not found: {mat_lib_path}")
|
|
return
|
|
|
|
# Collect unique material names needed
|
|
needed = set(mat_map.values())
|
|
if not needed:
|
|
return
|
|
|
|
# Append materials from library
|
|
appended = {}
|
|
for mat_name in needed:
|
|
inner_path = f"{mat_lib_path}/Material/{mat_name}"
|
|
try:
|
|
bpy.ops.wm.append(
|
|
filepath=inner_path,
|
|
directory=f"{mat_lib_path}/Material/",
|
|
filename=mat_name,
|
|
link=False,
|
|
)
|
|
if mat_name in bpy.data.materials:
|
|
appended[mat_name] = bpy.data.materials[mat_name]
|
|
print(f"[still_render] appended material: {mat_name}")
|
|
else:
|
|
print(f"[still_render] WARNING: material '{mat_name}' not found after append")
|
|
except Exception as exc:
|
|
print(f"[still_render] WARNING: failed to append material '{mat_name}': {exc}")
|
|
|
|
if not appended:
|
|
return
|
|
|
|
# Assign materials to parts — primary: name-based (per-part STL mode),
|
|
# secondary: index-based via part_names_ordered (combined STL fallback)
|
|
assigned_count = 0
|
|
for i, part in enumerate(parts):
|
|
# 1. Name-based: strip Blender .NNN suffix, then OCC _AF0/_AF1 suffix
|
|
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
|
_prev = None
|
|
while _prev != base_name:
|
|
_prev = base_name
|
|
base_name = _re.sub(r'_AF\d+$', '', base_name, flags=_re.IGNORECASE)
|
|
part_key = base_name.lower().strip()
|
|
mat_name = mat_map.get(part_key)
|
|
|
|
# 2. Prefix fallback: longest mat_map key that is a prefix/suffix match
|
|
if not mat_name:
|
|
for key, val in sorted(mat_map.items(), key=lambda x: len(x[0]), reverse=True):
|
|
if len(key) >= 5 and len(part_key) >= 5 and (
|
|
part_key.startswith(key) or key.startswith(part_key)
|
|
):
|
|
mat_name = val
|
|
break
|
|
|
|
# 3. Index-based fallback via part_names_ordered (also strips _AF suffix)
|
|
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
|
step_name = part_names_ordered[i]
|
|
step_key = step_name.lower().strip()
|
|
mat_name = mat_map.get(step_key)
|
|
if not mat_name:
|
|
_p2 = None
|
|
while _p2 != step_key:
|
|
_p2 = step_key
|
|
step_key = _re.sub(r'_af\d+$', '', step_key)
|
|
mat_name = mat_map.get(step_key)
|
|
|
|
if mat_name and mat_name in appended:
|
|
part.data.materials.clear()
|
|
part.data.materials.append(appended[mat_name])
|
|
assigned_count += 1
|
|
print(f"[still_render] assigned '{mat_name}' to part '{part.name}'")
|
|
|
|
print(f"[still_render] material assignment: {assigned_count}/{len(parts)} parts matched")
|
|
|
|
|
|
def main():
|
|
argv = sys.argv
|
|
args = argv[argv.index("--") + 1:]
|
|
|
|
glb_path = args[0]
|
|
output_path = args[1]
|
|
width = int(args[2])
|
|
height = int(args[3])
|
|
engine = args[4]
|
|
samples = int(args[5])
|
|
part_colors_json = args[6] if len(args) > 6 else "{}"
|
|
transparent_bg = args[7] == "1" if len(args) > 7 else False
|
|
|
|
# Template + material library args (passed by hartomat-still.js)
|
|
template_path = args[8] if len(args) > 8 and args[8] else ""
|
|
target_collection = args[9] if len(args) > 9 else "Product"
|
|
material_library_path = args[10] if len(args) > 10 and args[10] else ""
|
|
material_map_raw = args[11] if len(args) > 11 else "{}"
|
|
part_names_ordered_raw = args[12] if len(args) > 12 else "[]"
|
|
lighting_only = args[13] == "1" if len(args) > 13 else False
|
|
cycles_device = args[14].lower() if len(args) > 14 else "auto" # "auto", "gpu", "cpu"
|
|
shadow_catcher = args[15] == "1" if len(args) > 15 else False
|
|
rotation_x = float(args[16]) if len(args) > 16 else 0.0
|
|
rotation_y = float(args[17]) if len(args) > 17 else 0.0
|
|
rotation_z = float(args[18]) if len(args) > 18 else 0.0
|
|
noise_threshold_arg = args[19] if len(args) > 19 else ""
|
|
denoiser_arg = args[20] if len(args) > 20 else ""
|
|
denoising_input_passes_arg = args[21] if len(args) > 21 else ""
|
|
denoising_prefilter_arg = args[22] if len(args) > 22 else ""
|
|
denoising_quality_arg = args[23] if len(args) > 23 else ""
|
|
denoising_use_gpu_arg = args[24] if len(args) > 24 else ""
|
|
|
|
# Named argument: --mesh-attributes <json>
|
|
_mesh_attrs: dict = {}
|
|
if "--mesh-attributes" in argv:
|
|
_idx = argv.index("--mesh-attributes")
|
|
try:
|
|
_mesh_attrs = json.loads(argv[_idx + 1])
|
|
except Exception:
|
|
pass
|
|
|
|
# Named argument: --focal-length <mm>
|
|
_focal_length = None
|
|
if "--focal-length" in argv:
|
|
_idx = argv.index("--focal-length")
|
|
_focal_length = float(argv[_idx + 1]) if _idx + 1 < len(argv) else None
|
|
|
|
# Named argument: --sensor-width <mm>
|
|
_sensor_width = None
|
|
if "--sensor-width" in argv:
|
|
_idx = argv.index("--sensor-width")
|
|
_sensor_width = float(argv[_idx + 1]) if _idx + 1 < len(argv) else None
|
|
|
|
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
|
|
|
try:
|
|
part_colors = json.loads(part_colors_json)
|
|
except json.JSONDecodeError:
|
|
part_colors = {}
|
|
|
|
try:
|
|
material_map = json.loads(material_map_raw) if material_map_raw else {}
|
|
except json.JSONDecodeError:
|
|
material_map = {}
|
|
|
|
try:
|
|
part_names_ordered = json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
|
|
except json.JSONDecodeError:
|
|
part_names_ordered = []
|
|
|
|
# Validate template path: if provided it MUST exist on disk.
|
|
# A missing template is a configuration error — fail loudly rather than
|
|
# silently falling back to factory-settings mode which produces renders that
|
|
# look completely wrong.
|
|
if template_path and not os.path.isfile(template_path):
|
|
print(f"[still_render] ERROR: template_path was provided but file not found: {template_path}")
|
|
print("[still_render] Ensure the blend-templates directory is accessible on this worker.")
|
|
sys.exit(1)
|
|
|
|
use_template = bool(template_path)
|
|
|
|
print(f"[still_render] engine={engine}, samples={samples}, size={width}x{height}, transparent={transparent_bg}")
|
|
print(f"[still_render] part_names_ordered: {len(part_names_ordered)} entries")
|
|
if use_template:
|
|
print(f"[still_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
|
|
else:
|
|
print("[still_render] no template — using factory settings (Mode A)")
|
|
if material_library_path:
|
|
print(f"[still_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
|
|
|
|
# ── SCENE SETUP ──────────────────────────────────────────────────────────
|
|
|
|
if use_template:
|
|
# ── MODE B: Template-based render ────────────────────────────────────
|
|
print(f"[still_render] Opening template: {template_path}")
|
|
bpy.ops.wm.open_mainfile(filepath=template_path)
|
|
|
|
# Find or create target collection
|
|
target_col = _ensure_collection(target_collection)
|
|
|
|
# Import OCC GLB (already in metres, one object per STEP part)
|
|
parts = _import_glb(glb_path)
|
|
# Apply render position rotation (before camera/bbox calculations)
|
|
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
|
# Apply OCC topology-based shading overrides
|
|
_apply_mesh_attributes(parts, _mesh_attrs)
|
|
|
|
# Move imported parts into target collection
|
|
for part in parts:
|
|
for col in list(part.users_collection):
|
|
col.objects.unlink(part)
|
|
target_col.objects.link(part)
|
|
|
|
# Apply smooth shading and mark sharp edges / UV seams
|
|
for part in parts:
|
|
_apply_smooth(part, SMOOTH_ANGLE)
|
|
_mark_sharp_and_seams(
|
|
part, SMOOTH_ANGLE,
|
|
sharp_edge_midpoints=_mesh_attrs.get('sharp_edge_midpoints'),
|
|
)
|
|
|
|
# Material assignment: library materials if available, otherwise palette
|
|
if material_library_path and material_map:
|
|
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
|
|
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
|
|
# Parts not matched by library get palette fallback
|
|
for i, part in enumerate(parts):
|
|
if not part.data.materials or len(part.data.materials) == 0:
|
|
_assign_palette_material(part, i)
|
|
else:
|
|
for i, part in enumerate(parts):
|
|
step_name = _resolve_part_name(i, part, part_names_ordered)
|
|
color_hex = part_colors.get(step_name)
|
|
if color_hex:
|
|
color = _hex_to_linear(color_hex)
|
|
mat = bpy.data.materials.new(name=f"Part_{i}")
|
|
mat.use_nodes = True
|
|
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
|
if bsdf:
|
|
bsdf.inputs["Base Color"].default_value = color
|
|
bsdf.inputs["Metallic"].default_value = 0.35
|
|
bsdf.inputs["Roughness"].default_value = 0.40
|
|
try:
|
|
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
|
except KeyError:
|
|
pass
|
|
part.data.materials.clear()
|
|
part.data.materials.append(mat)
|
|
else:
|
|
_assign_palette_material(part, i)
|
|
|
|
# ── Shadow catcher (Cycles only, template mode only) ─────────────────
|
|
if shadow_catcher:
|
|
sc_col_name = "Shadowcatcher"
|
|
sc_obj_name = "Shadowcatcher"
|
|
for vl in bpy.context.scene.view_layers:
|
|
def _enable_col_recursive(layer_col):
|
|
if layer_col.collection.name == sc_col_name:
|
|
layer_col.exclude = False
|
|
layer_col.collection.hide_render = False
|
|
layer_col.collection.hide_viewport = False
|
|
return True
|
|
for child in layer_col.children:
|
|
if _enable_col_recursive(child):
|
|
return True
|
|
return False
|
|
_enable_col_recursive(vl.layer_collection)
|
|
|
|
sc_obj = bpy.data.objects.get(sc_obj_name)
|
|
if sc_obj:
|
|
all_world_z = []
|
|
for part in parts:
|
|
for corner in part.bound_box:
|
|
all_world_z.append((part.matrix_world @ Vector(corner)).z)
|
|
if all_world_z:
|
|
sc_obj.location.z = min(all_world_z)
|
|
print(f"[still_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
|
|
else:
|
|
print(f"[still_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
|
|
|
|
# lighting_only: use template World/HDRI but force auto-camera UNLESS the shadow
|
|
# catcher is enabled — in that case the template camera is already positioned to
|
|
# show both the product and its shadow on the ground plane.
|
|
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
|
|
if lighting_only and not shadow_catcher:
|
|
print("[still_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
|
|
elif needs_auto_camera:
|
|
print("[still_render] WARNING: template has no camera — will create auto-camera")
|
|
|
|
# Set very close near clip on template camera for mm-scale parts (now in metres)
|
|
if not needs_auto_camera and bpy.context.scene.camera:
|
|
bpy.context.scene.camera.data.clip_start = 0.001
|
|
|
|
print(f"[still_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
|
|
|
|
else:
|
|
# ── MODE A: Factory settings (original behavior) ─────────────────────
|
|
needs_auto_camera = True
|
|
bpy.ops.wm.read_factory_settings(use_empty=True)
|
|
|
|
parts = _import_glb(glb_path)
|
|
# Apply render position rotation (before camera/bbox calculations)
|
|
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
|
# Apply OCC topology-based shading overrides
|
|
_apply_mesh_attributes(parts, _mesh_attrs)
|
|
|
|
for i, part in enumerate(parts):
|
|
_apply_smooth(part, SMOOTH_ANGLE)
|
|
_mark_sharp_and_seams(
|
|
part, SMOOTH_ANGLE,
|
|
sharp_edge_midpoints=_mesh_attrs.get('sharp_edge_midpoints'),
|
|
)
|
|
|
|
# Material assignment: library materials if available, else part_colors/palette
|
|
if material_library_path and material_map:
|
|
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
|
|
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
|
|
# Palette fallback for unmatched parts
|
|
for i, part in enumerate(parts):
|
|
if not part.data.materials or len(part.data.materials) == 0:
|
|
_assign_palette_material(part, i)
|
|
else:
|
|
# part_colors or palette — use index-based lookup via part_names_ordered
|
|
for i, part in enumerate(parts):
|
|
step_name = _resolve_part_name(i, part, part_names_ordered)
|
|
color_hex = part_colors.get(step_name)
|
|
if color_hex:
|
|
color = _hex_to_linear(color_hex)
|
|
else:
|
|
color = PALETTE_LINEAR[i % len(PALETTE_LINEAR)]
|
|
|
|
mat = bpy.data.materials.new(name=f"Part_{i}")
|
|
mat.use_nodes = True
|
|
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
|
if bsdf:
|
|
bsdf.inputs["Base Color"].default_value = color
|
|
bsdf.inputs["Metallic"].default_value = 0.35
|
|
bsdf.inputs["Roughness"].default_value = 0.40
|
|
try:
|
|
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
|
except KeyError:
|
|
pass
|
|
part.data.materials.clear()
|
|
part.data.materials.append(mat)
|
|
|
|
if needs_auto_camera:
|
|
# ── Combined bounding box / bounding sphere ──────────────────────────
|
|
all_corners = []
|
|
for part in parts:
|
|
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
|
|
|
bbox_min = Vector((
|
|
min(v.x for v in all_corners),
|
|
min(v.y for v in all_corners),
|
|
min(v.z for v in all_corners),
|
|
))
|
|
bbox_max = Vector((
|
|
max(v.x for v in all_corners),
|
|
max(v.y for v in all_corners),
|
|
max(v.z for v in all_corners),
|
|
))
|
|
|
|
bbox_center = (bbox_min + bbox_max) * 0.5
|
|
bbox_dims = bbox_max - bbox_min
|
|
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
|
|
|
|
print(f"[still_render] bbox_dims={tuple(round(d, 4) for d in bbox_dims)}, "
|
|
f"bsphere_radius={bsphere_radius:.4f}")
|
|
|
|
# ── Lighting — only in Mode A (factory settings) ─────────────────────
|
|
# In template mode the .blend file provides its own World/HDRI lighting.
|
|
# Adding auto-lights would overpower the template's intended look.
|
|
if not use_template:
|
|
light_dist = bsphere_radius * 6.0
|
|
|
|
bpy.ops.object.light_add(type='SUN', location=(
|
|
bbox_center.x + light_dist * 0.5,
|
|
bbox_center.y - light_dist * 0.35,
|
|
bbox_center.z + light_dist,
|
|
))
|
|
sun = bpy.context.active_object
|
|
sun.data.energy = 4.0
|
|
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
|
|
|
|
bpy.ops.object.light_add(type='AREA', location=(
|
|
bbox_center.x - light_dist * 0.4,
|
|
bbox_center.y + light_dist * 0.4,
|
|
bbox_center.z + light_dist * 0.7,
|
|
))
|
|
fill = bpy.context.active_object
|
|
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
|
|
fill.data.size = max(4.0, bsphere_radius * 4.0)
|
|
|
|
# ── Camera (isometric-style, matches blender_render.py) ──────────────
|
|
ELEVATION_DEG = 28.0
|
|
AZIMUTH_DEG = 40.0
|
|
LENS_MM = _focal_length if _focal_length is not None else 50.0
|
|
SENSOR_WIDTH_MM = _sensor_width if _sensor_width is not None else 36.0
|
|
FILL_FACTOR = 0.85
|
|
|
|
elevation_rad = math.radians(ELEVATION_DEG)
|
|
azimuth_rad = math.radians(AZIMUTH_DEG)
|
|
|
|
cam_dir = Vector((
|
|
math.cos(elevation_rad) * math.cos(azimuth_rad),
|
|
math.cos(elevation_rad) * math.sin(azimuth_rad),
|
|
math.sin(elevation_rad),
|
|
)).normalized()
|
|
|
|
fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM))
|
|
fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM))
|
|
fov_used = min(fov_h, fov_v)
|
|
|
|
dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR
|
|
dist = max(dist, bsphere_radius * 1.05)
|
|
|
|
cam_location = bbox_center + cam_dir * dist
|
|
bpy.ops.object.camera_add(location=cam_location)
|
|
cam_obj = bpy.context.active_object
|
|
cam_obj.data.lens = LENS_MM
|
|
bpy.context.scene.camera = cam_obj
|
|
|
|
# Look-at rotation
|
|
look_dir = (bbox_center - cam_location).normalized()
|
|
up_world = Vector((0.0, 0.0, 1.0))
|
|
right = look_dir.cross(up_world)
|
|
if right.length < 1e-6:
|
|
right = Vector((1.0, 0.0, 0.0))
|
|
right.normalize()
|
|
cam_up = right.cross(look_dir).normalized()
|
|
|
|
rot_mat = Matrix((
|
|
(right.x, right.y, right.z),
|
|
(cam_up.x, cam_up.y, cam_up.z),
|
|
(-look_dir.x, -look_dir.y, -look_dir.z),
|
|
)).transposed()
|
|
cam_obj.rotation_euler = rot_mat.to_euler('XYZ')
|
|
|
|
cam_obj.data.clip_start = max(dist * 0.001, 0.0001)
|
|
cam_obj.data.clip_end = dist + bsphere_radius * 3.0
|
|
|
|
# ── World background — only in Mode A ───────────────────────────────
|
|
# In template mode the .blend file owns its World (HDRI, sky texture,
|
|
# studio lighting). Overwriting it would destroy the HDR look the
|
|
# template was designed to use (e.g. Alpha-HDR output types).
|
|
if not use_template:
|
|
world = bpy.data.worlds.new("World")
|
|
bpy.context.scene.world = world
|
|
world.use_nodes = True
|
|
bg = world.node_tree.nodes["Background"]
|
|
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
|
|
bg.inputs["Strength"].default_value = 0.15
|
|
|
|
# ── Colour management ────────────────────────────────────────────────────
|
|
# In template mode the .blend file owns its colour management settings
|
|
# (e.g. Filmic/AgX for HDR, custom exposure for Alpha-HDR output types).
|
|
# Overwriting them would destroy the look the template was designed for.
|
|
# In factory-settings mode (Mode A) we force Standard to avoid the grey
|
|
# Filmic tint that Blender applies by default.
|
|
scene = bpy.context.scene
|
|
if not use_template:
|
|
scene.view_settings.view_transform = 'Standard'
|
|
scene.view_settings.exposure = 0.0
|
|
scene.view_settings.gamma = 1.0
|
|
try:
|
|
scene.view_settings.look = 'None'
|
|
except Exception:
|
|
pass
|
|
|
|
# ── Render engine ────────────────────────────────────────────────────────
|
|
if engine == "eevee":
|
|
eevee_ok = False
|
|
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
|
|
try:
|
|
scene.render.engine = eevee_id
|
|
eevee_ok = True
|
|
print(f"[still_render] EEVEE engine id: {eevee_id}")
|
|
break
|
|
except TypeError:
|
|
continue
|
|
if eevee_ok:
|
|
for attr in ('taa_render_samples', 'samples'):
|
|
try:
|
|
setattr(scene.eevee, attr, samples)
|
|
break
|
|
except AttributeError:
|
|
continue
|
|
else:
|
|
print("[still_render] WARNING: EEVEE unavailable, falling back to Cycles")
|
|
engine = "cycles"
|
|
|
|
if engine != "eevee":
|
|
scene.render.engine = 'CYCLES'
|
|
scene.cycles.samples = samples
|
|
scene.cycles.use_denoising = True
|
|
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
|
|
if denoising_input_passes_arg:
|
|
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
|
|
except Exception: pass
|
|
if denoising_prefilter_arg:
|
|
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
|
|
except Exception: pass
|
|
if denoising_quality_arg:
|
|
try: scene.cycles.denoising_quality = denoising_quality_arg
|
|
except Exception: pass
|
|
if denoising_use_gpu_arg:
|
|
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
|
|
except AttributeError: pass
|
|
if noise_threshold_arg:
|
|
scene.cycles.use_adaptive_sampling = True
|
|
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
|
|
# Device selection: "cpu" forces CPU, "gpu" forces GPU (warns if unavailable),
|
|
# "auto" (default) tries GPU first and falls back to CPU.
|
|
print(f"[still_render] cycles_device={cycles_device}")
|
|
gpu_found = False
|
|
if cycles_device != "cpu":
|
|
try:
|
|
cycles_prefs = bpy.context.preferences.addons['cycles'].preferences
|
|
for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
|
|
try:
|
|
cycles_prefs.compute_device_type = device_type
|
|
cycles_prefs.get_devices()
|
|
gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU']
|
|
if gpu_devs:
|
|
for d in gpu_devs:
|
|
d.use = True
|
|
scene.cycles.device = 'GPU'
|
|
gpu_found = True
|
|
print(f"[still_render] Cycles GPU ({device_type})")
|
|
break
|
|
except Exception:
|
|
continue
|
|
except Exception:
|
|
pass
|
|
if gpu_found:
|
|
print(f"RENDER_DEVICE_USED: engine=CYCLES device=GPU compute_type={device_type}", flush=True)
|
|
else:
|
|
scene.cycles.device = 'CPU'
|
|
print("[still_render] WARNING: GPU not found — falling back to CPU")
|
|
print("RENDER_DEVICE_USED: engine=CYCLES device=CPU compute_type=NONE (fallback)", flush=True)
|
|
import os as _os
|
|
if _os.environ.get("CYCLES_DEVICE", "auto").lower() == "gpu":
|
|
print("GPU_REQUIRED_BUT_CPU_USED: strict mode active (CYCLES_DEVICE=gpu)", flush=True)
|
|
sys.exit(2)
|
|
|
|
# ── Render settings ──────────────────────────────────────────────────────
|
|
scene.render.resolution_x = width
|
|
scene.render.resolution_y = height
|
|
scene.render.resolution_percentage = 100
|
|
scene.render.film_transparent = transparent_bg
|
|
|
|
ext = os.path.splitext(output_path)[1].lower()
|
|
if ext in ('.jpg', '.jpeg'):
|
|
scene.render.image_settings.file_format = 'JPEG'
|
|
scene.render.image_settings.quality = 92
|
|
else:
|
|
scene.render.image_settings.file_format = 'PNG'
|
|
|
|
scene.render.filepath = output_path
|
|
|
|
# ── Render ───────────────────────────────────────────────────────────────
|
|
print(f"[still_render] Rendering -> {output_path} (Blender {bpy.app.version_string})")
|
|
bpy.ops.render.render(write_still=True)
|
|
print("[still_render] render done.")
|
|
|
|
print("[still_render] Done.")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|