6c5873d51f
- @timed_step decorator with wall-clock + RSS tracking (pipeline_logger) - Blender timing laps for sharp edges and material assignment - MeshRegistry pattern: eliminate 13 scene.traverse() calls across viewers - Lazy material cloning (clone-on-first-write in both viewers) - _pipeline_session context manager: 7 create_engine() → 2 in render_thumbnail - KD-tree spatial pre-filter for sharp edge marking (bbox-based pruning) - Batch material library append: N bpy.ops.wm.append → single bpy.data.libraries.load - GMSH single-session batching: compound all solids into one tessellation call - Validate part-materials save endpoints against parsed_objects (prevents bogus keys) - ROADMAP updated with completion status Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
286 lines
11 KiB
Python
286 lines
11 KiB
Python
"""Material assignment helpers for Blender headless renders."""
|
|
from __future__ import annotations
|
|
|
|
import os
|
|
import re as _re
|
|
import time as _time
|
|
|
|
FAILED_MATERIAL_NAME = "SCHAEFFLER_059999_FailedMaterial"
|
|
|
|
|
|
def _batch_append_materials(mat_lib_path: str, names: set[str]) -> dict:
|
|
"""Append multiple materials from a .blend file in a single open.
|
|
|
|
Uses bpy.data.libraries.load() to open the .blend once instead of
|
|
N separate bpy.ops.wm.append() calls (each reopens the file).
|
|
Falls back to individual append for any materials that fail to load.
|
|
"""
|
|
import bpy # type: ignore[import]
|
|
|
|
result: dict = {}
|
|
if not names:
|
|
return result
|
|
|
|
try:
|
|
with bpy.data.libraries.load(mat_lib_path, link=False) as (data_from, data_to):
|
|
# data_from.materials lists all material names in the .blend
|
|
available = set(data_from.materials)
|
|
to_load = [n for n in names if n in available]
|
|
not_found = names - available
|
|
data_to.materials = to_load
|
|
# After the context manager closes, materials are loaded into bpy.data
|
|
for mat_name in to_load:
|
|
mat = bpy.data.materials.get(mat_name)
|
|
if mat:
|
|
result[mat_name] = mat
|
|
print(f"[blender_render] batch-appended material: {mat_name}")
|
|
else:
|
|
print(f"[blender_render] WARNING: material '{mat_name}' not found after batch append")
|
|
if not_found:
|
|
print(f"[blender_render] WARNING: materials not in library: {sorted(not_found)[:10]}")
|
|
except Exception as exc:
|
|
print(f"[blender_render] WARNING: batch append failed ({exc}), falling back to individual append")
|
|
# Fallback: individual append for each material
|
|
for mat_name in names:
|
|
if mat_name in result:
|
|
continue
|
|
try:
|
|
bpy.ops.wm.append(
|
|
filepath=f"{mat_lib_path}/Material/{mat_name}",
|
|
directory=f"{mat_lib_path}/Material/",
|
|
filename=mat_name,
|
|
link=False,
|
|
)
|
|
mat = bpy.data.materials.get(mat_name)
|
|
if mat:
|
|
result[mat_name] = mat
|
|
except Exception:
|
|
pass
|
|
|
|
return result
|
|
|
|
|
|
def assign_failed_material(part_obj) -> None:
|
|
"""Assign the standard fallback material (magenta) when no library material matches.
|
|
|
|
Reuses SCHAEFFLER_059999_FailedMaterial if already loaded; otherwise
|
|
creates a simple magenta Principled BSDF node tree.
|
|
"""
|
|
import bpy # type: ignore[import]
|
|
|
|
mat = bpy.data.materials.get(FAILED_MATERIAL_NAME)
|
|
if mat is None:
|
|
mat = bpy.data.materials.new(name=FAILED_MATERIAL_NAME)
|
|
mat.use_nodes = True
|
|
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
|
if bsdf:
|
|
bsdf.inputs["Base Color"].default_value = (1.0, 0.0, 1.0, 1.0) # magenta
|
|
bsdf.inputs["Roughness"].default_value = 0.6
|
|
part_obj.data.materials.clear()
|
|
part_obj.data.materials.append(mat)
|
|
|
|
|
|
def build_mat_map_lower(material_map: dict) -> dict:
|
|
"""Return a lowercased version of material_map with _AF\\d+ suffix variants added.
|
|
|
|
Both the original key and the AF-stripped key are inserted so that GLB
|
|
object names (which may lack _AF suffixes that OCC adds to mat_map keys)
|
|
can match in either direction.
|
|
"""
|
|
mat_map_lower: dict = {}
|
|
for k, v in material_map.items():
|
|
kl = k.lower().strip()
|
|
mat_map_lower[kl] = v
|
|
# USD path: part_key slugs replace ALL non-alphanumeric chars with '_'
|
|
# (same regex as generate_part_key in export_step_to_usd.py).
|
|
# E.g. "F-802007_TR4-D1" → "f_802007_tr4_d1". Add slug variant so
|
|
# hyphenated OCC names match USD-imported Blender objects.
|
|
slug_key = _re.sub(r'[^a-z0-9]+', '_', kl).strip('_')
|
|
if slug_key and slug_key != kl:
|
|
mat_map_lower.setdefault(slug_key, v)
|
|
# Strip OCC assembly-frame suffixes: _AF0, _AF0_1, _AF0_1_AF0, etc.
|
|
# Pattern matches one or more groups of _AF<n> optionally followed by
|
|
# an instance number _<n>, anchored at end of string.
|
|
stripped = _re.sub(r'(_af\d+(_\d+)?)+$', '', kl)
|
|
if stripped != kl:
|
|
mat_map_lower.setdefault(stripped, v)
|
|
# Also slug the AF-stripped key for USD path where part_key is
|
|
# both AF-stripped AND slugified (e.g. "ge360-hf_..." → "ge360_hf_...")
|
|
slug_stripped = _re.sub(r'[^a-z0-9]+', '_', stripped).strip('_')
|
|
if slug_stripped and slug_stripped != stripped:
|
|
mat_map_lower.setdefault(slug_stripped, v)
|
|
return mat_map_lower
|
|
|
|
|
|
def apply_material_library_direct(
|
|
parts: list,
|
|
mat_lib_path: str,
|
|
material_lookup: dict[str, str],
|
|
) -> None:
|
|
"""Assign materials from library using a direct object_name → material_name mapping.
|
|
|
|
This bypasses all name-matching heuristics — the mapping comes from USD
|
|
customData (schaeffler:canonicalMaterialName) read via pxr after Blender import.
|
|
Parts not present in material_lookup receive FAILED_MATERIAL_NAME.
|
|
|
|
material_lookup: {blender_object_name: canonical_material_name}
|
|
"""
|
|
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
|
print(f"[blender_render] material library not found: {mat_lib_path}")
|
|
return
|
|
|
|
import bpy # type: ignore[import]
|
|
|
|
_t0 = _time.monotonic()
|
|
|
|
# Collect unique material names needed
|
|
needed = set(material_lookup.values())
|
|
if not needed:
|
|
return
|
|
|
|
# Batch-append materials from library (single file open)
|
|
appended: dict = {}
|
|
_t_append = _time.monotonic()
|
|
# Check already-loaded materials first
|
|
still_needed = set()
|
|
for mat_name in needed:
|
|
if mat_name in bpy.data.materials:
|
|
appended[mat_name] = bpy.data.materials[mat_name]
|
|
else:
|
|
still_needed.add(mat_name)
|
|
# Load remaining from .blend in one pass
|
|
if still_needed:
|
|
appended.update(_batch_append_materials(mat_lib_path, still_needed))
|
|
_append_dur = _time.monotonic() - _t_append
|
|
print(f"[blender_render] TIMING material_append_direct={_append_dur:.2f}s ({len(appended)}/{len(needed)} materials)", flush=True)
|
|
|
|
if not appended:
|
|
return
|
|
|
|
assigned_count = 0
|
|
unmatched_names = []
|
|
for part in parts:
|
|
mat_name = material_lookup.get(part.name)
|
|
if mat_name and mat_name in appended:
|
|
if part.data.users > 1:
|
|
part.data = part.data.copy()
|
|
part.data.materials.clear()
|
|
part.data.materials.append(appended[mat_name])
|
|
assigned_count += 1
|
|
else:
|
|
unmatched_names.append(part.name)
|
|
|
|
_assign_dur = _time.monotonic() - _t_append - _append_dur + (_time.monotonic() - _t0 - _append_dur)
|
|
_total = _time.monotonic() - _t0
|
|
print(f"[blender_render] TIMING material_assign_direct={_total:.2f}s "
|
|
f"(append={_append_dur:.2f}s, assign={_total - _append_dur:.2f}s, "
|
|
f"{assigned_count}/{len(parts)} matched)", flush=True)
|
|
if unmatched_names:
|
|
print(f"[blender_render] unmatched (no primvar): {unmatched_names[:10]}", flush=True)
|
|
for part in parts:
|
|
if part.name in set(unmatched_names):
|
|
if part.data.users > 1:
|
|
part.data = part.data.copy()
|
|
assign_failed_material(part)
|
|
|
|
|
|
def apply_material_library(
|
|
parts: list,
|
|
mat_lib_path: str,
|
|
mat_map: dict,
|
|
part_names_ordered: list | None = None,
|
|
) -> None:
|
|
"""Append materials from library .blend and assign to parts via material_map.
|
|
|
|
GLB-imported objects are named after STEP parts, so matching is by name
|
|
(stripping Blender .NNN suffix for duplicates). Falls back to
|
|
part_names_ordered index-based matching.
|
|
|
|
mat_map: {part_name_lower: material_name}
|
|
Parts without a match receive the FAILED_MATERIAL_NAME sentinel.
|
|
"""
|
|
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
|
print(f"[blender_render] material library not found: {mat_lib_path}")
|
|
return
|
|
|
|
import bpy # type: ignore[import]
|
|
|
|
_t0 = _time.monotonic()
|
|
|
|
if part_names_ordered is None:
|
|
part_names_ordered = []
|
|
|
|
# Collect unique material names needed
|
|
needed = set(mat_map.values())
|
|
if not needed:
|
|
return
|
|
|
|
# Batch-append materials from library (single file open)
|
|
appended: dict = {}
|
|
_t_append = _time.monotonic()
|
|
appended.update(_batch_append_materials(mat_lib_path, needed))
|
|
_append_dur = _time.monotonic() - _t_append
|
|
print(f"[blender_render] TIMING material_append={_append_dur:.2f}s ({len(appended)}/{len(needed)} materials)", flush=True)
|
|
|
|
if not appended:
|
|
return
|
|
|
|
# Assign materials to parts — primary: name-based (GLB object names),
|
|
# secondary: index-based via part_names_ordered
|
|
assigned_count = 0
|
|
unmatched_names = []
|
|
for i, part in enumerate(parts):
|
|
# Try name-based matching first (strip Blender .NNN suffix)
|
|
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
|
# Strip OCC assembly-instance suffix (_AF0, _AF1, …) — GLB object
|
|
# names may or may not have them while mat_map keys might.
|
|
_prev = None
|
|
while _prev != base_name:
|
|
_prev = base_name
|
|
base_name = _re.sub(r'_AF\d+$', '', base_name, flags=_re.IGNORECASE)
|
|
part_key = base_name.lower().strip()
|
|
mat_name = mat_map.get(part_key)
|
|
|
|
# Prefix fallback: if a mat_map key starts with our base name or
|
|
# vice-versa, use the longest matching key (most-specific wins).
|
|
if not mat_name:
|
|
for key, val in sorted(mat_map.items(), key=lambda x: len(x[0]), reverse=True):
|
|
if len(key) >= 5 and len(part_key) >= 5 and (
|
|
part_key.startswith(key) or key.startswith(part_key)
|
|
):
|
|
mat_name = val
|
|
break
|
|
|
|
# Fall back to index-based matching via part_names_ordered
|
|
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
|
step_name = part_names_ordered[i]
|
|
step_key = step_name.lower().strip()
|
|
mat_name = mat_map.get(step_key)
|
|
# Also try stripping AF from part_names_ordered entry
|
|
if not mat_name:
|
|
_p2 = None
|
|
while _p2 != step_key:
|
|
_p2 = step_key
|
|
step_key = _re.sub(r'_af\d+$', '', step_key)
|
|
mat_name = mat_map.get(step_key)
|
|
|
|
if mat_name and mat_name in appended:
|
|
part.data.materials.clear()
|
|
part.data.materials.append(appended[mat_name])
|
|
assigned_count += 1
|
|
else:
|
|
unmatched_names.append(part.name)
|
|
|
|
_total = _time.monotonic() - _t0
|
|
print(f"[blender_render] TIMING material_assign={_total:.2f}s "
|
|
f"(append={_append_dur:.2f}s, match={_total - _append_dur:.2f}s, "
|
|
f"{assigned_count}/{len(parts)} matched)", flush=True)
|
|
if unmatched_names:
|
|
print(f"[blender_render] unmatched parts → assigning {FAILED_MATERIAL_NAME}: {unmatched_names[:10]}", flush=True)
|
|
unmatched_set = set(unmatched_names)
|
|
for part in parts:
|
|
if part.name in unmatched_set:
|
|
if part.data.users > 1:
|
|
part.data = part.data.copy()
|
|
assign_failed_material(part)
|