chore: snapshot workflow migration progress

This commit is contained in:
2026-04-12 11:49:04 +02:00
parent 0cd02513d5
commit 3e810c74a3
163 changed files with 31774 additions and 2753 deletions
+163 -29
View File
@@ -37,6 +37,29 @@ def _find_material_with_nodes(base_name: str):
return None
def _iter_object_name_variants(raw_name: str):
"""Yield conservative object-name variants for direct material lookup."""
if not raw_name:
return
seen: set[str] = set()
def _emit(value: str):
value = (value or "").strip()
if value and value not in seen:
seen.add(value)
return value
return None
exact = _emit(raw_name)
if exact:
yield exact
no_blender_suffix = _emit(_re.sub(r'\.\d{3}$', '', raw_name))
if no_blender_suffix:
yield no_blender_suffix
def _batch_append_materials(mat_lib_path: str, names: set[str]) -> dict:
"""Append multiple materials from a .blend file in a single open.
@@ -46,7 +69,9 @@ def _batch_append_materials(mat_lib_path: str, names: set[str]) -> dict:
Handles empty material stubs left by Blender's USD importer: when a
stub exists with the target name, the library material gets renamed
with a .NNN suffix. We find it via _find_material_with_nodes().
with a .NNN suffix. Blender returns the actual loaded datablocks in
data_to.materials, so we can use those directly instead of re-scanning
bpy.data.materials after the library load.
"""
import bpy # type: ignore[import]
@@ -60,19 +85,22 @@ def _batch_append_materials(mat_lib_path: str, names: set[str]) -> dict:
available = set(data_from.materials)
to_load = [n for n in names if n in available]
not_found = names - available
data_to.materials = to_load
# After the context manager closes, materials are loaded into bpy.data.
# If a USD stub occupied the name, the real material gets a .NNN suffix.
for mat_name in to_load:
mat = _find_material_with_nodes(mat_name)
requested_names = [str(n) for n in to_load]
data_to.materials = list(requested_names)
loaded_materials = list(data_to.materials)
# After the context manager closes, data_to.materials contains the actual
# appended datablocks in the same order as to_load, including any .NNN
# renames Blender introduced to avoid collisions with USD stubs.
for mat_name, mat in zip(requested_names, loaded_materials):
if mat:
result[mat_name] = mat
if mat.name != mat_name:
print(f"[blender_render] batch-appended material: {mat_name} (as '{mat.name}', stub collision)")
else:
print(f"[blender_render] batch-appended material: {mat_name}")
else:
print(f"[blender_render] WARNING: material '{mat_name}' not found after batch append")
continue
print(f"[blender_render] WARNING: material '{mat_name}' not returned after batch append")
if not_found:
print(f"[blender_render] WARNING: materials not in library: {sorted(not_found)[:10]}")
except Exception as exc:
@@ -149,6 +177,126 @@ def build_mat_map_lower(material_map: dict) -> dict:
return mat_map_lower
def _common_prefix_len(left: str, right: str) -> int:
limit = min(len(left), len(right))
idx = 0
while idx < limit and left[idx] == right[idx]:
idx += 1
return idx
def _lookup_by_common_prefix(query: str, mat_map: dict) -> str | None:
"""Resolve near-matches when USD/source names omit trailing serial suffixes.
This is intentionally conservative: only return a material when the
strongest common-prefix matches all point to the same material.
"""
if not query or not mat_map:
return None
scored: list[tuple[float, int, int, str]] = []
query_len = len(query)
for key, material in mat_map.items():
prefix_len = _common_prefix_len(query, key)
if prefix_len < 12:
continue
ratio = prefix_len / max(query_len, len(key))
if ratio < 0.68:
continue
scored.append((ratio, prefix_len, len(key), material))
if not scored:
return None
scored.sort(reverse=True)
top_ratio, top_prefix, _, top_material = scored[0]
contenders = [
material
for ratio, prefix_len, _, material in scored
if ratio >= top_ratio - 0.02 and prefix_len >= top_prefix - 2
]
unique_materials = set(contenders)
if len(unique_materials) == 1:
return top_material
return None
def _lookup_by_prefix(query: str, mat_map: dict) -> str | None:
"""Resolve prefix-compatible matches when all contenders share one material."""
if not query or not mat_map:
return None
contenders: list[tuple[int, str]] = []
for key, material in mat_map.items():
if len(key) >= 5 and len(query) >= 5 and (
query.startswith(key) or key.startswith(query)
):
contenders.append((len(key), material))
if not contenders:
return None
contenders.sort(reverse=True)
top_len = contenders[0][0]
close_materials = {
material for key_len, material in contenders if key_len >= top_len - 2
}
if len(close_materials) == 1:
return contenders[0][1]
return None
def lookup_material_name(raw_name: str, mat_map: dict, *fallback_names: str) -> str | None:
"""Resolve a material name against normalized mat_map keys.
Lookup order:
1. exact normalized key
2. prefix-compatible key
3. conservative common-prefix fuzzy match
"""
candidates = [raw_name, *fallback_names]
seen: set[str] = set()
for candidate in candidates:
if not candidate:
continue
normalized = candidate.lower().strip()
variants = [normalized]
stripped = _re.sub(r'(_af\d+(_\d+)?)+$', '', normalized, flags=_re.IGNORECASE)
if stripped != normalized:
variants.append(stripped)
no_instance = _re.sub(r'_\d+$', '', stripped)
if no_instance and no_instance not in variants:
variants.append(no_instance)
for variant in list(variants):
slug = _re.sub(r'[^a-z0-9]+', '_', variant).strip('_')
if slug and slug not in variants:
variants.append(slug)
deduped_variants = [variant for variant in variants if variant and not (variant in seen or seen.add(variant))]
for variant in deduped_variants:
mat_name = mat_map.get(variant)
if mat_name:
return mat_name
for variant in deduped_variants:
mat_name = _lookup_by_prefix(variant, mat_map)
if mat_name:
return mat_name
for variant in deduped_variants:
mat_name = _lookup_by_common_prefix(variant, mat_map)
if mat_name:
return mat_name
return None
def apply_material_library_direct(
parts: list,
mat_lib_path: str,
@@ -201,7 +349,11 @@ def apply_material_library_direct(
assigned_count = 0
unmatched_names = []
for part in parts:
mat_name = material_lookup.get(part.name)
mat_name = None
for candidate in _iter_object_name_variants(part.name):
mat_name = material_lookup.get(candidate)
if mat_name:
break
if mat_name and mat_name in appended:
if part.data.users > 1:
part.data = part.data.copy()
@@ -280,30 +432,12 @@ def apply_material_library(
_prev = base_name
base_name = _re.sub(r'_AF\d+$', '', base_name, flags=_re.IGNORECASE)
part_key = base_name.lower().strip()
mat_name = mat_map.get(part_key)
# Prefix fallback: if a mat_map key starts with our base name or
# vice-versa, use the longest matching key (most-specific wins).
if not mat_name:
for key, val in sorted(mat_map.items(), key=lambda x: len(x[0]), reverse=True):
if len(key) >= 5 and len(part_key) >= 5 and (
part_key.startswith(key) or key.startswith(part_key)
):
mat_name = val
break
mat_name = lookup_material_name(part_key, mat_map)
# Fall back to index-based matching via part_names_ordered
if not mat_name and part_names_ordered and i < len(part_names_ordered):
step_name = part_names_ordered[i]
step_key = step_name.lower().strip()
mat_name = mat_map.get(step_key)
# Also try stripping AF from part_names_ordered entry
if not mat_name:
_p2 = None
while _p2 != step_key:
_p2 = step_key
step_key = _re.sub(r'_af\d+$', '', step_key)
mat_name = mat_map.get(step_key)
mat_name = lookup_material_name(step_name, mat_map, part_key)
if mat_name and mat_name in appended:
part.data.materials.clear()