feat: initial commit
This commit is contained in:
@@ -0,0 +1,216 @@
|
||||
"""STEP to STL converter for Flamenco tasks.
|
||||
|
||||
Usage: python convert_step.py <step_path> <stl_path> <quality>
|
||||
quality: 'low' or 'high'
|
||||
|
||||
Produces:
|
||||
- Combined STL at <stl_path> (for fallback)
|
||||
- Per-part STLs in <stl_path_without_ext>_parts/ with manifest.json
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
|
||||
|
||||
def _export_per_part_stls(step_path, parts_dir, quality):
|
||||
"""Export one STL per named STEP leaf shape using OCP XCAF.
|
||||
|
||||
Creates parts_dir with individual STL files and a manifest.json:
|
||||
{"parts": [{"index": 0, "name": "PartName", "file": "00_PartName.stl"}, ...]}
|
||||
|
||||
Returns the manifest list, or empty list on failure.
|
||||
"""
|
||||
tol = 0.01 if quality == "high" else 0.3
|
||||
angular_tol = 0.05 if quality == "high" else 0.3
|
||||
|
||||
try:
|
||||
from OCP.STEPCAFControl import STEPCAFControl_Reader
|
||||
from OCP.XCAFDoc import XCAFDoc_DocumentTool, XCAFDoc_ShapeTool
|
||||
from OCP.TDataStd import TDataStd_Name
|
||||
from OCP.TDF import TDF_Label as TDF_Label_cls, TDF_LabelSequence
|
||||
from OCP.XCAFApp import XCAFApp_Application
|
||||
from OCP.TDocStd import TDocStd_Document
|
||||
from OCP.TCollection import TCollection_ExtendedString
|
||||
from OCP.IFSelect import IFSelect_RetDone
|
||||
import cadquery as cq
|
||||
except ImportError as e:
|
||||
print(f"[convert_step] per-part export skipped (import error): {e}")
|
||||
return []
|
||||
|
||||
# Read STEP with XCAF
|
||||
app = XCAFApp_Application.GetApplication_s()
|
||||
doc = TDocStd_Document(TCollection_ExtendedString("XmlOcaf"))
|
||||
app.InitDocument(doc)
|
||||
|
||||
reader = STEPCAFControl_Reader()
|
||||
reader.SetNameMode(True)
|
||||
status = reader.ReadFile(str(step_path))
|
||||
if status != IFSelect_RetDone:
|
||||
print(f"[convert_step] XCAF reader failed with status {status}")
|
||||
return []
|
||||
|
||||
if not reader.Transfer(doc):
|
||||
print("[convert_step] XCAF transfer failed")
|
||||
return []
|
||||
|
||||
shape_tool = XCAFDoc_DocumentTool.ShapeTool_s(doc.Main())
|
||||
name_id = TDataStd_Name.GetID_s()
|
||||
|
||||
# Recursively collect leaf shapes with their names
|
||||
leaves = [] # list of (name, TopoDS_Shape)
|
||||
|
||||
def _get_label_name(label):
|
||||
"""Extract name string from a TDF_Label."""
|
||||
name_attr = TDataStd_Name()
|
||||
if label.FindAttribute(name_id, name_attr):
|
||||
return name_attr.Get().ToExtString()
|
||||
return ""
|
||||
|
||||
def _collect_leaves(label):
|
||||
"""Recursively collect leaf (simple shape) labels."""
|
||||
if XCAFDoc_ShapeTool.IsAssembly_s(label):
|
||||
# Get components of this assembly
|
||||
components = TDF_LabelSequence()
|
||||
XCAFDoc_ShapeTool.GetComponents_s(label, components)
|
||||
for i in range(1, components.Length() + 1):
|
||||
comp_label = components.Value(i)
|
||||
if XCAFDoc_ShapeTool.IsReference_s(comp_label):
|
||||
ref_label = TDF_Label_cls()
|
||||
XCAFDoc_ShapeTool.GetReferredShape_s(comp_label, ref_label)
|
||||
# Use the component name (instance name), fall back to referred shape name
|
||||
comp_name = _get_label_name(comp_label)
|
||||
ref_name = _get_label_name(ref_label)
|
||||
# Prefer referred shape name — matches material_map keys
|
||||
name = ref_name or comp_name
|
||||
if XCAFDoc_ShapeTool.IsAssembly_s(ref_label):
|
||||
_collect_leaves(ref_label)
|
||||
elif XCAFDoc_ShapeTool.IsSimpleShape_s(ref_label):
|
||||
# Use comp_label shape — includes instance transform (position)
|
||||
shape = XCAFDoc_ShapeTool.GetShape_s(comp_label)
|
||||
leaves.append((name or f"unnamed_{len(leaves)}", shape))
|
||||
else:
|
||||
_collect_leaves(comp_label)
|
||||
elif XCAFDoc_ShapeTool.IsSimpleShape_s(label):
|
||||
name = _get_label_name(label)
|
||||
shape = XCAFDoc_ShapeTool.GetShape_s(label)
|
||||
leaves.append((name or f"unnamed_{len(leaves)}", shape))
|
||||
|
||||
# Get top-level free shapes
|
||||
top_labels = TDF_LabelSequence()
|
||||
shape_tool.GetFreeShapes(top_labels)
|
||||
for i in range(1, top_labels.Length() + 1):
|
||||
_collect_leaves(top_labels.Value(i))
|
||||
|
||||
if not leaves:
|
||||
print("[convert_step] no leaf shapes found via XCAF")
|
||||
return []
|
||||
|
||||
# Export each leaf shape as individual STL
|
||||
os.makedirs(parts_dir, exist_ok=True)
|
||||
manifest = []
|
||||
|
||||
for idx, (name, shape) in enumerate(leaves):
|
||||
# Sanitize filename: replace problematic chars
|
||||
safe_name = name.replace("/", "_").replace("\\", "_").replace(" ", "_")
|
||||
filename = f"{idx:02d}_{safe_name}.stl"
|
||||
filepath = os.path.join(parts_dir, filename)
|
||||
|
||||
try:
|
||||
cq_shape = cq.Shape(shape)
|
||||
cq_shape.exportStl(filepath, tolerance=tol, angularTolerance=angular_tol)
|
||||
manifest.append({"index": idx, "name": name, "file": filename})
|
||||
except Exception as e:
|
||||
print(f"[convert_step] WARNING: failed to export part '{name}': {e}")
|
||||
|
||||
# Write manifest
|
||||
manifest_path = os.path.join(parts_dir, "manifest.json")
|
||||
with open(manifest_path, "w") as f:
|
||||
json.dump({"parts": manifest}, f, indent=2)
|
||||
|
||||
total_size = sum(
|
||||
os.path.getsize(os.path.join(parts_dir, p["file"]))
|
||||
for p in manifest
|
||||
if os.path.exists(os.path.join(parts_dir, p["file"]))
|
||||
)
|
||||
print(f"[convert_step] exported {len(manifest)} per-part STLs "
|
||||
f"({total_size / 1024:.0f} KB total) to {parts_dir}")
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 4:
|
||||
print("Usage: convert_step.py <step_path> <stl_path> <quality>")
|
||||
sys.exit(1)
|
||||
|
||||
step_path = sys.argv[1]
|
||||
stl_path = sys.argv[2]
|
||||
quality = sys.argv[3]
|
||||
|
||||
if not os.path.isfile(step_path):
|
||||
print(f"ERROR: STEP file not found: {step_path}")
|
||||
sys.exit(1)
|
||||
|
||||
os.makedirs(os.path.dirname(stl_path), exist_ok=True)
|
||||
|
||||
# Cache hit: skip re-conversion if STL already exists and is non-empty
|
||||
if os.path.isfile(stl_path) and os.path.getsize(stl_path) > 0:
|
||||
size_kb = os.path.getsize(stl_path) / 1024
|
||||
print(f"[convert_step] Cache hit: {stl_path} ({size_kb:.0f} KB) — skipping STEP conversion")
|
||||
stl_stem = os.path.splitext(stl_path)[0]
|
||||
parts_dir = stl_stem + "_parts"
|
||||
manifest_path = os.path.join(parts_dir, "manifest.json")
|
||||
if not os.path.isfile(manifest_path):
|
||||
print("[convert_step] Per-part STLs missing — exporting from STEP")
|
||||
t1 = time.time()
|
||||
try:
|
||||
manifest = _export_per_part_stls(step_path, parts_dir, quality)
|
||||
if manifest:
|
||||
print(f"[convert_step] per-part export took {time.time() - t1:.1f}s")
|
||||
else:
|
||||
print("[convert_step] per-part export empty — combined STL only")
|
||||
except Exception as e:
|
||||
print(f"[convert_step] per-part export failed (non-fatal): {e}")
|
||||
else:
|
||||
print(f"[convert_step] Per-part STLs exist: {parts_dir}")
|
||||
return
|
||||
|
||||
print(f"Converting STEP -> STL: {step_path}")
|
||||
print(f"Quality: {quality}")
|
||||
t0 = time.time()
|
||||
|
||||
import cadquery as cq
|
||||
|
||||
tol = 0.01 if quality == "high" else 0.3
|
||||
angular_tol = 0.05 if quality == "high" else 0.3
|
||||
|
||||
result = cq.importers.importStep(step_path)
|
||||
cq.exporters.export(
|
||||
result,
|
||||
stl_path,
|
||||
exportType="STL",
|
||||
tolerance=tol,
|
||||
angularTolerance=angular_tol,
|
||||
)
|
||||
|
||||
elapsed = time.time() - t0
|
||||
size_kb = os.path.getsize(stl_path) / 1024
|
||||
print(f"STL written: {stl_path} ({size_kb:.0f} KB, {elapsed:.1f}s)")
|
||||
|
||||
# Export per-part STLs alongside the combined STL (non-fatal)
|
||||
stl_stem = os.path.splitext(stl_path)[0]
|
||||
parts_dir = stl_stem + "_parts"
|
||||
t1 = time.time()
|
||||
try:
|
||||
manifest = _export_per_part_stls(step_path, parts_dir, quality)
|
||||
if manifest:
|
||||
print(f"[convert_step] per-part export took {time.time() - t1:.1f}s")
|
||||
else:
|
||||
print("[convert_step] per-part export failed or empty — combined STL only")
|
||||
except Exception as e:
|
||||
print(f"[convert_step] per-part export failed (non-fatal): {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,121 @@
|
||||
// Schaeffler Still Render job type for Flamenco 3.x
|
||||
// Pipeline: STEP -> STL (cadquery) -> Blender single-frame render
|
||||
|
||||
const JOB_TYPE = {
|
||||
label: "Schaeffler Still",
|
||||
settings: [
|
||||
{ key: "step_path", type: "string", required: true,
|
||||
description: "Absolute path to STEP file" },
|
||||
{ key: "output_path", type: "string", required: true,
|
||||
description: "Full path for output image (e.g. /shared/render.png)" },
|
||||
{ key: "width", type: "int32", default: 1024,
|
||||
description: "Output width in pixels" },
|
||||
{ key: "height", type: "int32", default: 1024,
|
||||
description: "Output height in pixels" },
|
||||
{ key: "engine", type: "string", default: "cycles",
|
||||
description: "Blender render engine: cycles or eevee" },
|
||||
{ key: "samples", type: "int32", default: 256,
|
||||
description: "Render samples" },
|
||||
{ key: "stl_quality", type: "string", default: "low",
|
||||
description: "STL mesh quality: low or high" },
|
||||
{ key: "part_colors_json", type: "string", default: "{}",
|
||||
description: "JSON dict mapping part names to hex colors" },
|
||||
{ key: "transparent_bg", type: "bool", default: false,
|
||||
description: "Render with transparent background (PNG alpha)" },
|
||||
{ key: "template_path", type: "string", default: "",
|
||||
description: "Path to .blend template file (empty = factory settings)" },
|
||||
{ key: "target_collection", type: "string", default: "Product",
|
||||
description: "Blender collection name to import geometry into" },
|
||||
{ key: "material_library_path", type: "string", default: "",
|
||||
description: "Path to material library .blend file" },
|
||||
{ key: "material_map_json", type: "string", default: "{}",
|
||||
description: "JSON dict mapping part names to material names" },
|
||||
{ key: "part_names_ordered_json", type: "string", default: "[]",
|
||||
description: "JSON array of STEP part names in solid order (for index-based matching)" },
|
||||
{ key: "lighting_only", type: "bool", default: false,
|
||||
description: "Use template only for World/HDRI lighting; always auto-frame with computed camera" },
|
||||
{ key: "cycles_device", type: "string", default: "auto",
|
||||
description: "Cycles compute device: auto (try GPU, fall back to CPU), gpu (force GPU), cpu (force CPU)" },
|
||||
{ key: "shadow_catcher", type: "bool", default: false,
|
||||
description: "Enable Shadowcatcher collection from template and position plane under product (Cycles only)" },
|
||||
{ key: "rotation_x", type: "float", default: 0.0,
|
||||
description: "Product rotation around X axis in degrees (render position)" },
|
||||
{ key: "rotation_y", type: "float", default: 0.0,
|
||||
description: "Product rotation around Y axis in degrees (render position)" },
|
||||
{ key: "rotation_z", type: "float", default: 0.0,
|
||||
description: "Product rotation around Z axis in degrees (render position)" },
|
||||
{ key: "noise_threshold", type: "string", default: "",
|
||||
description: "Adaptive sampling noise threshold (empty = Blender default 0.01)" },
|
||||
{ key: "denoiser", type: "string", default: "",
|
||||
description: "Cycles denoiser: OPTIX, OPENIMAGEDENOISE, or empty for auto" },
|
||||
{ key: "denoising_input_passes", type: "string", default: "",
|
||||
description: "Denoising input passes: RGB, RGB_ALBEDO, RGB_ALBEDO_NORMAL, or empty for default" },
|
||||
{ key: "denoising_prefilter", type: "string", default: "",
|
||||
description: "Denoising prefilter: NONE, FAST, ACCURATE, or empty for default" },
|
||||
{ key: "denoising_quality", type: "string", default: "",
|
||||
description: "Denoising quality: HIGH, BALANCED, FAST, or empty for default (Blender 4.2+)" },
|
||||
{ key: "denoising_use_gpu", type: "string", default: "",
|
||||
description: "Route OIDN denoising through GPU: 1, 0, or empty for auto" },
|
||||
],
|
||||
};
|
||||
|
||||
function compileJob(job) {
|
||||
const settings = job.settings;
|
||||
// Cache STL next to STEP file: {step_dir}/{step_stem}_{quality}.stl
|
||||
// This allows re-renders to skip the STEP→STL conversion step.
|
||||
const stepDir = settings.step_path.replace(/\/[^/]+$/, "");
|
||||
const stepBasename = settings.step_path.replace(/.*\//, "");
|
||||
const stepStem = stepBasename.replace(/\.[^.]+$/, "");
|
||||
const stlPath = stepDir + "/" + stepStem + "_" + settings.stl_quality + ".stl";
|
||||
|
||||
// Task 1: Convert STEP to STL
|
||||
const convertTask = author.Task("convert-step", "misc");
|
||||
convertTask.addCommand(author.Command("exec", {
|
||||
exe: "{python}",
|
||||
args: [
|
||||
"/opt/flamenco/scripts/convert_step.py",
|
||||
settings.step_path,
|
||||
stlPath,
|
||||
settings.stl_quality,
|
||||
],
|
||||
}));
|
||||
job.addTask(convertTask);
|
||||
|
||||
// Task 2: Render single image with Blender
|
||||
const renderTask = author.Task("render-image", "blender");
|
||||
renderTask.addCommand(author.Command("exec", {
|
||||
exe: "{blender}",
|
||||
args: [
|
||||
"--background", "--python",
|
||||
"/opt/flamenco/scripts/still_render.py",
|
||||
"--",
|
||||
stlPath,
|
||||
settings.output_path,
|
||||
String(settings.width),
|
||||
String(settings.height),
|
||||
settings.engine,
|
||||
String(settings.samples),
|
||||
settings.part_colors_json,
|
||||
settings.transparent_bg ? "1" : "0",
|
||||
settings.template_path || "",
|
||||
settings.target_collection || "Product",
|
||||
settings.material_library_path || "",
|
||||
settings.material_map_json || "{}",
|
||||
settings.part_names_ordered_json || "[]",
|
||||
settings.lighting_only ? "1" : "0",
|
||||
settings.cycles_device || "auto",
|
||||
settings.shadow_catcher ? "1" : "0",
|
||||
String(settings.rotation_x || 0),
|
||||
String(settings.rotation_y || 0),
|
||||
String(settings.rotation_z || 0),
|
||||
settings.noise_threshold || "",
|
||||
settings.denoiser || "",
|
||||
settings.denoising_input_passes || "",
|
||||
settings.denoising_prefilter || "",
|
||||
settings.denoising_quality || "",
|
||||
settings.denoising_use_gpu || "",
|
||||
],
|
||||
}));
|
||||
renderTask.addDependency(convertTask);
|
||||
job.addTask(renderTask);
|
||||
}
|
||||
@@ -0,0 +1,211 @@
|
||||
// Schaeffler Turntable Animation job type for Flamenco 3.x
|
||||
// Pipeline: STEP -> STL (cadquery) -> Blender scene setup -> Blender -a render -> FFmpeg video
|
||||
//
|
||||
// Task flow:
|
||||
// 1. convert-step : STEP → STL via cadquery
|
||||
// 2. setup-scene : turntable_setup.py imports STL, applies materials/camera/animation,
|
||||
// saves a ready-to-render .blend to output_dir/scene.blend
|
||||
// 3. render-frames : blender --background scene.blend --python turntable_gpu_setup.py -a
|
||||
// Blender's native -a keeps GPU scene (BVH, textures) loaded for ALL
|
||||
// frames — no per-frame re-upload overhead.
|
||||
// 4. compose-video : FFmpeg encodes frame PNGs → MP4
|
||||
|
||||
const JOB_TYPE = {
|
||||
label: "Schaeffler Turntable",
|
||||
settings: [
|
||||
{ key: "step_path", type: "string", required: true,
|
||||
description: "Absolute path to STEP file" },
|
||||
{ key: "output_dir", type: "string", required: true,
|
||||
description: "Directory for rendered frames and final video" },
|
||||
{ key: "output_name", type: "string", required: true, default: "turntable",
|
||||
description: "Base name for output files" },
|
||||
{ key: "frame_count", type: "int32", default: 120,
|
||||
description: "Number of frames to render" },
|
||||
{ key: "fps", type: "int32", default: 30,
|
||||
description: "Frames per second for output video" },
|
||||
{ key: "turntable_degrees", type: "int32", default: 360,
|
||||
description: "Total rotation in degrees" },
|
||||
{ key: "width", type: "int32", default: 1920,
|
||||
description: "Output width in pixels" },
|
||||
{ key: "height", type: "int32", default: 1080,
|
||||
description: "Output height in pixels" },
|
||||
{ key: "engine", type: "string", default: "cycles",
|
||||
description: "Blender render engine: cycles or eevee" },
|
||||
{ key: "samples", type: "int32", default: 128,
|
||||
description: "Render samples" },
|
||||
{ key: "stl_quality", type: "string", default: "low",
|
||||
description: "STL mesh quality: low or high" },
|
||||
{ key: "part_colors_json", type: "string", default: "{}",
|
||||
description: "JSON dict mapping part names to hex colors" },
|
||||
{ key: "template_path", type: "string", default: "",
|
||||
description: "Path to .blend template file (empty = factory settings)" },
|
||||
{ key: "target_collection", type: "string", default: "Product",
|
||||
description: "Blender collection name to import geometry into" },
|
||||
{ key: "material_library_path", type: "string", default: "",
|
||||
description: "Path to material library .blend file" },
|
||||
{ key: "material_map_json", type: "string", default: "{}",
|
||||
description: "JSON dict mapping part names to material names" },
|
||||
{ key: "part_names_ordered_json", type: "string", default: "[]",
|
||||
description: "JSON array of STEP part names in solid order (for index-based matching)" },
|
||||
{ key: "lighting_only", type: "bool", default: false,
|
||||
description: "Use template only for World/HDRI lighting; always auto-frame with computed camera" },
|
||||
{ key: "cycles_device", type: "string", default: "auto",
|
||||
description: "Cycles compute device: auto (try GPU, fall back to CPU), gpu (force GPU), cpu (force CPU)" },
|
||||
{ key: "shadow_catcher", type: "bool", default: false,
|
||||
description: "Enable Shadowcatcher collection from template and position plane under product (Cycles only)" },
|
||||
{ key: "rotation_x", type: "float", default: 0.0,
|
||||
description: "Product rotation around X axis in degrees (render position)" },
|
||||
{ key: "rotation_y", type: "float", default: 0.0,
|
||||
description: "Product rotation around Y axis in degrees (render position)" },
|
||||
{ key: "rotation_z", type: "float", default: 0.0,
|
||||
description: "Product rotation around Z axis in degrees (render position)" },
|
||||
{ key: "turntable_axis", type: "string", default: "world_z",
|
||||
description: "Turntable rotation axis: world_z (default), world_x, or world_y" },
|
||||
{ key: "bg_color", type: "string", default: "",
|
||||
description: "Solid background hex color for compositing (e.g. #1a1a2e); empty = HDR visible as background" },
|
||||
{ key: "camera_orbit", type: "bool", default: true,
|
||||
description: "Rotate camera around product instead of rotating product (true = better GPU performance, BVH cached)" },
|
||||
{ key: "noise_threshold", type: "string", default: "",
|
||||
description: "Adaptive sampling noise threshold (empty = Blender default 0.01)" },
|
||||
{ key: "denoiser", type: "string", default: "",
|
||||
description: "Cycles denoiser: OPTIX, OPENIMAGEDENOISE, or empty for auto" },
|
||||
{ key: "denoising_input_passes", type: "string", default: "",
|
||||
description: "Denoising input passes: RGB, RGB_ALBEDO, RGB_ALBEDO_NORMAL, or empty for default" },
|
||||
{ key: "denoising_prefilter", type: "string", default: "",
|
||||
description: "Denoising prefilter: NONE, FAST, ACCURATE, or empty for default" },
|
||||
{ key: "denoising_quality", type: "string", default: "",
|
||||
description: "Denoising quality: HIGH, BALANCED, FAST, or empty for default (Blender 4.2+)" },
|
||||
{ key: "denoising_use_gpu", type: "string", default: "",
|
||||
description: "Route OIDN denoising through GPU: 1, 0, or empty for auto" },
|
||||
],
|
||||
};
|
||||
|
||||
function compileJob(job) {
|
||||
const settings = job.settings;
|
||||
// Cache STL next to STEP file: {step_dir}/{step_stem}_{quality}.stl
|
||||
const stepDir = settings.step_path.replace(/\/[^/]+$/, "");
|
||||
const stepBasename = settings.step_path.replace(/.*\//, "");
|
||||
const stepStem = stepBasename.replace(/\.[^.]+$/, "");
|
||||
const stlPath = stepDir + "/" + stepStem + "_" + settings.stl_quality + ".stl";
|
||||
const framesDir = settings.output_dir + "/frames";
|
||||
const scenePath = settings.output_dir + "/scene.blend";
|
||||
const videoPath = settings.output_dir + "/" + settings.output_name + ".mp4";
|
||||
|
||||
// Task 1: Convert STEP to STL
|
||||
const convertTask = author.Task("convert-step", "misc");
|
||||
convertTask.addCommand(author.Command("exec", {
|
||||
exe: "{python}",
|
||||
args: [
|
||||
"/opt/flamenco/scripts/convert_step.py",
|
||||
settings.step_path,
|
||||
stlPath,
|
||||
settings.stl_quality,
|
||||
],
|
||||
}));
|
||||
job.addTask(convertTask);
|
||||
|
||||
// Task 2: Setup Blender scene and save to scene.blend
|
||||
// turntable_setup.py imports the STL, assigns materials, sets up the
|
||||
// camera rig and pivot animation, configures the compositor (bg_color),
|
||||
// and saves the complete scene — ready for native -a rendering.
|
||||
const setupTask = author.Task("setup-scene", "blender");
|
||||
setupTask.addCommand(author.Command("exec", {
|
||||
exe: "{blender}",
|
||||
args: [
|
||||
"--background", "--python",
|
||||
"/opt/flamenco/scripts/turntable_setup.py",
|
||||
"--",
|
||||
stlPath,
|
||||
framesDir,
|
||||
String(settings.frame_count),
|
||||
String(settings.turntable_degrees),
|
||||
String(settings.width),
|
||||
String(settings.height),
|
||||
settings.engine,
|
||||
String(settings.samples),
|
||||
settings.part_colors_json,
|
||||
settings.template_path || "",
|
||||
settings.target_collection || "Product",
|
||||
settings.material_library_path || "",
|
||||
settings.material_map_json || "{}",
|
||||
settings.part_names_ordered_json || "[]",
|
||||
settings.lighting_only ? "1" : "0",
|
||||
settings.cycles_device || "gpu",
|
||||
settings.shadow_catcher ? "1" : "0",
|
||||
String(settings.rotation_x || 0),
|
||||
String(settings.rotation_y || 0),
|
||||
String(settings.rotation_z || 0),
|
||||
settings.turntable_axis || "world_z",
|
||||
settings.bg_color || "",
|
||||
settings.transparent_bg ? "1" : "0",
|
||||
scenePath,
|
||||
settings.camera_orbit !== false ? "1" : "0",
|
||||
settings.noise_threshold || "",
|
||||
settings.denoiser || "",
|
||||
settings.denoising_input_passes || "",
|
||||
settings.denoising_prefilter || "",
|
||||
settings.denoising_quality || "",
|
||||
settings.denoising_use_gpu || "",
|
||||
],
|
||||
}));
|
||||
setupTask.addDependency(convertTask);
|
||||
job.addTask(setupTask);
|
||||
|
||||
// Task 3: Render all frames using Blender's native -a (--render-anim)
|
||||
// turntable_gpu_setup.py re-applies GPU preferences (user-level, not stored
|
||||
// in .blend), then -a renders all frames in one process — GPU scene stays
|
||||
// loaded between frames, no per-frame BVH re-upload.
|
||||
const renderTask = author.Task("render-frames", "blender");
|
||||
renderTask.addCommand(author.Command("exec", {
|
||||
exe: "{blender}",
|
||||
args: [
|
||||
"--background",
|
||||
scenePath,
|
||||
"--python",
|
||||
"/opt/flamenco/scripts/turntable_gpu_setup.py",
|
||||
"-a",
|
||||
],
|
||||
}));
|
||||
renderTask.addDependency(setupTask);
|
||||
job.addTask(renderTask);
|
||||
|
||||
// Task 4: Compose video with FFmpeg
|
||||
// Blender writes transparent PNG frames (film_transparent=True) when bg_color is set.
|
||||
// FFmpeg composites them over a solid colour background using the lavfi color source.
|
||||
// Without bg_color, frames are opaque and encoded directly.
|
||||
const composeTask = author.Task("compose-video", "misc");
|
||||
const bgHex = (settings.bg_color || "").replace(/^#/, "");
|
||||
const ffmpegArgs = bgHex
|
||||
? [
|
||||
"-y",
|
||||
// Background: solid colour at video resolution and frame rate
|
||||
"-f", "lavfi",
|
||||
"-i", "color=c=0x" + bgHex + ":size=" + String(settings.width) + "x" + String(settings.height) + ":rate=" + String(settings.fps),
|
||||
// Foreground: transparent PNG frame sequence
|
||||
"-framerate", String(settings.fps),
|
||||
"-i", framesDir + "/frame_%04d.png",
|
||||
// Composite foreground over background
|
||||
"-filter_complex", "[0:v][1:v]overlay=0:0:shortest=1",
|
||||
"-c:v", "libx264",
|
||||
"-pix_fmt", "yuv420p",
|
||||
"-preset", "medium",
|
||||
"-crf", "18",
|
||||
videoPath,
|
||||
]
|
||||
: [
|
||||
"-y",
|
||||
"-framerate", String(settings.fps),
|
||||
"-i", framesDir + "/frame_%04d.png",
|
||||
"-c:v", "libx264",
|
||||
"-pix_fmt", "yuv420p",
|
||||
"-preset", "medium",
|
||||
"-crf", "18",
|
||||
videoPath,
|
||||
];
|
||||
composeTask.addCommand(author.Command("exec", {
|
||||
exe: "ffmpeg",
|
||||
args: ffmpegArgs,
|
||||
}));
|
||||
composeTask.addDependency(renderTask);
|
||||
job.addTask(composeTask);
|
||||
}
|
||||
@@ -0,0 +1,781 @@
|
||||
"""Blender Python script: single-frame still render for Flamenco.
|
||||
|
||||
Matches the lighting, camera, materials, and post-processing of the
|
||||
Celery blender_render.py so that LQ and HQ renders look consistent.
|
||||
|
||||
Usage (from Blender):
|
||||
blender --background --python still_render.py -- \
|
||||
<stl_path> <output_path> <width> <height> <engine> <samples> \
|
||||
<part_colors_json> <transparent_bg> \
|
||||
[template_path] [target_collection] [material_library_path] [material_map_json]
|
||||
"""
|
||||
import bpy
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import math
|
||||
from mathutils import Vector, Matrix
|
||||
|
||||
# ── Colour palette (matches blender_render.py / Three.js renderer) ───────────
|
||||
PALETTE_HEX = [
|
||||
"#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8",
|
||||
"#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8",
|
||||
]
|
||||
|
||||
def _srgb_to_linear(c: int) -> float:
|
||||
v = c / 255.0
|
||||
return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4
|
||||
|
||||
def _hex_to_linear(hex_color: str) -> tuple:
|
||||
h = hex_color.lstrip('#')
|
||||
return (
|
||||
_srgb_to_linear(int(h[0:2], 16)),
|
||||
_srgb_to_linear(int(h[2:4], 16)),
|
||||
_srgb_to_linear(int(h[4:6], 16)),
|
||||
1.0,
|
||||
)
|
||||
|
||||
PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX]
|
||||
|
||||
SMOOTH_ANGLE = 30 # degrees
|
||||
|
||||
|
||||
# ── Helper functions ─────────────────────────────────────────────────────────
|
||||
|
||||
def _ensure_collection(name: str):
|
||||
"""Return a collection by name, creating it if needed."""
|
||||
if name in bpy.data.collections:
|
||||
return bpy.data.collections[name]
|
||||
col = bpy.data.collections.new(name)
|
||||
bpy.context.scene.collection.children.link(col)
|
||||
return col
|
||||
|
||||
|
||||
def _assign_palette_material(part_obj, index):
|
||||
"""Assign a palette colour material to a mesh part."""
|
||||
color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)]
|
||||
mat = bpy.data.materials.new(name=f"Part_{index}")
|
||||
mat.use_nodes = True
|
||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||
if bsdf:
|
||||
bsdf.inputs["Base Color"].default_value = color
|
||||
bsdf.inputs["Metallic"].default_value = 0.35
|
||||
bsdf.inputs["Roughness"].default_value = 0.40
|
||||
try:
|
||||
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
||||
except KeyError:
|
||||
pass
|
||||
part_obj.data.materials.clear()
|
||||
part_obj.data.materials.append(mat)
|
||||
|
||||
|
||||
def _apply_smooth(part_obj, angle_deg):
|
||||
"""Apply smooth or flat shading to a mesh object."""
|
||||
bpy.context.view_layer.objects.active = part_obj
|
||||
part_obj.select_set(True)
|
||||
if angle_deg > 0:
|
||||
try:
|
||||
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
|
||||
except AttributeError:
|
||||
bpy.ops.object.shade_smooth()
|
||||
part_obj.data.use_auto_smooth = True
|
||||
part_obj.data.auto_smooth_angle = math.radians(angle_deg)
|
||||
else:
|
||||
bpy.ops.object.shade_flat()
|
||||
|
||||
|
||||
import re as _re
|
||||
|
||||
|
||||
def _scale_mm_to_m(parts):
|
||||
"""Scale imported STL objects from mm to Blender metres (×0.001).
|
||||
|
||||
STEP/STL coordinates are in mm; Blender's default unit is metres.
|
||||
Without scaling a 50 mm part appears as 50 m inside Blender — way too large
|
||||
relative to any template environment designed in metric units.
|
||||
"""
|
||||
if not parts:
|
||||
return
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
for p in parts:
|
||||
p.scale = (0.001, 0.001, 0.001)
|
||||
p.location *= 0.001
|
||||
p.select_set(True)
|
||||
bpy.context.view_layer.objects.active = parts[0]
|
||||
bpy.ops.object.transform_apply(scale=True, location=False, rotation=False)
|
||||
print(f"[still_render] scaled {len(parts)} parts mm→m (×0.001)")
|
||||
|
||||
|
||||
def _apply_rotation(parts, rx, ry, rz):
|
||||
"""Apply Euler rotation (degrees, XYZ order) to all parts around world origin."""
|
||||
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
|
||||
return
|
||||
import math
|
||||
from mathutils import Euler
|
||||
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
|
||||
for p in parts:
|
||||
p.matrix_world = rot_mat @ p.matrix_world
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
for p in parts:
|
||||
p.select_set(True)
|
||||
bpy.context.view_layer.objects.active = parts[0]
|
||||
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
|
||||
print(f"[still_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
|
||||
|
||||
|
||||
def _import_stl(stl_file):
|
||||
"""Import STL into Blender, using per-part STLs if available.
|
||||
|
||||
Checks for {stl_stem}_parts/manifest.json next to the STL file.
|
||||
- Per-part mode: imports each part STL, names Blender object after STEP part name.
|
||||
- Fallback: imports combined STL and splits by loose geometry.
|
||||
|
||||
Returns list of Blender mesh objects, centred at origin.
|
||||
"""
|
||||
stl_dir = os.path.dirname(stl_file)
|
||||
stl_stem = os.path.splitext(os.path.basename(stl_file))[0]
|
||||
parts_dir = os.path.join(stl_dir, stl_stem + "_parts")
|
||||
manifest_path = os.path.join(parts_dir, "manifest.json")
|
||||
|
||||
parts = []
|
||||
|
||||
if os.path.isfile(manifest_path):
|
||||
# ── Per-part mode ────────────────────────────────────────────────
|
||||
try:
|
||||
with open(manifest_path, "r") as f:
|
||||
manifest = json.loads(f.read())
|
||||
part_entries = manifest.get("parts", [])
|
||||
except Exception as e:
|
||||
print(f"[still_render] WARNING: failed to read manifest: {e}")
|
||||
part_entries = []
|
||||
|
||||
if part_entries:
|
||||
for entry in part_entries:
|
||||
part_file = os.path.join(parts_dir, entry["file"])
|
||||
part_name = entry["name"]
|
||||
if not os.path.isfile(part_file):
|
||||
print(f"[still_render] WARNING: part STL missing: {part_file}")
|
||||
continue
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
bpy.ops.wm.stl_import(filepath=part_file)
|
||||
imported = bpy.context.selected_objects
|
||||
if imported:
|
||||
obj = imported[0]
|
||||
obj.name = part_name
|
||||
if obj.data:
|
||||
obj.data.name = part_name
|
||||
parts.append(obj)
|
||||
|
||||
if parts:
|
||||
print(f"[still_render] imported {len(parts)} named parts from per-part STLs")
|
||||
|
||||
# ── Fallback: combined STL + separate by loose ───────────────────────
|
||||
if not parts:
|
||||
bpy.ops.wm.stl_import(filepath=stl_file)
|
||||
obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None
|
||||
if obj is None:
|
||||
print(f"ERROR: No objects imported from {stl_file}")
|
||||
sys.exit(1)
|
||||
|
||||
bpy.context.view_layer.objects.active = obj
|
||||
bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS')
|
||||
obj.location = (0.0, 0.0, 0.0)
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
bpy.ops.mesh.separate(type='LOOSE')
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
parts = list(bpy.context.selected_objects)
|
||||
print(f"[still_render] fallback: separated into {len(parts)} part(s)")
|
||||
return parts
|
||||
|
||||
# ── Centre per-part imports at origin (combined bbox) ────────────────
|
||||
all_corners = []
|
||||
for p in parts:
|
||||
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
|
||||
|
||||
if all_corners:
|
||||
mins = Vector((min(v.x for v in all_corners),
|
||||
min(v.y for v in all_corners),
|
||||
min(v.z for v in all_corners)))
|
||||
maxs = Vector((max(v.x for v in all_corners),
|
||||
max(v.y for v in all_corners),
|
||||
max(v.z for v in all_corners)))
|
||||
center = (mins + maxs) * 0.5
|
||||
for p in parts:
|
||||
p.location -= center
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
def _resolve_part_name(index, part_obj, part_names_ordered):
|
||||
"""Get the STEP part name for a Blender part by index.
|
||||
|
||||
With per-part import, part_obj.name IS the STEP name (possibly with
|
||||
Blender .NNN suffix). Falls back to part_names_ordered for combined-STL mode.
|
||||
"""
|
||||
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
|
||||
if part_names_ordered and index < len(part_names_ordered):
|
||||
return part_names_ordered[index]
|
||||
return base_name
|
||||
|
||||
|
||||
def _apply_material_library(parts, mat_lib_path, mat_map, part_names_ordered=None):
|
||||
"""Append materials from library .blend and assign to parts via material_map.
|
||||
|
||||
With per-part STL import, Blender objects are named after STEP parts,
|
||||
so matching is by name (stripping Blender .NNN suffix for duplicates).
|
||||
Falls back to part_names_ordered index-based matching for combined-STL mode.
|
||||
|
||||
mat_map: {part_name_lower: material_name}
|
||||
Parts without a match keep their current material.
|
||||
"""
|
||||
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
||||
print(f"[still_render] material library not found: {mat_lib_path}")
|
||||
return
|
||||
|
||||
# Collect unique material names needed
|
||||
needed = set(mat_map.values())
|
||||
if not needed:
|
||||
return
|
||||
|
||||
# Append materials from library
|
||||
appended = {}
|
||||
for mat_name in needed:
|
||||
inner_path = f"{mat_lib_path}/Material/{mat_name}"
|
||||
try:
|
||||
bpy.ops.wm.append(
|
||||
filepath=inner_path,
|
||||
directory=f"{mat_lib_path}/Material/",
|
||||
filename=mat_name,
|
||||
link=False,
|
||||
)
|
||||
if mat_name in bpy.data.materials:
|
||||
appended[mat_name] = bpy.data.materials[mat_name]
|
||||
print(f"[still_render] appended material: {mat_name}")
|
||||
else:
|
||||
print(f"[still_render] WARNING: material '{mat_name}' not found after append")
|
||||
except Exception as exc:
|
||||
print(f"[still_render] WARNING: failed to append material '{mat_name}': {exc}")
|
||||
|
||||
if not appended:
|
||||
return
|
||||
|
||||
# Assign materials to parts — primary: name-based (per-part STL mode),
|
||||
# secondary: index-based via part_names_ordered (combined STL fallback)
|
||||
assigned_count = 0
|
||||
for i, part in enumerate(parts):
|
||||
# Try name-based matching first (strip Blender .NNN suffix)
|
||||
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
||||
part_key = base_name.lower().strip()
|
||||
mat_name = mat_map.get(part_key)
|
||||
|
||||
# Fall back to index-based matching via part_names_ordered
|
||||
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
||||
step_name = part_names_ordered[i]
|
||||
part_key = step_name.lower().strip()
|
||||
mat_name = mat_map.get(part_key)
|
||||
|
||||
if mat_name and mat_name in appended:
|
||||
part.data.materials.clear()
|
||||
part.data.materials.append(appended[mat_name])
|
||||
assigned_count += 1
|
||||
print(f"[still_render] assigned '{mat_name}' to part '{part.name}'")
|
||||
|
||||
print(f"[still_render] material assignment: {assigned_count}/{len(parts)} parts matched")
|
||||
|
||||
|
||||
def main():
|
||||
argv = sys.argv
|
||||
args = argv[argv.index("--") + 1:]
|
||||
|
||||
stl_path = args[0]
|
||||
output_path = args[1]
|
||||
width = int(args[2])
|
||||
height = int(args[3])
|
||||
engine = args[4]
|
||||
samples = int(args[5])
|
||||
part_colors_json = args[6] if len(args) > 6 else "{}"
|
||||
transparent_bg = args[7] == "1" if len(args) > 7 else False
|
||||
|
||||
# Template + material library args (passed by schaeffler-still.js)
|
||||
template_path = args[8] if len(args) > 8 and args[8] else ""
|
||||
target_collection = args[9] if len(args) > 9 else "Product"
|
||||
material_library_path = args[10] if len(args) > 10 and args[10] else ""
|
||||
material_map_raw = args[11] if len(args) > 11 else "{}"
|
||||
part_names_ordered_raw = args[12] if len(args) > 12 else "[]"
|
||||
lighting_only = args[13] == "1" if len(args) > 13 else False
|
||||
cycles_device = args[14].lower() if len(args) > 14 else "auto" # "auto", "gpu", "cpu"
|
||||
shadow_catcher = args[15] == "1" if len(args) > 15 else False
|
||||
rotation_x = float(args[16]) if len(args) > 16 else 0.0
|
||||
rotation_y = float(args[17]) if len(args) > 17 else 0.0
|
||||
rotation_z = float(args[18]) if len(args) > 18 else 0.0
|
||||
noise_threshold_arg = args[19] if len(args) > 19 else ""
|
||||
denoiser_arg = args[20] if len(args) > 20 else ""
|
||||
denoising_input_passes_arg = args[21] if len(args) > 21 else ""
|
||||
denoising_prefilter_arg = args[22] if len(args) > 22 else ""
|
||||
denoising_quality_arg = args[23] if len(args) > 23 else ""
|
||||
denoising_use_gpu_arg = args[24] if len(args) > 24 else ""
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
try:
|
||||
part_colors = json.loads(part_colors_json)
|
||||
except json.JSONDecodeError:
|
||||
part_colors = {}
|
||||
|
||||
try:
|
||||
material_map = json.loads(material_map_raw) if material_map_raw else {}
|
||||
except json.JSONDecodeError:
|
||||
material_map = {}
|
||||
|
||||
try:
|
||||
part_names_ordered = json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
|
||||
except json.JSONDecodeError:
|
||||
part_names_ordered = []
|
||||
|
||||
# Validate template path: if provided it MUST exist on disk.
|
||||
# A missing template is a configuration error — fail loudly rather than
|
||||
# silently falling back to factory-settings mode which produces renders that
|
||||
# look completely wrong.
|
||||
if template_path and not os.path.isfile(template_path):
|
||||
print(f"[still_render] ERROR: template_path was provided but file not found: {template_path}")
|
||||
print("[still_render] Ensure the blend-templates directory is accessible on this worker.")
|
||||
sys.exit(1)
|
||||
|
||||
use_template = bool(template_path)
|
||||
|
||||
print(f"[still_render] engine={engine}, samples={samples}, size={width}x{height}, transparent={transparent_bg}")
|
||||
print(f"[still_render] part_names_ordered: {len(part_names_ordered)} entries")
|
||||
if use_template:
|
||||
print(f"[still_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
|
||||
else:
|
||||
print("[still_render] no template — using factory settings (Mode A)")
|
||||
if material_library_path:
|
||||
print(f"[still_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
|
||||
|
||||
# ── SCENE SETUP ──────────────────────────────────────────────────────────
|
||||
|
||||
if use_template:
|
||||
# ── MODE B: Template-based render ────────────────────────────────────
|
||||
print(f"[still_render] Opening template: {template_path}")
|
||||
bpy.ops.wm.open_mainfile(filepath=template_path)
|
||||
|
||||
# Find or create target collection
|
||||
target_col = _ensure_collection(target_collection)
|
||||
|
||||
# Import and split STL
|
||||
parts = _import_stl(stl_path)
|
||||
# Scale mm→m: STEP coords are mm, Blender default unit is metres
|
||||
_scale_mm_to_m(parts)
|
||||
# Apply render position rotation (before camera/bbox calculations)
|
||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
|
||||
# Move imported parts into target collection
|
||||
for part in parts:
|
||||
for col in list(part.users_collection):
|
||||
col.objects.unlink(part)
|
||||
target_col.objects.link(part)
|
||||
|
||||
# Apply smooth shading
|
||||
for part in parts:
|
||||
_apply_smooth(part, SMOOTH_ANGLE)
|
||||
|
||||
# Material assignment: library materials if available, otherwise palette
|
||||
if material_library_path and material_map:
|
||||
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
|
||||
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
|
||||
# Parts not matched by library get palette fallback
|
||||
for i, part in enumerate(parts):
|
||||
if not part.data.materials or len(part.data.materials) == 0:
|
||||
_assign_palette_material(part, i)
|
||||
else:
|
||||
for i, part in enumerate(parts):
|
||||
step_name = _resolve_part_name(i, part, part_names_ordered)
|
||||
color_hex = part_colors.get(step_name)
|
||||
if color_hex:
|
||||
color = _hex_to_linear(color_hex)
|
||||
mat = bpy.data.materials.new(name=f"Part_{i}")
|
||||
mat.use_nodes = True
|
||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||
if bsdf:
|
||||
bsdf.inputs["Base Color"].default_value = color
|
||||
bsdf.inputs["Metallic"].default_value = 0.35
|
||||
bsdf.inputs["Roughness"].default_value = 0.40
|
||||
try:
|
||||
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
||||
except KeyError:
|
||||
pass
|
||||
part.data.materials.clear()
|
||||
part.data.materials.append(mat)
|
||||
else:
|
||||
_assign_palette_material(part, i)
|
||||
|
||||
# ── Shadow catcher (Cycles only, template mode only) ─────────────────
|
||||
if shadow_catcher:
|
||||
sc_col_name = "Shadowcatcher"
|
||||
sc_obj_name = "Shadowcatcher"
|
||||
for vl in bpy.context.scene.view_layers:
|
||||
def _enable_col_recursive(layer_col):
|
||||
if layer_col.collection.name == sc_col_name:
|
||||
layer_col.exclude = False
|
||||
layer_col.collection.hide_render = False
|
||||
layer_col.collection.hide_viewport = False
|
||||
return True
|
||||
for child in layer_col.children:
|
||||
if _enable_col_recursive(child):
|
||||
return True
|
||||
return False
|
||||
_enable_col_recursive(vl.layer_collection)
|
||||
|
||||
sc_obj = bpy.data.objects.get(sc_obj_name)
|
||||
if sc_obj:
|
||||
all_world_z = []
|
||||
for part in parts:
|
||||
for corner in part.bound_box:
|
||||
all_world_z.append((part.matrix_world @ Vector(corner)).z)
|
||||
if all_world_z:
|
||||
sc_obj.location.z = min(all_world_z)
|
||||
print(f"[still_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
|
||||
else:
|
||||
print(f"[still_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
|
||||
|
||||
# lighting_only: use template World/HDRI but force auto-camera UNLESS the shadow
|
||||
# catcher is enabled — in that case the template camera is already positioned to
|
||||
# show both the product and its shadow on the ground plane.
|
||||
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
|
||||
if lighting_only and not shadow_catcher:
|
||||
print("[still_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
|
||||
elif needs_auto_camera:
|
||||
print("[still_render] WARNING: template has no camera — will create auto-camera")
|
||||
|
||||
# Set very close near clip on template camera for mm-scale parts (now in metres)
|
||||
if not needs_auto_camera and bpy.context.scene.camera:
|
||||
bpy.context.scene.camera.data.clip_start = 0.001
|
||||
|
||||
print(f"[still_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
|
||||
|
||||
else:
|
||||
# ── MODE A: Factory settings (original behavior) ─────────────────────
|
||||
needs_auto_camera = True
|
||||
bpy.ops.wm.read_factory_settings(use_empty=True)
|
||||
|
||||
parts = _import_stl(stl_path)
|
||||
# Scale mm→m: STEP coords are mm, Blender default unit is metres
|
||||
_scale_mm_to_m(parts)
|
||||
# Apply render position rotation (before camera/bbox calculations)
|
||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
|
||||
for i, part in enumerate(parts):
|
||||
_apply_smooth(part, SMOOTH_ANGLE)
|
||||
|
||||
# Material assignment: library materials if available, else part_colors/palette
|
||||
if material_library_path and material_map:
|
||||
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
|
||||
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
|
||||
# Palette fallback for unmatched parts
|
||||
for i, part in enumerate(parts):
|
||||
if not part.data.materials or len(part.data.materials) == 0:
|
||||
_assign_palette_material(part, i)
|
||||
else:
|
||||
# part_colors or palette — use index-based lookup via part_names_ordered
|
||||
for i, part in enumerate(parts):
|
||||
step_name = _resolve_part_name(i, part, part_names_ordered)
|
||||
color_hex = part_colors.get(step_name)
|
||||
if color_hex:
|
||||
color = _hex_to_linear(color_hex)
|
||||
else:
|
||||
color = PALETTE_LINEAR[i % len(PALETTE_LINEAR)]
|
||||
|
||||
mat = bpy.data.materials.new(name=f"Part_{i}")
|
||||
mat.use_nodes = True
|
||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||
if bsdf:
|
||||
bsdf.inputs["Base Color"].default_value = color
|
||||
bsdf.inputs["Metallic"].default_value = 0.35
|
||||
bsdf.inputs["Roughness"].default_value = 0.40
|
||||
try:
|
||||
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
||||
except KeyError:
|
||||
pass
|
||||
part.data.materials.clear()
|
||||
part.data.materials.append(mat)
|
||||
|
||||
if needs_auto_camera:
|
||||
# ── Combined bounding box / bounding sphere ──────────────────────────
|
||||
all_corners = []
|
||||
for part in parts:
|
||||
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
||||
|
||||
bbox_min = Vector((
|
||||
min(v.x for v in all_corners),
|
||||
min(v.y for v in all_corners),
|
||||
min(v.z for v in all_corners),
|
||||
))
|
||||
bbox_max = Vector((
|
||||
max(v.x for v in all_corners),
|
||||
max(v.y for v in all_corners),
|
||||
max(v.z for v in all_corners),
|
||||
))
|
||||
|
||||
bbox_center = (bbox_min + bbox_max) * 0.5
|
||||
bbox_dims = bbox_max - bbox_min
|
||||
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
|
||||
|
||||
print(f"[still_render] bbox_dims={tuple(round(d, 4) for d in bbox_dims)}, "
|
||||
f"bsphere_radius={bsphere_radius:.4f}")
|
||||
|
||||
# ── Lighting — only in Mode A (factory settings) ─────────────────────
|
||||
# In template mode the .blend file provides its own World/HDRI lighting.
|
||||
# Adding auto-lights would overpower the template's intended look.
|
||||
if not use_template:
|
||||
light_dist = bsphere_radius * 6.0
|
||||
|
||||
bpy.ops.object.light_add(type='SUN', location=(
|
||||
bbox_center.x + light_dist * 0.5,
|
||||
bbox_center.y - light_dist * 0.35,
|
||||
bbox_center.z + light_dist,
|
||||
))
|
||||
sun = bpy.context.active_object
|
||||
sun.data.energy = 4.0
|
||||
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
|
||||
|
||||
bpy.ops.object.light_add(type='AREA', location=(
|
||||
bbox_center.x - light_dist * 0.4,
|
||||
bbox_center.y + light_dist * 0.4,
|
||||
bbox_center.z + light_dist * 0.7,
|
||||
))
|
||||
fill = bpy.context.active_object
|
||||
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
|
||||
fill.data.size = max(4.0, bsphere_radius * 4.0)
|
||||
|
||||
# ── Camera (isometric-style, matches blender_render.py) ──────────────
|
||||
ELEVATION_DEG = 28.0
|
||||
AZIMUTH_DEG = 40.0
|
||||
LENS_MM = 50.0
|
||||
SENSOR_WIDTH_MM = 36.0
|
||||
FILL_FACTOR = 0.85
|
||||
|
||||
elevation_rad = math.radians(ELEVATION_DEG)
|
||||
azimuth_rad = math.radians(AZIMUTH_DEG)
|
||||
|
||||
cam_dir = Vector((
|
||||
math.cos(elevation_rad) * math.cos(azimuth_rad),
|
||||
math.cos(elevation_rad) * math.sin(azimuth_rad),
|
||||
math.sin(elevation_rad),
|
||||
)).normalized()
|
||||
|
||||
fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM))
|
||||
fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM))
|
||||
fov_used = min(fov_h, fov_v)
|
||||
|
||||
dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR
|
||||
dist = max(dist, bsphere_radius * 1.5)
|
||||
|
||||
cam_location = bbox_center + cam_dir * dist
|
||||
bpy.ops.object.camera_add(location=cam_location)
|
||||
cam_obj = bpy.context.active_object
|
||||
cam_obj.data.lens = LENS_MM
|
||||
bpy.context.scene.camera = cam_obj
|
||||
|
||||
# Look-at rotation
|
||||
look_dir = (bbox_center - cam_location).normalized()
|
||||
up_world = Vector((0.0, 0.0, 1.0))
|
||||
right = look_dir.cross(up_world)
|
||||
if right.length < 1e-6:
|
||||
right = Vector((1.0, 0.0, 0.0))
|
||||
right.normalize()
|
||||
cam_up = right.cross(look_dir).normalized()
|
||||
|
||||
rot_mat = Matrix((
|
||||
(right.x, right.y, right.z),
|
||||
(cam_up.x, cam_up.y, cam_up.z),
|
||||
(-look_dir.x, -look_dir.y, -look_dir.z),
|
||||
)).transposed()
|
||||
cam_obj.rotation_euler = rot_mat.to_euler('XYZ')
|
||||
|
||||
cam_obj.data.clip_start = max(dist * 0.001, 0.0001)
|
||||
cam_obj.data.clip_end = dist + bsphere_radius * 3.0
|
||||
|
||||
# ── World background — only in Mode A ───────────────────────────────
|
||||
# In template mode the .blend file owns its World (HDRI, sky texture,
|
||||
# studio lighting). Overwriting it would destroy the HDR look the
|
||||
# template was designed to use (e.g. Alpha-HDR output types).
|
||||
if not use_template:
|
||||
world = bpy.data.worlds.new("World")
|
||||
bpy.context.scene.world = world
|
||||
world.use_nodes = True
|
||||
bg = world.node_tree.nodes["Background"]
|
||||
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
|
||||
bg.inputs["Strength"].default_value = 0.15
|
||||
|
||||
# ── Colour management ────────────────────────────────────────────────────
|
||||
# In template mode the .blend file owns its colour management settings
|
||||
# (e.g. Filmic/AgX for HDR, custom exposure for Alpha-HDR output types).
|
||||
# Overwriting them would destroy the look the template was designed for.
|
||||
# In factory-settings mode (Mode A) we force Standard to avoid the grey
|
||||
# Filmic tint that Blender applies by default.
|
||||
scene = bpy.context.scene
|
||||
if not use_template:
|
||||
scene.view_settings.view_transform = 'Standard'
|
||||
scene.view_settings.exposure = 0.0
|
||||
scene.view_settings.gamma = 1.0
|
||||
try:
|
||||
scene.view_settings.look = 'None'
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ── Render engine ────────────────────────────────────────────────────────
|
||||
if engine == "eevee":
|
||||
eevee_ok = False
|
||||
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
|
||||
try:
|
||||
scene.render.engine = eevee_id
|
||||
eevee_ok = True
|
||||
print(f"[still_render] EEVEE engine id: {eevee_id}")
|
||||
break
|
||||
except TypeError:
|
||||
continue
|
||||
if eevee_ok:
|
||||
for attr in ('taa_render_samples', 'samples'):
|
||||
try:
|
||||
setattr(scene.eevee, attr, samples)
|
||||
break
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
print("[still_render] WARNING: EEVEE unavailable, falling back to Cycles")
|
||||
engine = "cycles"
|
||||
|
||||
if engine != "eevee":
|
||||
scene.render.engine = 'CYCLES'
|
||||
scene.cycles.samples = samples
|
||||
scene.cycles.use_denoising = True
|
||||
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
|
||||
if denoising_input_passes_arg:
|
||||
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
|
||||
except Exception: pass
|
||||
if denoising_prefilter_arg:
|
||||
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
|
||||
except Exception: pass
|
||||
if denoising_quality_arg:
|
||||
try: scene.cycles.denoising_quality = denoising_quality_arg
|
||||
except Exception: pass
|
||||
if denoising_use_gpu_arg:
|
||||
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
|
||||
except AttributeError: pass
|
||||
if noise_threshold_arg:
|
||||
scene.cycles.use_adaptive_sampling = True
|
||||
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
|
||||
# Device selection: "cpu" forces CPU, "gpu" forces GPU (warns if unavailable),
|
||||
# "auto" (default) tries GPU first and falls back to CPU.
|
||||
print(f"[still_render] cycles_device={cycles_device}")
|
||||
gpu_found = False
|
||||
if cycles_device != "cpu":
|
||||
try:
|
||||
cycles_prefs = bpy.context.preferences.addons['cycles'].preferences
|
||||
for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
|
||||
try:
|
||||
cycles_prefs.compute_device_type = device_type
|
||||
cycles_prefs.get_devices()
|
||||
gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU']
|
||||
if gpu_devs:
|
||||
for d in gpu_devs:
|
||||
d.use = True
|
||||
scene.cycles.device = 'GPU'
|
||||
gpu_found = True
|
||||
print(f"[still_render] Cycles GPU ({device_type})")
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
if not gpu_found:
|
||||
scene.cycles.device = 'CPU'
|
||||
print("[still_render] WARNING: GPU not found — falling back to CPU")
|
||||
|
||||
# ── Render settings ──────────────────────────────────────────────────────
|
||||
scene.render.resolution_x = width
|
||||
scene.render.resolution_y = height
|
||||
scene.render.resolution_percentage = 100
|
||||
scene.render.film_transparent = transparent_bg
|
||||
|
||||
ext = os.path.splitext(output_path)[1].lower()
|
||||
if ext in ('.jpg', '.jpeg'):
|
||||
scene.render.image_settings.file_format = 'JPEG'
|
||||
scene.render.image_settings.quality = 92
|
||||
else:
|
||||
scene.render.image_settings.file_format = 'PNG'
|
||||
|
||||
scene.render.filepath = output_path
|
||||
|
||||
# ── Render ───────────────────────────────────────────────────────────────
|
||||
print(f"[still_render] Rendering -> {output_path} (Blender {bpy.app.version_string})")
|
||||
bpy.ops.render.render(write_still=True)
|
||||
print("[still_render] render done.")
|
||||
|
||||
# ── Pillow post-processing: green bar + model name label ─────────────────
|
||||
# Skip overlay for transparent renders to keep clean alpha channel
|
||||
if transparent_bg:
|
||||
print("[still_render] Transparent mode — skipping Pillow overlay.")
|
||||
else:
|
||||
try:
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
|
||||
img = Image.open(output_path).convert("RGBA")
|
||||
draw = ImageDraw.Draw(img)
|
||||
W, H = img.size
|
||||
|
||||
# Schaeffler green top bar
|
||||
bar_h = max(8, H // 32)
|
||||
draw.rectangle([0, 0, W - 1, bar_h - 1], fill=(0, 137, 61, 255))
|
||||
|
||||
# Model name strip at bottom
|
||||
model_name = os.path.splitext(os.path.basename(stl_path))[0]
|
||||
label_h = max(20, H // 20)
|
||||
img.alpha_composite(
|
||||
Image.new("RGBA", (W, label_h), (30, 30, 30, 180)),
|
||||
dest=(0, H - label_h),
|
||||
)
|
||||
|
||||
font_size = max(10, label_h - 6)
|
||||
font = None
|
||||
for fp in [
|
||||
"/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf",
|
||||
"/usr/share/fonts/truetype/liberation/LiberationSans-Bold.ttf",
|
||||
"/usr/share/fonts/truetype/freefont/FreeSansBold.ttf",
|
||||
]:
|
||||
if os.path.exists(fp):
|
||||
try:
|
||||
font = ImageFont.truetype(fp, font_size)
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
if font is None:
|
||||
font = ImageFont.load_default()
|
||||
|
||||
tb = draw.textbbox((0, 0), model_name, font=font)
|
||||
text_w = tb[2] - tb[0]
|
||||
draw.text(
|
||||
((W - text_w) // 2, H - label_h + (label_h - (tb[3] - tb[1])) // 2),
|
||||
model_name, font=font, fill=(255, 255, 255, 255),
|
||||
)
|
||||
|
||||
# Save in original format
|
||||
if ext in ('.jpg', '.jpeg'):
|
||||
img.convert("RGB").save(output_path, format="JPEG", quality=92)
|
||||
else:
|
||||
img.convert("RGB").save(output_path, format="PNG")
|
||||
print("[still_render] Pillow overlay applied.")
|
||||
|
||||
except ImportError:
|
||||
print("[still_render] Pillow not available - skipping overlay.")
|
||||
except Exception as exc:
|
||||
print(f"[still_render] Pillow overlay failed (non-fatal): {exc}")
|
||||
|
||||
print("[still_render] Done.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,74 @@
|
||||
"""Blender GPU preferences setup for native animation render (-a).
|
||||
|
||||
Called as:
|
||||
blender --background scene.blend --python turntable_gpu_setup.py -a
|
||||
|
||||
Reads the intended cycles_device from the scene custom property set by
|
||||
turntable_setup.py, then applies the matching GPU compute device preferences.
|
||||
GPU preferences are user-level and not stored in .blend, so they must be
|
||||
re-applied at render time.
|
||||
|
||||
After this script runs, Blender processes -a and renders all animation frames
|
||||
natively — keeping the GPU scene (BVH, textures) loaded across all frames.
|
||||
"""
|
||||
import bpy
|
||||
|
||||
scene = bpy.context.scene
|
||||
cycles_device = scene.get("_cycles_device", "gpu")
|
||||
denoiser_override = scene.get("_denoiser_override", "")
|
||||
|
||||
if scene.render.engine != 'CYCLES':
|
||||
# EEVEE or other engine — no Cycles GPU preferences needed
|
||||
print(f"[turntable_gpu] engine={scene.render.engine} — no Cycles GPU setup needed")
|
||||
elif cycles_device == "cpu":
|
||||
scene.cycles.device = 'CPU'
|
||||
print("[turntable_gpu] Using CPU (explicit override)")
|
||||
else:
|
||||
gpu_found = False
|
||||
try:
|
||||
cycles_prefs = bpy.context.preferences.addons['cycles'].preferences
|
||||
for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
|
||||
try:
|
||||
cycles_prefs.compute_device_type = device_type
|
||||
cycles_prefs.get_devices()
|
||||
gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU']
|
||||
if gpu_devs:
|
||||
for d in gpu_devs:
|
||||
d.use = True
|
||||
scene.cycles.device = 'GPU'
|
||||
gpu_found = True
|
||||
|
||||
# OptiX denoiser is fully GPU-native and faster than OIDN on NVIDIA.
|
||||
# Fall back to OIDN (also GPU-accelerated) on CUDA/HIP.
|
||||
if not denoiser_override:
|
||||
if device_type == 'OPTIX':
|
||||
try:
|
||||
scene.cycles.denoiser = 'OPTIX'
|
||||
print("[turntable_gpu] OptiX denoiser active (GPU-native)")
|
||||
except Exception:
|
||||
pass # Keep OIDN
|
||||
else:
|
||||
try:
|
||||
scene.cycles.denoiser = denoiser_override
|
||||
print(f"[turntable_gpu] Denoiser override: {denoiser_override}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Blender 4.x+: explicitly route OIDN through GPU path
|
||||
try:
|
||||
scene.cycles.denoising_use_gpu = True
|
||||
except AttributeError:
|
||||
pass # Older Blender — OIDN uses GPU automatically when device=GPU
|
||||
|
||||
print(f"[turntable_gpu] Cycles GPU ({device_type}) — rendering {scene.frame_end - scene.frame_start + 1} frames")
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not gpu_found:
|
||||
scene.cycles.device = 'CPU'
|
||||
print("[turntable_gpu] WARNING: GPU not found — falling back to CPU")
|
||||
|
||||
print(f"[turntable_gpu] Output: {scene.render.filepath}#### (frames {scene.frame_start}–{scene.frame_end})")
|
||||
@@ -0,0 +1,762 @@
|
||||
"""Blender Python script: turntable animation render for Flamenco.
|
||||
|
||||
Usage (from Blender):
|
||||
blender --background --python turntable_render.py -- \
|
||||
<stl_path> <frames_dir> <frame_count> <degrees> <width> <height> \
|
||||
<engine> <samples> <part_colors_json> \
|
||||
[template_path] [target_collection] [material_library_path] [material_map_json]
|
||||
"""
|
||||
import bpy
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import math
|
||||
from mathutils import Vector, Matrix
|
||||
|
||||
# ── Colour palette (matches blender_render.py / Three.js renderer) ───────────
|
||||
PALETTE_HEX = [
|
||||
"#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8",
|
||||
"#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8",
|
||||
]
|
||||
|
||||
def _srgb_to_linear(c: int) -> float:
|
||||
v = c / 255.0
|
||||
return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4
|
||||
|
||||
def _hex_to_linear(hex_color: str) -> tuple:
|
||||
h = hex_color.lstrip('#')
|
||||
return (
|
||||
_srgb_to_linear(int(h[0:2], 16)),
|
||||
_srgb_to_linear(int(h[2:4], 16)),
|
||||
_srgb_to_linear(int(h[4:6], 16)),
|
||||
1.0,
|
||||
)
|
||||
|
||||
PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX]
|
||||
|
||||
SMOOTH_ANGLE = 30 # degrees
|
||||
|
||||
|
||||
# ── Helper functions ─────────────────────────────────────────────────────────
|
||||
|
||||
def _ensure_collection(name: str):
|
||||
"""Return a collection by name, creating it if needed."""
|
||||
if name in bpy.data.collections:
|
||||
return bpy.data.collections[name]
|
||||
col = bpy.data.collections.new(name)
|
||||
bpy.context.scene.collection.children.link(col)
|
||||
return col
|
||||
|
||||
|
||||
def _assign_palette_material(part_obj, index):
|
||||
"""Assign a palette colour material to a mesh part."""
|
||||
color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)]
|
||||
mat = bpy.data.materials.new(name=f"Part_{index}")
|
||||
mat.use_nodes = True
|
||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||
if bsdf:
|
||||
bsdf.inputs["Base Color"].default_value = color
|
||||
bsdf.inputs["Metallic"].default_value = 0.35
|
||||
bsdf.inputs["Roughness"].default_value = 0.40
|
||||
try:
|
||||
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
||||
except KeyError:
|
||||
pass
|
||||
part_obj.data.materials.clear()
|
||||
part_obj.data.materials.append(mat)
|
||||
|
||||
|
||||
def _apply_smooth(part_obj, angle_deg):
|
||||
"""Apply smooth or flat shading to a mesh object."""
|
||||
bpy.context.view_layer.objects.active = part_obj
|
||||
part_obj.select_set(True)
|
||||
if angle_deg > 0:
|
||||
try:
|
||||
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
|
||||
except AttributeError:
|
||||
bpy.ops.object.shade_smooth()
|
||||
part_obj.data.use_auto_smooth = True
|
||||
part_obj.data.auto_smooth_angle = math.radians(angle_deg)
|
||||
else:
|
||||
bpy.ops.object.shade_flat()
|
||||
|
||||
|
||||
import re as _re
|
||||
|
||||
|
||||
def _apply_rotation(parts, rx, ry, rz):
|
||||
"""Apply Euler XYZ rotation (degrees) to all parts by modifying matrix_world.
|
||||
|
||||
Rotates around world origin, which equals the assembly centre because
|
||||
_import_stl already centres parts there. Applied before material assignment
|
||||
and camera/bbox calculations so everything downstream sees the final pose.
|
||||
"""
|
||||
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
|
||||
return
|
||||
from mathutils import Euler
|
||||
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
|
||||
for p in parts:
|
||||
p.matrix_world = rot_mat @ p.matrix_world
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
for p in parts:
|
||||
p.select_set(True)
|
||||
bpy.context.view_layer.objects.active = parts[0]
|
||||
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
|
||||
print(f"[turntable_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
|
||||
|
||||
|
||||
def _axis_rotation(axis: str, degrees: float) -> tuple:
|
||||
"""Map turntable axis name to Euler (x, y, z) rotation in radians."""
|
||||
rad = math.radians(degrees)
|
||||
if axis == "world_x":
|
||||
return (rad, 0.0, 0.0)
|
||||
elif axis == "world_y":
|
||||
return (0.0, rad, 0.0)
|
||||
else: # "world_z" default
|
||||
return (0.0, 0.0, rad)
|
||||
|
||||
|
||||
def _set_fcurves_linear(action):
|
||||
"""Set LINEAR interpolation on all fcurves.
|
||||
|
||||
Handles both the legacy Blender < 4.4 API (action.fcurves) and the new
|
||||
Baklava layered-action API introduced in Blender 4.4 / 5.x
|
||||
(action.layers[*].strips[*].channelbags[*].fcurves).
|
||||
"""
|
||||
try:
|
||||
# New layered-action API (Blender 4.4+ / 5.x)
|
||||
for layer in action.layers:
|
||||
for strip in layer.strips:
|
||||
for channelbag in strip.channelbags:
|
||||
for fc in channelbag.fcurves:
|
||||
for kp in fc.keyframe_points:
|
||||
kp.interpolation = 'LINEAR'
|
||||
except AttributeError:
|
||||
# Legacy API (Blender < 4.4)
|
||||
for fc in action.fcurves:
|
||||
for kp in fc.keyframe_points:
|
||||
kp.interpolation = 'LINEAR'
|
||||
|
||||
|
||||
def _scale_mm_to_m(parts):
|
||||
"""Scale imported STL objects from mm to Blender metres (×0.001).
|
||||
|
||||
STEP/STL coordinates are in mm; Blender's default unit is metres.
|
||||
Without scaling a 50 mm part appears as 50 m inside Blender — way too large
|
||||
relative to any template environment designed in metric units.
|
||||
"""
|
||||
if not parts:
|
||||
return
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
for p in parts:
|
||||
p.scale = (0.001, 0.001, 0.001)
|
||||
p.location *= 0.001
|
||||
p.select_set(True)
|
||||
bpy.context.view_layer.objects.active = parts[0]
|
||||
bpy.ops.object.transform_apply(scale=True, location=False, rotation=False)
|
||||
print(f"[turntable_render] scaled {len(parts)} parts mm→m (×0.001)")
|
||||
|
||||
|
||||
def _import_stl(stl_file):
|
||||
"""Import STL into Blender, using per-part STLs if available.
|
||||
|
||||
Checks for {stl_stem}_parts/manifest.json next to the STL file.
|
||||
- Per-part mode: imports each part STL, names Blender object after STEP part name.
|
||||
- Fallback: imports combined STL and splits by loose geometry.
|
||||
|
||||
Returns list of Blender mesh objects, centred at origin.
|
||||
"""
|
||||
stl_dir = os.path.dirname(stl_file)
|
||||
stl_stem = os.path.splitext(os.path.basename(stl_file))[0]
|
||||
parts_dir = os.path.join(stl_dir, stl_stem + "_parts")
|
||||
manifest_path = os.path.join(parts_dir, "manifest.json")
|
||||
|
||||
parts = []
|
||||
|
||||
if os.path.isfile(manifest_path):
|
||||
# ── Per-part mode ────────────────────────────────────────────────
|
||||
try:
|
||||
with open(manifest_path, "r") as f:
|
||||
manifest = json.loads(f.read())
|
||||
part_entries = manifest.get("parts", [])
|
||||
except Exception as e:
|
||||
print(f"[turntable_render] WARNING: failed to read manifest: {e}")
|
||||
part_entries = []
|
||||
|
||||
if part_entries:
|
||||
for entry in part_entries:
|
||||
part_file = os.path.join(parts_dir, entry["file"])
|
||||
part_name = entry["name"]
|
||||
if not os.path.isfile(part_file):
|
||||
print(f"[turntable_render] WARNING: part STL missing: {part_file}")
|
||||
continue
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
bpy.ops.wm.stl_import(filepath=part_file)
|
||||
imported = bpy.context.selected_objects
|
||||
if imported:
|
||||
obj = imported[0]
|
||||
obj.name = part_name
|
||||
if obj.data:
|
||||
obj.data.name = part_name
|
||||
parts.append(obj)
|
||||
|
||||
if parts:
|
||||
print(f"[turntable_render] imported {len(parts)} named parts from per-part STLs")
|
||||
|
||||
# ── Fallback: combined STL + separate by loose ───────────────────────
|
||||
if not parts:
|
||||
bpy.ops.wm.stl_import(filepath=stl_file)
|
||||
obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None
|
||||
if obj is None:
|
||||
print(f"ERROR: No objects imported from {stl_file}")
|
||||
sys.exit(1)
|
||||
|
||||
bpy.context.view_layer.objects.active = obj
|
||||
bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS')
|
||||
obj.location = (0.0, 0.0, 0.0)
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
bpy.ops.mesh.separate(type='LOOSE')
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
parts = list(bpy.context.selected_objects)
|
||||
print(f"[turntable_render] fallback: separated into {len(parts)} part(s)")
|
||||
return parts
|
||||
|
||||
# ── Centre per-part imports at origin (combined bbox) ────────────────
|
||||
all_corners = []
|
||||
for p in parts:
|
||||
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
|
||||
|
||||
if all_corners:
|
||||
mins = Vector((min(v.x for v in all_corners),
|
||||
min(v.y for v in all_corners),
|
||||
min(v.z for v in all_corners)))
|
||||
maxs = Vector((max(v.x for v in all_corners),
|
||||
max(v.y for v in all_corners),
|
||||
max(v.z for v in all_corners)))
|
||||
center = (mins + maxs) * 0.5
|
||||
for p in parts:
|
||||
p.location -= center
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
def _resolve_part_name(index, part_obj, part_names_ordered):
|
||||
"""Get the STEP part name for a Blender part by index.
|
||||
|
||||
With per-part import, part_obj.name IS the STEP name (possibly with
|
||||
Blender .NNN suffix). Falls back to part_names_ordered for combined-STL mode.
|
||||
"""
|
||||
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
|
||||
if part_names_ordered and index < len(part_names_ordered):
|
||||
return part_names_ordered[index]
|
||||
return base_name
|
||||
|
||||
|
||||
def _apply_material_library(parts, mat_lib_path, mat_map, part_names_ordered=None):
|
||||
"""Append materials from library .blend and assign to parts via material_map.
|
||||
|
||||
With per-part STL import, Blender objects are named after STEP parts,
|
||||
so matching is by name (stripping Blender .NNN suffix for duplicates).
|
||||
Falls back to part_names_ordered index-based matching for combined-STL mode.
|
||||
|
||||
mat_map: {part_name_lower: material_name}
|
||||
Parts without a match keep their current material.
|
||||
"""
|
||||
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
||||
print(f"[turntable_render] material library not found: {mat_lib_path}")
|
||||
return
|
||||
|
||||
# Collect unique material names needed
|
||||
needed = set(mat_map.values())
|
||||
if not needed:
|
||||
return
|
||||
|
||||
# Append materials from library
|
||||
appended = {}
|
||||
for mat_name in needed:
|
||||
inner_path = f"{mat_lib_path}/Material/{mat_name}"
|
||||
try:
|
||||
bpy.ops.wm.append(
|
||||
filepath=inner_path,
|
||||
directory=f"{mat_lib_path}/Material/",
|
||||
filename=mat_name,
|
||||
link=False,
|
||||
)
|
||||
if mat_name in bpy.data.materials:
|
||||
appended[mat_name] = bpy.data.materials[mat_name]
|
||||
print(f"[turntable_render] appended material: {mat_name}")
|
||||
else:
|
||||
print(f"[turntable_render] WARNING: material '{mat_name}' not found after append")
|
||||
except Exception as exc:
|
||||
print(f"[turntable_render] WARNING: failed to append material '{mat_name}': {exc}")
|
||||
|
||||
if not appended:
|
||||
return
|
||||
|
||||
# Assign materials to parts — primary: name-based (per-part STL mode),
|
||||
# secondary: index-based via part_names_ordered (combined STL fallback)
|
||||
assigned_count = 0
|
||||
for i, part in enumerate(parts):
|
||||
# Try name-based matching first (strip Blender .NNN suffix)
|
||||
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
||||
part_key = base_name.lower().strip()
|
||||
mat_name = mat_map.get(part_key)
|
||||
|
||||
# Fall back to index-based matching via part_names_ordered
|
||||
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
||||
step_name = part_names_ordered[i]
|
||||
part_key = step_name.lower().strip()
|
||||
mat_name = mat_map.get(part_key)
|
||||
|
||||
if mat_name and mat_name in appended:
|
||||
part.data.materials.clear()
|
||||
part.data.materials.append(appended[mat_name])
|
||||
assigned_count += 1
|
||||
print(f"[turntable_render] assigned '{mat_name}' to part '{part.name}'")
|
||||
|
||||
print(f"[turntable_render] material assignment: {assigned_count}/{len(parts)} parts matched")
|
||||
|
||||
|
||||
def main():
|
||||
argv = sys.argv
|
||||
# Everything after "--" is our args
|
||||
args = argv[argv.index("--") + 1:]
|
||||
|
||||
stl_path = args[0]
|
||||
frames_dir = args[1]
|
||||
frame_count = int(args[2])
|
||||
degrees = int(args[3])
|
||||
width = int(args[4])
|
||||
height = int(args[5])
|
||||
engine = args[6]
|
||||
samples = int(args[7])
|
||||
part_colors_json = args[8] if len(args) > 8 else "{}"
|
||||
|
||||
# Template + material library args (passed by schaeffler-turntable.js)
|
||||
template_path = args[9] if len(args) > 9 and args[9] else ""
|
||||
target_collection = args[10] if len(args) > 10 else "Product"
|
||||
material_library_path = args[11] if len(args) > 11 and args[11] else ""
|
||||
material_map_raw = args[12] if len(args) > 12 else "{}"
|
||||
part_names_ordered_raw = args[13] if len(args) > 13 else "[]"
|
||||
lighting_only = args[14] == "1" if len(args) > 14 else False
|
||||
cycles_device = args[15].lower() if len(args) > 15 else "auto" # "auto", "gpu", "cpu"
|
||||
shadow_catcher = args[16] == "1" if len(args) > 16 else False
|
||||
rotation_x = float(args[17]) if len(args) > 17 else 0.0
|
||||
rotation_y = float(args[18]) if len(args) > 18 else 0.0
|
||||
rotation_z = float(args[19]) if len(args) > 19 else 0.0
|
||||
turntable_axis = args[20] if len(args) > 20 else "world_z"
|
||||
bg_color = args[21] if len(args) > 21 else ""
|
||||
transparent_bg = args[22] == "1" if len(args) > 22 else False
|
||||
|
||||
os.makedirs(frames_dir, exist_ok=True)
|
||||
|
||||
try:
|
||||
part_colors = json.loads(part_colors_json)
|
||||
except json.JSONDecodeError:
|
||||
part_colors = {}
|
||||
|
||||
try:
|
||||
material_map = json.loads(material_map_raw) if material_map_raw else {}
|
||||
except json.JSONDecodeError:
|
||||
material_map = {}
|
||||
|
||||
try:
|
||||
part_names_ordered = json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
|
||||
except json.JSONDecodeError:
|
||||
part_names_ordered = []
|
||||
|
||||
# Validate template path: if provided it MUST exist on disk.
|
||||
if template_path and not os.path.isfile(template_path):
|
||||
print(f"[turntable_render] ERROR: template_path was provided but file not found: {template_path}")
|
||||
print("[turntable_render] Ensure the blend-templates directory is accessible on this worker.")
|
||||
sys.exit(1)
|
||||
|
||||
use_template = bool(template_path)
|
||||
|
||||
print(f"[turntable_render] engine={engine}, samples={samples}, size={width}x{height}, "
|
||||
f"frames={frame_count}, degrees={degrees}")
|
||||
print(f"[turntable_render] part_names_ordered: {len(part_names_ordered)} entries")
|
||||
if use_template:
|
||||
print(f"[turntable_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
|
||||
else:
|
||||
print("[turntable_render] no template — using factory settings (Mode A)")
|
||||
if material_library_path:
|
||||
print(f"[turntable_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
|
||||
|
||||
# ── SCENE SETUP ──────────────────────────────────────────────────────────
|
||||
|
||||
if use_template:
|
||||
# ── MODE B: Template-based render ────────────────────────────────────
|
||||
print(f"[turntable_render] Opening template: {template_path}")
|
||||
bpy.ops.wm.open_mainfile(filepath=template_path)
|
||||
|
||||
# Find or create target collection
|
||||
target_col = _ensure_collection(target_collection)
|
||||
|
||||
# Import and split STL
|
||||
parts = _import_stl(stl_path)
|
||||
# Scale mm→m: STEP coords are mm, Blender default unit is metres
|
||||
_scale_mm_to_m(parts)
|
||||
# Apply render position rotation before material/camera setup
|
||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
|
||||
# Move imported parts into target collection
|
||||
for part in parts:
|
||||
for col in list(part.users_collection):
|
||||
col.objects.unlink(part)
|
||||
target_col.objects.link(part)
|
||||
|
||||
# Apply smooth shading
|
||||
for part in parts:
|
||||
_apply_smooth(part, SMOOTH_ANGLE)
|
||||
|
||||
# Material assignment: library materials if available, otherwise palette
|
||||
if material_library_path and material_map:
|
||||
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
|
||||
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
|
||||
# Parts not matched by library get palette fallback
|
||||
for i, part in enumerate(parts):
|
||||
if not part.data.materials or len(part.data.materials) == 0:
|
||||
_assign_palette_material(part, i)
|
||||
else:
|
||||
for i, part in enumerate(parts):
|
||||
step_name = _resolve_part_name(i, part, part_names_ordered)
|
||||
color_hex = part_colors.get(step_name)
|
||||
if not color_hex:
|
||||
_assign_palette_material(part, i)
|
||||
|
||||
# ── Shadow catcher (Cycles only, template mode only) ─────────────────
|
||||
if shadow_catcher:
|
||||
sc_col_name = "Shadowcatcher"
|
||||
sc_obj_name = "Shadowcatcher"
|
||||
for vl in bpy.context.scene.view_layers:
|
||||
def _enable_col_recursive(layer_col):
|
||||
if layer_col.collection.name == sc_col_name:
|
||||
layer_col.exclude = False
|
||||
layer_col.collection.hide_render = False
|
||||
layer_col.collection.hide_viewport = False
|
||||
return True
|
||||
for child in layer_col.children:
|
||||
if _enable_col_recursive(child):
|
||||
return True
|
||||
return False
|
||||
_enable_col_recursive(vl.layer_collection)
|
||||
|
||||
sc_obj = bpy.data.objects.get(sc_obj_name)
|
||||
if sc_obj:
|
||||
all_world_z = []
|
||||
for part in parts:
|
||||
for corner in part.bound_box:
|
||||
all_world_z.append((part.matrix_world @ Vector(corner)).z)
|
||||
if all_world_z:
|
||||
sc_obj.location.z = min(all_world_z)
|
||||
print(f"[turntable_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
|
||||
else:
|
||||
print(f"[turntable_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
|
||||
|
||||
# lighting_only: always use auto-framing; normal template: use camera if present
|
||||
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
|
||||
if lighting_only and not shadow_catcher:
|
||||
print("[turntable_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
|
||||
elif needs_auto_camera:
|
||||
print("[turntable_render] WARNING: template has no camera — will create auto-camera")
|
||||
|
||||
# Set very close near clip on template camera for mm-scale parts (now in metres)
|
||||
if not needs_auto_camera and bpy.context.scene.camera:
|
||||
bpy.context.scene.camera.data.clip_start = 0.001
|
||||
|
||||
print(f"[turntable_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
|
||||
|
||||
else:
|
||||
# ── MODE A: Factory settings ─────────────────────────────────────────
|
||||
needs_auto_camera = True
|
||||
bpy.ops.wm.read_factory_settings(use_empty=True)
|
||||
|
||||
parts = _import_stl(stl_path)
|
||||
# Scale mm→m: STEP coords are mm, Blender default unit is metres
|
||||
_scale_mm_to_m(parts)
|
||||
# Apply render position rotation before material/camera setup
|
||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
|
||||
for i, part in enumerate(parts):
|
||||
_apply_smooth(part, SMOOTH_ANGLE)
|
||||
|
||||
# Material assignment: library materials if available, else part_colors/palette
|
||||
if material_library_path and material_map:
|
||||
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
|
||||
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
|
||||
# Palette fallback for unmatched parts
|
||||
for i, part in enumerate(parts):
|
||||
if not part.data.materials or len(part.data.materials) == 0:
|
||||
_assign_palette_material(part, i)
|
||||
else:
|
||||
# part_colors or palette — use index-based lookup via part_names_ordered
|
||||
for i, part in enumerate(parts):
|
||||
step_name = _resolve_part_name(i, part, part_names_ordered)
|
||||
color_hex = part_colors.get(step_name)
|
||||
if color_hex:
|
||||
mat = bpy.data.materials.new(name=f"mat_{part.name}")
|
||||
mat.use_nodes = True
|
||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||
if bsdf:
|
||||
color = _hex_to_linear(color_hex)
|
||||
bsdf.inputs["Base Color"].default_value = color
|
||||
bsdf.inputs["Metallic"].default_value = 0.35
|
||||
bsdf.inputs["Roughness"].default_value = 0.40
|
||||
try:
|
||||
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
||||
except KeyError:
|
||||
pass
|
||||
part.data.materials.clear()
|
||||
part.data.materials.append(mat)
|
||||
else:
|
||||
_assign_palette_material(part, i)
|
||||
|
||||
if needs_auto_camera:
|
||||
# ── Combined bounding box / bounding sphere ──────────────────────────
|
||||
all_corners = []
|
||||
for part in parts:
|
||||
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
||||
|
||||
bbox_min = Vector((
|
||||
min(v.x for v in all_corners),
|
||||
min(v.y for v in all_corners),
|
||||
min(v.z for v in all_corners),
|
||||
))
|
||||
bbox_max = Vector((
|
||||
max(v.x for v in all_corners),
|
||||
max(v.y for v in all_corners),
|
||||
max(v.z for v in all_corners),
|
||||
))
|
||||
|
||||
bbox_center = (bbox_min + bbox_max) * 0.5
|
||||
bbox_dims = bbox_max - bbox_min
|
||||
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
|
||||
|
||||
print(f"[turntable_render] bbox_dims={tuple(round(d, 4) for d in bbox_dims)}, "
|
||||
f"bsphere_radius={bsphere_radius:.4f}")
|
||||
|
||||
# ── Lighting — only in Mode A (factory settings) ─────────────────────
|
||||
# In template mode the .blend file provides its own World/HDRI lighting.
|
||||
# Adding auto-lights would overpower the template's intended look.
|
||||
if not use_template:
|
||||
light_dist = bsphere_radius * 6.0
|
||||
|
||||
bpy.ops.object.light_add(type='SUN', location=(
|
||||
bbox_center.x + light_dist * 0.5,
|
||||
bbox_center.y - light_dist * 0.35,
|
||||
bbox_center.z + light_dist,
|
||||
))
|
||||
sun = bpy.context.active_object
|
||||
sun.data.energy = 4.0
|
||||
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
|
||||
|
||||
bpy.ops.object.light_add(type='AREA', location=(
|
||||
bbox_center.x - light_dist * 0.4,
|
||||
bbox_center.y + light_dist * 0.4,
|
||||
bbox_center.z + light_dist * 0.7,
|
||||
))
|
||||
fill = bpy.context.active_object
|
||||
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
|
||||
fill.data.size = max(4.0, bsphere_radius * 4.0)
|
||||
|
||||
# ── Camera ───────────────────────────────────────────────────────────
|
||||
cam_dist = bsphere_radius * 2.5
|
||||
cam_location = Vector((
|
||||
bbox_center.x + cam_dist,
|
||||
bbox_center.y,
|
||||
bbox_center.z + bsphere_radius * 0.5,
|
||||
))
|
||||
bpy.ops.object.camera_add(location=cam_location)
|
||||
camera = bpy.context.active_object
|
||||
bpy.context.scene.camera = camera
|
||||
camera.data.clip_start = max(cam_dist * 0.001, 0.0001)
|
||||
camera.data.clip_end = cam_dist * 10.0
|
||||
|
||||
# Track-to constraint for look-at
|
||||
empty = bpy.data.objects.new("target", None)
|
||||
bpy.context.collection.objects.link(empty)
|
||||
empty.location = bbox_center
|
||||
|
||||
track = camera.constraints.new(type='TRACK_TO')
|
||||
track.target = empty
|
||||
track.track_axis = 'TRACK_NEGATIVE_Z'
|
||||
track.up_axis = 'UP_Y'
|
||||
|
||||
# ── World background — only in Mode A ───────────────────────────────
|
||||
# In template mode the .blend file owns its World (HDRI, sky texture,
|
||||
# studio lighting). Overwriting it would destroy the HDR look.
|
||||
if not use_template:
|
||||
world = bpy.data.worlds.new("World")
|
||||
bpy.context.scene.world = world
|
||||
world.use_nodes = True
|
||||
bg = world.node_tree.nodes["Background"]
|
||||
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
|
||||
bg.inputs["Strength"].default_value = 0.15
|
||||
|
||||
# ── Turntable pivot ──────────────────────────────────────────────────
|
||||
pivot = bpy.data.objects.new("pivot", None)
|
||||
bpy.context.collection.objects.link(pivot)
|
||||
pivot.location = bbox_center
|
||||
|
||||
# Parent camera to pivot
|
||||
camera.parent = pivot
|
||||
camera.location = (cam_dist, 0, bsphere_radius * 0.5)
|
||||
|
||||
# Keyframe pivot rotation
|
||||
scene = bpy.context.scene
|
||||
scene.frame_start = 1
|
||||
scene.frame_end = frame_count
|
||||
|
||||
pivot.rotation_euler = (0, 0, 0)
|
||||
pivot.keyframe_insert(data_path="rotation_euler", frame=1)
|
||||
pivot.rotation_euler = _axis_rotation(turntable_axis, degrees)
|
||||
pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1)
|
||||
|
||||
# Linear interpolation — frame N+1 is never rendered, giving N uniform steps
|
||||
_set_fcurves_linear(pivot.animation_data.action)
|
||||
|
||||
else:
|
||||
# Template has camera — set up turntable on the model parts instead
|
||||
scene = bpy.context.scene
|
||||
scene.frame_start = 1
|
||||
scene.frame_end = frame_count
|
||||
|
||||
# Calculate model center for pivot
|
||||
all_corners = []
|
||||
for part in parts:
|
||||
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
||||
|
||||
bbox_center = Vector((
|
||||
(min(v.x for v in all_corners) + max(v.x for v in all_corners)) * 0.5,
|
||||
(min(v.y for v in all_corners) + max(v.y for v in all_corners)) * 0.5,
|
||||
(min(v.z for v in all_corners) + max(v.z for v in all_corners)) * 0.5,
|
||||
))
|
||||
|
||||
# Create a pivot empty and parent all parts to it
|
||||
pivot = bpy.data.objects.new("turntable_pivot", None)
|
||||
bpy.context.collection.objects.link(pivot)
|
||||
pivot.location = bbox_center
|
||||
|
||||
for part in parts:
|
||||
part.parent = pivot
|
||||
|
||||
# Keyframe pivot rotation
|
||||
pivot.rotation_euler = (0, 0, 0)
|
||||
pivot.keyframe_insert(data_path="rotation_euler", frame=1)
|
||||
pivot.rotation_euler = _axis_rotation(turntable_axis, degrees)
|
||||
pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1)
|
||||
|
||||
# Linear interpolation — frame N+1 is never rendered, giving N uniform steps
|
||||
_set_fcurves_linear(pivot.animation_data.action)
|
||||
|
||||
# ── Colour management ────────────────────────────────────────────────────
|
||||
# In template mode the .blend file owns its colour management settings.
|
||||
# Overwriting them would destroy the intended HDR/tonemapping look.
|
||||
# In factory-settings mode force Standard to avoid the grey Filmic tint.
|
||||
scene = bpy.context.scene
|
||||
if not use_template:
|
||||
scene.view_settings.view_transform = 'Standard'
|
||||
scene.view_settings.exposure = 0.0
|
||||
scene.view_settings.gamma = 1.0
|
||||
try:
|
||||
scene.view_settings.look = 'None'
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ── Render engine ────────────────────────────────────────────────────────
|
||||
if engine == "eevee":
|
||||
eevee_ok = False
|
||||
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
|
||||
try:
|
||||
scene.render.engine = eevee_id
|
||||
eevee_ok = True
|
||||
print(f"[turntable_render] EEVEE engine id: {eevee_id}")
|
||||
break
|
||||
except TypeError:
|
||||
continue
|
||||
if eevee_ok:
|
||||
for attr in ('taa_render_samples', 'samples'):
|
||||
try:
|
||||
setattr(scene.eevee, attr, samples)
|
||||
break
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
print("[turntable_render] WARNING: EEVEE not available, falling back to Cycles")
|
||||
engine = "cycles"
|
||||
|
||||
if engine != "eevee":
|
||||
scene.render.engine = 'CYCLES'
|
||||
scene.cycles.samples = samples
|
||||
scene.cycles.use_denoising = True
|
||||
scene.cycles.denoiser = 'OPENIMAGEDENOISE' # GPU-accelerated when CUDA/OptiX active
|
||||
# Device selection: "cpu" forces CPU, "gpu" forces GPU (warns if unavailable),
|
||||
# "auto" (default) tries GPU first and falls back to CPU.
|
||||
print(f"[turntable_render] cycles_device={cycles_device}")
|
||||
gpu_found = False
|
||||
if cycles_device != "cpu":
|
||||
try:
|
||||
cycles_prefs = bpy.context.preferences.addons['cycles'].preferences
|
||||
for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
|
||||
try:
|
||||
cycles_prefs.compute_device_type = device_type
|
||||
cycles_prefs.get_devices()
|
||||
gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU']
|
||||
if gpu_devs:
|
||||
for d in gpu_devs:
|
||||
d.use = True
|
||||
scene.cycles.device = 'GPU'
|
||||
gpu_found = True
|
||||
print(f"[turntable_render] Cycles GPU ({device_type})")
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
if not gpu_found:
|
||||
scene.cycles.device = 'CPU'
|
||||
print("[turntable_render] WARNING: GPU not found — falling back to CPU")
|
||||
|
||||
# ── Render settings ──────────────────────────────────────────────────────
|
||||
scene.render.resolution_x = width
|
||||
scene.render.resolution_y = height
|
||||
scene.render.resolution_percentage = 100
|
||||
scene.render.image_settings.file_format = 'PNG'
|
||||
|
||||
# ── Transparent background ────────────────────────────────────────────────
|
||||
# bg_color compositing is handled by FFmpeg in the compose-video task.
|
||||
# Blender renders transparent PNG frames when bg_color is set.
|
||||
if bg_color or transparent_bg:
|
||||
scene.render.film_transparent = True
|
||||
if bg_color:
|
||||
print(f"[turntable_render] film_transparent=True for FFmpeg bg_color compositing ({bg_color})")
|
||||
else:
|
||||
print("[turntable_render] transparent_bg enabled (alpha PNG frames)")
|
||||
|
||||
# ── Render all frames ────────────────────────────────────────────────────
|
||||
# Per-frame loop with write_still=True. In a single Blender session,
|
||||
# Cycles keeps the GPU scene (BVH, textures, material graph) loaded
|
||||
# between frames — only the animated pivot transform is updated each step.
|
||||
# bpy.ops.render.render(animation=True) does NOT work reliably in
|
||||
# background mode after wm.open_mainfile() in Blender 5.x (silently
|
||||
# writes no files), so we use the explicit per-frame approach.
|
||||
import time as _time
|
||||
_render_start = _time.time()
|
||||
for frame in range(1, frame_count + 1):
|
||||
scene.frame_set(frame)
|
||||
scene.render.filepath = os.path.join(frames_dir, f"frame_{frame:04d}")
|
||||
bpy.ops.render.render(write_still=True)
|
||||
elapsed = _time.time() - _render_start
|
||||
fps_so_far = frame / elapsed
|
||||
print(f"[turntable_render] Frame {frame}/{frame_count} — {elapsed:.1f}s elapsed ({fps_so_far:.2f} fps)")
|
||||
|
||||
total = _time.time() - _render_start
|
||||
print(f"[turntable_render] Turntable render complete: {frame_count} frames in {total:.1f}s ({frame_count/total:.2f} fps avg)")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,688 @@
|
||||
"""Blender Python script: scene setup for turntable animation (Flamenco).
|
||||
|
||||
Performs all scene preparation — STL import, materials, camera, pivot animation,
|
||||
compositor — then SAVES the resulting .blend file to <scene_path>.
|
||||
|
||||
The saved .blend is then rendered by a separate Flamenco task:
|
||||
blender --background <scene_path> --python turntable_gpu_setup.py -a
|
||||
|
||||
Using Blender's native -a (--render-anim) keeps the GPU scene (BVH, textures)
|
||||
loaded for ALL frames in one process, avoiding per-frame GPU re-upload overhead.
|
||||
|
||||
Usage (from Blender):
|
||||
blender --background --python turntable_setup.py -- \\
|
||||
<stl_path> <frames_dir> <frame_count> <degrees> <width> <height> \\
|
||||
<engine> <samples> <part_colors_json> \\
|
||||
[template_path] [target_collection] [material_library_path] \\
|
||||
[material_map_json] [part_names_ordered_json] [lighting_only] \\
|
||||
[cycles_device] [shadow_catcher] [rotation_x] [rotation_y] [rotation_z] \\
|
||||
[turntable_axis] [bg_color] [transparent_bg] [scene_path] [camera_orbit]
|
||||
"""
|
||||
import bpy
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import math
|
||||
from mathutils import Vector, Matrix
|
||||
|
||||
# ── Colour palette ────────────────────────────────────────────────────────────
|
||||
PALETTE_HEX = [
|
||||
"#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8",
|
||||
"#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8",
|
||||
]
|
||||
|
||||
def _srgb_to_linear(c: int) -> float:
|
||||
v = c / 255.0
|
||||
return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4
|
||||
|
||||
def _hex_to_linear(hex_color: str) -> tuple:
|
||||
h = hex_color.lstrip('#')
|
||||
return (
|
||||
_srgb_to_linear(int(h[0:2], 16)),
|
||||
_srgb_to_linear(int(h[2:4], 16)),
|
||||
_srgb_to_linear(int(h[4:6], 16)),
|
||||
1.0,
|
||||
)
|
||||
|
||||
PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX]
|
||||
SMOOTH_ANGLE = 30
|
||||
|
||||
|
||||
# ── Helpers (kept in sync with turntable_render.py) ──────────────────────────
|
||||
|
||||
def _ensure_collection(name: str):
|
||||
if name in bpy.data.collections:
|
||||
return bpy.data.collections[name]
|
||||
col = bpy.data.collections.new(name)
|
||||
bpy.context.scene.collection.children.link(col)
|
||||
return col
|
||||
|
||||
|
||||
def _assign_palette_material(part_obj, index):
|
||||
color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)]
|
||||
mat = bpy.data.materials.new(name=f"Part_{index}")
|
||||
mat.use_nodes = True
|
||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||
if bsdf:
|
||||
bsdf.inputs["Base Color"].default_value = color
|
||||
bsdf.inputs["Metallic"].default_value = 0.35
|
||||
bsdf.inputs["Roughness"].default_value = 0.40
|
||||
try:
|
||||
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
||||
except KeyError:
|
||||
pass
|
||||
part_obj.data.materials.clear()
|
||||
part_obj.data.materials.append(mat)
|
||||
|
||||
|
||||
def _apply_smooth(part_obj, angle_deg):
|
||||
bpy.context.view_layer.objects.active = part_obj
|
||||
part_obj.select_set(True)
|
||||
if angle_deg > 0:
|
||||
try:
|
||||
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
|
||||
except AttributeError:
|
||||
bpy.ops.object.shade_smooth()
|
||||
part_obj.data.use_auto_smooth = True
|
||||
part_obj.data.auto_smooth_angle = math.radians(angle_deg)
|
||||
else:
|
||||
bpy.ops.object.shade_flat()
|
||||
|
||||
|
||||
import re as _re
|
||||
|
||||
|
||||
def _apply_rotation(parts, rx, ry, rz):
|
||||
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
|
||||
return
|
||||
from mathutils import Euler
|
||||
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
|
||||
for p in parts:
|
||||
p.matrix_world = rot_mat @ p.matrix_world
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
for p in parts:
|
||||
p.select_set(True)
|
||||
bpy.context.view_layer.objects.active = parts[0]
|
||||
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
|
||||
print(f"[turntable_setup] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
|
||||
|
||||
|
||||
def _axis_rotation(axis: str, degrees: float) -> tuple:
|
||||
rad = math.radians(degrees)
|
||||
if axis == "world_x":
|
||||
return (rad, 0.0, 0.0)
|
||||
elif axis == "world_y":
|
||||
return (0.0, rad, 0.0)
|
||||
else:
|
||||
return (0.0, 0.0, rad)
|
||||
|
||||
|
||||
def _set_fcurves_linear(action):
|
||||
try:
|
||||
for layer in action.layers:
|
||||
for strip in layer.strips:
|
||||
for channelbag in strip.channelbags:
|
||||
for fc in channelbag.fcurves:
|
||||
for kp in fc.keyframe_points:
|
||||
kp.interpolation = 'LINEAR'
|
||||
except AttributeError:
|
||||
for fc in action.fcurves:
|
||||
for kp in fc.keyframe_points:
|
||||
kp.interpolation = 'LINEAR'
|
||||
|
||||
|
||||
def _scale_mm_to_m(parts):
|
||||
if not parts:
|
||||
return
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
for p in parts:
|
||||
p.scale = (0.001, 0.001, 0.001)
|
||||
p.location *= 0.001
|
||||
p.select_set(True)
|
||||
bpy.context.view_layer.objects.active = parts[0]
|
||||
bpy.ops.object.transform_apply(scale=True, location=False, rotation=False)
|
||||
print(f"[turntable_setup] scaled {len(parts)} parts mm→m (×0.001)")
|
||||
|
||||
|
||||
def _import_stl(stl_file):
|
||||
stl_dir = os.path.dirname(stl_file)
|
||||
stl_stem = os.path.splitext(os.path.basename(stl_file))[0]
|
||||
parts_dir = os.path.join(stl_dir, stl_stem + "_parts")
|
||||
manifest_path = os.path.join(parts_dir, "manifest.json")
|
||||
|
||||
parts = []
|
||||
|
||||
if os.path.isfile(manifest_path):
|
||||
try:
|
||||
with open(manifest_path, "r") as f:
|
||||
manifest = json.loads(f.read())
|
||||
part_entries = manifest.get("parts", [])
|
||||
except Exception as e:
|
||||
print(f"[turntable_setup] WARNING: failed to read manifest: {e}")
|
||||
part_entries = []
|
||||
|
||||
if part_entries:
|
||||
for entry in part_entries:
|
||||
part_file = os.path.join(parts_dir, entry["file"])
|
||||
part_name = entry["name"]
|
||||
if not os.path.isfile(part_file):
|
||||
print(f"[turntable_setup] WARNING: part STL missing: {part_file}")
|
||||
continue
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
bpy.ops.wm.stl_import(filepath=part_file)
|
||||
imported = bpy.context.selected_objects
|
||||
if imported:
|
||||
obj = imported[0]
|
||||
obj.name = part_name
|
||||
if obj.data:
|
||||
obj.data.name = part_name
|
||||
parts.append(obj)
|
||||
|
||||
if parts:
|
||||
print(f"[turntable_setup] imported {len(parts)} named parts from per-part STLs")
|
||||
|
||||
if not parts:
|
||||
bpy.ops.wm.stl_import(filepath=stl_file)
|
||||
obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None
|
||||
if obj is None:
|
||||
print(f"ERROR: No objects imported from {stl_file}")
|
||||
sys.exit(1)
|
||||
|
||||
bpy.context.view_layer.objects.active = obj
|
||||
bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS')
|
||||
obj.location = (0.0, 0.0, 0.0)
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
bpy.ops.mesh.separate(type='LOOSE')
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
parts = list(bpy.context.selected_objects)
|
||||
print(f"[turntable_setup] fallback: separated into {len(parts)} part(s)")
|
||||
return parts
|
||||
|
||||
all_corners = []
|
||||
for p in parts:
|
||||
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
|
||||
|
||||
if all_corners:
|
||||
mins = Vector((min(v.x for v in all_corners),
|
||||
min(v.y for v in all_corners),
|
||||
min(v.z for v in all_corners)))
|
||||
maxs = Vector((max(v.x for v in all_corners),
|
||||
max(v.y for v in all_corners),
|
||||
max(v.z for v in all_corners)))
|
||||
center = (mins + maxs) * 0.5
|
||||
for p in parts:
|
||||
p.location -= center
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
def _resolve_part_name(index, part_obj, part_names_ordered):
|
||||
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
|
||||
if part_names_ordered and index < len(part_names_ordered):
|
||||
return part_names_ordered[index]
|
||||
return base_name
|
||||
|
||||
|
||||
def _apply_material_library(parts, mat_lib_path, mat_map, part_names_ordered=None):
|
||||
if not mat_lib_path or not os.path.isfile(mat_lib_path):
|
||||
print(f"[turntable_setup] material library not found: {mat_lib_path}")
|
||||
return
|
||||
|
||||
needed = set(mat_map.values())
|
||||
if not needed:
|
||||
return
|
||||
|
||||
appended = {}
|
||||
for mat_name in needed:
|
||||
inner_path = f"{mat_lib_path}/Material/{mat_name}"
|
||||
try:
|
||||
bpy.ops.wm.append(
|
||||
filepath=inner_path,
|
||||
directory=f"{mat_lib_path}/Material/",
|
||||
filename=mat_name,
|
||||
link=False,
|
||||
)
|
||||
if mat_name in bpy.data.materials:
|
||||
appended[mat_name] = bpy.data.materials[mat_name]
|
||||
print(f"[turntable_setup] appended material: {mat_name}")
|
||||
else:
|
||||
print(f"[turntable_setup] WARNING: material '{mat_name}' not found after append")
|
||||
except Exception as exc:
|
||||
print(f"[turntable_setup] WARNING: failed to append material '{mat_name}': {exc}")
|
||||
|
||||
if not appended:
|
||||
return
|
||||
|
||||
assigned_count = 0
|
||||
for i, part in enumerate(parts):
|
||||
base_name = _re.sub(r'\.\d{3}$', '', part.name)
|
||||
part_key = base_name.lower().strip()
|
||||
mat_name = mat_map.get(part_key)
|
||||
|
||||
if not mat_name and part_names_ordered and i < len(part_names_ordered):
|
||||
step_name = part_names_ordered[i]
|
||||
part_key = step_name.lower().strip()
|
||||
mat_name = mat_map.get(part_key)
|
||||
|
||||
if mat_name and mat_name in appended:
|
||||
part.data.materials.clear()
|
||||
part.data.materials.append(appended[mat_name])
|
||||
assigned_count += 1
|
||||
print(f"[turntable_setup] assigned '{mat_name}' to part '{part.name}'")
|
||||
|
||||
print(f"[turntable_setup] material assignment: {assigned_count}/{len(parts)} parts matched")
|
||||
|
||||
|
||||
def main():
|
||||
argv = sys.argv
|
||||
args = argv[argv.index("--") + 1:]
|
||||
|
||||
stl_path = args[0]
|
||||
frames_dir = args[1]
|
||||
frame_count = int(args[2])
|
||||
degrees = int(args[3])
|
||||
width = int(args[4])
|
||||
height = int(args[5])
|
||||
engine = args[6]
|
||||
samples = int(args[7])
|
||||
part_colors_json = args[8] if len(args) > 8 else "{}"
|
||||
template_path = args[9] if len(args) > 9 and args[9] else ""
|
||||
target_collection = args[10] if len(args) > 10 else "Product"
|
||||
material_library_path = args[11] if len(args) > 11 and args[11] else ""
|
||||
material_map_raw = args[12] if len(args) > 12 else "{}"
|
||||
part_names_ordered_raw = args[13] if len(args) > 13 else "[]"
|
||||
lighting_only = args[14] == "1" if len(args) > 14 else False
|
||||
cycles_device = args[15].lower() if len(args) > 15 else "auto"
|
||||
shadow_catcher = args[16] == "1" if len(args) > 16 else False
|
||||
rotation_x = float(args[17]) if len(args) > 17 else 0.0
|
||||
rotation_y = float(args[18]) if len(args) > 18 else 0.0
|
||||
rotation_z = float(args[19]) if len(args) > 19 else 0.0
|
||||
turntable_axis = args[20] if len(args) > 20 else "world_z"
|
||||
bg_color = args[21] if len(args) > 21 else ""
|
||||
transparent_bg = args[22] == "1" if len(args) > 22 else False
|
||||
scene_path = args[23] if len(args) > 23 else os.path.join(os.path.dirname(frames_dir), "scene.blend")
|
||||
camera_orbit = args[24] != "0" if len(args) > 24 else True
|
||||
noise_threshold_arg = args[25] if len(args) > 25 else ""
|
||||
denoiser_arg = args[26] if len(args) > 26 else ""
|
||||
denoising_input_passes_arg = args[27] if len(args) > 27 else ""
|
||||
denoising_prefilter_arg = args[28] if len(args) > 28 else ""
|
||||
denoising_quality_arg = args[29] if len(args) > 29 else ""
|
||||
denoising_use_gpu_arg = args[30] if len(args) > 30 else ""
|
||||
|
||||
os.makedirs(frames_dir, exist_ok=True)
|
||||
os.makedirs(os.path.dirname(scene_path), exist_ok=True)
|
||||
|
||||
try:
|
||||
part_colors = json.loads(part_colors_json)
|
||||
except json.JSONDecodeError:
|
||||
part_colors = {}
|
||||
|
||||
try:
|
||||
material_map = json.loads(material_map_raw) if material_map_raw else {}
|
||||
except json.JSONDecodeError:
|
||||
material_map = {}
|
||||
|
||||
try:
|
||||
part_names_ordered = json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
|
||||
except json.JSONDecodeError:
|
||||
part_names_ordered = []
|
||||
|
||||
if template_path and not os.path.isfile(template_path):
|
||||
print(f"[turntable_setup] ERROR: template_path not found: {template_path}")
|
||||
sys.exit(1)
|
||||
|
||||
use_template = bool(template_path)
|
||||
|
||||
print(f"[turntable_setup] engine={engine}, samples={samples}, size={width}x{height}, "
|
||||
f"frames={frame_count}, degrees={degrees}")
|
||||
print(f"[turntable_setup] part_names_ordered: {len(part_names_ordered)} entries")
|
||||
if use_template:
|
||||
print(f"[turntable_setup] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
|
||||
else:
|
||||
print("[turntable_setup] no template — using factory settings (Mode A)")
|
||||
if material_library_path:
|
||||
print(f"[turntable_setup] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
|
||||
|
||||
# ── SCENE SETUP ──────────────────────────────────────────────────────────
|
||||
|
||||
if use_template:
|
||||
print(f"[turntable_setup] Opening template: {template_path}")
|
||||
bpy.ops.wm.open_mainfile(filepath=template_path)
|
||||
|
||||
target_col = _ensure_collection(target_collection)
|
||||
parts = _import_stl(stl_path)
|
||||
_scale_mm_to_m(parts)
|
||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
|
||||
for part in parts:
|
||||
for col in list(part.users_collection):
|
||||
col.objects.unlink(part)
|
||||
target_col.objects.link(part)
|
||||
|
||||
for part in parts:
|
||||
_apply_smooth(part, SMOOTH_ANGLE)
|
||||
|
||||
if material_library_path and material_map:
|
||||
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
|
||||
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
|
||||
for i, part in enumerate(parts):
|
||||
if not part.data.materials or len(part.data.materials) == 0:
|
||||
_assign_palette_material(part, i)
|
||||
else:
|
||||
for i, part in enumerate(parts):
|
||||
step_name = _resolve_part_name(i, part, part_names_ordered)
|
||||
color_hex = part_colors.get(step_name)
|
||||
if not color_hex:
|
||||
_assign_palette_material(part, i)
|
||||
|
||||
if shadow_catcher:
|
||||
sc_col_name = "Shadowcatcher"
|
||||
sc_obj_name = "Shadowcatcher"
|
||||
for vl in bpy.context.scene.view_layers:
|
||||
def _enable_col_recursive(layer_col):
|
||||
if layer_col.collection.name == sc_col_name:
|
||||
layer_col.exclude = False
|
||||
layer_col.collection.hide_render = False
|
||||
layer_col.collection.hide_viewport = False
|
||||
return True
|
||||
for child in layer_col.children:
|
||||
if _enable_col_recursive(child):
|
||||
return True
|
||||
return False
|
||||
_enable_col_recursive(vl.layer_collection)
|
||||
|
||||
sc_obj = bpy.data.objects.get(sc_obj_name)
|
||||
if sc_obj:
|
||||
all_world_z = []
|
||||
for part in parts:
|
||||
for corner in part.bound_box:
|
||||
all_world_z.append((part.matrix_world @ Vector(corner)).z)
|
||||
if all_world_z:
|
||||
sc_obj.location.z = min(all_world_z)
|
||||
print(f"[turntable_setup] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
|
||||
else:
|
||||
print(f"[turntable_setup] WARNING: shadow catcher object '{sc_obj_name}' not found")
|
||||
|
||||
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
|
||||
if not needs_auto_camera and bpy.context.scene.camera:
|
||||
bpy.context.scene.camera.data.clip_start = 0.001
|
||||
|
||||
print(f"[turntable_setup] template mode: {len(parts)} parts imported into '{target_collection}'")
|
||||
|
||||
else:
|
||||
needs_auto_camera = True
|
||||
bpy.ops.wm.read_factory_settings(use_empty=True)
|
||||
|
||||
parts = _import_stl(stl_path)
|
||||
_scale_mm_to_m(parts)
|
||||
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
|
||||
|
||||
for i, part in enumerate(parts):
|
||||
_apply_smooth(part, SMOOTH_ANGLE)
|
||||
|
||||
if material_library_path and material_map:
|
||||
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
|
||||
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
|
||||
for i, part in enumerate(parts):
|
||||
if not part.data.materials or len(part.data.materials) == 0:
|
||||
_assign_palette_material(part, i)
|
||||
else:
|
||||
for i, part in enumerate(parts):
|
||||
step_name = _resolve_part_name(i, part, part_names_ordered)
|
||||
color_hex = part_colors.get(step_name)
|
||||
if color_hex:
|
||||
mat = bpy.data.materials.new(name=f"mat_{part.name}")
|
||||
mat.use_nodes = True
|
||||
bsdf = mat.node_tree.nodes.get("Principled BSDF")
|
||||
if bsdf:
|
||||
color = _hex_to_linear(color_hex)
|
||||
bsdf.inputs["Base Color"].default_value = color
|
||||
bsdf.inputs["Metallic"].default_value = 0.35
|
||||
bsdf.inputs["Roughness"].default_value = 0.40
|
||||
try:
|
||||
bsdf.inputs["Specular IOR Level"].default_value = 0.5
|
||||
except KeyError:
|
||||
pass
|
||||
part.data.materials.clear()
|
||||
part.data.materials.append(mat)
|
||||
else:
|
||||
_assign_palette_material(part, i)
|
||||
|
||||
if needs_auto_camera:
|
||||
all_corners = []
|
||||
for part in parts:
|
||||
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
||||
|
||||
bbox_min = Vector((min(v.x for v in all_corners), min(v.y for v in all_corners), min(v.z for v in all_corners)))
|
||||
bbox_max = Vector((max(v.x for v in all_corners), max(v.y for v in all_corners), max(v.z for v in all_corners)))
|
||||
bbox_center = (bbox_min + bbox_max) * 0.5
|
||||
bbox_dims = bbox_max - bbox_min
|
||||
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
|
||||
|
||||
print(f"[turntable_setup] bbox_dims={tuple(round(d, 4) for d in bbox_dims)}, bsphere_radius={bsphere_radius:.4f}")
|
||||
|
||||
if not use_template:
|
||||
light_dist = bsphere_radius * 6.0
|
||||
bpy.ops.object.light_add(type='SUN', location=(
|
||||
bbox_center.x + light_dist * 0.5,
|
||||
bbox_center.y - light_dist * 0.35,
|
||||
bbox_center.z + light_dist,
|
||||
))
|
||||
sun = bpy.context.active_object
|
||||
sun.data.energy = 4.0
|
||||
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
|
||||
|
||||
bpy.ops.object.light_add(type='AREA', location=(
|
||||
bbox_center.x - light_dist * 0.4,
|
||||
bbox_center.y + light_dist * 0.4,
|
||||
bbox_center.z + light_dist * 0.7,
|
||||
))
|
||||
fill = bpy.context.active_object
|
||||
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
|
||||
fill.data.size = max(4.0, bsphere_radius * 4.0)
|
||||
|
||||
cam_dist = bsphere_radius * 2.5
|
||||
cam_location = Vector((bbox_center.x + cam_dist, bbox_center.y, bbox_center.z + bsphere_radius * 0.5))
|
||||
bpy.ops.object.camera_add(location=cam_location)
|
||||
camera = bpy.context.active_object
|
||||
bpy.context.scene.camera = camera
|
||||
camera.data.clip_start = max(cam_dist * 0.001, 0.0001)
|
||||
camera.data.clip_end = cam_dist * 10.0
|
||||
|
||||
empty = bpy.data.objects.new("target", None)
|
||||
bpy.context.collection.objects.link(empty)
|
||||
empty.location = bbox_center
|
||||
|
||||
track = camera.constraints.new(type='TRACK_TO')
|
||||
track.target = empty
|
||||
track.track_axis = 'TRACK_NEGATIVE_Z'
|
||||
track.up_axis = 'UP_Y'
|
||||
|
||||
if not use_template:
|
||||
world = bpy.data.worlds.new("World")
|
||||
bpy.context.scene.world = world
|
||||
world.use_nodes = True
|
||||
bg = world.node_tree.nodes["Background"]
|
||||
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
|
||||
bg.inputs["Strength"].default_value = 0.15
|
||||
|
||||
pivot = bpy.data.objects.new("pivot", None)
|
||||
bpy.context.collection.objects.link(pivot)
|
||||
pivot.location = bbox_center
|
||||
camera.parent = pivot
|
||||
camera.location = (cam_dist, 0, bsphere_radius * 0.5)
|
||||
|
||||
scene = bpy.context.scene
|
||||
scene.frame_start = 1
|
||||
scene.frame_end = frame_count
|
||||
|
||||
pivot.rotation_euler = (0, 0, 0)
|
||||
pivot.keyframe_insert(data_path="rotation_euler", frame=1)
|
||||
pivot.rotation_euler = _axis_rotation(turntable_axis, degrees)
|
||||
pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1)
|
||||
_set_fcurves_linear(pivot.animation_data.action)
|
||||
|
||||
else:
|
||||
scene = bpy.context.scene
|
||||
scene.frame_start = 1
|
||||
scene.frame_end = frame_count
|
||||
|
||||
all_corners = []
|
||||
for part in parts:
|
||||
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
|
||||
|
||||
bbox_center = Vector((
|
||||
(min(v.x for v in all_corners) + max(v.x for v in all_corners)) * 0.5,
|
||||
(min(v.y for v in all_corners) + max(v.y for v in all_corners)) * 0.5,
|
||||
(min(v.z for v in all_corners) + max(v.z for v in all_corners)) * 0.5,
|
||||
))
|
||||
|
||||
if camera_orbit and bpy.context.scene.camera:
|
||||
# Camera-orbit mode: rotate camera around static product.
|
||||
# Parts stay stationary → Cycles BVH cached across all frames → ~40% speedup.
|
||||
camera = bpy.context.scene.camera
|
||||
cam_world = camera.matrix_world.copy()
|
||||
|
||||
cam_pivot = bpy.data.objects.new("cam_pivot", None)
|
||||
bpy.context.collection.objects.link(cam_pivot)
|
||||
cam_pivot.location = bbox_center
|
||||
|
||||
camera.parent = cam_pivot
|
||||
# Restore world-space transform after parenting (Blender recomputes local matrix)
|
||||
camera.matrix_world = cam_world
|
||||
|
||||
cam_pivot.rotation_euler = (0, 0, 0)
|
||||
cam_pivot.keyframe_insert(data_path="rotation_euler", frame=1)
|
||||
cam_pivot.rotation_euler = _axis_rotation(turntable_axis, degrees)
|
||||
cam_pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1)
|
||||
_set_fcurves_linear(cam_pivot.animation_data.action)
|
||||
print(f"[turntable_setup] camera-orbit mode: cam_pivot at {tuple(round(c, 4) for c in bbox_center)}")
|
||||
else:
|
||||
# Product-rotation mode: parts parent to pivot (default fallback when no camera)
|
||||
pivot = bpy.data.objects.new("turntable_pivot", None)
|
||||
bpy.context.collection.objects.link(pivot)
|
||||
pivot.location = bbox_center
|
||||
|
||||
for part in parts:
|
||||
part.parent = pivot
|
||||
|
||||
pivot.rotation_euler = (0, 0, 0)
|
||||
pivot.keyframe_insert(data_path="rotation_euler", frame=1)
|
||||
pivot.rotation_euler = _axis_rotation(turntable_axis, degrees)
|
||||
pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1)
|
||||
_set_fcurves_linear(pivot.animation_data.action)
|
||||
print(f"[turntable_setup] product-rotation mode: {len(parts)} parts parented to turntable_pivot")
|
||||
|
||||
# ── Colour management ────────────────────────────────────────────────────
|
||||
scene = bpy.context.scene
|
||||
if not use_template:
|
||||
scene.view_settings.view_transform = 'Standard'
|
||||
scene.view_settings.exposure = 0.0
|
||||
scene.view_settings.gamma = 1.0
|
||||
try:
|
||||
scene.view_settings.look = 'None'
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ── Render engine ────────────────────────────────────────────────────────
|
||||
if engine == "eevee":
|
||||
eevee_ok = False
|
||||
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
|
||||
try:
|
||||
scene.render.engine = eevee_id
|
||||
eevee_ok = True
|
||||
print(f"[turntable_setup] EEVEE engine id: {eevee_id}")
|
||||
break
|
||||
except TypeError:
|
||||
continue
|
||||
if eevee_ok:
|
||||
for attr in ('taa_render_samples', 'samples'):
|
||||
try:
|
||||
setattr(scene.eevee, attr, samples)
|
||||
break
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
print("[turntable_setup] WARNING: EEVEE not available, falling back to Cycles")
|
||||
engine = "cycles"
|
||||
|
||||
if engine != "eevee":
|
||||
scene.render.engine = 'CYCLES'
|
||||
scene.cycles.samples = samples
|
||||
scene.cycles.use_denoising = True
|
||||
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
|
||||
if denoising_input_passes_arg:
|
||||
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
|
||||
except Exception: pass
|
||||
if denoising_prefilter_arg:
|
||||
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
|
||||
except Exception: pass
|
||||
if denoising_quality_arg:
|
||||
try: scene.cycles.denoising_quality = denoising_quality_arg
|
||||
except Exception: pass
|
||||
if denoising_use_gpu_arg:
|
||||
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
|
||||
except AttributeError: pass
|
||||
if noise_threshold_arg:
|
||||
scene.cycles.use_adaptive_sampling = True
|
||||
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
|
||||
if denoiser_arg:
|
||||
scene["_denoiser_override"] = denoiser_arg
|
||||
# scene.cycles.device is set by turntable_gpu_setup.py at render time
|
||||
# (GPU preferences are user-level and not stored in .blend)
|
||||
# We set the intended device here so gpu_setup can read it.
|
||||
scene["_cycles_device"] = cycles_device
|
||||
# Keep BVH, textures, and scene data resident on GPU between frames.
|
||||
# Critical for -a mode: prevents Cycles from re-uploading data each frame.
|
||||
scene.render.use_persistent_data = True
|
||||
# No motion blur needed for static mechanical parts — eliminates per-frame
|
||||
# CPU deformation calculations.
|
||||
scene.render.use_motion_blur = False
|
||||
print(f"[turntable_setup] cycles_device preference saved: {cycles_device}")
|
||||
print("[turntable_setup] use_persistent_data=True, use_motion_blur=False")
|
||||
|
||||
# ── Render output settings ───────────────────────────────────────────────
|
||||
scene.render.resolution_x = width
|
||||
scene.render.resolution_y = height
|
||||
scene.render.resolution_percentage = 100
|
||||
scene.render.image_settings.file_format = 'PNG'
|
||||
# Blender -a appends 4-digit frame number: "frame_" → "frame_0001.png"
|
||||
scene.render.filepath = os.path.join(frames_dir, "frame_")
|
||||
|
||||
# ── Transparent background ────────────────────────────────────────────────
|
||||
# bg_color compositing is done by FFmpeg in the compose-video task.
|
||||
# Blender renders transparent PNG frames (film_transparent=True) when
|
||||
# bg_color is set; FFmpeg then overlays them over a solid colour background.
|
||||
if bg_color or transparent_bg:
|
||||
scene.render.film_transparent = True
|
||||
if bg_color:
|
||||
print(f"[turntable_setup] film_transparent=True for FFmpeg bg_color compositing ({bg_color})")
|
||||
else:
|
||||
print("[turntable_setup] transparent_bg enabled (alpha PNG frames)")
|
||||
|
||||
# ── Save scene ───────────────────────────────────────────────────────────
|
||||
# save_as_mainfile saves to an explicit new path (like File > Save As).
|
||||
# save_mainfile would save back to the originally-opened template path.
|
||||
print(f"[turntable_setup] Saving scene to {scene_path} …")
|
||||
result = bpy.ops.wm.save_as_mainfile(filepath=scene_path)
|
||||
if 'FINISHED' not in result:
|
||||
print(f"[turntable_setup] ERROR: save_as_mainfile returned {result} — aborting")
|
||||
sys.exit(1)
|
||||
if not os.path.isfile(scene_path):
|
||||
print(f"[turntable_setup] ERROR: scene file not found after save: {scene_path}")
|
||||
sys.exit(1)
|
||||
size_mb = os.path.getsize(scene_path) / 1024 / 1024
|
||||
print(f"[turntable_setup] Scene saved → {scene_path} ({size_mb:.1f} MB)")
|
||||
print(f"[turntable_setup] Ready for: blender --background {scene_path} --python turntable_gpu_setup.py -a")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except SystemExit:
|
||||
raise
|
||||
except Exception as _exc:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print(f"[turntable_setup] FATAL: unhandled exception — {_exc}")
|
||||
sys.exit(1)
|
||||
Reference in New Issue
Block a user