refactor(A2): replace blender-renderer HTTP service with render-worker Celery container

- Create render-worker/ with Dockerfile (Ubuntu + cadquery + Blender via host mount)
- Add render-worker/check_version.py: verifies Blender >= 5.0.1 at startup, Exit 1 on failure
- Add render-worker/scripts/: blender_render.py, still_render.py, turntable_render.py
- Create backend/app/services/render_blender.py: direct subprocess rendering
  - convert_step_to_stl() and export_per_part_stls() using cadquery
  - render_still(): STEP → STL → PNG via Blender subprocess
  - is_blender_available(): detects BLENDER_BIN env for render-worker context
- Create backend/app/domains/rendering/tasks.py: render_still_task + render_turntable_task
- Update step_processor.py: use subprocess path when BLENDER_BIN env is set (render-worker)
- Update step_tasks.py: generate_stl_cache uses direct cadquery instead of HTTP
- Remove blender-renderer and threejs-renderer from docker-compose.yml
- Replace worker-thumbnail with render-worker (Ubuntu + cadquery + Blender mount)
- Remove Docker SDK from backend Dockerfile (was only for flamenco scaling)
- Update .env.example: BLENDER_VERSION=5.0.1 documented
- Update celery_app.py: include domains.rendering.tasks in autodiscover

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-06 15:48:46 +01:00
parent 1d6864fb64
commit 9d1a820295
16 changed files with 3118 additions and 108 deletions
+7
View File
@@ -26,3 +26,10 @@ MAX_UPLOAD_SIZE_MB=500
# Celery worker concurrency (default: 8 parallel CAD jobs per worker container) # Celery worker concurrency (default: 8 parallel CAD jobs per worker container)
# Scale horizontally with: docker compose up --scale worker=N # Scale horizontally with: docker compose up --scale worker=N
CELERY_WORKER_CONCURRENCY=8 CELERY_WORKER_CONCURRENCY=8
# Blender (render-worker)
# Blender >= 5.0.1 must be installed on the host at /opt/blender
# The render-worker container mounts it read-only via volumes: - /opt/blender:/opt/blender:ro
BLENDER_VERSION=5.0.1
# Set to host path if Blender is not at /opt/blender:
# BLENDER_BIN=/usr/local/blender/blender
-3
View File
@@ -8,9 +8,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
gcc \ gcc \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Docker SDK (for dynamic flamenco-worker scaling via /var/run/docker.sock)
RUN pip install --no-cache-dir "docker>=6.1.0"
# Install Python dependencies # Install Python dependencies
COPY pyproject.toml . COPY pyproject.toml .
RUN pip install --no-cache-dir -e . RUN pip install --no-cache-dir -e .
+14 -18
View File
@@ -359,24 +359,20 @@ async def generate_missing_stls(
async def renderer_status( async def renderer_status(
admin: User = Depends(require_admin), admin: User = Depends(require_admin),
): ):
"""Check health of external renderer services.""" """Check health of renderer services."""
import httpx from app.services.render_blender import find_blender, is_blender_available
services = { blender_available = is_blender_available()
"pillow": {"url": None, "available": True, "note": "Built-in (always available)"}, blender_bin = find_blender()
"blender": {"url": "http://blender-renderer:8100/health", "available": False, "note": ""}, return {
"pillow": {"available": True, "note": "Built-in (always available)"},
"blender": {
"available": blender_available,
"note": (
f"render-worker subprocess ({blender_bin})"
if blender_available
else "Blender not found — check render-worker container and BLENDER_BIN"
),
},
} }
async with httpx.AsyncClient(timeout=3.0) as client:
for name, info in services.items():
if info["url"] is None:
continue
try:
resp = await client.get(info["url"])
if resp.status_code == 200:
data = resp.json()
services[name]["available"] = True
services[name]["note"] = data.get("renderer", name)
except Exception as e:
services[name]["note"] = str(e)[:100]
return services
View File
+251
View File
@@ -0,0 +1,251 @@
"""Rendering domain tasks — Celery tasks for Blender-based rendering.
These tasks run on the `thumbnail_rendering` queue in the render-worker
container, which has Blender and cadquery available.
Phase A2: Initial implementation replacing the blender-renderer HTTP service.
Phase B: This module will be expanded as part of the Domain-Driven restructure.
"""
import logging
from pathlib import Path
from app.tasks.celery_app import celery_app
logger = logging.getLogger(__name__)
@celery_app.task(
bind=True,
name="app.domains.rendering.tasks.render_still_task",
queue="thumbnail_rendering",
max_retries=2,
)
def render_still_task(
self,
step_path: str,
output_path: str,
engine: str = "cycles",
samples: int = 256,
stl_quality: str = "low",
smooth_angle: int = 30,
cycles_device: str = "auto",
width: int = 512,
height: int = 512,
transparent_bg: bool = False,
template_path: str | None = None,
target_collection: str = "Product",
material_library_path: str | None = None,
material_map: dict | None = None,
part_names_ordered: list | None = None,
lighting_only: bool = False,
shadow_catcher: bool = False,
rotation_x: float = 0.0,
rotation_y: float = 0.0,
rotation_z: float = 0.0,
noise_threshold: str = "",
denoiser: str = "",
denoising_input_passes: str = "",
denoising_prefilter: str = "",
denoising_quality: str = "",
denoising_use_gpu: str = "",
) -> dict:
"""Render a STEP file to a still PNG via Blender subprocess.
Returns render metadata dict on success.
Retries up to 2 times on failure (30s countdown).
"""
try:
from app.services.render_blender import render_still
result = render_still(
step_path=Path(step_path),
output_path=Path(output_path),
engine=engine,
samples=samples,
stl_quality=stl_quality,
smooth_angle=smooth_angle,
cycles_device=cycles_device,
width=width,
height=height,
transparent_bg=transparent_bg,
template_path=template_path,
target_collection=target_collection,
material_library_path=material_library_path,
material_map=material_map,
part_names_ordered=part_names_ordered,
lighting_only=lighting_only,
shadow_catcher=shadow_catcher,
rotation_x=rotation_x,
rotation_y=rotation_y,
rotation_z=rotation_z,
noise_threshold=noise_threshold,
denoiser=denoiser,
denoising_input_passes=denoising_input_passes,
denoising_prefilter=denoising_prefilter,
denoising_quality=denoising_quality,
denoising_use_gpu=denoising_use_gpu,
)
logger.info(
"render_still_task completed: %s%s in %.1fs",
Path(step_path).name, Path(output_path).name,
result.get("total_duration_s", 0),
)
return result
except Exception as exc:
logger.error("render_still_task failed for %s: %s", step_path, exc)
raise self.retry(exc=exc, countdown=30)
@celery_app.task(
bind=True,
name="app.domains.rendering.tasks.render_turntable_task",
queue="thumbnail_rendering",
max_retries=2,
)
def render_turntable_task(
self,
step_path: str,
output_dir: str,
output_name: str = "turntable",
engine: str = "cycles",
samples: int = 64,
stl_quality: str = "low",
smooth_angle: int = 30,
cycles_device: str = "auto",
width: int = 1920,
height: int = 1080,
frame_count: int = 120,
fps: int = 30,
turntable_degrees: float = 360.0,
turntable_axis: str = "world_z",
bg_color: str = "",
template_path: str | None = None,
target_collection: str = "Product",
material_library_path: str | None = None,
material_map: dict | None = None,
part_names_ordered: list | None = None,
lighting_only: bool = False,
shadow_catcher: bool = False,
camera_orbit: bool = True,
rotation_x: float = 0.0,
rotation_y: float = 0.0,
rotation_z: float = 0.0,
) -> dict:
"""Render a STEP file as a turntable animation (frames + FFmpeg composite).
Returns render metadata dict on success.
"""
import json
import os
import shutil
import subprocess
from app.services.render_blender import (
find_blender, convert_step_to_stl, export_per_part_stls
)
blender_bin = find_blender()
if not blender_bin:
raise RuntimeError("Blender binary not found in render-worker container")
step = Path(step_path)
out_dir = Path(output_dir)
out_dir.mkdir(parents=True, exist_ok=True)
scripts_dir = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts"))
turntable_script = scripts_dir / "turntable_render.py"
# STL conversion
stl_path = step.parent / f"{step.stem}_{stl_quality}.stl"
if not stl_path.exists() or stl_path.stat().st_size == 0:
convert_step_to_stl(step, stl_path, stl_quality)
parts_dir = step.parent / f"{step.stem}_{stl_quality}_parts"
if not (parts_dir / "manifest.json").exists():
try:
export_per_part_stls(step, parts_dir, stl_quality)
except Exception as exc:
logger.warning("per-part export non-fatal: %s", exc)
# Build turntable render arguments
frames_dir = out_dir / "frames"
frames_dir.mkdir(exist_ok=True)
cmd = [
blender_bin, "--background",
"--python", str(turntable_script),
"--",
str(stl_path),
str(frames_dir),
output_name,
str(width), str(height),
engine, str(samples), str(smooth_angle), cycles_device,
str(frame_count), str(fps), str(turntable_degrees), turntable_axis,
template_path or "",
target_collection,
material_library_path or "",
json.dumps(material_map) if material_map else "{}",
json.dumps(part_names_ordered) if part_names_ordered else "[]",
"1" if lighting_only else "0",
"1" if shadow_catcher else "0",
"1" if camera_orbit else "0",
str(rotation_x), str(rotation_y), str(rotation_z),
]
try:
result = subprocess.run(
cmd, capture_output=True, text=True, timeout=3600
)
if result.returncode != 0:
raise RuntimeError(
f"Blender turntable exited {result.returncode}:\n{result.stdout[-2000:]}"
)
except Exception as exc:
logger.error("render_turntable_task failed: %s", exc)
raise self.retry(exc=exc, countdown=60)
# FFmpeg composite: frames → MP4 with optional background
output_mp4 = out_dir / f"{output_name}.mp4"
ffmpeg_cmd = _build_ffmpeg_cmd(
frames_dir, output_mp4, fps=fps, bg_color=bg_color
)
try:
subprocess.run(ffmpeg_cmd, check=True, capture_output=True, text=True, timeout=300)
except subprocess.CalledProcessError as exc:
raise RuntimeError(f"FFmpeg composite failed: {exc.stderr[-500:]}")
return {
"output_mp4": str(output_mp4),
"frame_count": frame_count,
"fps": fps,
}
def _build_ffmpeg_cmd(
frames_dir: Path, output_mp4: Path, fps: int = 30, bg_color: str = ""
) -> list:
"""Build FFmpeg command for compositing turntable frames to MP4."""
import shutil as _shutil
ffmpeg = _shutil.which("ffmpeg") or "ffmpeg"
frame_pattern = str(frames_dir / "%04d.png")
if bg_color:
# Overlay transparent frames onto solid color background
r = int(bg_color[1:3], 16) if bg_color.startswith("#") else 255
g = int(bg_color[3:5], 16) if bg_color.startswith("#") else 255
b = int(bg_color[5:7], 16) if bg_color.startswith("#") else 255
color_str = f"color=c=0x{r:02x}{g:02x}{b:02x}:s=1920x1080:r={fps}"
return [
ffmpeg, "-y",
"-f", "lavfi", "-i", color_str,
"-framerate", str(fps), "-i", frame_pattern,
"-filter_complex", "[0:v][1:v]overlay=0:0",
"-c:v", "libx264", "-pix_fmt", "yuv420p",
"-movflags", "+faststart",
str(output_mp4),
]
else:
return [
ffmpeg, "-y",
"-framerate", str(fps), "-i", frame_pattern,
"-c:v", "libx264", "-pix_fmt", "yuv420p",
"-movflags", "+faststart",
str(output_mp4),
]
+330
View File
@@ -0,0 +1,330 @@
"""Direct Blender rendering service — runs Blender as a subprocess.
Used by the render-worker Celery container (which has BLENDER_BIN set and
cadquery installed). The backend and standard workers fall back to the Pillow
placeholder when this service is unavailable.
"""
import json
import logging
import os
import shutil
import signal
import subprocess
from pathlib import Path
logger = logging.getLogger(__name__)
MIN_BLENDER_VERSION = (5, 0, 1)
def find_blender() -> str:
"""Locate the Blender binary via $BLENDER_BIN or PATH."""
env_bin = os.environ.get("BLENDER_BIN", "")
if env_bin and Path(env_bin).exists():
return env_bin
found = shutil.which("blender")
return found or ""
def is_blender_available() -> bool:
"""Return True if a Blender binary is reachable from this process."""
return bool(find_blender())
def convert_step_to_stl(step_path: Path, stl_path: Path, quality: str = "low") -> None:
"""Convert a STEP file to STL using cadquery.
Raises ImportError if cadquery is not installed (not available in backend
container — only in render-worker container).
"""
import cadquery as cq # only available in render-worker
if quality == "high":
shape = cq.importers.importStep(str(step_path))
cq.exporters.export(shape, str(stl_path), tolerance=0.01, angularTolerance=0.02)
else:
shape = cq.importers.importStep(str(step_path))
cq.exporters.export(shape, str(stl_path), tolerance=0.3, angularTolerance=0.3)
if not stl_path.exists() or stl_path.stat().st_size == 0:
raise RuntimeError("cadquery produced empty STL")
def export_per_part_stls(step_path: Path, parts_dir: Path, quality: str = "low") -> list:
"""Export one STL per named STEP leaf shape using OCP XCAF.
Returns the manifest list (may be empty on failure — non-fatal).
"""
tol = 0.01 if quality == "high" else 0.3
angular_tol = 0.05 if quality == "high" else 0.3
try:
from OCP.STEPCAFControl import STEPCAFControl_Reader
from OCP.XCAFDoc import XCAFDoc_DocumentTool, XCAFDoc_ShapeTool
from OCP.TDataStd import TDataStd_Name
from OCP.TDF import TDF_Label as TDF_Label_cls, TDF_LabelSequence
from OCP.XCAFApp import XCAFApp_Application
from OCP.TDocStd import TDocStd_Document
from OCP.TCollection import TCollection_ExtendedString
from OCP.IFSelect import IFSelect_RetDone
import cadquery as cq
except ImportError as e:
logger.warning("per-part export skipped (import error): %s", e)
return []
app = XCAFApp_Application.GetApplication_s()
doc = TDocStd_Document(TCollection_ExtendedString("XmlOcaf"))
app.InitDocument(doc)
reader = STEPCAFControl_Reader()
reader.SetNameMode(True)
status = reader.ReadFile(str(step_path))
if status != IFSelect_RetDone:
logger.warning("XCAF reader failed with status %s", status)
return []
if not reader.Transfer(doc):
logger.warning("XCAF transfer failed")
return []
shape_tool = XCAFDoc_DocumentTool.ShapeTool_s(doc.Main())
name_id = TDataStd_Name.GetID_s()
leaves = []
def _get_label_name(label):
name_attr = TDataStd_Name()
if label.FindAttribute(name_id, name_attr):
return name_attr.Get().ToExtString()
return ""
def _collect_leaves(label):
if XCAFDoc_ShapeTool.IsAssembly_s(label):
components = TDF_LabelSequence()
XCAFDoc_ShapeTool.GetComponents_s(label, components)
for i in range(1, components.Length() + 1):
comp_label = components.Value(i)
if XCAFDoc_ShapeTool.IsReference_s(comp_label):
ref_label = TDF_Label_cls()
XCAFDoc_ShapeTool.GetReferredShape_s(comp_label, ref_label)
comp_name = _get_label_name(comp_label)
ref_name = _get_label_name(ref_label)
name = ref_name or comp_name
if XCAFDoc_ShapeTool.IsAssembly_s(ref_label):
_collect_leaves(ref_label)
elif XCAFDoc_ShapeTool.IsSimpleShape_s(ref_label):
shape = XCAFDoc_ShapeTool.GetShape_s(comp_label)
leaves.append((name or f"unnamed_{len(leaves)}", shape))
else:
_collect_leaves(comp_label)
elif XCAFDoc_ShapeTool.IsSimpleShape_s(label):
name = _get_label_name(label)
shape = XCAFDoc_ShapeTool.GetShape_s(label)
leaves.append((name or f"unnamed_{len(leaves)}", shape))
top_labels = TDF_LabelSequence()
shape_tool.GetFreeShapes(top_labels)
for i in range(1, top_labels.Length() + 1):
_collect_leaves(top_labels.Value(i))
if not leaves:
logger.warning("no leaf shapes found via XCAF")
return []
parts_dir.mkdir(parents=True, exist_ok=True)
manifest = []
for idx, (name, shape) in enumerate(leaves):
safe_name = name.replace("/", "_").replace("\\", "_").replace(" ", "_")
filename = f"{idx:02d}_{safe_name}.stl"
filepath = str(parts_dir / filename)
try:
cq_shape = cq.Shape(shape)
cq_shape.exportStl(filepath, tolerance=tol, angularTolerance=angular_tol)
manifest.append({"index": idx, "name": name, "file": filename})
except Exception as e:
logger.warning("failed to export part '%s': %s", name, e)
manifest_path = parts_dir / "manifest.json"
with open(manifest_path, "w") as f:
json.dump({"parts": manifest}, f, indent=2)
return manifest
def render_still(
step_path: Path,
output_path: Path,
width: int = 512,
height: int = 512,
engine: str = "cycles",
samples: int = 256,
stl_quality: str = "low",
smooth_angle: int = 30,
cycles_device: str = "auto",
transparent_bg: bool = False,
part_colors: dict | None = None,
template_path: str | None = None,
target_collection: str = "Product",
material_library_path: str | None = None,
material_map: dict | None = None,
part_names_ordered: list | None = None,
lighting_only: bool = False,
shadow_catcher: bool = False,
rotation_x: float = 0.0,
rotation_y: float = 0.0,
rotation_z: float = 0.0,
noise_threshold: str = "",
denoiser: str = "",
denoising_input_passes: str = "",
denoising_prefilter: str = "",
denoising_quality: str = "",
denoising_use_gpu: str = "",
) -> dict:
"""Convert STEP → STL (cadquery) → PNG (Blender subprocess).
Returns a dict with timing, sizes, engine_used, and log_lines.
Raises RuntimeError on failure.
"""
import time
blender_bin = find_blender()
if not blender_bin:
raise RuntimeError("Blender binary not found — check BLENDER_BIN env or PATH")
script_path = Path(os.environ.get("RENDER_SCRIPTS_DIR", "/render-scripts")) / "blender_render.py"
if not script_path.exists():
# Fallback: look next to this file (development mode)
alt = Path(__file__).parent.parent.parent.parent / "render-worker" / "scripts" / "blender_render.py"
if alt.exists():
script_path = alt
else:
raise RuntimeError(f"blender_render.py not found at {script_path}")
t0 = time.monotonic()
# 1. STL conversion (cadquery)
stl_path = step_path.parent / f"{step_path.stem}_{stl_quality}.stl"
parts_dir = step_path.parent / f"{step_path.stem}_{stl_quality}_parts"
t_stl = time.monotonic()
if not stl_path.exists() or stl_path.stat().st_size == 0:
logger.info("STL cache miss — converting: %s", step_path.name)
convert_step_to_stl(step_path, stl_path, stl_quality)
else:
logger.info("STL cache hit: %s (%d KB)", stl_path.name, stl_path.stat().st_size // 1024)
stl_size_bytes = stl_path.stat().st_size if stl_path.exists() else 0
if not (parts_dir / "manifest.json").exists():
try:
export_per_part_stls(step_path, parts_dir, stl_quality)
except Exception as exc:
logger.warning("per-part STL export failed (non-fatal): %s", exc)
stl_duration_s = round(time.monotonic() - t_stl, 2)
# 2. Blender render
output_path.parent.mkdir(parents=True, exist_ok=True)
env = dict(os.environ)
if engine == "eevee":
env.update({
"VK_ICD_FILENAMES": "/usr/share/vulkan/icd.d/lvp_icd.x86_64.json",
"LIBGL_ALWAYS_SOFTWARE": "1",
"MESA_GL_VERSION_OVERRIDE": "4.5",
"EGL_PLATFORM": "surfaceless",
})
else:
env["EGL_PLATFORM"] = "surfaceless"
def _build_cmd(eng: str) -> list:
return [
blender_bin,
"--background",
"--python", str(script_path),
"--",
str(stl_path),
str(output_path),
str(width), str(height),
eng, str(samples), str(smooth_angle),
cycles_device,
"1" if transparent_bg else "0",
template_path or "",
target_collection,
material_library_path or "",
json.dumps(material_map) if material_map else "{}",
json.dumps(part_names_ordered) if part_names_ordered else "[]",
"1" if lighting_only else "0",
"1" if shadow_catcher else "0",
str(rotation_x), str(rotation_y), str(rotation_z),
noise_threshold or "", denoiser or "",
denoising_input_passes or "", denoising_prefilter or "",
denoising_quality or "", denoising_use_gpu or "",
]
def _run(eng: str) -> subprocess.CompletedProcess:
proc = subprocess.Popen(
_build_cmd(eng),
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
text=True, env=env, start_new_session=True,
)
try:
stdout, stderr = proc.communicate(timeout=600)
except subprocess.TimeoutExpired:
try:
os.killpg(os.getpgid(proc.pid), signal.SIGTERM)
except (ProcessLookupError, OSError):
pass
stdout, stderr = proc.communicate()
return subprocess.CompletedProcess(_build_cmd(eng), proc.returncode, stdout, stderr)
t_render = time.monotonic()
result = _run(engine)
engine_used = engine
log_lines = []
for line in (result.stdout or "").splitlines():
logger.info("[blender] %s", line)
if "[blender_render]" in line:
log_lines.append(line)
for line in (result.stderr or "").splitlines():
logger.warning("[blender stderr] %s", line)
# EEVEE fallback to Cycles on non-signal error
if result.returncode > 0 and engine == "eevee":
logger.warning("EEVEE failed (exit %d) — retrying with Cycles", result.returncode)
result = _run("cycles")
engine_used = "cycles (eevee fallback)"
for line in (result.stdout or "").splitlines():
logger.info("[blender-fallback] %s", line)
if "[blender_render]" in line:
log_lines.append(line)
if result.returncode != 0:
raise RuntimeError(
f"Blender exited with code {result.returncode}.\n"
f"stdout: {(result.stdout or '')[-2000:]}\n"
f"stderr: {(result.stderr or '')[-500:]}"
)
render_duration_s = round(time.monotonic() - t_render, 2)
parts_count = 0
manifest_file = parts_dir / "manifest.json"
if manifest_file.exists():
try:
data = json.loads(manifest_file.read_text())
parts_count = len(data.get("parts", []))
except Exception:
pass
return {
"total_duration_s": round(time.monotonic() - t0, 2),
"stl_duration_s": stl_duration_s,
"render_duration_s": render_duration_s,
"stl_size_bytes": stl_size_bytes,
"output_size_bytes": output_path.stat().st_size if output_path.exists() else 0,
"parts_count": parts_count,
"engine_used": engine_used,
"log_lines": log_lines,
}
+57 -37
View File
@@ -329,8 +329,9 @@ def _generate_thumbnail(
"height": 512, "height": 512,
}) })
elif renderer == "threejs": elif renderer == "threejs":
size = int(settings["threejs_render_size"]) # Three.js renderer removed in v2; treat as pillow fallback
render_log.update({"width": size, "height": size}) renderer = "pillow"
render_log.update({"renderer": "pillow", "threejs_removed": True})
logger.info(f"Thumbnail renderer={renderer}, format={fmt}") logger.info(f"Thumbnail renderer={renderer}, format={fmt}")
@@ -340,29 +341,25 @@ def _generate_thumbnail(
if renderer == "blender": if renderer == "blender":
engine = settings["blender_engine"] engine = settings["blender_engine"]
samples = int(settings[f"blender_{engine}_samples"]) samples = int(settings[f"blender_{engine}_samples"])
extra = {
"engine": engine,
"samples": samples,
"stl_quality": settings["stl_quality"],
"smooth_angle": int(settings["blender_smooth_angle"]),
"cycles_device": settings["cycles_device"],
}
rendered_png, service_data = _render_via_service(
"http://blender-renderer:8100/render", step_path, tmp_png, extra
)
if not rendered_png:
logger.warning("Blender renderer failed; falling back to Pillow placeholder")
elif renderer == "threejs": from app.services.render_blender import is_blender_available, render_still
size = int(settings["threejs_render_size"]) if is_blender_available():
extra2: dict = {"width": size, "height": size} try:
if part_colors is not None: service_data = render_still(
extra2["part_colors"] = part_colors step_path=step_path,
rendered_png, service_data = _render_via_service( output_path=tmp_png,
"http://threejs-renderer:8101/render", step_path, tmp_png, extra2 engine=engine,
) samples=samples,
if not rendered_png: stl_quality=settings["stl_quality"],
logger.warning("Three.js renderer failed; falling back to Pillow placeholder") smooth_angle=int(settings["blender_smooth_angle"]),
cycles_device=settings["cycles_device"],
)
rendered_png = tmp_png if tmp_png.exists() else None
except Exception as exc:
logger.warning("Blender subprocess render failed: %s", exc)
rendered_png = None
else:
logger.warning("Blender not available in this container — falling back to Pillow placeholder")
# Merge rich service response data into render_log # Merge rich service response data into render_log
if service_data: if service_data:
@@ -669,20 +666,43 @@ def render_to_file(
extra["denoising_quality"] = denoising_quality extra["denoising_quality"] = denoising_quality
if denoising_use_gpu: if denoising_use_gpu:
extra["denoising_use_gpu"] = denoising_use_gpu extra["denoising_use_gpu"] = denoising_use_gpu
rendered_png, service_data = _render_via_service( from app.services.render_blender import is_blender_available, render_still
"http://blender-renderer:8100/render", step, tmp_png, extra, job_id=job_id if is_blender_available():
) try:
service_data = render_still(
step_path=step,
output_path=tmp_png,
engine=actual_engine,
samples=actual_samples,
stl_quality=settings["stl_quality"],
smooth_angle=int(settings["blender_smooth_angle"]),
cycles_device=actual_cycles_device,
width=w, height=h,
transparent_bg=transparent_bg,
part_colors=part_colors,
template_path=template_path,
target_collection=target_collection,
material_library_path=material_library_path,
material_map=material_map,
part_names_ordered=part_names_ordered,
lighting_only=lighting_only,
shadow_catcher=shadow_catcher,
rotation_x=rotation_x, rotation_y=rotation_y, rotation_z=rotation_z,
noise_threshold=noise_threshold, denoiser=denoiser,
denoising_input_passes=denoising_input_passes,
denoising_prefilter=denoising_prefilter,
denoising_quality=denoising_quality,
denoising_use_gpu=denoising_use_gpu,
)
rendered_png = tmp_png if tmp_png.exists() else None
except Exception as exc:
logger.warning("Blender subprocess render failed: %s", exc)
rendered_png = None
else:
logger.warning("Blender not available in this container — using Pillow fallback")
elif renderer == "threejs": elif renderer == "threejs":
default_size = int(settings["threejs_render_size"]) # Three.js renderer removed in v2 — fall through to Pillow placeholder
w = width or default_size logger.warning("Three.js renderer removed; using Pillow fallback")
h = height or default_size
render_log.update({"width": w, "height": h})
extra2: dict = {"width": w, "height": h}
if part_colors is not None:
extra2["part_colors"] = part_colors
rendered_png, service_data = _render_via_service(
"http://threejs-renderer:8101/render", step, tmp_png, extra2
)
if service_data: if service_data:
for key in ("total_duration_s", "stl_duration_s", "render_duration_s", for key in ("total_duration_s", "stl_duration_s", "render_duration_s",
+6 -1
View File
@@ -5,7 +5,11 @@ celery_app = Celery(
"schaefflerautomat", "schaefflerautomat",
broker=settings.redis_url, broker=settings.redis_url,
backend=settings.redis_url, backend=settings.redis_url,
include=["app.tasks.step_tasks", "app.tasks.ai_tasks"], include=[
"app.tasks.step_tasks",
"app.tasks.ai_tasks",
"app.domains.rendering.tasks",
],
) )
celery_app.conf.update( celery_app.conf.update(
@@ -17,6 +21,7 @@ celery_app.conf.update(
task_routes={ task_routes={
"app.tasks.step_tasks.*": {"queue": "step_processing"}, "app.tasks.step_tasks.*": {"queue": "step_processing"},
"app.tasks.ai_tasks.*": {"queue": "ai_validation"}, "app.tasks.ai_tasks.*": {"queue": "ai_validation"},
"app.domains.rendering.tasks.*": {"queue": "thumbnail_rendering"},
}, },
beat_schedule={}, beat_schedule={},
) )
+14 -11
View File
@@ -157,7 +157,6 @@ def generate_stl_cache(self, cad_file_id: str, quality: str):
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from app.config import settings as app_settings from app.config import settings as app_settings
from app.models.cad_file import CadFile from app.models.cad_file import CadFile
import httpx
logger.info(f"Generating {quality}-quality STL for CAD file: {cad_file_id}") logger.info(f"Generating {quality}-quality STL for CAD file: {cad_file_id}")
@@ -172,16 +171,20 @@ def generate_stl_cache(self, cad_file_id: str, quality: str):
eng.dispose() eng.dispose()
try: try:
resp = httpx.post( from app.services.render_blender import convert_step_to_stl, export_per_part_stls
"http://blender-renderer:8100/convert-stl", from pathlib import Path as _Path
json={"step_path": step_path, "quality": quality}, step = _Path(step_path)
timeout=600.0, stl_out = step.parent / f"{step.stem}_{quality}.stl"
) parts_dir = step.parent / f"{step.stem}_{quality}_parts"
if resp.status_code == 200:
data = resp.json() if not stl_out.exists() or stl_out.stat().st_size == 0:
logger.info(f"STL cached: {data['stl_path']} ({data['size_bytes']} bytes) in {data['duration_s']}s") convert_step_to_stl(step, stl_out, quality)
else: if not (parts_dir / "manifest.json").exists():
raise RuntimeError(f"blender-renderer returned {resp.status_code}: {resp.text[:300]}") try:
export_per_part_stls(step, parts_dir, quality)
except Exception as pe:
logger.warning(f"Per-part STL export non-fatal: {pe}")
logger.info(f"STL cached: {stl_out} ({stl_out.stat().st_size // 1024} KB)")
except Exception as exc: except Exception as exc:
logger.error(f"STL generation failed for {cad_file_id} quality={quality}: {exc}") logger.error(f"STL generation failed for {cad_file_id} quality={quality}: {exc}")
raise self.retry(exc=exc, countdown=30, max_retries=2) raise self.retry(exc=exc, countdown=30, max_retries=2)
+14 -38
View File
@@ -49,7 +49,6 @@ services:
volumes: volumes:
- ./backend:/app - ./backend:/app
- uploads:/app/uploads - uploads:/app/uploads
- /var/run/docker.sock:/var/run/docker.sock
ports: ports:
- "8888:8888" - "8888:8888"
depends_on: depends_on:
@@ -86,11 +85,12 @@ services:
redis: redis:
condition: service_healthy condition: service_healthy
worker-thumbnail: render-worker:
build: build:
context: ./backend context: ./render-worker
dockerfile: Dockerfile dockerfile: Dockerfile
command: celery -A app.tasks.celery_app worker --loglevel=info -Q thumbnail_rendering --concurrency=1 args:
- BLENDER_VERSION=${BLENDER_VERSION:-5.0.1}
environment: environment:
- POSTGRES_DB=${POSTGRES_DB:-schaeffler} - POSTGRES_DB=${POSTGRES_DB:-schaeffler}
- POSTGRES_USER=${POSTGRES_USER:-schaeffler} - POSTGRES_USER=${POSTGRES_USER:-schaeffler}
@@ -99,19 +99,25 @@ services:
- POSTGRES_PORT=5432 - POSTGRES_PORT=5432
- REDIS_URL=${REDIS_URL:-redis://redis:6379/0} - REDIS_URL=${REDIS_URL:-redis://redis:6379/0}
- JWT_SECRET_KEY=${JWT_SECRET_KEY:-changeme-in-production} - JWT_SECRET_KEY=${JWT_SECRET_KEY:-changeme-in-production}
- AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY:-}
- AZURE_OPENAI_ENDPOINT=${AZURE_OPENAI_ENDPOINT:-}
- AZURE_OPENAI_DEPLOYMENT=${AZURE_OPENAI_DEPLOYMENT:-gpt-4o}
- AZURE_OPENAI_API_VERSION=${AZURE_OPENAI_API_VERSION:-2024-02-01}
- UPLOAD_DIR=/app/uploads - UPLOAD_DIR=/app/uploads
- BLENDER_BIN=/opt/blender/blender
- RENDER_SCRIPTS_DIR=/render-scripts
volumes: volumes:
- ./backend:/app - ./backend:/app
- uploads:/app/uploads - uploads:/app/uploads
- /opt/blender:/opt/blender:ro
depends_on: depends_on:
postgres: postgres:
condition: service_healthy condition: service_healthy
redis: redis:
condition: service_healthy condition: service_healthy
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu, compute, utility, graphics]
beat: beat:
build: build:
@@ -140,36 +146,6 @@ services:
redis: redis:
condition: service_healthy condition: service_healthy
blender-renderer:
build:
context: ./blender-renderer
dockerfile: Dockerfile
ports:
- "8100:8100"
volumes:
- uploads:/app/uploads
- ./blender-renderer:/app
- /opt/blender:/opt/blender:ro
restart: unless-stopped
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu, compute, utility, graphics]
threejs-renderer:
build:
context: ./threejs-renderer
dockerfile: Dockerfile
ports:
- "8101:8101"
volumes:
- uploads:/app/uploads
- ./threejs-renderer:/app
restart: unless-stopped
frontend: frontend:
build: build:
context: ./frontend context: ./frontend
+61
View File
@@ -0,0 +1,61 @@
FROM ubuntu:22.04
ARG BLENDER_VERSION=5.0.1
ENV DEBIAN_FRONTEND=noninteractive
ENV PYTHONUNBUFFERED=1
ENV BLENDER_VERSION=${BLENDER_VERSION}
# Blender 5.x is mounted from the host at /opt/blender (see docker-compose.yml)
ENV BLENDER_BIN=/opt/blender/blender
# OSMesa for headless cadquery/VTK (no display needed)
ENV PYOPENGL_PLATFORM=osmesa
ENV VTK_DEFAULT_EGL=0
# Runtime libraries for cadquery/OCC + Blender 5.x headless
RUN apt-get update && apt-get install -y --no-install-recommends \
python3-pip \
python3-dev \
libpq-dev \
gcc \
libxrender1 \
libxi6 \
libxkbcommon-x11-0 \
libsm6 \
libglib2.0-0 \
libgl1-mesa-glx \
libosmesa6 \
libgomp1 \
libxfixes3 \
libxrandr2 \
libxcursor1 \
libxinerama1 \
libwayland-client0 \
libwayland-cursor0 \
libwayland-egl1 \
libvulkan1 \
mesa-vulkan-drivers \
libegl1 \
libegl-mesa0 \
libgbm1 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
# Install backend Python dependencies (includes celery, sqlalchemy, fastapi, etc.)
COPY pyproject.toml .
RUN pip3 install --no-cache-dir -e .
# Install cadquery (heavy — installed after backend deps for better layer caching)
RUN pip3 install --no-cache-dir "cadquery>=2.4.0"
# Copy render scripts
COPY scripts/ /render-scripts/
# Version check script — fails fast if Blender < 5.0.1
COPY check_version.py /check_version.py
# Copy app code (overridden by volume mount in docker-compose)
COPY . .
# Verify Blender version at build time if binary is available
# (skipped during build since /opt/blender is a host mount)
CMD ["bash", "-c", "python3 /check_version.py && celery -A app.tasks.celery_app worker --loglevel=info -Q thumbnail_rendering --concurrency=1"]
+68
View File
@@ -0,0 +1,68 @@
"""Startup check: verify Blender >= 5.0.1 is available.
Run before starting the Celery worker. Exits with code 1 if Blender is
missing or below the minimum required version.
"""
import os
import re
import subprocess
import sys
from pathlib import Path
MIN_VERSION = (5, 0, 1)
MIN_VERSION_STR = ".".join(str(v) for v in MIN_VERSION)
def find_blender() -> str:
import shutil
env_bin = os.environ.get("BLENDER_BIN", "")
if env_bin and Path(env_bin).exists():
return env_bin
found = shutil.which("blender")
return found or "blender"
def check_version():
blender_bin = find_blender()
if not Path(blender_bin).exists():
print(f"ERROR: Blender not found at {blender_bin}", file=sys.stderr)
print(
"Mount Blender >= 5.0.1 from the host via:\n"
" volumes:\n"
" - /opt/blender:/opt/blender:ro",
file=sys.stderr,
)
sys.exit(1)
try:
result = subprocess.run(
[blender_bin, "--version"],
capture_output=True, text=True, timeout=15
)
output = result.stdout or result.stderr or ""
except Exception as exc:
print(f"ERROR: Could not run Blender: {exc}", file=sys.stderr)
sys.exit(1)
match = re.search(r"Blender\s+(\d+)\.(\d+)\.(\d+)", output)
if not match:
print(f"ERROR: Could not parse Blender version from output:\n{output[:200]}", file=sys.stderr)
sys.exit(1)
version = tuple(int(x) for x in match.groups())
version_str = ".".join(str(v) for v in version)
if version < MIN_VERSION:
print(
f"ERROR: Blender {version_str} < required {MIN_VERSION_STR}.\n"
f"Update your host Blender installation.",
file=sys.stderr,
)
sys.exit(1)
print(f"Blender {version_str} OK (>= {MIN_VERSION_STR})")
if __name__ == "__main__":
check_version()
+753
View File
@@ -0,0 +1,753 @@
"""
Blender Python script for rendering an STL file to PNG.
Targets Blender 5.0+ (EEVEE / Cycles).
Called by Blender:
blender --background --python blender_render.py -- \
<stl_path> <output_path> <width> <height> [engine] [samples]
engine: "cycles" (default) | "eevee"
Features:
- Disconnected mesh islands split into separate objects and painted with
palette colours (same 10-colour palette as the Three.js renderer).
- Bounding-box-aware camera: object fills ~85 % of the frame.
- Isometric-style angle (elevation 28°, azimuth 40°).
- Dynamic clip planes.
- Standard (non-Filmic) colour management → no grey tint.
- Schaeffler green top bar + model name label via Pillow post-processing.
"""
import sys
import os
import math
import bpy
from mathutils import Vector, Matrix
# ── Colour palette (matches Three.js renderer) ───────────────────────────────
PALETTE_HEX = [
"#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8",
"#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8",
]
def _srgb_to_linear(c: int) -> float:
"""Convert 0-255 sRGB integer to linear float."""
v = c / 255.0
return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4
def _hex_to_linear(hex_color: str) -> tuple:
"""Return (r, g, b, 1.0) in Blender linear colour space."""
h = hex_color.lstrip('#')
return (
_srgb_to_linear(int(h[0:2], 16)),
_srgb_to_linear(int(h[2:4], 16)),
_srgb_to_linear(int(h[4:6], 16)),
1.0,
)
PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX]
# ── Parse arguments ───────────────────────────────────────────────────────────
argv = sys.argv
if "--" in argv:
argv = argv[argv.index("--") + 1:]
else:
argv = []
if len(argv) < 4:
print("Usage: blender --background --python blender_render.py -- "
"<stl_path> <output_path> <width> <height> [engine] [samples] [smooth_angle] [cycles_device] [transparent_bg]")
sys.exit(1)
import json as _json
stl_path = argv[0]
output_path = argv[1]
width = int(argv[2])
height = int(argv[3])
engine = argv[4].lower() if len(argv) > 4 else "cycles"
samples = int(argv[5]) if len(argv) > 5 else (64 if engine == "eevee" else 256)
smooth_angle = int(argv[6]) if len(argv) > 6 else 30 # degrees; 0 = flat shading
cycles_device = argv[7].lower() if len(argv) > 7 else "auto" # "auto", "gpu", "cpu"
transparent_bg = argv[8] == "1" if len(argv) > 8 else False
template_path = argv[9] if len(argv) > 9 and argv[9] else ""
target_collection = argv[10] if len(argv) > 10 else "Product"
material_library_path = argv[11] if len(argv) > 11 and argv[11] else ""
material_map_raw = argv[12] if len(argv) > 12 else "{}"
try:
material_map = _json.loads(material_map_raw) if material_map_raw else {}
except _json.JSONDecodeError:
material_map = {}
part_names_ordered_raw = argv[13] if len(argv) > 13 else "[]"
try:
part_names_ordered = _json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
except _json.JSONDecodeError:
part_names_ordered = []
lighting_only = argv[14] == "1" if len(argv) > 14 else False
shadow_catcher = argv[15] == "1" if len(argv) > 15 else False
rotation_x = float(argv[16]) if len(argv) > 16 else 0.0
rotation_y = float(argv[17]) if len(argv) > 17 else 0.0
rotation_z = float(argv[18]) if len(argv) > 18 else 0.0
noise_threshold_arg = argv[19] if len(argv) > 19 else ""
denoiser_arg = argv[20] if len(argv) > 20 else ""
denoising_input_passes_arg = argv[21] if len(argv) > 21 else ""
denoising_prefilter_arg = argv[22] if len(argv) > 22 else ""
denoising_quality_arg = argv[23] if len(argv) > 23 else ""
denoising_use_gpu_arg = argv[24] if len(argv) > 24 else ""
# Validate template path: if provided it MUST exist on disk.
# Fail loudly rather than silently rendering with factory settings.
if template_path and not os.path.isfile(template_path):
print(f"[blender_render] ERROR: template_path was provided but file not found: {template_path}")
print("[blender_render] Check that the blend-templates directory is on the shared volume.")
sys.exit(1)
use_template = bool(template_path)
print(f"[blender_render] engine={engine}, samples={samples}, size={width}x{height}, smooth_angle={smooth_angle}°, device={cycles_device}, transparent={transparent_bg}")
print(f"[blender_render] part_names_ordered: {len(part_names_ordered)} entries")
if use_template:
print(f"[blender_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
else:
print("[blender_render] no template — using factory settings (Mode A)")
if material_library_path:
print(f"[blender_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
# ── Helper: find or create collection by name ────────────────────────────────
def _ensure_collection(name: str):
"""Return a collection by name, creating it if needed."""
if name in bpy.data.collections:
return bpy.data.collections[name]
col = bpy.data.collections.new(name)
bpy.context.scene.collection.children.link(col)
return col
def _apply_smooth(part_obj, angle_deg):
"""Apply smooth or flat shading to a mesh object."""
bpy.context.view_layer.objects.active = part_obj
part_obj.select_set(True)
if angle_deg > 0:
try:
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
except AttributeError:
bpy.ops.object.shade_smooth()
part_obj.data.use_auto_smooth = True
part_obj.data.auto_smooth_angle = math.radians(angle_deg)
else:
bpy.ops.object.shade_flat()
def _assign_palette_material(part_obj, index):
"""Assign a palette colour material to a mesh part."""
color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)]
mat = bpy.data.materials.new(name=f"Part_{index}")
mat.use_nodes = True
bsdf = mat.node_tree.nodes.get("Principled BSDF")
if bsdf:
bsdf.inputs["Base Color"].default_value = color
bsdf.inputs["Metallic"].default_value = 0.35
bsdf.inputs["Roughness"].default_value = 0.40
try:
bsdf.inputs["Specular IOR Level"].default_value = 0.5
except KeyError:
pass
part_obj.data.materials.clear()
part_obj.data.materials.append(mat)
import re as _re
def _scale_mm_to_m(parts):
"""Scale imported STL objects from mm to Blender metres (×0.001).
STEP/STL coordinates are in mm; Blender's default unit is metres.
Without scaling a 50 mm part appears as 50 m inside Blender — way too large
relative to any template environment designed in metric units.
"""
if not parts:
return
bpy.ops.object.select_all(action='DESELECT')
for p in parts:
p.scale = (0.001, 0.001, 0.001)
p.location *= 0.001
p.select_set(True)
bpy.context.view_layer.objects.active = parts[0]
bpy.ops.object.transform_apply(scale=True, location=False, rotation=False)
print(f"[blender_render] scaled {len(parts)} parts mm→m (×0.001)")
def _apply_rotation(parts, rx, ry, rz):
"""Apply Euler rotation (degrees, XYZ order) to all parts around world origin.
After _import_stl + _scale_mm_to_m the combined bbox center is at world origin,
so rotating around origin is equivalent to rotating around the assembly center.
"""
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
return
from mathutils import Euler
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
for p in parts:
p.matrix_world = rot_mat @ p.matrix_world
# Bake rotation into mesh data so camera bbox calculations see the rotated geometry
bpy.ops.object.select_all(action='DESELECT')
for p in parts:
p.select_set(True)
bpy.context.view_layer.objects.active = parts[0]
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
print(f"[blender_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
def _import_stl(stl_file):
"""Import STL into Blender, using per-part STLs if available.
Checks for {stl_stem}_parts/manifest.json next to the STL file.
- Per-part mode: imports each part STL, names Blender object after STEP part name.
- Fallback: imports combined STL and splits by loose geometry.
Returns list of Blender mesh objects, centred at origin.
"""
stl_dir = os.path.dirname(stl_file)
stl_stem = os.path.splitext(os.path.basename(stl_file))[0]
parts_dir = os.path.join(stl_dir, stl_stem + "_parts")
manifest_path = os.path.join(parts_dir, "manifest.json")
parts = []
if os.path.isfile(manifest_path):
# ── Per-part mode ────────────────────────────────────────────────
try:
with open(manifest_path, "r") as f:
manifest = _json.loads(f.read())
part_entries = manifest.get("parts", [])
except Exception as e:
print(f"[blender_render] WARNING: failed to read manifest: {e}")
part_entries = []
if part_entries:
for entry in part_entries:
part_file = os.path.join(parts_dir, entry["file"])
part_name = entry["name"]
if not os.path.isfile(part_file):
print(f"[blender_render] WARNING: part STL missing: {part_file}")
continue
bpy.ops.object.select_all(action='DESELECT')
bpy.ops.wm.stl_import(filepath=part_file)
imported = bpy.context.selected_objects
if imported:
obj = imported[0]
obj.name = part_name
if obj.data:
obj.data.name = part_name
parts.append(obj)
if parts:
print(f"[blender_render] imported {len(parts)} named parts from per-part STLs")
# ── Fallback: combined STL + separate by loose ───────────────────────
if not parts:
bpy.ops.wm.stl_import(filepath=stl_file)
obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None
if obj is None:
print(f"ERROR: No objects imported from {stl_file}")
sys.exit(1)
bpy.context.view_layer.objects.active = obj
bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS')
obj.location = (0.0, 0.0, 0.0)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.separate(type='LOOSE')
bpy.ops.object.mode_set(mode='OBJECT')
parts = list(bpy.context.selected_objects)
print(f"[blender_render] fallback: separated into {len(parts)} part(s)")
return parts
# ── Centre per-part imports at origin (combined bbox) ────────────────
all_corners = []
for p in parts:
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
if all_corners:
mins = Vector((min(v.x for v in all_corners),
min(v.y for v in all_corners),
min(v.z for v in all_corners)))
maxs = Vector((max(v.x for v in all_corners),
max(v.y for v in all_corners),
max(v.z for v in all_corners)))
center = (mins + maxs) * 0.5
for p in parts:
p.location -= center
return parts
def _resolve_part_name(index, part_obj):
"""Get the STEP part name for a Blender part by index.
With per-part import, part_obj.name IS the STEP name (possibly with
Blender .NNN suffix for duplicates). Strip that suffix for lookup.
Falls back to part_names_ordered index mapping for combined-STL mode.
"""
# Strip Blender auto-suffix (.001, .002, etc.)
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
# If the base name looks like a real STEP part name (not generic "Cube" etc.),
# use it directly
if part_names_ordered and index < len(part_names_ordered):
return part_names_ordered[index]
return base_name
def _apply_material_library(parts, mat_lib_path, mat_map):
"""Append materials from library .blend and assign to parts via material_map.
With per-part STL import, Blender objects are named after STEP parts,
so matching is by name (stripping Blender .NNN suffix for duplicates).
Falls back to part_names_ordered index-based matching for combined-STL mode.
mat_map: {part_name_lower: material_name}
Parts without a match keep their current material.
"""
if not mat_lib_path or not os.path.isfile(mat_lib_path):
print(f"[blender_render] material library not found: {mat_lib_path}")
return
# Collect unique material names needed
needed = set(mat_map.values())
if not needed:
return
# Append materials from library
appended = {}
for mat_name in needed:
inner_path = f"{mat_lib_path}/Material/{mat_name}"
try:
bpy.ops.wm.append(
filepath=inner_path,
directory=f"{mat_lib_path}/Material/",
filename=mat_name,
link=False,
)
if mat_name in bpy.data.materials:
appended[mat_name] = bpy.data.materials[mat_name]
print(f"[blender_render] appended material: {mat_name}")
else:
print(f"[blender_render] WARNING: material '{mat_name}' not found after append")
except Exception as exc:
print(f"[blender_render] WARNING: failed to append material '{mat_name}': {exc}")
if not appended:
return
# Assign materials to parts — primary: name-based (per-part STL mode),
# secondary: index-based via part_names_ordered (combined STL fallback)
assigned_count = 0
for i, part in enumerate(parts):
# Try name-based matching first (strip Blender .NNN suffix)
base_name = _re.sub(r'\.\d{3}$', '', part.name)
part_key = base_name.lower().strip()
mat_name = mat_map.get(part_key)
# Fall back to index-based matching via part_names_ordered
if not mat_name and part_names_ordered and i < len(part_names_ordered):
step_name = part_names_ordered[i]
part_key = step_name.lower().strip()
mat_name = mat_map.get(part_key)
if mat_name and mat_name in appended:
part.data.materials.clear()
part.data.materials.append(appended[mat_name])
assigned_count += 1
print(f"[blender_render] assigned '{mat_name}' to part '{part.name}'")
print(f"[blender_render] material assignment: {assigned_count}/{len(parts)} parts matched")
# ── SCENE SETUP ──────────────────────────────────────────────────────────────
if use_template:
# ── MODE B: Template-based render ────────────────────────────────────────
print(f"[blender_render] Opening template: {template_path}")
bpy.ops.wm.open_mainfile(filepath=template_path)
# Find or create target collection
target_col = _ensure_collection(target_collection)
# Import and split STL
parts = _import_stl(stl_path)
# Scale mm→m: STEP coords are mm, Blender default unit is metres
_scale_mm_to_m(parts)
# Apply render position rotation (before camera/bbox calculations)
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
# Move imported parts into target collection
for part in parts:
# Remove from all existing collections
for col in list(part.users_collection):
col.objects.unlink(part)
target_col.objects.link(part)
# Apply smooth shading
for part in parts:
_apply_smooth(part, smooth_angle)
# Material assignment: library materials if available, otherwise palette
if material_library_path and material_map:
# Build lowercased material_map for matching
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
_apply_material_library(parts, material_library_path, mat_map_lower)
# Parts not matched by library get palette fallback
for i, part in enumerate(parts):
if not part.data.materials or len(part.data.materials) == 0:
_assign_palette_material(part, i)
else:
for i, part in enumerate(parts):
_assign_palette_material(part, i)
# ── Shadow catcher (Cycles only, template mode only) ─────────────────────
if shadow_catcher:
sc_col_name = "Shadowcatcher"
sc_obj_name = "Shadowcatcher"
# Enable the Shadowcatcher collection in all view layers
for vl in bpy.context.scene.view_layers:
def _enable_col_recursive(layer_col):
if layer_col.collection.name == sc_col_name:
layer_col.exclude = False
layer_col.collection.hide_render = False
layer_col.collection.hide_viewport = False
return True
for child in layer_col.children:
if _enable_col_recursive(child):
return True
return False
_enable_col_recursive(vl.layer_collection)
sc_obj = bpy.data.objects.get(sc_obj_name)
if sc_obj:
# Calculate product bbox min Z (world space)
all_world_corners = []
for part in parts:
for corner in part.bound_box:
all_world_corners.append((part.matrix_world @ Vector(corner)).z)
if all_world_corners:
sc_obj.location.z = min(all_world_corners)
print(f"[blender_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
else:
print(f"[blender_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
# lighting_only: use template World/HDRI but force auto-camera UNLESS the shadow
# catcher is enabled — in that case the template camera is already positioned to
# show both the product and its shadow on the ground plane.
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
if lighting_only and not shadow_catcher:
print("[blender_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
elif needs_auto_camera:
print("[blender_render] WARNING: template has no camera — will create auto-camera")
# Set very close near clip on template camera for mm-scale parts (now in metres)
if not needs_auto_camera and bpy.context.scene.camera:
bpy.context.scene.camera.data.clip_start = 0.001
print(f"[blender_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
else:
# ── MODE A: Factory settings (original behavior) ─────────────────────────
needs_auto_camera = True
bpy.ops.wm.read_factory_settings(use_empty=True)
parts = _import_stl(stl_path)
# Scale mm→m: STEP coords are mm, Blender default unit is metres
_scale_mm_to_m(parts)
# Apply render position rotation (before camera/bbox calculations)
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
for i, part in enumerate(parts):
_apply_smooth(part, smooth_angle)
_assign_palette_material(part, i)
# Apply material library on top of palette colours (same logic as Mode B).
# material_library_path / material_map are parsed from argv even in Mode A
# but were previously never used here — that was the bug.
if material_library_path and material_map:
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
_apply_material_library(parts, material_library_path, mat_map_lower)
# Parts not matched by the library keep their palette material (already set above)
if needs_auto_camera:
# ── Combined bounding box / bounding sphere ──────────────────────────────
all_corners = []
for part in parts:
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
bbox_min = Vector((
min(v.x for v in all_corners),
min(v.y for v in all_corners),
min(v.z for v in all_corners),
))
bbox_max = Vector((
max(v.x for v in all_corners),
max(v.y for v in all_corners),
max(v.z for v in all_corners),
))
bbox_center = (bbox_min + bbox_max) * 0.5
bbox_dims = bbox_max - bbox_min
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
print(f"[blender_render] bbox_dims={tuple(round(d,4) for d in bbox_dims)}, "
f"bsphere_radius={bsphere_radius:.4f}, center={tuple(round(c,4) for c in bbox_center)}")
# ── Lighting — only in Mode A (factory settings) ─────────────────────────
# In template mode the .blend file provides its own World/HDRI lighting.
# Adding auto-lights would overpower the template's intended look.
if not use_template:
light_dist = bsphere_radius * 6.0
bpy.ops.object.light_add(type='SUN', location=(
bbox_center.x + light_dist * 0.5,
bbox_center.y - light_dist * 0.35,
bbox_center.z + light_dist,
))
sun = bpy.context.active_object
sun.data.energy = 4.0
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
bpy.ops.object.light_add(type='AREA', location=(
bbox_center.x - light_dist * 0.4,
bbox_center.y + light_dist * 0.4,
bbox_center.z + light_dist * 0.7,
))
fill = bpy.context.active_object
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
fill.data.size = max(4.0, bsphere_radius * 4.0)
# ── Camera ───────────────────────────────────────────────────────────────
ELEVATION_DEG = 28.0
AZIMUTH_DEG = 40.0
LENS_MM = 50.0
SENSOR_WIDTH_MM = 36.0
FILL_FACTOR = 0.85
elevation_rad = math.radians(ELEVATION_DEG)
azimuth_rad = math.radians(AZIMUTH_DEG)
cam_dir = Vector((
math.cos(elevation_rad) * math.cos(azimuth_rad),
math.cos(elevation_rad) * math.sin(azimuth_rad),
math.sin(elevation_rad),
)).normalized()
fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM))
fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM))
fov_used = min(fov_h, fov_v)
dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR
dist = max(dist, bsphere_radius * 1.5)
print(f"[blender_render] camera dist={dist:.4f}, fov={math.degrees(fov_used):.2f}°")
cam_location = bbox_center + cam_dir * dist
bpy.ops.object.camera_add(location=cam_location)
cam_obj = bpy.context.active_object
cam_obj.data.lens = LENS_MM
bpy.context.scene.camera = cam_obj
look_dir = (bbox_center - cam_location).normalized()
up_world = Vector((0.0, 0.0, 1.0))
right = look_dir.cross(up_world)
if right.length < 1e-6:
right = Vector((1.0, 0.0, 0.0))
right.normalize()
cam_up = right.cross(look_dir).normalized()
rot_mat = Matrix((
( right.x, right.y, right.z),
( cam_up.x, cam_up.y, cam_up.z),
(-look_dir.x, -look_dir.y, -look_dir.z),
)).transposed()
cam_obj.rotation_euler = rot_mat.to_euler('XYZ')
cam_obj.data.clip_start = max(dist * 0.001, 0.0001)
cam_obj.data.clip_end = dist + bsphere_radius * 3.0
print(f"[blender_render] clip {cam_obj.data.clip_start:.6f}{cam_obj.data.clip_end:.4f}")
# ── World background — only in Mode A ────────────────────────────────────
# In template mode the .blend file owns its World (HDRI, sky texture, studio
# lighting). Overwriting it would destroy the HDR look the template was
# designed to use (e.g. Alpha-HDR output types with Filmic tonemapping).
if not use_template:
world = bpy.data.worlds.new("World")
bpy.context.scene.world = world
world.use_nodes = True
bg = world.node_tree.nodes["Background"]
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
bg.inputs["Strength"].default_value = 0.15
# ── Render engine ─────────────────────────────────────────────────────────────
scene = bpy.context.scene
if engine == "eevee":
# Blender 4.x used 'BLENDER_EEVEE_NEXT'; Blender 5.x reverted to 'BLENDER_EEVEE'.
# Try both names so the script works across versions.
set_ok = False
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
try:
scene.render.engine = eevee_id
set_ok = True
print(f"[blender_render] EEVEE engine id: {eevee_id}")
break
except TypeError:
continue
if not set_ok:
print("[blender_render] WARNING: could not set EEVEE engine falling back to Cycles")
engine = "cycles"
if engine == "eevee":
# Sample attribute name changed across minor versions
for attr in ('taa_render_samples', 'samples'):
try:
setattr(scene.eevee, attr, samples)
print(f"[blender_render] EEVEE samples: scene.eevee.{attr}={samples}")
break
except AttributeError:
continue
if engine != "eevee": # covers both explicit Cycles and EEVEE-fallback
scene.render.engine = 'CYCLES'
scene.cycles.samples = samples
scene.cycles.use_denoising = True
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
if denoising_input_passes_arg:
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
except Exception: pass
if denoising_prefilter_arg:
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
except Exception: pass
if denoising_quality_arg:
try: scene.cycles.denoising_quality = denoising_quality_arg
except Exception: pass
if denoising_use_gpu_arg:
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
except AttributeError: pass
if noise_threshold_arg:
scene.cycles.use_adaptive_sampling = True
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
# ── Device selection: "cpu" forces CPU, "gpu" forces GPU (fail if unavailable),
# "auto" tries GPU first and falls back to CPU.
gpu_type_found = None
if cycles_device != "cpu":
try:
cycles_prefs = bpy.context.preferences.addons['cycles'].preferences
for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
try:
cycles_prefs.compute_device_type = device_type
cycles_prefs.get_devices()
gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU']
if gpu_devs:
for d in gpu_devs:
d.use = True
gpu_type_found = device_type
break
except Exception as e:
print(f"[blender_render] {device_type} not available: {e}")
except Exception as e:
print(f"[blender_render] GPU probe failed: {e}")
if gpu_type_found:
scene.cycles.device = 'GPU'
print(f"[blender_render] Cycles GPU ({gpu_type_found}), samples={samples}")
else:
scene.cycles.device = 'CPU'
print(f"[blender_render] WARNING: GPU not found — falling back to CPU, samples={samples}")
# ── Colour management ─────────────────────────────────────────────────────────
# In template mode the .blend file owns its colour management (e.g. Filmic/
# AgX for HDR, custom exposure for Alpha-HDR output types). Overwriting it
# would destroy the look the template was designed for.
# In factory-settings mode (Mode A) force Standard to avoid the grey Filmic
# tint that Blender applies by default.
if not use_template:
scene.view_settings.view_transform = 'Standard'
scene.view_settings.exposure = 0.0
scene.view_settings.gamma = 1.0
try:
scene.view_settings.look = 'None'
except Exception:
pass
# ── Render settings ───────────────────────────────────────────────────────────
scene.render.resolution_x = width
scene.render.resolution_y = height
scene.render.resolution_percentage = 100
scene.render.image_settings.file_format = 'PNG'
scene.render.filepath = output_path
scene.render.film_transparent = transparent_bg
# ── Render ────────────────────────────────────────────────────────────────────
print(f"[blender_render] Rendering → {output_path} (Blender {bpy.app.version_string})")
bpy.ops.render.render(write_still=True)
print("[blender_render] render done.")
# ── Pillow post-processing: green bar + model name label ─────────────────────
# Skip overlay for transparent renders to keep clean alpha channel
if transparent_bg:
print("[blender_render] Transparent mode — skipping Pillow overlay.")
else:
try:
from PIL import Image, ImageDraw, ImageFont
img = Image.open(output_path).convert("RGBA")
draw = ImageDraw.Draw(img)
W, H = img.size
# Schaeffler green top bar
bar_h = max(8, H // 32)
draw.rectangle([0, 0, W - 1, bar_h - 1], fill=(0, 137, 61, 255))
# Model name strip at bottom
model_name = os.path.splitext(os.path.basename(stl_path))[0]
label_h = max(20, H // 20)
img.alpha_composite(
Image.new("RGBA", (W, label_h), (30, 30, 30, 180)),
dest=(0, H - label_h),
)
font_size = max(10, label_h - 6)
font = None
for fp in [
"/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf",
"/usr/share/fonts/truetype/liberation/LiberationSans-Bold.ttf",
"/usr/share/fonts/truetype/freefont/FreeSansBold.ttf",
]:
if os.path.exists(fp):
try:
font = ImageFont.truetype(fp, font_size)
break
except Exception:
pass
if font is None:
font = ImageFont.load_default()
tb = draw.textbbox((0, 0), model_name, font=font)
text_w = tb[2] - tb[0]
draw.text(
((W - text_w) // 2, H - label_h + (label_h - (tb[3] - tb[1])) // 2),
model_name, font=font, fill=(255, 255, 255, 255),
)
img.convert("RGB").save(output_path, format="PNG")
print(f"[blender_render] Pillow overlay applied.")
except ImportError:
print("[blender_render] Pillow not in Blender Python skipping overlay.")
except Exception as exc:
print(f"[blender_render] Pillow overlay failed (non-fatal): {exc}")
print("[blender_render] Done.")
+781
View File
@@ -0,0 +1,781 @@
"""Blender Python script: single-frame still render for Flamenco.
Matches the lighting, camera, materials, and post-processing of the
Celery blender_render.py so that LQ and HQ renders look consistent.
Usage (from Blender):
blender --background --python still_render.py -- \
<stl_path> <output_path> <width> <height> <engine> <samples> \
<part_colors_json> <transparent_bg> \
[template_path] [target_collection] [material_library_path] [material_map_json]
"""
import bpy
import sys
import os
import json
import math
from mathutils import Vector, Matrix
# ── Colour palette (matches blender_render.py / Three.js renderer) ───────────
PALETTE_HEX = [
"#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8",
"#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8",
]
def _srgb_to_linear(c: int) -> float:
v = c / 255.0
return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4
def _hex_to_linear(hex_color: str) -> tuple:
h = hex_color.lstrip('#')
return (
_srgb_to_linear(int(h[0:2], 16)),
_srgb_to_linear(int(h[2:4], 16)),
_srgb_to_linear(int(h[4:6], 16)),
1.0,
)
PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX]
SMOOTH_ANGLE = 30 # degrees
# ── Helper functions ─────────────────────────────────────────────────────────
def _ensure_collection(name: str):
"""Return a collection by name, creating it if needed."""
if name in bpy.data.collections:
return bpy.data.collections[name]
col = bpy.data.collections.new(name)
bpy.context.scene.collection.children.link(col)
return col
def _assign_palette_material(part_obj, index):
"""Assign a palette colour material to a mesh part."""
color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)]
mat = bpy.data.materials.new(name=f"Part_{index}")
mat.use_nodes = True
bsdf = mat.node_tree.nodes.get("Principled BSDF")
if bsdf:
bsdf.inputs["Base Color"].default_value = color
bsdf.inputs["Metallic"].default_value = 0.35
bsdf.inputs["Roughness"].default_value = 0.40
try:
bsdf.inputs["Specular IOR Level"].default_value = 0.5
except KeyError:
pass
part_obj.data.materials.clear()
part_obj.data.materials.append(mat)
def _apply_smooth(part_obj, angle_deg):
"""Apply smooth or flat shading to a mesh object."""
bpy.context.view_layer.objects.active = part_obj
part_obj.select_set(True)
if angle_deg > 0:
try:
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
except AttributeError:
bpy.ops.object.shade_smooth()
part_obj.data.use_auto_smooth = True
part_obj.data.auto_smooth_angle = math.radians(angle_deg)
else:
bpy.ops.object.shade_flat()
import re as _re
def _scale_mm_to_m(parts):
"""Scale imported STL objects from mm to Blender metres (×0.001).
STEP/STL coordinates are in mm; Blender's default unit is metres.
Without scaling a 50 mm part appears as 50 m inside Blender — way too large
relative to any template environment designed in metric units.
"""
if not parts:
return
bpy.ops.object.select_all(action='DESELECT')
for p in parts:
p.scale = (0.001, 0.001, 0.001)
p.location *= 0.001
p.select_set(True)
bpy.context.view_layer.objects.active = parts[0]
bpy.ops.object.transform_apply(scale=True, location=False, rotation=False)
print(f"[still_render] scaled {len(parts)} parts mm→m (×0.001)")
def _apply_rotation(parts, rx, ry, rz):
"""Apply Euler rotation (degrees, XYZ order) to all parts around world origin."""
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
return
import math
from mathutils import Euler
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
for p in parts:
p.matrix_world = rot_mat @ p.matrix_world
bpy.ops.object.select_all(action='DESELECT')
for p in parts:
p.select_set(True)
bpy.context.view_layer.objects.active = parts[0]
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
print(f"[still_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
def _import_stl(stl_file):
"""Import STL into Blender, using per-part STLs if available.
Checks for {stl_stem}_parts/manifest.json next to the STL file.
- Per-part mode: imports each part STL, names Blender object after STEP part name.
- Fallback: imports combined STL and splits by loose geometry.
Returns list of Blender mesh objects, centred at origin.
"""
stl_dir = os.path.dirname(stl_file)
stl_stem = os.path.splitext(os.path.basename(stl_file))[0]
parts_dir = os.path.join(stl_dir, stl_stem + "_parts")
manifest_path = os.path.join(parts_dir, "manifest.json")
parts = []
if os.path.isfile(manifest_path):
# ── Per-part mode ────────────────────────────────────────────────
try:
with open(manifest_path, "r") as f:
manifest = json.loads(f.read())
part_entries = manifest.get("parts", [])
except Exception as e:
print(f"[still_render] WARNING: failed to read manifest: {e}")
part_entries = []
if part_entries:
for entry in part_entries:
part_file = os.path.join(parts_dir, entry["file"])
part_name = entry["name"]
if not os.path.isfile(part_file):
print(f"[still_render] WARNING: part STL missing: {part_file}")
continue
bpy.ops.object.select_all(action='DESELECT')
bpy.ops.wm.stl_import(filepath=part_file)
imported = bpy.context.selected_objects
if imported:
obj = imported[0]
obj.name = part_name
if obj.data:
obj.data.name = part_name
parts.append(obj)
if parts:
print(f"[still_render] imported {len(parts)} named parts from per-part STLs")
# ── Fallback: combined STL + separate by loose ───────────────────────
if not parts:
bpy.ops.wm.stl_import(filepath=stl_file)
obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None
if obj is None:
print(f"ERROR: No objects imported from {stl_file}")
sys.exit(1)
bpy.context.view_layer.objects.active = obj
bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS')
obj.location = (0.0, 0.0, 0.0)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.separate(type='LOOSE')
bpy.ops.object.mode_set(mode='OBJECT')
parts = list(bpy.context.selected_objects)
print(f"[still_render] fallback: separated into {len(parts)} part(s)")
return parts
# ── Centre per-part imports at origin (combined bbox) ────────────────
all_corners = []
for p in parts:
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
if all_corners:
mins = Vector((min(v.x for v in all_corners),
min(v.y for v in all_corners),
min(v.z for v in all_corners)))
maxs = Vector((max(v.x for v in all_corners),
max(v.y for v in all_corners),
max(v.z for v in all_corners)))
center = (mins + maxs) * 0.5
for p in parts:
p.location -= center
return parts
def _resolve_part_name(index, part_obj, part_names_ordered):
"""Get the STEP part name for a Blender part by index.
With per-part import, part_obj.name IS the STEP name (possibly with
Blender .NNN suffix). Falls back to part_names_ordered for combined-STL mode.
"""
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
if part_names_ordered and index < len(part_names_ordered):
return part_names_ordered[index]
return base_name
def _apply_material_library(parts, mat_lib_path, mat_map, part_names_ordered=None):
"""Append materials from library .blend and assign to parts via material_map.
With per-part STL import, Blender objects are named after STEP parts,
so matching is by name (stripping Blender .NNN suffix for duplicates).
Falls back to part_names_ordered index-based matching for combined-STL mode.
mat_map: {part_name_lower: material_name}
Parts without a match keep their current material.
"""
if not mat_lib_path or not os.path.isfile(mat_lib_path):
print(f"[still_render] material library not found: {mat_lib_path}")
return
# Collect unique material names needed
needed = set(mat_map.values())
if not needed:
return
# Append materials from library
appended = {}
for mat_name in needed:
inner_path = f"{mat_lib_path}/Material/{mat_name}"
try:
bpy.ops.wm.append(
filepath=inner_path,
directory=f"{mat_lib_path}/Material/",
filename=mat_name,
link=False,
)
if mat_name in bpy.data.materials:
appended[mat_name] = bpy.data.materials[mat_name]
print(f"[still_render] appended material: {mat_name}")
else:
print(f"[still_render] WARNING: material '{mat_name}' not found after append")
except Exception as exc:
print(f"[still_render] WARNING: failed to append material '{mat_name}': {exc}")
if not appended:
return
# Assign materials to parts — primary: name-based (per-part STL mode),
# secondary: index-based via part_names_ordered (combined STL fallback)
assigned_count = 0
for i, part in enumerate(parts):
# Try name-based matching first (strip Blender .NNN suffix)
base_name = _re.sub(r'\.\d{3}$', '', part.name)
part_key = base_name.lower().strip()
mat_name = mat_map.get(part_key)
# Fall back to index-based matching via part_names_ordered
if not mat_name and part_names_ordered and i < len(part_names_ordered):
step_name = part_names_ordered[i]
part_key = step_name.lower().strip()
mat_name = mat_map.get(part_key)
if mat_name and mat_name in appended:
part.data.materials.clear()
part.data.materials.append(appended[mat_name])
assigned_count += 1
print(f"[still_render] assigned '{mat_name}' to part '{part.name}'")
print(f"[still_render] material assignment: {assigned_count}/{len(parts)} parts matched")
def main():
argv = sys.argv
args = argv[argv.index("--") + 1:]
stl_path = args[0]
output_path = args[1]
width = int(args[2])
height = int(args[3])
engine = args[4]
samples = int(args[5])
part_colors_json = args[6] if len(args) > 6 else "{}"
transparent_bg = args[7] == "1" if len(args) > 7 else False
# Template + material library args (passed by schaeffler-still.js)
template_path = args[8] if len(args) > 8 and args[8] else ""
target_collection = args[9] if len(args) > 9 else "Product"
material_library_path = args[10] if len(args) > 10 and args[10] else ""
material_map_raw = args[11] if len(args) > 11 else "{}"
part_names_ordered_raw = args[12] if len(args) > 12 else "[]"
lighting_only = args[13] == "1" if len(args) > 13 else False
cycles_device = args[14].lower() if len(args) > 14 else "auto" # "auto", "gpu", "cpu"
shadow_catcher = args[15] == "1" if len(args) > 15 else False
rotation_x = float(args[16]) if len(args) > 16 else 0.0
rotation_y = float(args[17]) if len(args) > 17 else 0.0
rotation_z = float(args[18]) if len(args) > 18 else 0.0
noise_threshold_arg = args[19] if len(args) > 19 else ""
denoiser_arg = args[20] if len(args) > 20 else ""
denoising_input_passes_arg = args[21] if len(args) > 21 else ""
denoising_prefilter_arg = args[22] if len(args) > 22 else ""
denoising_quality_arg = args[23] if len(args) > 23 else ""
denoising_use_gpu_arg = args[24] if len(args) > 24 else ""
os.makedirs(os.path.dirname(output_path), exist_ok=True)
try:
part_colors = json.loads(part_colors_json)
except json.JSONDecodeError:
part_colors = {}
try:
material_map = json.loads(material_map_raw) if material_map_raw else {}
except json.JSONDecodeError:
material_map = {}
try:
part_names_ordered = json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
except json.JSONDecodeError:
part_names_ordered = []
# Validate template path: if provided it MUST exist on disk.
# A missing template is a configuration error — fail loudly rather than
# silently falling back to factory-settings mode which produces renders that
# look completely wrong.
if template_path and not os.path.isfile(template_path):
print(f"[still_render] ERROR: template_path was provided but file not found: {template_path}")
print("[still_render] Ensure the blend-templates directory is accessible on this worker.")
sys.exit(1)
use_template = bool(template_path)
print(f"[still_render] engine={engine}, samples={samples}, size={width}x{height}, transparent={transparent_bg}")
print(f"[still_render] part_names_ordered: {len(part_names_ordered)} entries")
if use_template:
print(f"[still_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
else:
print("[still_render] no template — using factory settings (Mode A)")
if material_library_path:
print(f"[still_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
# ── SCENE SETUP ──────────────────────────────────────────────────────────
if use_template:
# ── MODE B: Template-based render ────────────────────────────────────
print(f"[still_render] Opening template: {template_path}")
bpy.ops.wm.open_mainfile(filepath=template_path)
# Find or create target collection
target_col = _ensure_collection(target_collection)
# Import and split STL
parts = _import_stl(stl_path)
# Scale mm→m: STEP coords are mm, Blender default unit is metres
_scale_mm_to_m(parts)
# Apply render position rotation (before camera/bbox calculations)
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
# Move imported parts into target collection
for part in parts:
for col in list(part.users_collection):
col.objects.unlink(part)
target_col.objects.link(part)
# Apply smooth shading
for part in parts:
_apply_smooth(part, SMOOTH_ANGLE)
# Material assignment: library materials if available, otherwise palette
if material_library_path and material_map:
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
# Parts not matched by library get palette fallback
for i, part in enumerate(parts):
if not part.data.materials or len(part.data.materials) == 0:
_assign_palette_material(part, i)
else:
for i, part in enumerate(parts):
step_name = _resolve_part_name(i, part, part_names_ordered)
color_hex = part_colors.get(step_name)
if color_hex:
color = _hex_to_linear(color_hex)
mat = bpy.data.materials.new(name=f"Part_{i}")
mat.use_nodes = True
bsdf = mat.node_tree.nodes.get("Principled BSDF")
if bsdf:
bsdf.inputs["Base Color"].default_value = color
bsdf.inputs["Metallic"].default_value = 0.35
bsdf.inputs["Roughness"].default_value = 0.40
try:
bsdf.inputs["Specular IOR Level"].default_value = 0.5
except KeyError:
pass
part.data.materials.clear()
part.data.materials.append(mat)
else:
_assign_palette_material(part, i)
# ── Shadow catcher (Cycles only, template mode only) ─────────────────
if shadow_catcher:
sc_col_name = "Shadowcatcher"
sc_obj_name = "Shadowcatcher"
for vl in bpy.context.scene.view_layers:
def _enable_col_recursive(layer_col):
if layer_col.collection.name == sc_col_name:
layer_col.exclude = False
layer_col.collection.hide_render = False
layer_col.collection.hide_viewport = False
return True
for child in layer_col.children:
if _enable_col_recursive(child):
return True
return False
_enable_col_recursive(vl.layer_collection)
sc_obj = bpy.data.objects.get(sc_obj_name)
if sc_obj:
all_world_z = []
for part in parts:
for corner in part.bound_box:
all_world_z.append((part.matrix_world @ Vector(corner)).z)
if all_world_z:
sc_obj.location.z = min(all_world_z)
print(f"[still_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
else:
print(f"[still_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
# lighting_only: use template World/HDRI but force auto-camera UNLESS the shadow
# catcher is enabled — in that case the template camera is already positioned to
# show both the product and its shadow on the ground plane.
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
if lighting_only and not shadow_catcher:
print("[still_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
elif needs_auto_camera:
print("[still_render] WARNING: template has no camera — will create auto-camera")
# Set very close near clip on template camera for mm-scale parts (now in metres)
if not needs_auto_camera and bpy.context.scene.camera:
bpy.context.scene.camera.data.clip_start = 0.001
print(f"[still_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
else:
# ── MODE A: Factory settings (original behavior) ─────────────────────
needs_auto_camera = True
bpy.ops.wm.read_factory_settings(use_empty=True)
parts = _import_stl(stl_path)
# Scale mm→m: STEP coords are mm, Blender default unit is metres
_scale_mm_to_m(parts)
# Apply render position rotation (before camera/bbox calculations)
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
for i, part in enumerate(parts):
_apply_smooth(part, SMOOTH_ANGLE)
# Material assignment: library materials if available, else part_colors/palette
if material_library_path and material_map:
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
# Palette fallback for unmatched parts
for i, part in enumerate(parts):
if not part.data.materials or len(part.data.materials) == 0:
_assign_palette_material(part, i)
else:
# part_colors or palette — use index-based lookup via part_names_ordered
for i, part in enumerate(parts):
step_name = _resolve_part_name(i, part, part_names_ordered)
color_hex = part_colors.get(step_name)
if color_hex:
color = _hex_to_linear(color_hex)
else:
color = PALETTE_LINEAR[i % len(PALETTE_LINEAR)]
mat = bpy.data.materials.new(name=f"Part_{i}")
mat.use_nodes = True
bsdf = mat.node_tree.nodes.get("Principled BSDF")
if bsdf:
bsdf.inputs["Base Color"].default_value = color
bsdf.inputs["Metallic"].default_value = 0.35
bsdf.inputs["Roughness"].default_value = 0.40
try:
bsdf.inputs["Specular IOR Level"].default_value = 0.5
except KeyError:
pass
part.data.materials.clear()
part.data.materials.append(mat)
if needs_auto_camera:
# ── Combined bounding box / bounding sphere ──────────────────────────
all_corners = []
for part in parts:
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
bbox_min = Vector((
min(v.x for v in all_corners),
min(v.y for v in all_corners),
min(v.z for v in all_corners),
))
bbox_max = Vector((
max(v.x for v in all_corners),
max(v.y for v in all_corners),
max(v.z for v in all_corners),
))
bbox_center = (bbox_min + bbox_max) * 0.5
bbox_dims = bbox_max - bbox_min
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
print(f"[still_render] bbox_dims={tuple(round(d, 4) for d in bbox_dims)}, "
f"bsphere_radius={bsphere_radius:.4f}")
# ── Lighting — only in Mode A (factory settings) ─────────────────────
# In template mode the .blend file provides its own World/HDRI lighting.
# Adding auto-lights would overpower the template's intended look.
if not use_template:
light_dist = bsphere_radius * 6.0
bpy.ops.object.light_add(type='SUN', location=(
bbox_center.x + light_dist * 0.5,
bbox_center.y - light_dist * 0.35,
bbox_center.z + light_dist,
))
sun = bpy.context.active_object
sun.data.energy = 4.0
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
bpy.ops.object.light_add(type='AREA', location=(
bbox_center.x - light_dist * 0.4,
bbox_center.y + light_dist * 0.4,
bbox_center.z + light_dist * 0.7,
))
fill = bpy.context.active_object
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
fill.data.size = max(4.0, bsphere_radius * 4.0)
# ── Camera (isometric-style, matches blender_render.py) ──────────────
ELEVATION_DEG = 28.0
AZIMUTH_DEG = 40.0
LENS_MM = 50.0
SENSOR_WIDTH_MM = 36.0
FILL_FACTOR = 0.85
elevation_rad = math.radians(ELEVATION_DEG)
azimuth_rad = math.radians(AZIMUTH_DEG)
cam_dir = Vector((
math.cos(elevation_rad) * math.cos(azimuth_rad),
math.cos(elevation_rad) * math.sin(azimuth_rad),
math.sin(elevation_rad),
)).normalized()
fov_h = math.atan(SENSOR_WIDTH_MM / (2.0 * LENS_MM))
fov_v = math.atan(SENSOR_WIDTH_MM * (height / width) / (2.0 * LENS_MM))
fov_used = min(fov_h, fov_v)
dist = (bsphere_radius / math.tan(fov_used)) / FILL_FACTOR
dist = max(dist, bsphere_radius * 1.5)
cam_location = bbox_center + cam_dir * dist
bpy.ops.object.camera_add(location=cam_location)
cam_obj = bpy.context.active_object
cam_obj.data.lens = LENS_MM
bpy.context.scene.camera = cam_obj
# Look-at rotation
look_dir = (bbox_center - cam_location).normalized()
up_world = Vector((0.0, 0.0, 1.0))
right = look_dir.cross(up_world)
if right.length < 1e-6:
right = Vector((1.0, 0.0, 0.0))
right.normalize()
cam_up = right.cross(look_dir).normalized()
rot_mat = Matrix((
(right.x, right.y, right.z),
(cam_up.x, cam_up.y, cam_up.z),
(-look_dir.x, -look_dir.y, -look_dir.z),
)).transposed()
cam_obj.rotation_euler = rot_mat.to_euler('XYZ')
cam_obj.data.clip_start = max(dist * 0.001, 0.0001)
cam_obj.data.clip_end = dist + bsphere_radius * 3.0
# ── World background — only in Mode A ───────────────────────────────
# In template mode the .blend file owns its World (HDRI, sky texture,
# studio lighting). Overwriting it would destroy the HDR look the
# template was designed to use (e.g. Alpha-HDR output types).
if not use_template:
world = bpy.data.worlds.new("World")
bpy.context.scene.world = world
world.use_nodes = True
bg = world.node_tree.nodes["Background"]
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
bg.inputs["Strength"].default_value = 0.15
# ── Colour management ────────────────────────────────────────────────────
# In template mode the .blend file owns its colour management settings
# (e.g. Filmic/AgX for HDR, custom exposure for Alpha-HDR output types).
# Overwriting them would destroy the look the template was designed for.
# In factory-settings mode (Mode A) we force Standard to avoid the grey
# Filmic tint that Blender applies by default.
scene = bpy.context.scene
if not use_template:
scene.view_settings.view_transform = 'Standard'
scene.view_settings.exposure = 0.0
scene.view_settings.gamma = 1.0
try:
scene.view_settings.look = 'None'
except Exception:
pass
# ── Render engine ────────────────────────────────────────────────────────
if engine == "eevee":
eevee_ok = False
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
try:
scene.render.engine = eevee_id
eevee_ok = True
print(f"[still_render] EEVEE engine id: {eevee_id}")
break
except TypeError:
continue
if eevee_ok:
for attr in ('taa_render_samples', 'samples'):
try:
setattr(scene.eevee, attr, samples)
break
except AttributeError:
continue
else:
print("[still_render] WARNING: EEVEE unavailable, falling back to Cycles")
engine = "cycles"
if engine != "eevee":
scene.render.engine = 'CYCLES'
scene.cycles.samples = samples
scene.cycles.use_denoising = True
scene.cycles.denoiser = denoiser_arg if denoiser_arg else 'OPENIMAGEDENOISE'
if denoising_input_passes_arg:
try: scene.cycles.denoising_input_passes = denoising_input_passes_arg
except Exception: pass
if denoising_prefilter_arg:
try: scene.cycles.denoising_prefilter = denoising_prefilter_arg
except Exception: pass
if denoising_quality_arg:
try: scene.cycles.denoising_quality = denoising_quality_arg
except Exception: pass
if denoising_use_gpu_arg:
try: scene.cycles.denoising_use_gpu = (denoising_use_gpu_arg == "1")
except AttributeError: pass
if noise_threshold_arg:
scene.cycles.use_adaptive_sampling = True
scene.cycles.adaptive_threshold = float(noise_threshold_arg)
# Device selection: "cpu" forces CPU, "gpu" forces GPU (warns if unavailable),
# "auto" (default) tries GPU first and falls back to CPU.
print(f"[still_render] cycles_device={cycles_device}")
gpu_found = False
if cycles_device != "cpu":
try:
cycles_prefs = bpy.context.preferences.addons['cycles'].preferences
for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
try:
cycles_prefs.compute_device_type = device_type
cycles_prefs.get_devices()
gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU']
if gpu_devs:
for d in gpu_devs:
d.use = True
scene.cycles.device = 'GPU'
gpu_found = True
print(f"[still_render] Cycles GPU ({device_type})")
break
except Exception:
continue
except Exception:
pass
if not gpu_found:
scene.cycles.device = 'CPU'
print("[still_render] WARNING: GPU not found — falling back to CPU")
# ── Render settings ──────────────────────────────────────────────────────
scene.render.resolution_x = width
scene.render.resolution_y = height
scene.render.resolution_percentage = 100
scene.render.film_transparent = transparent_bg
ext = os.path.splitext(output_path)[1].lower()
if ext in ('.jpg', '.jpeg'):
scene.render.image_settings.file_format = 'JPEG'
scene.render.image_settings.quality = 92
else:
scene.render.image_settings.file_format = 'PNG'
scene.render.filepath = output_path
# ── Render ───────────────────────────────────────────────────────────────
print(f"[still_render] Rendering -> {output_path} (Blender {bpy.app.version_string})")
bpy.ops.render.render(write_still=True)
print("[still_render] render done.")
# ── Pillow post-processing: green bar + model name label ─────────────────
# Skip overlay for transparent renders to keep clean alpha channel
if transparent_bg:
print("[still_render] Transparent mode — skipping Pillow overlay.")
else:
try:
from PIL import Image, ImageDraw, ImageFont
img = Image.open(output_path).convert("RGBA")
draw = ImageDraw.Draw(img)
W, H = img.size
# Schaeffler green top bar
bar_h = max(8, H // 32)
draw.rectangle([0, 0, W - 1, bar_h - 1], fill=(0, 137, 61, 255))
# Model name strip at bottom
model_name = os.path.splitext(os.path.basename(stl_path))[0]
label_h = max(20, H // 20)
img.alpha_composite(
Image.new("RGBA", (W, label_h), (30, 30, 30, 180)),
dest=(0, H - label_h),
)
font_size = max(10, label_h - 6)
font = None
for fp in [
"/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf",
"/usr/share/fonts/truetype/liberation/LiberationSans-Bold.ttf",
"/usr/share/fonts/truetype/freefont/FreeSansBold.ttf",
]:
if os.path.exists(fp):
try:
font = ImageFont.truetype(fp, font_size)
break
except Exception:
pass
if font is None:
font = ImageFont.load_default()
tb = draw.textbbox((0, 0), model_name, font=font)
text_w = tb[2] - tb[0]
draw.text(
((W - text_w) // 2, H - label_h + (label_h - (tb[3] - tb[1])) // 2),
model_name, font=font, fill=(255, 255, 255, 255),
)
# Save in original format
if ext in ('.jpg', '.jpeg'):
img.convert("RGB").save(output_path, format="JPEG", quality=92)
else:
img.convert("RGB").save(output_path, format="PNG")
print("[still_render] Pillow overlay applied.")
except ImportError:
print("[still_render] Pillow not available - skipping overlay.")
except Exception as exc:
print(f"[still_render] Pillow overlay failed (non-fatal): {exc}")
print("[still_render] Done.")
if __name__ == "__main__":
main()
+762
View File
@@ -0,0 +1,762 @@
"""Blender Python script: turntable animation render for Flamenco.
Usage (from Blender):
blender --background --python turntable_render.py -- \
<stl_path> <frames_dir> <frame_count> <degrees> <width> <height> \
<engine> <samples> <part_colors_json> \
[template_path] [target_collection] [material_library_path] [material_map_json]
"""
import bpy
import sys
import os
import json
import math
from mathutils import Vector, Matrix
# ── Colour palette (matches blender_render.py / Three.js renderer) ───────────
PALETTE_HEX = [
"#4C9BE8", "#E85B4C", "#4CBE72", "#E8A84C", "#A04CE8",
"#4CD4E8", "#E84CA8", "#7EC850", "#E86B30", "#5088C8",
]
def _srgb_to_linear(c: int) -> float:
v = c / 255.0
return v / 12.92 if v <= 0.04045 else ((v + 0.055) / 1.055) ** 2.4
def _hex_to_linear(hex_color: str) -> tuple:
h = hex_color.lstrip('#')
return (
_srgb_to_linear(int(h[0:2], 16)),
_srgb_to_linear(int(h[2:4], 16)),
_srgb_to_linear(int(h[4:6], 16)),
1.0,
)
PALETTE_LINEAR = [_hex_to_linear(h) for h in PALETTE_HEX]
SMOOTH_ANGLE = 30 # degrees
# ── Helper functions ─────────────────────────────────────────────────────────
def _ensure_collection(name: str):
"""Return a collection by name, creating it if needed."""
if name in bpy.data.collections:
return bpy.data.collections[name]
col = bpy.data.collections.new(name)
bpy.context.scene.collection.children.link(col)
return col
def _assign_palette_material(part_obj, index):
"""Assign a palette colour material to a mesh part."""
color = PALETTE_LINEAR[index % len(PALETTE_LINEAR)]
mat = bpy.data.materials.new(name=f"Part_{index}")
mat.use_nodes = True
bsdf = mat.node_tree.nodes.get("Principled BSDF")
if bsdf:
bsdf.inputs["Base Color"].default_value = color
bsdf.inputs["Metallic"].default_value = 0.35
bsdf.inputs["Roughness"].default_value = 0.40
try:
bsdf.inputs["Specular IOR Level"].default_value = 0.5
except KeyError:
pass
part_obj.data.materials.clear()
part_obj.data.materials.append(mat)
def _apply_smooth(part_obj, angle_deg):
"""Apply smooth or flat shading to a mesh object."""
bpy.context.view_layer.objects.active = part_obj
part_obj.select_set(True)
if angle_deg > 0:
try:
bpy.ops.object.shade_smooth_by_angle(angle=math.radians(angle_deg))
except AttributeError:
bpy.ops.object.shade_smooth()
part_obj.data.use_auto_smooth = True
part_obj.data.auto_smooth_angle = math.radians(angle_deg)
else:
bpy.ops.object.shade_flat()
import re as _re
def _apply_rotation(parts, rx, ry, rz):
"""Apply Euler XYZ rotation (degrees) to all parts by modifying matrix_world.
Rotates around world origin, which equals the assembly centre because
_import_stl already centres parts there. Applied before material assignment
and camera/bbox calculations so everything downstream sees the final pose.
"""
if not parts or (rx == 0.0 and ry == 0.0 and rz == 0.0):
return
from mathutils import Euler
rot_mat = Euler((math.radians(rx), math.radians(ry), math.radians(rz)), 'XYZ').to_matrix().to_4x4()
for p in parts:
p.matrix_world = rot_mat @ p.matrix_world
bpy.ops.object.select_all(action='DESELECT')
for p in parts:
p.select_set(True)
bpy.context.view_layer.objects.active = parts[0]
bpy.ops.object.transform_apply(location=False, rotation=True, scale=False)
print(f"[turntable_render] applied rotation ({rx}°, {ry}°, {rz}°) to {len(parts)} parts")
def _axis_rotation(axis: str, degrees: float) -> tuple:
"""Map turntable axis name to Euler (x, y, z) rotation in radians."""
rad = math.radians(degrees)
if axis == "world_x":
return (rad, 0.0, 0.0)
elif axis == "world_y":
return (0.0, rad, 0.0)
else: # "world_z" default
return (0.0, 0.0, rad)
def _set_fcurves_linear(action):
"""Set LINEAR interpolation on all fcurves.
Handles both the legacy Blender < 4.4 API (action.fcurves) and the new
Baklava layered-action API introduced in Blender 4.4 / 5.x
(action.layers[*].strips[*].channelbags[*].fcurves).
"""
try:
# New layered-action API (Blender 4.4+ / 5.x)
for layer in action.layers:
for strip in layer.strips:
for channelbag in strip.channelbags:
for fc in channelbag.fcurves:
for kp in fc.keyframe_points:
kp.interpolation = 'LINEAR'
except AttributeError:
# Legacy API (Blender < 4.4)
for fc in action.fcurves:
for kp in fc.keyframe_points:
kp.interpolation = 'LINEAR'
def _scale_mm_to_m(parts):
"""Scale imported STL objects from mm to Blender metres (×0.001).
STEP/STL coordinates are in mm; Blender's default unit is metres.
Without scaling a 50 mm part appears as 50 m inside Blender — way too large
relative to any template environment designed in metric units.
"""
if not parts:
return
bpy.ops.object.select_all(action='DESELECT')
for p in parts:
p.scale = (0.001, 0.001, 0.001)
p.location *= 0.001
p.select_set(True)
bpy.context.view_layer.objects.active = parts[0]
bpy.ops.object.transform_apply(scale=True, location=False, rotation=False)
print(f"[turntable_render] scaled {len(parts)} parts mm→m (×0.001)")
def _import_stl(stl_file):
"""Import STL into Blender, using per-part STLs if available.
Checks for {stl_stem}_parts/manifest.json next to the STL file.
- Per-part mode: imports each part STL, names Blender object after STEP part name.
- Fallback: imports combined STL and splits by loose geometry.
Returns list of Blender mesh objects, centred at origin.
"""
stl_dir = os.path.dirname(stl_file)
stl_stem = os.path.splitext(os.path.basename(stl_file))[0]
parts_dir = os.path.join(stl_dir, stl_stem + "_parts")
manifest_path = os.path.join(parts_dir, "manifest.json")
parts = []
if os.path.isfile(manifest_path):
# ── Per-part mode ────────────────────────────────────────────────
try:
with open(manifest_path, "r") as f:
manifest = json.loads(f.read())
part_entries = manifest.get("parts", [])
except Exception as e:
print(f"[turntable_render] WARNING: failed to read manifest: {e}")
part_entries = []
if part_entries:
for entry in part_entries:
part_file = os.path.join(parts_dir, entry["file"])
part_name = entry["name"]
if not os.path.isfile(part_file):
print(f"[turntable_render] WARNING: part STL missing: {part_file}")
continue
bpy.ops.object.select_all(action='DESELECT')
bpy.ops.wm.stl_import(filepath=part_file)
imported = bpy.context.selected_objects
if imported:
obj = imported[0]
obj.name = part_name
if obj.data:
obj.data.name = part_name
parts.append(obj)
if parts:
print(f"[turntable_render] imported {len(parts)} named parts from per-part STLs")
# ── Fallback: combined STL + separate by loose ───────────────────────
if not parts:
bpy.ops.wm.stl_import(filepath=stl_file)
obj = bpy.context.selected_objects[0] if bpy.context.selected_objects else None
if obj is None:
print(f"ERROR: No objects imported from {stl_file}")
sys.exit(1)
bpy.context.view_layer.objects.active = obj
bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY', center='BOUNDS')
obj.location = (0.0, 0.0, 0.0)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.separate(type='LOOSE')
bpy.ops.object.mode_set(mode='OBJECT')
parts = list(bpy.context.selected_objects)
print(f"[turntable_render] fallback: separated into {len(parts)} part(s)")
return parts
# ── Centre per-part imports at origin (combined bbox) ────────────────
all_corners = []
for p in parts:
all_corners.extend(p.matrix_world @ Vector(c) for c in p.bound_box)
if all_corners:
mins = Vector((min(v.x for v in all_corners),
min(v.y for v in all_corners),
min(v.z for v in all_corners)))
maxs = Vector((max(v.x for v in all_corners),
max(v.y for v in all_corners),
max(v.z for v in all_corners)))
center = (mins + maxs) * 0.5
for p in parts:
p.location -= center
return parts
def _resolve_part_name(index, part_obj, part_names_ordered):
"""Get the STEP part name for a Blender part by index.
With per-part import, part_obj.name IS the STEP name (possibly with
Blender .NNN suffix). Falls back to part_names_ordered for combined-STL mode.
"""
base_name = _re.sub(r'\.\d{3}$', '', part_obj.name)
if part_names_ordered and index < len(part_names_ordered):
return part_names_ordered[index]
return base_name
def _apply_material_library(parts, mat_lib_path, mat_map, part_names_ordered=None):
"""Append materials from library .blend and assign to parts via material_map.
With per-part STL import, Blender objects are named after STEP parts,
so matching is by name (stripping Blender .NNN suffix for duplicates).
Falls back to part_names_ordered index-based matching for combined-STL mode.
mat_map: {part_name_lower: material_name}
Parts without a match keep their current material.
"""
if not mat_lib_path or not os.path.isfile(mat_lib_path):
print(f"[turntable_render] material library not found: {mat_lib_path}")
return
# Collect unique material names needed
needed = set(mat_map.values())
if not needed:
return
# Append materials from library
appended = {}
for mat_name in needed:
inner_path = f"{mat_lib_path}/Material/{mat_name}"
try:
bpy.ops.wm.append(
filepath=inner_path,
directory=f"{mat_lib_path}/Material/",
filename=mat_name,
link=False,
)
if mat_name in bpy.data.materials:
appended[mat_name] = bpy.data.materials[mat_name]
print(f"[turntable_render] appended material: {mat_name}")
else:
print(f"[turntable_render] WARNING: material '{mat_name}' not found after append")
except Exception as exc:
print(f"[turntable_render] WARNING: failed to append material '{mat_name}': {exc}")
if not appended:
return
# Assign materials to parts — primary: name-based (per-part STL mode),
# secondary: index-based via part_names_ordered (combined STL fallback)
assigned_count = 0
for i, part in enumerate(parts):
# Try name-based matching first (strip Blender .NNN suffix)
base_name = _re.sub(r'\.\d{3}$', '', part.name)
part_key = base_name.lower().strip()
mat_name = mat_map.get(part_key)
# Fall back to index-based matching via part_names_ordered
if not mat_name and part_names_ordered and i < len(part_names_ordered):
step_name = part_names_ordered[i]
part_key = step_name.lower().strip()
mat_name = mat_map.get(part_key)
if mat_name and mat_name in appended:
part.data.materials.clear()
part.data.materials.append(appended[mat_name])
assigned_count += 1
print(f"[turntable_render] assigned '{mat_name}' to part '{part.name}'")
print(f"[turntable_render] material assignment: {assigned_count}/{len(parts)} parts matched")
def main():
argv = sys.argv
# Everything after "--" is our args
args = argv[argv.index("--") + 1:]
stl_path = args[0]
frames_dir = args[1]
frame_count = int(args[2])
degrees = int(args[3])
width = int(args[4])
height = int(args[5])
engine = args[6]
samples = int(args[7])
part_colors_json = args[8] if len(args) > 8 else "{}"
# Template + material library args (passed by schaeffler-turntable.js)
template_path = args[9] if len(args) > 9 and args[9] else ""
target_collection = args[10] if len(args) > 10 else "Product"
material_library_path = args[11] if len(args) > 11 and args[11] else ""
material_map_raw = args[12] if len(args) > 12 else "{}"
part_names_ordered_raw = args[13] if len(args) > 13 else "[]"
lighting_only = args[14] == "1" if len(args) > 14 else False
cycles_device = args[15].lower() if len(args) > 15 else "auto" # "auto", "gpu", "cpu"
shadow_catcher = args[16] == "1" if len(args) > 16 else False
rotation_x = float(args[17]) if len(args) > 17 else 0.0
rotation_y = float(args[18]) if len(args) > 18 else 0.0
rotation_z = float(args[19]) if len(args) > 19 else 0.0
turntable_axis = args[20] if len(args) > 20 else "world_z"
bg_color = args[21] if len(args) > 21 else ""
transparent_bg = args[22] == "1" if len(args) > 22 else False
os.makedirs(frames_dir, exist_ok=True)
try:
part_colors = json.loads(part_colors_json)
except json.JSONDecodeError:
part_colors = {}
try:
material_map = json.loads(material_map_raw) if material_map_raw else {}
except json.JSONDecodeError:
material_map = {}
try:
part_names_ordered = json.loads(part_names_ordered_raw) if part_names_ordered_raw else []
except json.JSONDecodeError:
part_names_ordered = []
# Validate template path: if provided it MUST exist on disk.
if template_path and not os.path.isfile(template_path):
print(f"[turntable_render] ERROR: template_path was provided but file not found: {template_path}")
print("[turntable_render] Ensure the blend-templates directory is accessible on this worker.")
sys.exit(1)
use_template = bool(template_path)
print(f"[turntable_render] engine={engine}, samples={samples}, size={width}x{height}, "
f"frames={frame_count}, degrees={degrees}")
print(f"[turntable_render] part_names_ordered: {len(part_names_ordered)} entries")
if use_template:
print(f"[turntable_render] template={template_path}, collection={target_collection}, lighting_only={lighting_only}")
else:
print("[turntable_render] no template — using factory settings (Mode A)")
if material_library_path:
print(f"[turntable_render] material_library={material_library_path}, material_map keys={list(material_map.keys())}")
# ── SCENE SETUP ──────────────────────────────────────────────────────────
if use_template:
# ── MODE B: Template-based render ────────────────────────────────────
print(f"[turntable_render] Opening template: {template_path}")
bpy.ops.wm.open_mainfile(filepath=template_path)
# Find or create target collection
target_col = _ensure_collection(target_collection)
# Import and split STL
parts = _import_stl(stl_path)
# Scale mm→m: STEP coords are mm, Blender default unit is metres
_scale_mm_to_m(parts)
# Apply render position rotation before material/camera setup
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
# Move imported parts into target collection
for part in parts:
for col in list(part.users_collection):
col.objects.unlink(part)
target_col.objects.link(part)
# Apply smooth shading
for part in parts:
_apply_smooth(part, SMOOTH_ANGLE)
# Material assignment: library materials if available, otherwise palette
if material_library_path and material_map:
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
# Parts not matched by library get palette fallback
for i, part in enumerate(parts):
if not part.data.materials or len(part.data.materials) == 0:
_assign_palette_material(part, i)
else:
for i, part in enumerate(parts):
step_name = _resolve_part_name(i, part, part_names_ordered)
color_hex = part_colors.get(step_name)
if not color_hex:
_assign_palette_material(part, i)
# ── Shadow catcher (Cycles only, template mode only) ─────────────────
if shadow_catcher:
sc_col_name = "Shadowcatcher"
sc_obj_name = "Shadowcatcher"
for vl in bpy.context.scene.view_layers:
def _enable_col_recursive(layer_col):
if layer_col.collection.name == sc_col_name:
layer_col.exclude = False
layer_col.collection.hide_render = False
layer_col.collection.hide_viewport = False
return True
for child in layer_col.children:
if _enable_col_recursive(child):
return True
return False
_enable_col_recursive(vl.layer_collection)
sc_obj = bpy.data.objects.get(sc_obj_name)
if sc_obj:
all_world_z = []
for part in parts:
for corner in part.bound_box:
all_world_z.append((part.matrix_world @ Vector(corner)).z)
if all_world_z:
sc_obj.location.z = min(all_world_z)
print(f"[turntable_render] shadow catcher enabled, plane Z={sc_obj.location.z:.4f}")
else:
print(f"[turntable_render] WARNING: shadow catcher object '{sc_obj_name}' not found in template")
# lighting_only: always use auto-framing; normal template: use camera if present
needs_auto_camera = (lighting_only and not shadow_catcher) or not bpy.context.scene.camera
if lighting_only and not shadow_catcher:
print("[turntable_render] lighting_only mode: using template World/HDRI, forcing auto-camera")
elif needs_auto_camera:
print("[turntable_render] WARNING: template has no camera — will create auto-camera")
# Set very close near clip on template camera for mm-scale parts (now in metres)
if not needs_auto_camera and bpy.context.scene.camera:
bpy.context.scene.camera.data.clip_start = 0.001
print(f"[turntable_render] template mode: {len(parts)} parts imported into collection '{target_collection}'")
else:
# ── MODE A: Factory settings ─────────────────────────────────────────
needs_auto_camera = True
bpy.ops.wm.read_factory_settings(use_empty=True)
parts = _import_stl(stl_path)
# Scale mm→m: STEP coords are mm, Blender default unit is metres
_scale_mm_to_m(parts)
# Apply render position rotation before material/camera setup
_apply_rotation(parts, rotation_x, rotation_y, rotation_z)
for i, part in enumerate(parts):
_apply_smooth(part, SMOOTH_ANGLE)
# Material assignment: library materials if available, else part_colors/palette
if material_library_path and material_map:
mat_map_lower = {k.lower(): v for k, v in material_map.items()}
_apply_material_library(parts, material_library_path, mat_map_lower, part_names_ordered)
# Palette fallback for unmatched parts
for i, part in enumerate(parts):
if not part.data.materials or len(part.data.materials) == 0:
_assign_palette_material(part, i)
else:
# part_colors or palette — use index-based lookup via part_names_ordered
for i, part in enumerate(parts):
step_name = _resolve_part_name(i, part, part_names_ordered)
color_hex = part_colors.get(step_name)
if color_hex:
mat = bpy.data.materials.new(name=f"mat_{part.name}")
mat.use_nodes = True
bsdf = mat.node_tree.nodes.get("Principled BSDF")
if bsdf:
color = _hex_to_linear(color_hex)
bsdf.inputs["Base Color"].default_value = color
bsdf.inputs["Metallic"].default_value = 0.35
bsdf.inputs["Roughness"].default_value = 0.40
try:
bsdf.inputs["Specular IOR Level"].default_value = 0.5
except KeyError:
pass
part.data.materials.clear()
part.data.materials.append(mat)
else:
_assign_palette_material(part, i)
if needs_auto_camera:
# ── Combined bounding box / bounding sphere ──────────────────────────
all_corners = []
for part in parts:
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
bbox_min = Vector((
min(v.x for v in all_corners),
min(v.y for v in all_corners),
min(v.z for v in all_corners),
))
bbox_max = Vector((
max(v.x for v in all_corners),
max(v.y for v in all_corners),
max(v.z for v in all_corners),
))
bbox_center = (bbox_min + bbox_max) * 0.5
bbox_dims = bbox_max - bbox_min
bsphere_radius = max(bbox_dims.length * 0.5, 0.001)
print(f"[turntable_render] bbox_dims={tuple(round(d, 4) for d in bbox_dims)}, "
f"bsphere_radius={bsphere_radius:.4f}")
# ── Lighting — only in Mode A (factory settings) ─────────────────────
# In template mode the .blend file provides its own World/HDRI lighting.
# Adding auto-lights would overpower the template's intended look.
if not use_template:
light_dist = bsphere_radius * 6.0
bpy.ops.object.light_add(type='SUN', location=(
bbox_center.x + light_dist * 0.5,
bbox_center.y - light_dist * 0.35,
bbox_center.z + light_dist,
))
sun = bpy.context.active_object
sun.data.energy = 4.0
sun.rotation_euler = (math.radians(45), 0, math.radians(30))
bpy.ops.object.light_add(type='AREA', location=(
bbox_center.x - light_dist * 0.4,
bbox_center.y + light_dist * 0.4,
bbox_center.z + light_dist * 0.7,
))
fill = bpy.context.active_object
fill.data.energy = max(800.0, bsphere_radius ** 2 * 2000.0)
fill.data.size = max(4.0, bsphere_radius * 4.0)
# ── Camera ───────────────────────────────────────────────────────────
cam_dist = bsphere_radius * 2.5
cam_location = Vector((
bbox_center.x + cam_dist,
bbox_center.y,
bbox_center.z + bsphere_radius * 0.5,
))
bpy.ops.object.camera_add(location=cam_location)
camera = bpy.context.active_object
bpy.context.scene.camera = camera
camera.data.clip_start = max(cam_dist * 0.001, 0.0001)
camera.data.clip_end = cam_dist * 10.0
# Track-to constraint for look-at
empty = bpy.data.objects.new("target", None)
bpy.context.collection.objects.link(empty)
empty.location = bbox_center
track = camera.constraints.new(type='TRACK_TO')
track.target = empty
track.track_axis = 'TRACK_NEGATIVE_Z'
track.up_axis = 'UP_Y'
# ── World background — only in Mode A ───────────────────────────────
# In template mode the .blend file owns its World (HDRI, sky texture,
# studio lighting). Overwriting it would destroy the HDR look.
if not use_template:
world = bpy.data.worlds.new("World")
bpy.context.scene.world = world
world.use_nodes = True
bg = world.node_tree.nodes["Background"]
bg.inputs["Color"].default_value = (0.96, 0.96, 0.97, 1.0)
bg.inputs["Strength"].default_value = 0.15
# ── Turntable pivot ──────────────────────────────────────────────────
pivot = bpy.data.objects.new("pivot", None)
bpy.context.collection.objects.link(pivot)
pivot.location = bbox_center
# Parent camera to pivot
camera.parent = pivot
camera.location = (cam_dist, 0, bsphere_radius * 0.5)
# Keyframe pivot rotation
scene = bpy.context.scene
scene.frame_start = 1
scene.frame_end = frame_count
pivot.rotation_euler = (0, 0, 0)
pivot.keyframe_insert(data_path="rotation_euler", frame=1)
pivot.rotation_euler = _axis_rotation(turntable_axis, degrees)
pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1)
# Linear interpolation — frame N+1 is never rendered, giving N uniform steps
_set_fcurves_linear(pivot.animation_data.action)
else:
# Template has camera — set up turntable on the model parts instead
scene = bpy.context.scene
scene.frame_start = 1
scene.frame_end = frame_count
# Calculate model center for pivot
all_corners = []
for part in parts:
all_corners.extend(part.matrix_world @ Vector(c) for c in part.bound_box)
bbox_center = Vector((
(min(v.x for v in all_corners) + max(v.x for v in all_corners)) * 0.5,
(min(v.y for v in all_corners) + max(v.y for v in all_corners)) * 0.5,
(min(v.z for v in all_corners) + max(v.z for v in all_corners)) * 0.5,
))
# Create a pivot empty and parent all parts to it
pivot = bpy.data.objects.new("turntable_pivot", None)
bpy.context.collection.objects.link(pivot)
pivot.location = bbox_center
for part in parts:
part.parent = pivot
# Keyframe pivot rotation
pivot.rotation_euler = (0, 0, 0)
pivot.keyframe_insert(data_path="rotation_euler", frame=1)
pivot.rotation_euler = _axis_rotation(turntable_axis, degrees)
pivot.keyframe_insert(data_path="rotation_euler", frame=frame_count + 1)
# Linear interpolation — frame N+1 is never rendered, giving N uniform steps
_set_fcurves_linear(pivot.animation_data.action)
# ── Colour management ────────────────────────────────────────────────────
# In template mode the .blend file owns its colour management settings.
# Overwriting them would destroy the intended HDR/tonemapping look.
# In factory-settings mode force Standard to avoid the grey Filmic tint.
scene = bpy.context.scene
if not use_template:
scene.view_settings.view_transform = 'Standard'
scene.view_settings.exposure = 0.0
scene.view_settings.gamma = 1.0
try:
scene.view_settings.look = 'None'
except Exception:
pass
# ── Render engine ────────────────────────────────────────────────────────
if engine == "eevee":
eevee_ok = False
for eevee_id in ('BLENDER_EEVEE', 'BLENDER_EEVEE_NEXT'):
try:
scene.render.engine = eevee_id
eevee_ok = True
print(f"[turntable_render] EEVEE engine id: {eevee_id}")
break
except TypeError:
continue
if eevee_ok:
for attr in ('taa_render_samples', 'samples'):
try:
setattr(scene.eevee, attr, samples)
break
except AttributeError:
continue
else:
print("[turntable_render] WARNING: EEVEE not available, falling back to Cycles")
engine = "cycles"
if engine != "eevee":
scene.render.engine = 'CYCLES'
scene.cycles.samples = samples
scene.cycles.use_denoising = True
scene.cycles.denoiser = 'OPENIMAGEDENOISE' # GPU-accelerated when CUDA/OptiX active
# Device selection: "cpu" forces CPU, "gpu" forces GPU (warns if unavailable),
# "auto" (default) tries GPU first and falls back to CPU.
print(f"[turntable_render] cycles_device={cycles_device}")
gpu_found = False
if cycles_device != "cpu":
try:
cycles_prefs = bpy.context.preferences.addons['cycles'].preferences
for device_type in ('OPTIX', 'CUDA', 'HIP', 'ONEAPI'):
try:
cycles_prefs.compute_device_type = device_type
cycles_prefs.get_devices()
gpu_devs = [d for d in cycles_prefs.devices if d.type != 'CPU']
if gpu_devs:
for d in gpu_devs:
d.use = True
scene.cycles.device = 'GPU'
gpu_found = True
print(f"[turntable_render] Cycles GPU ({device_type})")
break
except Exception:
continue
except Exception:
pass
if not gpu_found:
scene.cycles.device = 'CPU'
print("[turntable_render] WARNING: GPU not found — falling back to CPU")
# ── Render settings ──────────────────────────────────────────────────────
scene.render.resolution_x = width
scene.render.resolution_y = height
scene.render.resolution_percentage = 100
scene.render.image_settings.file_format = 'PNG'
# ── Transparent background ────────────────────────────────────────────────
# bg_color compositing is handled by FFmpeg in the compose-video task.
# Blender renders transparent PNG frames when bg_color is set.
if bg_color or transparent_bg:
scene.render.film_transparent = True
if bg_color:
print(f"[turntable_render] film_transparent=True for FFmpeg bg_color compositing ({bg_color})")
else:
print("[turntable_render] transparent_bg enabled (alpha PNG frames)")
# ── Render all frames ────────────────────────────────────────────────────
# Per-frame loop with write_still=True. In a single Blender session,
# Cycles keeps the GPU scene (BVH, textures, material graph) loaded
# between frames — only the animated pivot transform is updated each step.
# bpy.ops.render.render(animation=True) does NOT work reliably in
# background mode after wm.open_mainfile() in Blender 5.x (silently
# writes no files), so we use the explicit per-frame approach.
import time as _time
_render_start = _time.time()
for frame in range(1, frame_count + 1):
scene.frame_set(frame)
scene.render.filepath = os.path.join(frames_dir, f"frame_{frame:04d}")
bpy.ops.render.render(write_still=True)
elapsed = _time.time() - _render_start
fps_so_far = frame / elapsed
print(f"[turntable_render] Frame {frame}/{frame_count}{elapsed:.1f}s elapsed ({fps_so_far:.2f} fps)")
total = _time.time() - _render_start
print(f"[turntable_render] Turntable render complete: {frame_count} frames in {total:.1f}s ({frame_count/total:.2f} fps avg)")
if __name__ == "__main__":
main()