582 lines
23 KiB
Python
582 lines
23 KiB
Python
"""
|
||
Blender renderer service — FastAPI microservice.
|
||
|
||
Accepts a STEP file path (on shared uploads volume) and renders a thumbnail PNG
|
||
using the pipeline: STEP → STL (via cadquery) → PNG (via Blender headless).
|
||
"""
|
||
import asyncio
|
||
import json as _json_mod
|
||
import logging
|
||
import os
|
||
import signal
|
||
import shutil
|
||
import subprocess
|
||
import tempfile
|
||
import threading
|
||
import time
|
||
from pathlib import Path
|
||
|
||
from fastapi import FastAPI, HTTPException
|
||
from pydantic import BaseModel
|
||
|
||
logger = logging.getLogger(__name__)
|
||
|
||
app = FastAPI(title="Blender Renderer", version="1.0.0")
|
||
|
||
# Active render subprocesses keyed by job_id for cancellation support
|
||
_active_procs: dict[str, subprocess.Popen] = {}
|
||
_procs_lock = threading.Lock()
|
||
|
||
# Limit concurrent Blender renders to avoid memory exhaustion from parallel threads
|
||
# (each thread loads cadquery/OCC, ~300-500 MB each).
|
||
# Resizable at runtime via POST /configure without restart.
|
||
_max_concurrent: int = 3
|
||
_render_semaphore = threading.Semaphore(_max_concurrent)
|
||
_config_lock = threading.Lock()
|
||
|
||
|
||
def _set_max_concurrent(n: int) -> None:
|
||
"""Replace the global semaphore with a new one sized to n.
|
||
|
||
In-flight renders hold a reference to the old semaphore and will release it
|
||
normally; new renders pick up the new one.
|
||
"""
|
||
global _render_semaphore, _max_concurrent
|
||
with _config_lock:
|
||
_max_concurrent = n
|
||
_render_semaphore = threading.Semaphore(n)
|
||
|
||
|
||
class RenderRequest(BaseModel):
|
||
step_path: str
|
||
output_path: str
|
||
width: int = 512
|
||
height: int = 512
|
||
engine: str = "cycles" # "cycles" or "eevee"
|
||
samples: int = 256
|
||
stl_quality: str = "low" # "low" or "high"
|
||
smooth_angle: int = 30 # degrees; 0 = shade_flat, >0 = shade_smooth_by_angle
|
||
cycles_device: str = "auto" # "auto", "gpu", or "cpu"
|
||
transparent_bg: bool = False # render with transparent background (PNG only)
|
||
part_colors: dict | None = None # optional {part_name: hex_color}
|
||
template_path: str | None = None # Path to .blend template file
|
||
target_collection: str = "Product" # Collection to import geometry into
|
||
material_library_path: str | None = None # Path to material library .blend
|
||
material_map: dict | None = None # {part_name: material_name} from Excel
|
||
part_names_ordered: list | None = None # ordered STEP part names for index matching
|
||
lighting_only: bool = False # use template World/HDRI only; force auto-camera
|
||
shadow_catcher: bool = False # enable Shadowcatcher collection + position plane at bbox min Z
|
||
rotation_x: float = 0.0 # Euler X rotation in degrees (applied to imported STL)
|
||
rotation_y: float = 0.0 # Euler Y rotation in degrees
|
||
rotation_z: float = 0.0 # Euler Z rotation in degrees
|
||
job_id: str | None = None # Optional ID for cancellation tracking
|
||
noise_threshold: str = "" # Adaptive sampling noise threshold (empty = Blender default)
|
||
denoiser: str = "" # "OPTIX" | "OPENIMAGEDENOISE" (empty = auto)
|
||
denoising_input_passes: str = "" # "RGB" | "RGB_ALBEDO" | "RGB_ALBEDO_NORMAL"
|
||
denoising_prefilter: str = "" # "NONE" | "FAST" | "ACCURATE"
|
||
denoising_quality: str = "" # "HIGH" | "BALANCED" | "FAST" (Blender 4.2+)
|
||
denoising_use_gpu: str = "" # "1" = GPU, "0" = CPU, "" = auto
|
||
|
||
|
||
def _find_blender() -> str:
|
||
"""Locate the Blender binary: prefer $BLENDER_BIN, then PATH."""
|
||
import os, shutil
|
||
env_bin = os.environ.get("BLENDER_BIN", "")
|
||
if env_bin and Path(env_bin).exists():
|
||
return env_bin
|
||
return shutil.which("blender") or "blender"
|
||
|
||
|
||
@app.get("/health")
|
||
async def health():
|
||
blender_bin = _find_blender()
|
||
version = "unknown"
|
||
try:
|
||
result = subprocess.run(
|
||
[blender_bin, "--version"], capture_output=True, text=True, timeout=10
|
||
)
|
||
first_line = (result.stdout or result.stderr or "").splitlines()
|
||
version = first_line[0].strip() if first_line else "unknown"
|
||
except Exception:
|
||
pass
|
||
return {
|
||
"status": "ok",
|
||
"renderer": "blender",
|
||
"blender_path": blender_bin,
|
||
"blender_version": version,
|
||
}
|
||
|
||
|
||
class ConvertStlRequest(BaseModel):
|
||
step_path: str
|
||
quality: str = "low" # "low" or "high"
|
||
|
||
|
||
@app.post("/convert-stl")
|
||
async def convert_stl(req: ConvertStlRequest):
|
||
"""Convert a STEP file to STL and cache it — no Blender render."""
|
||
if req.quality not in ("low", "high"):
|
||
raise HTTPException(400, detail="quality must be 'low' or 'high'")
|
||
|
||
step_path = Path(req.step_path)
|
||
if not step_path.exists():
|
||
raise HTTPException(404, detail=f"STEP file not found: {step_path}")
|
||
|
||
stl_path = step_path.parent / f"{step_path.stem}_{req.quality}.stl"
|
||
parts_dir = step_path.parent / f"{step_path.stem}_{req.quality}_parts"
|
||
|
||
t0 = time.monotonic()
|
||
try:
|
||
if not stl_path.exists() or stl_path.stat().st_size == 0:
|
||
await asyncio.to_thread(_convert_step_to_stl, step_path, stl_path, req.quality)
|
||
logger.info("STL generated: %s (%d KB)", stl_path.name, stl_path.stat().st_size // 1024)
|
||
else:
|
||
logger.info("STL cache hit: %s (%d KB)", stl_path.name, stl_path.stat().st_size // 1024)
|
||
except Exception as e:
|
||
logger.error("STEP→STL conversion failed: %s", e)
|
||
raise HTTPException(500, detail=f"STEP conversion failed: {e}")
|
||
|
||
try:
|
||
if not (parts_dir / "manifest.json").exists():
|
||
await asyncio.to_thread(_export_per_part_stls, step_path, parts_dir, req.quality)
|
||
except Exception as e:
|
||
logger.warning("per-part STL export failed (non-fatal): %s", e)
|
||
|
||
return {
|
||
"stl_path": str(stl_path),
|
||
"size_bytes": stl_path.stat().st_size if stl_path.exists() else 0,
|
||
"duration_s": round(time.monotonic() - t0, 2),
|
||
}
|
||
|
||
|
||
@app.post("/cancel/{job_id}")
|
||
async def cancel_render(job_id: str):
|
||
"""Kill the Blender subprocess for a running job (best-effort)."""
|
||
with _procs_lock:
|
||
proc = _active_procs.pop(job_id, None)
|
||
if proc is None:
|
||
return {"status": "not_found", "job_id": job_id}
|
||
try:
|
||
pgid = os.getpgid(proc.pid)
|
||
os.killpg(pgid, signal.SIGTERM)
|
||
logger.info("Sent SIGTERM to process group %d for job %s", pgid, job_id)
|
||
except (ProcessLookupError, OSError):
|
||
pass # process already finished
|
||
return {"status": "cancelled", "job_id": job_id}
|
||
|
||
|
||
@app.get("/status")
|
||
async def status():
|
||
"""Return current render queue depth and concurrency setting."""
|
||
with _procs_lock:
|
||
active = len(_active_procs)
|
||
with _config_lock:
|
||
current_max = _max_concurrent
|
||
return {"active_jobs": active, "max_concurrent": current_max}
|
||
|
||
|
||
@app.post("/configure")
|
||
async def configure(max_concurrent: int):
|
||
"""Dynamically update the maximum number of concurrent Blender renders."""
|
||
if not (1 <= max_concurrent <= 16):
|
||
from fastapi import HTTPException
|
||
raise HTTPException(400, detail="max_concurrent must be between 1 and 16")
|
||
_set_max_concurrent(max_concurrent)
|
||
logger.info("max_concurrent_renders updated to %d", max_concurrent)
|
||
return {"max_concurrent": max_concurrent}
|
||
|
||
|
||
@app.post("/render")
|
||
async def render(req: RenderRequest):
|
||
step_path = Path(req.step_path)
|
||
output_path = Path(req.output_path)
|
||
|
||
if not step_path.exists():
|
||
raise HTTPException(404, detail=f"STEP file not found: {step_path}")
|
||
|
||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||
|
||
t_start = time.monotonic()
|
||
|
||
# Acquire render slot — blocks if 3 renders are already running.
|
||
# asyncio.to_thread is used so the semaphore acquire doesn't block the event loop.
|
||
acquired = await asyncio.to_thread(_render_semaphore.acquire)
|
||
|
||
# 1. Get/create STL cache — persistent next to STEP file so re-renders skip conversion
|
||
stl_path = step_path.parent / f"{step_path.stem}_{req.stl_quality}.stl"
|
||
parts_dir = step_path.parent / f"{step_path.stem}_{req.stl_quality}_parts"
|
||
stl_size_bytes = 0
|
||
t_stl_start = time.monotonic()
|
||
try:
|
||
if not stl_path.exists() or stl_path.stat().st_size == 0:
|
||
logger.info("STL cache miss — converting: %s", step_path.name)
|
||
_convert_step_to_stl(step_path, stl_path, req.stl_quality)
|
||
else:
|
||
logger.info("STL cache hit: %s (%d KB)", stl_path.name, stl_path.stat().st_size // 1024)
|
||
stl_size_bytes = stl_path.stat().st_size if stl_path.exists() else 0
|
||
except Exception as e:
|
||
_render_semaphore.release()
|
||
logger.error(f"STEP→STL conversion failed: {e}")
|
||
raise HTTPException(500, detail=f"STEP conversion failed: {e}")
|
||
|
||
# Per-part export (non-fatal — Blender falls back to combined STL)
|
||
try:
|
||
if not (parts_dir / "manifest.json").exists():
|
||
_export_per_part_stls(step_path, parts_dir, req.stl_quality)
|
||
except Exception as e:
|
||
logger.warning("per-part STL export failed (non-fatal): %s", e)
|
||
stl_duration_s = round(time.monotonic() - t_stl_start, 2)
|
||
|
||
# 2. Render STL → PNG via Blender
|
||
render_log_lines: list[str] = []
|
||
parts_count = 0
|
||
engine_used = req.engine
|
||
t_render_start = time.monotonic()
|
||
try:
|
||
render_log_lines, parts_count, engine_used = _render_stl_with_blender(
|
||
stl_path, output_path, req.width, req.height,
|
||
req.engine, req.samples, req.smooth_angle, req.cycles_device,
|
||
req.transparent_bg,
|
||
template_path=req.template_path,
|
||
target_collection=req.target_collection,
|
||
material_library_path=req.material_library_path,
|
||
material_map=req.material_map,
|
||
part_names_ordered=req.part_names_ordered,
|
||
lighting_only=req.lighting_only,
|
||
shadow_catcher=req.shadow_catcher,
|
||
rotation_x=req.rotation_x,
|
||
rotation_y=req.rotation_y,
|
||
rotation_z=req.rotation_z,
|
||
job_id=req.job_id,
|
||
noise_threshold=req.noise_threshold,
|
||
denoiser=req.denoiser,
|
||
denoising_input_passes=req.denoising_input_passes,
|
||
denoising_prefilter=req.denoising_prefilter,
|
||
denoising_quality=req.denoising_quality,
|
||
denoising_use_gpu=req.denoising_use_gpu,
|
||
)
|
||
except Exception as e:
|
||
logger.error(f"Blender render failed: {e}")
|
||
raise HTTPException(500, detail=f"Blender render failed: {e}")
|
||
finally:
|
||
_render_semaphore.release()
|
||
# STL cache is persistent — do NOT delete stl_path or parts_dir
|
||
render_duration_s = round(time.monotonic() - t_render_start, 2)
|
||
|
||
if not output_path.exists():
|
||
raise HTTPException(500, detail="Render produced no output file")
|
||
|
||
total_duration_s = round(time.monotonic() - t_start, 2)
|
||
output_size_bytes = output_path.stat().st_size
|
||
|
||
return {
|
||
"output_path": str(output_path),
|
||
"status": "ok",
|
||
"renderer": "blender",
|
||
# Timing
|
||
"total_duration_s": total_duration_s,
|
||
"stl_duration_s": stl_duration_s,
|
||
"render_duration_s": render_duration_s,
|
||
# Mesh info
|
||
"stl_size_bytes": stl_size_bytes,
|
||
"output_size_bytes": output_size_bytes,
|
||
"parts_count": parts_count,
|
||
# Effective settings (engine may differ from requested if EEVEE fell back)
|
||
"engine_used": engine_used,
|
||
# Blender log lines (filtered to [blender_render] prefix lines)
|
||
"log_lines": render_log_lines,
|
||
}
|
||
|
||
|
||
def _convert_step_to_stl(step_path: Path, stl_path: Path, quality: str = "low") -> None:
|
||
"""Convert STEP file to STL using cadquery.
|
||
|
||
quality="low" → tolerance=0.3, angularTolerance=0.3 (fast, coarser mesh)
|
||
quality="high" → tolerance=0.01, angularTolerance=0.02 (slower, finer mesh)
|
||
"""
|
||
import cadquery as cq
|
||
shape = cq.importers.importStep(str(step_path))
|
||
if quality == "high":
|
||
cq.exporters.export(shape, str(stl_path), tolerance=0.01, angularTolerance=0.02)
|
||
else:
|
||
cq.exporters.export(shape, str(stl_path), tolerance=0.3, angularTolerance=0.3)
|
||
if not stl_path.exists() or stl_path.stat().st_size == 0:
|
||
raise RuntimeError("cadquery produced empty STL")
|
||
|
||
|
||
def _export_per_part_stls(step_path: Path, parts_dir: Path, quality: str = "low") -> list:
|
||
"""Export one STL per named STEP leaf shape using OCP XCAF.
|
||
|
||
Creates parts_dir with individual STL files and a manifest.json.
|
||
Returns the manifest list, or empty list on failure.
|
||
"""
|
||
tol = 0.01 if quality == "high" else 0.3
|
||
angular_tol = 0.05 if quality == "high" else 0.3
|
||
|
||
try:
|
||
from OCP.STEPCAFControl import STEPCAFControl_Reader
|
||
from OCP.XCAFDoc import XCAFDoc_DocumentTool, XCAFDoc_ShapeTool
|
||
from OCP.TDataStd import TDataStd_Name
|
||
from OCP.TDF import TDF_Label as TDF_Label_cls, TDF_LabelSequence
|
||
from OCP.XCAFApp import XCAFApp_Application
|
||
from OCP.TDocStd import TDocStd_Document
|
||
from OCP.TCollection import TCollection_ExtendedString
|
||
from OCP.IFSelect import IFSelect_RetDone
|
||
import cadquery as cq
|
||
except ImportError as e:
|
||
logger.warning("per-part export skipped (import error): %s", e)
|
||
return []
|
||
|
||
app = XCAFApp_Application.GetApplication_s()
|
||
doc = TDocStd_Document(TCollection_ExtendedString("XmlOcaf"))
|
||
app.InitDocument(doc)
|
||
|
||
reader = STEPCAFControl_Reader()
|
||
reader.SetNameMode(True)
|
||
status = reader.ReadFile(str(step_path))
|
||
if status != IFSelect_RetDone:
|
||
logger.warning("XCAF reader failed with status %s", status)
|
||
return []
|
||
|
||
if not reader.Transfer(doc):
|
||
logger.warning("XCAF transfer failed")
|
||
return []
|
||
|
||
shape_tool = XCAFDoc_DocumentTool.ShapeTool_s(doc.Main())
|
||
name_id = TDataStd_Name.GetID_s()
|
||
|
||
leaves = []
|
||
|
||
def _get_label_name(label):
|
||
name_attr = TDataStd_Name()
|
||
if label.FindAttribute(name_id, name_attr):
|
||
return name_attr.Get().ToExtString()
|
||
return ""
|
||
|
||
def _collect_leaves(label):
|
||
if XCAFDoc_ShapeTool.IsAssembly_s(label):
|
||
components = TDF_LabelSequence()
|
||
XCAFDoc_ShapeTool.GetComponents_s(label, components)
|
||
for i in range(1, components.Length() + 1):
|
||
comp_label = components.Value(i)
|
||
if XCAFDoc_ShapeTool.IsReference_s(comp_label):
|
||
ref_label = TDF_Label_cls()
|
||
XCAFDoc_ShapeTool.GetReferredShape_s(comp_label, ref_label)
|
||
comp_name = _get_label_name(comp_label)
|
||
ref_name = _get_label_name(ref_label)
|
||
# Prefer referred shape name — matches material_map keys
|
||
name = ref_name or comp_name
|
||
if XCAFDoc_ShapeTool.IsAssembly_s(ref_label):
|
||
_collect_leaves(ref_label)
|
||
elif XCAFDoc_ShapeTool.IsSimpleShape_s(ref_label):
|
||
# Use comp_label shape — includes instance transform (position)
|
||
shape = XCAFDoc_ShapeTool.GetShape_s(comp_label)
|
||
leaves.append((name or f"unnamed_{len(leaves)}", shape))
|
||
else:
|
||
_collect_leaves(comp_label)
|
||
elif XCAFDoc_ShapeTool.IsSimpleShape_s(label):
|
||
name = _get_label_name(label)
|
||
shape = XCAFDoc_ShapeTool.GetShape_s(label)
|
||
leaves.append((name or f"unnamed_{len(leaves)}", shape))
|
||
|
||
top_labels = TDF_LabelSequence()
|
||
shape_tool.GetFreeShapes(top_labels)
|
||
for i in range(1, top_labels.Length() + 1):
|
||
_collect_leaves(top_labels.Value(i))
|
||
|
||
if not leaves:
|
||
logger.warning("no leaf shapes found via XCAF")
|
||
return []
|
||
|
||
parts_dir.mkdir(parents=True, exist_ok=True)
|
||
manifest = []
|
||
|
||
for idx, (name, shape) in enumerate(leaves):
|
||
safe_name = name.replace("/", "_").replace("\\", "_").replace(" ", "_")
|
||
filename = f"{idx:02d}_{safe_name}.stl"
|
||
filepath = str(parts_dir / filename)
|
||
|
||
try:
|
||
import cadquery as cq
|
||
cq_shape = cq.Shape(shape)
|
||
cq_shape.exportStl(filepath, tolerance=tol, angularTolerance=angular_tol)
|
||
manifest.append({"index": idx, "name": name, "file": filename})
|
||
except Exception as e:
|
||
logger.warning("failed to export part '%s': %s", name, e)
|
||
|
||
manifest_path = parts_dir / "manifest.json"
|
||
with open(manifest_path, "w") as f:
|
||
_json_mod.dump({"parts": manifest}, f, indent=2)
|
||
|
||
total_size = sum(
|
||
os.path.getsize(str(parts_dir / p["file"]))
|
||
for p in manifest
|
||
if (parts_dir / p["file"]).exists()
|
||
)
|
||
logger.info("exported %d per-part STLs (%d KB) to %s", len(manifest), total_size // 1024, parts_dir)
|
||
return manifest
|
||
|
||
|
||
def _parse_blender_log(stdout: str) -> tuple[list[str], int]:
|
||
"""Extract [blender_render] lines and parts count from Blender stdout."""
|
||
lines = []
|
||
parts_count = 0
|
||
for line in (stdout or "").splitlines():
|
||
stripped = line.strip()
|
||
if "[blender_render]" in stripped or "[blender_render" in stripped:
|
||
lines.append(stripped)
|
||
if "separated into" in stripped:
|
||
try:
|
||
parts_count = int(stripped.split("separated into")[1].split("part")[0].strip())
|
||
except Exception:
|
||
pass
|
||
elif "imported" in stripped and "named parts" in stripped:
|
||
try:
|
||
parts_count = int(stripped.split("imported")[1].split("named")[0].strip())
|
||
except Exception:
|
||
pass
|
||
elif stripped.startswith("Saved:") or stripped.startswith("Fra:"):
|
||
lines.append(stripped)
|
||
return lines, parts_count
|
||
|
||
|
||
def _render_stl_with_blender(
|
||
stl_path: Path, output_path: Path, width: int, height: int,
|
||
engine: str = "cycles", samples: int = 256, smooth_angle: int = 30,
|
||
cycles_device: str = "auto", transparent_bg: bool = False,
|
||
template_path: str | None = None, target_collection: str = "Product",
|
||
material_library_path: str | None = None, material_map: dict | None = None,
|
||
part_names_ordered: list | None = None, lighting_only: bool = False,
|
||
shadow_catcher: bool = False,
|
||
rotation_x: float = 0.0, rotation_y: float = 0.0, rotation_z: float = 0.0,
|
||
job_id: str | None = None,
|
||
noise_threshold: str = "",
|
||
denoiser: str = "",
|
||
denoising_input_passes: str = "",
|
||
denoising_prefilter: str = "",
|
||
denoising_quality: str = "",
|
||
denoising_use_gpu: str = "",
|
||
) -> tuple[list[str], int, str]:
|
||
"""Render STL to PNG using Blender in background mode.
|
||
|
||
Returns (log_lines, parts_count, engine_used).
|
||
Blender is launched in its own process group (start_new_session=True) so
|
||
that SIGTERM from a cancel request kills the entire Blender tree.
|
||
"""
|
||
import json as _json
|
||
blender_bin = _find_blender()
|
||
script_path = Path(__file__).parent / "blender_render.py"
|
||
|
||
env = dict(os.environ)
|
||
if engine == "eevee":
|
||
env.update({
|
||
"VK_ICD_FILENAMES": "/usr/share/vulkan/icd.d/lvp_icd.x86_64.json",
|
||
"LIBGL_ALWAYS_SOFTWARE": "1",
|
||
"MESA_GL_VERSION_OVERRIDE": "4.5",
|
||
"EGL_PLATFORM": "surfaceless",
|
||
})
|
||
else:
|
||
env.update({
|
||
"EGL_PLATFORM": "surfaceless",
|
||
})
|
||
|
||
def _build_cmd(eng: str) -> list:
|
||
return [
|
||
blender_bin,
|
||
"--background",
|
||
"--python", str(script_path),
|
||
"--",
|
||
str(stl_path),
|
||
str(output_path),
|
||
str(width),
|
||
str(height),
|
||
eng,
|
||
str(samples),
|
||
str(smooth_angle),
|
||
cycles_device,
|
||
"1" if transparent_bg else "0",
|
||
template_path or "",
|
||
target_collection,
|
||
material_library_path or "",
|
||
_json.dumps(material_map) if material_map else "{}",
|
||
_json.dumps(part_names_ordered) if part_names_ordered else "[]",
|
||
"1" if lighting_only else "0",
|
||
"1" if shadow_catcher else "0",
|
||
str(rotation_x),
|
||
str(rotation_y),
|
||
str(rotation_z),
|
||
noise_threshold or "",
|
||
denoiser or "",
|
||
denoising_input_passes or "",
|
||
denoising_prefilter or "",
|
||
denoising_quality or "",
|
||
denoising_use_gpu or "",
|
||
]
|
||
|
||
def _run_blender(eng: str) -> subprocess.CompletedProcess:
|
||
"""Launch Blender in an isolated process group and wait for completion."""
|
||
cmd = _build_cmd(eng)
|
||
proc = subprocess.Popen(
|
||
cmd,
|
||
stdout=subprocess.PIPE,
|
||
stderr=subprocess.PIPE,
|
||
text=True,
|
||
env=env,
|
||
start_new_session=True, # new process group → SIGTERM kills entire tree
|
||
)
|
||
if job_id:
|
||
with _procs_lock:
|
||
_active_procs[job_id] = proc
|
||
try:
|
||
stdout, stderr = proc.communicate(timeout=300)
|
||
except subprocess.TimeoutExpired:
|
||
try:
|
||
os.killpg(os.getpgid(proc.pid), signal.SIGTERM)
|
||
except (ProcessLookupError, OSError):
|
||
pass
|
||
stdout, stderr = proc.communicate()
|
||
finally:
|
||
if job_id:
|
||
with _procs_lock:
|
||
_active_procs.pop(job_id, None)
|
||
return subprocess.CompletedProcess(cmd, proc.returncode, stdout, stderr)
|
||
|
||
result = _run_blender(engine)
|
||
engine_used = engine
|
||
|
||
# Log to uvicorn output
|
||
if result.stdout:
|
||
for line in result.stdout.splitlines():
|
||
logger.info("[blender] %s", line)
|
||
if result.stderr:
|
||
for line in result.stderr.splitlines():
|
||
logger.warning("[blender stderr] %s", line)
|
||
|
||
# If EEVEE fails with a non-signal error, automatically retry with Cycles.
|
||
# A negative returncode means the process was killed by a signal (e.g. cancel)
|
||
# — do NOT retry in that case.
|
||
if result.returncode > 0 and engine == "eevee":
|
||
logger.warning(
|
||
"EEVEE render failed (exit %d) – retrying with Cycles (CPU).",
|
||
result.returncode,
|
||
)
|
||
result = _run_blender("cycles")
|
||
engine_used = "cycles (eevee fallback)"
|
||
if result.stdout:
|
||
for line in result.stdout.splitlines():
|
||
logger.info("[blender-cycles-fallback] %s", line)
|
||
if result.stderr:
|
||
for line in result.stderr.splitlines():
|
||
logger.warning("[blender-cycles-fallback stderr] %s", line)
|
||
|
||
if result.returncode != 0:
|
||
stdout_tail = result.stdout[-2000:] if result.stdout else ""
|
||
stderr_tail = result.stderr[-2000:] if result.stderr else ""
|
||
raise RuntimeError(
|
||
f"Blender exited {result.returncode}.\n"
|
||
f"STDOUT: {stdout_tail}\nSTDERR: {stderr_tail}"
|
||
)
|
||
|
||
log_lines, parts_count = _parse_blender_log(result.stdout)
|
||
return log_lines, parts_count, engine_used
|