feat(A4): add MinIO service + storage abstraction (core/storage.py)

- Add MinIO service to docker-compose.yml (port 9000 API, 9001 console)
- Add minio-data volume for persistent object storage
- Create backend/app/core/storage.py: MinIOStorage + LocalStorage abstraction
  - MinIOStorage: boto3-based, auto-creates bucket, upload/download/exists/delete/presign
  - LocalStorage: fallback for dev (UPLOAD_DIR filesystem, backward compat)
  - get_storage() singleton: auto-selects based on MINIO_URL env var
- Add MINIO_URL/USER/PASSWORD/BUCKET env vars to all service definitions
- backend/pyproject.toml: docker>=6.1.0 → boto3>=1.34.0
- Add docker-compose.worker.yml: external render-worker for remote machines
- Fix .gitignore: 'core' rule was too broad, now only matches root /core dump
- Update .env.example: MinIO connection vars documented

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-06 15:51:06 +01:00
parent c8ecc29d40
commit c728358fb6
7 changed files with 263 additions and 2 deletions
View File
+171
View File
@@ -0,0 +1,171 @@
"""Storage abstraction — MinIO (production) or LocalStorage (dev fallback).
Usage:
from app.core.storage import get_storage
storage = get_storage()
key = storage.upload(local_path, "uploads/step_files/my.step")
local_path = storage.download("uploads/step_files/my.step", tmp_dir / "my.step")
Environment variables (set in docker-compose.yml):
MINIO_URL — S3 endpoint URL (e.g. http://minio:9000)
MINIO_USER — Access key (default: minioadmin)
MINIO_PASSWORD — Secret key (default: minioadmin)
MINIO_BUCKET — Bucket name (default: uploads)
If MINIO_URL is not set, falls back to LocalStorage (reads/writes to UPLOAD_DIR).
"""
import logging
import os
from pathlib import Path
logger = logging.getLogger(__name__)
class MinIOStorage:
"""S3-compatible object storage via boto3 (MinIO backend)."""
def __init__(
self,
url: str,
user: str,
password: str,
bucket: str = "uploads",
):
import boto3
from botocore.config import Config
self._bucket = bucket
self._client = boto3.client(
"s3",
endpoint_url=url,
aws_access_key_id=user,
aws_secret_access_key=password,
config=Config(signature_version="s3v4"),
)
self._ensure_bucket()
def _ensure_bucket(self):
"""Create the bucket if it does not exist."""
try:
self._client.head_bucket(Bucket=self._bucket)
except Exception:
try:
self._client.create_bucket(Bucket=self._bucket)
logger.info("Created MinIO bucket: %s", self._bucket)
except Exception as exc:
logger.warning("Could not create MinIO bucket %s: %s", self._bucket, exc)
def upload(self, local_path: Path, object_key: str) -> str:
"""Upload a local file to MinIO. Returns the object_key."""
self._client.upload_file(str(local_path), self._bucket, object_key)
logger.debug("Uploaded %s → minio://%s/%s", local_path.name, self._bucket, object_key)
return object_key
def download(self, object_key: str, local_path: Path) -> Path:
"""Download object from MinIO to local_path. Returns local_path."""
local_path.parent.mkdir(parents=True, exist_ok=True)
self._client.download_file(self._bucket, object_key, str(local_path))
logger.debug("Downloaded minio://%s/%s%s", self._bucket, object_key, local_path.name)
return local_path
def exists(self, object_key: str) -> bool:
"""Return True if the object exists in MinIO."""
try:
self._client.head_object(Bucket=self._bucket, Key=object_key)
return True
except Exception:
return False
def delete(self, object_key: str) -> None:
"""Delete an object from MinIO (best-effort)."""
try:
self._client.delete_object(Bucket=self._bucket, Key=object_key)
except Exception as exc:
logger.warning("MinIO delete failed for %s: %s", object_key, exc)
def get_url(self, object_key: str, expires_in: int = 3600) -> str:
"""Generate a presigned URL for direct download (valid for expires_in seconds)."""
return self._client.generate_presigned_url(
"get_object",
Params={"Bucket": self._bucket, "Key": object_key},
ExpiresIn=expires_in,
)
@property
def backend(self) -> str:
return "minio"
class LocalStorage:
"""Fallback: reads/writes to the local UPLOAD_DIR filesystem.
Object keys are treated as relative paths under UPLOAD_DIR.
This preserves backward compatibility with the existing uploads/ volume.
"""
def __init__(self, upload_dir: str):
self._root = Path(upload_dir)
def _resolve(self, object_key: str) -> Path:
return self._root / object_key
def upload(self, local_path: Path, object_key: str) -> str:
"""Copy local_path to the local storage path for object_key."""
import shutil
dest = self._resolve(object_key)
dest.parent.mkdir(parents=True, exist_ok=True)
if str(local_path) != str(dest):
shutil.copy2(str(local_path), str(dest))
return object_key
def download(self, object_key: str, local_path: Path) -> Path:
"""'Download' by returning the existing local path."""
src = self._resolve(object_key)
if src == local_path:
return local_path
import shutil
local_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(str(src), str(local_path))
return local_path
def exists(self, object_key: str) -> bool:
return self._resolve(object_key).exists()
def delete(self, object_key: str) -> None:
path = self._resolve(object_key)
path.unlink(missing_ok=True)
def get_url(self, object_key: str, expires_in: int = 3600) -> str:
return f"/api/files/{object_key}"
@property
def backend(self) -> str:
return "local"
_storage_instance: MinIOStorage | LocalStorage | None = None
def get_storage() -> MinIOStorage | LocalStorage:
"""Return the configured storage backend (singleton)."""
global _storage_instance
if _storage_instance is not None:
return _storage_instance
minio_url = os.environ.get("MINIO_URL", "")
if minio_url:
user = os.environ.get("MINIO_USER", "minioadmin")
password = os.environ.get("MINIO_PASSWORD", "minioadmin")
bucket = os.environ.get("MINIO_BUCKET", "uploads")
try:
_storage_instance = MinIOStorage(url=minio_url, user=user, password=password, bucket=bucket)
logger.info("Storage backend: MinIO (%s, bucket=%s)", minio_url, bucket)
except Exception as exc:
logger.warning("MinIO init failed (%s) — falling back to LocalStorage: %s", minio_url, exc)
_storage_instance = LocalStorage(os.environ.get("UPLOAD_DIR", "/app/uploads"))
else:
upload_dir = os.environ.get("UPLOAD_DIR", "/app/uploads")
_storage_instance = LocalStorage(upload_dir)
logger.info("Storage backend: local filesystem (%s)", upload_dir)
return _storage_instance