Compare commits

...

5 Commits

Author SHA1 Message Date
da1ff62877 Merge aws-int: Add AWS integration with GraphQL, Step Functions, and Lambda
# Conflicts:
#	docs/architecture/index.html
2026-02-12 19:47:15 -03:00
9cead74fb3 updated docs 2026-02-12 19:46:12 -03:00
72e4113529 updated modelgen tool 2026-02-06 20:18:45 -03:00
8f5d407e0e fine tuning models 2026-02-06 18:46:27 -03:00
e642908abb shoehorning graphql, step functions and lamdas. aws deployment scripts 2026-02-06 18:25:42 -03:00
54 changed files with 3063 additions and 1894 deletions

251
api/graphql.py Normal file
View File

@@ -0,0 +1,251 @@
"""
GraphQL API using graphene, mounted on FastAPI/Starlette.
Provides the same data as the REST API but via GraphQL queries and mutations.
Uses Django ORM directly for data access.
Types are generated from schema/ via modelgen — see api/schema/graphql.py.
"""
import os
import graphene
from api.schema.graphql import (
CreateJobInput,
MediaAssetType,
ScanResultType,
SystemStatusType,
TranscodeJobType,
TranscodePresetType,
)
from core.storage import BUCKET_IN, list_objects
# Media extensions (same as assets route)
VIDEO_EXTS = {".mp4", ".mkv", ".avi", ".mov", ".webm", ".flv", ".wmv", ".m4v"}
AUDIO_EXTS = {".mp3", ".wav", ".flac", ".aac", ".ogg", ".m4a"}
MEDIA_EXTS = VIDEO_EXTS | AUDIO_EXTS
# ---------------------------------------------------------------------------
# Queries
# ---------------------------------------------------------------------------
class Query(graphene.ObjectType):
assets = graphene.List(
MediaAssetType,
status=graphene.String(),
search=graphene.String(),
)
asset = graphene.Field(MediaAssetType, id=graphene.UUID(required=True))
jobs = graphene.List(
TranscodeJobType,
status=graphene.String(),
source_asset_id=graphene.UUID(),
)
job = graphene.Field(TranscodeJobType, id=graphene.UUID(required=True))
presets = graphene.List(TranscodePresetType)
system_status = graphene.Field(SystemStatusType)
def resolve_assets(self, info, status=None, search=None):
from mpr.media_assets.models import MediaAsset
qs = MediaAsset.objects.all()
if status:
qs = qs.filter(status=status)
if search:
qs = qs.filter(filename__icontains=search)
return qs
def resolve_asset(self, info, id):
from mpr.media_assets.models import MediaAsset
try:
return MediaAsset.objects.get(id=id)
except MediaAsset.DoesNotExist:
return None
def resolve_jobs(self, info, status=None, source_asset_id=None):
from mpr.media_assets.models import TranscodeJob
qs = TranscodeJob.objects.all()
if status:
qs = qs.filter(status=status)
if source_asset_id:
qs = qs.filter(source_asset_id=source_asset_id)
return qs
def resolve_job(self, info, id):
from mpr.media_assets.models import TranscodeJob
try:
return TranscodeJob.objects.get(id=id)
except TranscodeJob.DoesNotExist:
return None
def resolve_presets(self, info):
from mpr.media_assets.models import TranscodePreset
return TranscodePreset.objects.all()
def resolve_system_status(self, info):
return {"status": "ok", "version": "0.1.0"}
# ---------------------------------------------------------------------------
# Mutations
# ---------------------------------------------------------------------------
class ScanMediaFolder(graphene.Mutation):
class Arguments:
pass
Output = ScanResultType
def mutate(self, info):
from mpr.media_assets.models import MediaAsset
objects = list_objects(BUCKET_IN, extensions=MEDIA_EXTS)
existing = set(MediaAsset.objects.values_list("filename", flat=True))
registered = []
skipped = []
for obj in objects:
if obj["filename"] in existing:
skipped.append(obj["filename"])
continue
try:
MediaAsset.objects.create(
filename=obj["filename"],
file_path=obj["key"],
file_size=obj["size"],
)
registered.append(obj["filename"])
except Exception:
pass
return ScanResultType(
found=len(objects),
registered=len(registered),
skipped=len(skipped),
files=registered,
)
class CreateJob(graphene.Mutation):
class Arguments:
input = CreateJobInput(required=True)
Output = TranscodeJobType
def mutate(self, info, input):
from pathlib import Path
from mpr.media_assets.models import MediaAsset, TranscodeJob, TranscodePreset
try:
source = MediaAsset.objects.get(id=input.source_asset_id)
except MediaAsset.DoesNotExist:
raise Exception("Source asset not found")
preset = None
preset_snapshot = {}
if input.preset_id:
try:
preset = TranscodePreset.objects.get(id=input.preset_id)
preset_snapshot = {
"name": preset.name,
"container": preset.container,
"video_codec": preset.video_codec,
"audio_codec": preset.audio_codec,
}
except TranscodePreset.DoesNotExist:
raise Exception("Preset not found")
if not preset and not input.trim_start and not input.trim_end:
raise Exception("Must specify preset_id or trim_start/trim_end")
output_filename = input.output_filename
if not output_filename:
stem = Path(source.filename).stem
ext = preset_snapshot.get("container", "mp4") if preset else "mp4"
output_filename = f"{stem}_output.{ext}"
job = TranscodeJob.objects.create(
source_asset_id=source.id,
preset_id=preset.id if preset else None,
preset_snapshot=preset_snapshot,
trim_start=input.trim_start,
trim_end=input.trim_end,
output_filename=output_filename,
output_path=output_filename,
priority=input.priority or 0,
)
# Dispatch
executor_mode = os.environ.get("MPR_EXECUTOR", "local")
if executor_mode == "lambda":
from task.executor import get_executor
get_executor().run(
job_id=str(job.id),
source_path=source.file_path,
output_path=output_filename,
preset=preset_snapshot or None,
trim_start=input.trim_start,
trim_end=input.trim_end,
duration=source.duration,
)
else:
from task.tasks import run_transcode_job
result = run_transcode_job.delay(
job_id=str(job.id),
source_key=source.file_path,
output_key=output_filename,
preset=preset_snapshot or None,
trim_start=input.trim_start,
trim_end=input.trim_end,
duration=source.duration,
)
job.celery_task_id = result.id
job.save(update_fields=["celery_task_id"])
return job
class CancelJob(graphene.Mutation):
class Arguments:
id = graphene.UUID(required=True)
Output = TranscodeJobType
def mutate(self, info, id):
from mpr.media_assets.models import TranscodeJob
try:
job = TranscodeJob.objects.get(id=id)
except TranscodeJob.DoesNotExist:
raise Exception("Job not found")
if job.status not in ("pending", "processing"):
raise Exception(f"Cannot cancel job with status: {job.status}")
job.status = "cancelled"
job.save(update_fields=["status"])
return job
class Mutation(graphene.ObjectType):
scan_media_folder = ScanMediaFolder.Field()
create_job = CreateJob.Field()
cancel_job = CancelJob.Field()
# ---------------------------------------------------------------------------
# Schema
# ---------------------------------------------------------------------------
schema = graphene.Schema(query=Query, mutation=Mutation)

View File

@@ -20,7 +20,9 @@ django.setup()
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from api.graphql import schema as graphql_schema
from api.routes import assets_router, jobs_router, presets_router, system_router
from starlette_graphene3 import GraphQLApp, make_graphiql_handler
app = FastAPI(
title="MPR API",
@@ -45,6 +47,9 @@ app.include_router(assets_router, prefix="/api")
app.include_router(presets_router, prefix="/api")
app.include_router(jobs_router, prefix="/api")
# GraphQL
app.mount("/graphql", GraphQLApp(schema=graphql_schema, on_get=make_graphiql_handler()))
@app.get("/")
def root():

View File

@@ -8,46 +8,27 @@ from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query
from api.deps import get_asset
from api.schemas import AssetCreate, AssetResponse, AssetUpdate
from api.schema import AssetCreate, AssetResponse, AssetUpdate
from core.storage import BUCKET_IN, list_objects
router = APIRouter(prefix="/assets", tags=["assets"])
# Supported media extensions
VIDEO_EXTS = {".mp4", ".mkv", ".avi", ".mov", ".webm", ".flv", ".wmv", ".m4v"}
AUDIO_EXTS = {".mp3", ".wav", ".flac", ".aac", ".ogg", ".m4a"}
MEDIA_EXTS = VIDEO_EXTS | AUDIO_EXTS
@router.post("/", response_model=AssetResponse, status_code=201)
def create_asset(data: AssetCreate):
"""
Register a media file as an asset.
The file must exist on disk. A probe task will be queued
to extract metadata asynchronously.
"""
from pathlib import Path
"""Register a media file as an asset."""
from mpr.media_assets.models import MediaAsset
# Validate file exists
path = Path(data.file_path)
if not path.exists():
raise HTTPException(status_code=400, detail="File not found")
# Store path relative to media root
import os
media_root = Path(os.environ.get("MEDIA_IN", "/app/media/in"))
try:
rel_path = str(path.relative_to(media_root))
except ValueError:
rel_path = path.name
# Create asset
asset = MediaAsset.objects.create(
filename=data.filename or path.name,
file_path=rel_path,
file_size=path.stat().st_size,
filename=data.filename or data.file_path.split("/")[-1],
file_path=data.file_path,
file_size=data.file_size,
)
# TODO: Queue probe task via gRPC/Celery
return asset
@@ -61,10 +42,8 @@ def list_assets(
from mpr.media_assets.models import MediaAsset
qs = MediaAsset.objects.all()
if status:
qs = qs.filter(status=status)
return list(qs[offset : offset + limit])
@@ -102,62 +81,36 @@ def delete_asset(asset_id: UUID, asset=Depends(get_asset)):
@router.post("/scan", response_model=dict)
def scan_media_folder():
"""
Scan the media folder for new video/audio files and register them as assets.
Returns a summary of files found and registered.
Scan the S3 media-in bucket for new video/audio files and register them as assets.
"""
import os
from pathlib import Path
from mpr.media_assets.models import MediaAsset
# Get media input folder from environment
media_root = os.environ.get("MEDIA_IN", "/app/media/in")
media_path = Path(media_root)
if not media_path.exists():
raise HTTPException(
status_code=500, detail=f"Media folder not found: {media_root}"
)
# Supported video/audio extensions
video_exts = {".mp4", ".mkv", ".avi", ".mov", ".webm", ".flv", ".wmv", ".m4v"}
audio_exts = {".mp3", ".wav", ".flac", ".aac", ".ogg", ".m4a"}
supported_exts = video_exts | audio_exts
# List objects from S3 bucket
objects = list_objects(BUCKET_IN, extensions=MEDIA_EXTS)
# Get existing filenames to avoid duplicates
existing_filenames = set(MediaAsset.objects.values_list("filename", flat=True))
# Scan for media files
found_files = []
registered_files = []
skipped_files = []
for file_path in media_path.rglob("*"):
if file_path.is_file() and file_path.suffix.lower() in supported_exts:
found_files.append(str(file_path))
# Skip if already registered
if file_path.name in existing_filenames:
skipped_files.append(file_path.name)
for obj in objects:
if obj["filename"] in existing_filenames:
skipped_files.append(obj["filename"])
continue
# Register new asset with path relative to media root
rel_path = str(file_path.relative_to(media_path))
try:
asset = MediaAsset.objects.create(
filename=file_path.name,
file_path=rel_path,
file_size=file_path.stat().st_size,
MediaAsset.objects.create(
filename=obj["filename"],
file_path=obj["key"],
file_size=obj["size"],
)
registered_files.append(file_path.name)
# TODO: Queue probe task to extract metadata
registered_files.append(obj["filename"])
except Exception as e:
print(f"Error registering {file_path.name}: {e}")
print(f"Error registering {obj['filename']}: {e}")
return {
"found": len(found_files),
"found": len(objects),
"registered": len(registered_files),
"skipped": len(skipped_files),
"files": registered_files,

View File

@@ -2,17 +2,20 @@
Job endpoints - transcode/trim job management.
"""
import json
import os
from pathlib import Path
from typing import Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query
from fastapi import APIRouter, Depends, Header, HTTPException, Query
from api.deps import get_asset, get_job, get_preset
from api.schemas import JobCreate, JobResponse
from api.schema import JobCreate, JobResponse
router = APIRouter(prefix="/jobs", tags=["jobs"])
CALLBACK_API_KEY = os.environ.get("CALLBACK_API_KEY", "")
@router.post("/", response_model=JobResponse, status_code=201)
def create_job(data: JobCreate):
@@ -36,7 +39,6 @@ def create_job(data: JobCreate):
if data.preset_id:
try:
preset = TranscodePreset.objects.get(id=data.preset_id)
# Snapshot preset at job creation time
preset_snapshot = {
"name": preset.name,
"container": preset.container,
@@ -61,22 +63,13 @@ def create_job(data: JobCreate):
status_code=400, detail="Must specify preset_id or trim_start/trim_end"
)
# Generate output filename and path
import os
from pathlib import Path
# Generate output filename - stored as S3 key in output bucket
output_filename = data.output_filename
if not output_filename:
stem = Path(source.filename).stem
ext = preset_snapshot.get("container", "mp4") if preset else "mp4"
output_filename = f"{stem}_output.{ext}"
media_out = os.environ.get("MEDIA_OUT", "/app/media/out")
output_path = str(Path(media_out) / output_filename)
media_in = os.environ.get("MEDIA_IN", "/app/media/in")
source_path = str(Path(media_in) / source.file_path)
# Create job
job = TranscodeJob.objects.create(
source_asset_id=source.id,
@@ -85,26 +78,95 @@ def create_job(data: JobCreate):
trim_start=data.trim_start,
trim_end=data.trim_end,
output_filename=output_filename,
output_path=output_path,
output_path=output_filename, # S3 key in output bucket
priority=data.priority or 0,
)
# Dispatch to Celery
# Dispatch based on executor mode
executor_mode = os.environ.get("MPR_EXECUTOR", "local")
if executor_mode == "lambda":
_dispatch_lambda(job, source, preset_snapshot)
else:
_dispatch_celery(job, source, preset_snapshot)
return job
def _dispatch_celery(job, source, preset_snapshot):
"""Dispatch job to Celery worker."""
from task.tasks import run_transcode_job
result = run_transcode_job.delay(
job_id=str(job.id),
source_path=source_path,
output_path=output_path,
source_key=source.file_path,
output_key=job.output_filename,
preset=preset_snapshot or None,
trim_start=data.trim_start,
trim_end=data.trim_end,
trim_start=job.trim_start,
trim_end=job.trim_end,
duration=source.duration,
)
job.celery_task_id = result.id
job.save(update_fields=["celery_task_id"])
return job
def _dispatch_lambda(job, source, preset_snapshot):
"""Dispatch job to AWS Step Functions."""
from task.executor import get_executor
executor = get_executor()
executor.run(
job_id=str(job.id),
source_path=source.file_path,
output_path=job.output_filename,
preset=preset_snapshot or None,
trim_start=job.trim_start,
trim_end=job.trim_end,
duration=source.duration,
)
@router.post("/{job_id}/callback")
def job_callback(
job_id: UUID,
payload: dict,
x_api_key: Optional[str] = Header(None),
):
"""
Callback endpoint for Lambda to report job completion.
Protected by API key.
"""
if CALLBACK_API_KEY and x_api_key != CALLBACK_API_KEY:
raise HTTPException(status_code=403, detail="Invalid API key")
from django.utils import timezone
from mpr.media_assets.models import TranscodeJob
try:
job = TranscodeJob.objects.get(id=job_id)
except TranscodeJob.DoesNotExist:
raise HTTPException(status_code=404, detail="Job not found")
status = payload.get("status", "failed")
job.status = status
job.progress = 100.0 if status == "completed" else job.progress
update_fields = ["status", "progress"]
if payload.get("error"):
job.error_message = payload["error"]
update_fields.append("error_message")
if status == "completed":
job.completed_at = timezone.now()
update_fields.append("completed_at")
elif status == "failed":
job.completed_at = timezone.now()
update_fields.append("completed_at")
job.save(update_fields=update_fields)
return {"ok": True}
@router.get("/", response_model=list[JobResponse])
@@ -118,12 +180,10 @@ def list_jobs(
from mpr.media_assets.models import TranscodeJob
qs = TranscodeJob.objects.all()
if status:
qs = qs.filter(status=status)
if source_asset_id:
qs = qs.filter(source_asset_id=source_asset_id)
return list(qs[offset : offset + limit])
@@ -154,11 +214,8 @@ def cancel_job(job_id: UUID, job=Depends(get_job)):
status_code=400, detail=f"Cannot cancel job with status: {job.status}"
)
# TODO: Cancel via gRPC
job.status = "cancelled"
job.save(update_fields=["status"])
return job
@@ -173,6 +230,4 @@ def retry_job(job_id: UUID, job=Depends(get_job)):
job.error_message = None
job.save(update_fields=["status", "progress", "error_message"])
# TODO: Resubmit via gRPC
return job

View File

@@ -7,7 +7,7 @@ from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException
from api.deps import get_preset
from api.schemas import PresetCreate, PresetResponse, PresetUpdate
from api.schema import PresetCreate, PresetResponse, PresetUpdate
router = APIRouter(prefix="/presets", tags=["presets"])

View File

@@ -28,7 +28,7 @@ class AssetCreate(BaseSchema):
bitrate: Optional[int] = None
properties: Dict[str, Any]
comments: str = ""
tags: List[str]
tags: List[str] = Field(default_factory=list)
class AssetUpdate(BaseSchema):
"""AssetUpdate schema."""
@@ -65,6 +65,6 @@ class AssetResponse(BaseSchema):
bitrate: Optional[int] = None
properties: Dict[str, Any]
comments: str = ""
tags: List[str]
tags: List[str] = Field(default_factory=list)
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None

129
api/schema/graphql.py Normal file
View File

@@ -0,0 +1,129 @@
"""
Graphene Types - GENERATED FILE
Do not edit directly. Regenerate using modelgen.
"""
import graphene
class AssetStatus(graphene.Enum):
PENDING = "pending"
READY = "ready"
ERROR = "error"
class JobStatus(graphene.Enum):
PENDING = "pending"
PROCESSING = "processing"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class MediaAssetType(graphene.ObjectType):
"""A video/audio file registered in the system."""
id = graphene.UUID()
filename = graphene.String()
file_path = graphene.String()
status = graphene.String()
error_message = graphene.String()
file_size = graphene.Int()
duration = graphene.Float()
video_codec = graphene.String()
audio_codec = graphene.String()
width = graphene.Int()
height = graphene.Int()
framerate = graphene.Float()
bitrate = graphene.Int()
properties = graphene.JSONString()
comments = graphene.String()
tags = graphene.List(graphene.String)
created_at = graphene.DateTime()
updated_at = graphene.DateTime()
class TranscodePresetType(graphene.ObjectType):
"""A reusable transcoding configuration (like Handbrake presets)."""
id = graphene.UUID()
name = graphene.String()
description = graphene.String()
is_builtin = graphene.Boolean()
container = graphene.String()
video_codec = graphene.String()
video_bitrate = graphene.String()
video_crf = graphene.Int()
video_preset = graphene.String()
resolution = graphene.String()
framerate = graphene.Float()
audio_codec = graphene.String()
audio_bitrate = graphene.String()
audio_channels = graphene.Int()
audio_samplerate = graphene.Int()
extra_args = graphene.List(graphene.String)
created_at = graphene.DateTime()
updated_at = graphene.DateTime()
class TranscodeJobType(graphene.ObjectType):
"""A transcoding or trimming job in the queue."""
id = graphene.UUID()
source_asset_id = graphene.UUID()
preset_id = graphene.UUID()
preset_snapshot = graphene.JSONString()
trim_start = graphene.Float()
trim_end = graphene.Float()
output_filename = graphene.String()
output_path = graphene.String()
output_asset_id = graphene.UUID()
status = graphene.String()
progress = graphene.Float()
current_frame = graphene.Int()
current_time = graphene.Float()
speed = graphene.String()
error_message = graphene.String()
celery_task_id = graphene.String()
execution_arn = graphene.String()
priority = graphene.Int()
created_at = graphene.DateTime()
started_at = graphene.DateTime()
completed_at = graphene.DateTime()
class CreateJobInput(graphene.InputObjectType):
"""Request body for creating a transcode/trim job."""
source_asset_id = graphene.UUID(required=True)
preset_id = graphene.UUID()
trim_start = graphene.Float()
trim_end = graphene.Float()
output_filename = graphene.String()
priority = graphene.Int(default_value=0)
class SystemStatusType(graphene.ObjectType):
"""System status response."""
status = graphene.String()
version = graphene.String()
class ScanResultType(graphene.ObjectType):
"""Result of scanning the media input bucket."""
found = graphene.Int()
registered = graphene.Int()
skipped = graphene.Int()
files = graphene.List(graphene.String)
class WorkerStatusType(graphene.ObjectType):
"""Worker health and capabilities."""
available = graphene.Boolean()
active_jobs = graphene.Int()
supported_codecs = graphene.List(graphene.String)
gpu_available = graphene.Boolean()

View File

@@ -17,19 +17,27 @@ class JobStatus(str, Enum):
class JobCreate(BaseSchema):
"""Client-facing job creation request."""
"""JobCreate schema."""
source_asset_id: UUID
preset_id: Optional[UUID] = None
preset_snapshot: Dict[str, Any]
trim_start: Optional[float] = None
trim_end: Optional[float] = None
output_filename: Optional[str] = None
output_filename: str = ""
output_path: Optional[str] = None
output_asset_id: Optional[UUID] = None
progress: float = 0.0
current_frame: Optional[int] = None
current_time: Optional[float] = None
speed: Optional[str] = None
celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None
priority: int = 0
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
class JobUpdate(BaseSchema):
"""JobUpdate schema."""
source_asset_id: Optional[UUID] = None
preset_id: Optional[UUID] = None
preset_snapshot: Optional[Dict[str, Any]] = None
@@ -45,14 +53,13 @@ class JobUpdate(BaseSchema):
speed: Optional[str] = None
error_message: Optional[str] = None
celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None
priority: Optional[int] = None
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
class JobResponse(BaseSchema):
"""JobResponse schema."""
id: UUID
source_asset_id: UUID
preset_id: Optional[UUID] = None
@@ -69,6 +76,7 @@ class JobResponse(BaseSchema):
speed: Optional[str] = None
error_message: Optional[str] = None
celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None
priority: int = 0
created_at: Optional[datetime] = None
started_at: Optional[datetime] = None

View File

@@ -24,7 +24,7 @@ class PresetCreate(BaseSchema):
audio_bitrate: Optional[str] = None
audio_channels: Optional[int] = None
audio_samplerate: Optional[int] = None
extra_args: List[str]
extra_args: List[str] = Field(default_factory=list)
class PresetUpdate(BaseSchema):
"""PresetUpdate schema."""
@@ -61,6 +61,6 @@ class PresetResponse(BaseSchema):
audio_bitrate: Optional[str] = None
audio_channels: Optional[int] = None
audio_samplerate: Optional[int] = None
extra_args: List[str]
extra_args: List[str] = Field(default_factory=list)
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None

View File

@@ -1,89 +0,0 @@
"""
Pydantic Models - GENERATED FILE
Do not edit directly. Regenerate using modelgen.
"""
from datetime import datetime
from enum import Enum
from typing import Any, Dict, List, Optional
from uuid import UUID
from pydantic import BaseModel, Field
class AssetStatus(str, Enum):
PENDING = "pending"
READY = "ready"
ERROR = "error"
class JobStatus(str, Enum):
PENDING = "pending"
PROCESSING = "processing"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class MediaAsset(BaseModel):
"""A video/audio file registered in the system."""
id: UUID
filename: str
file_path: str
status: AssetStatus = "AssetStatus.PENDING"
error_message: Optional[str] = None
file_size: Optional[int] = None
duration: Optional[float] = None
video_codec: Optional[str] = None
audio_codec: Optional[str] = None
width: Optional[int] = None
height: Optional[int] = None
framerate: Optional[float] = None
bitrate: Optional[int] = None
properties: Dict[str, Any]
comments: str = ""
tags: List[str] = Field(default_factory=list)
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
class TranscodePreset(BaseModel):
"""A reusable transcoding configuration (like Handbrake presets)."""
id: UUID
name: str
description: str = ""
is_builtin: bool = False
container: str = "mp4"
video_codec: str = "libx264"
video_bitrate: Optional[str] = None
video_crf: Optional[int] = None
video_preset: Optional[str] = None
resolution: Optional[str] = None
framerate: Optional[float] = None
audio_codec: str = "aac"
audio_bitrate: Optional[str] = None
audio_channels: Optional[int] = None
audio_samplerate: Optional[int] = None
extra_args: List[str] = Field(default_factory=list)
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
class TranscodeJob(BaseModel):
"""A transcoding or trimming job in the queue."""
id: UUID
source_asset_id: UUID
preset_id: Optional[UUID] = None
preset_snapshot: Dict[str, Any]
trim_start: Optional[float] = None
trim_end: Optional[float] = None
output_filename: str = ""
output_path: Optional[str] = None
output_asset_id: Optional[UUID] = None
status: JobStatus = "JobStatus.PENDING"
progress: float = 0.0
current_frame: Optional[int] = None
current_time: Optional[float] = None
speed: Optional[str] = None
error_message: Optional[str] = None
celery_task_id: Optional[str] = None
priority: int = 0
created_at: Optional[datetime] = None
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None

90
core/storage.py Normal file
View File

@@ -0,0 +1,90 @@
"""
S3 storage layer.
Uses MinIO locally (S3-compatible) and real AWS S3 in production.
The only difference is S3_ENDPOINT_URL: set for MinIO, omit for AWS.
"""
import os
import tempfile
from pathlib import Path
from typing import Optional
import boto3
from botocore.config import Config
BUCKET_IN = os.environ.get("S3_BUCKET_IN", "mpr-media-in")
BUCKET_OUT = os.environ.get("S3_BUCKET_OUT", "mpr-media-out")
def get_s3_client():
"""Get a boto3 S3 client. Works with both MinIO and real AWS S3."""
kwargs = {
"region_name": os.environ.get("AWS_REGION", "us-east-1"),
"config": Config(signature_version="s3v4"),
}
endpoint = os.environ.get("S3_ENDPOINT_URL")
if endpoint:
kwargs["endpoint_url"] = endpoint
kwargs["aws_access_key_id"] = os.environ.get("AWS_ACCESS_KEY_ID", "minioadmin")
kwargs["aws_secret_access_key"] = os.environ.get("AWS_SECRET_ACCESS_KEY", "minioadmin")
return boto3.client("s3", **kwargs)
def list_objects(bucket: str, prefix: str = "", extensions: Optional[set] = None) -> list[dict]:
"""List objects in an S3 bucket, optionally filtered by file extension."""
s3 = get_s3_client()
objects = []
kwargs = {"Bucket": bucket, "Prefix": prefix}
while True:
response = s3.list_objects_v2(**kwargs)
for obj in response.get("Contents", []):
key = obj["Key"]
if extensions:
ext = Path(key).suffix.lower()
if ext not in extensions:
continue
objects.append({
"key": key,
"size": obj["Size"],
"filename": Path(key).name,
})
if not response.get("IsTruncated"):
break
kwargs["ContinuationToken"] = response["NextContinuationToken"]
return objects
def download_file(bucket: str, key: str, local_path: str) -> str:
"""Download a file from S3 to a local path."""
s3 = get_s3_client()
Path(local_path).parent.mkdir(parents=True, exist_ok=True)
s3.download_file(bucket, key, local_path)
return local_path
def download_to_temp(bucket: str, key: str) -> str:
"""Download a file from S3 to a temp file. Caller must clean up."""
ext = Path(key).suffix
fd, tmp_path = tempfile.mkstemp(suffix=ext)
os.close(fd)
download_file(bucket, key, tmp_path)
return tmp_path
def upload_file(local_path: str, bucket: str, key: str) -> None:
"""Upload a local file to S3."""
s3 = get_s3_client()
s3.upload_file(local_path, bucket, key)
def get_presigned_url(bucket: str, key: str, expires: int = 3600) -> str:
"""Generate a presigned URL for an S3 object."""
s3 = get_s3_client()
return s3.generate_presigned_url(
"get_object",
Params={"Bucket": bucket, "Key": key},
ExpiresIn=expires,
)

View File

@@ -27,9 +27,13 @@ GRPC_HOST=grpc
GRPC_PORT=50051
GRPC_MAX_WORKERS=10
# Media
MEDIA_IN=/app/media/in
MEDIA_OUT=/app/media/out
# S3 Storage (MinIO locally, real S3 on AWS)
S3_ENDPOINT_URL=http://minio:9000
S3_BUCKET_IN=mpr-media-in
S3_BUCKET_OUT=mpr-media-out
AWS_REGION=us-east-1
AWS_ACCESS_KEY_ID=minioadmin
AWS_SECRET_ACCESS_KEY=minioadmin
# Vite
VITE_ALLOWED_HOSTS=your-domain.local

View File

@@ -1,18 +1,17 @@
#!/bin/bash
# Deploy MPR to remote server via rsync
# Uses project .gitignore for excludes
# MPR Deploy Script
#
# Usage: ./ctrl/deploy.sh [--restart] [--dry-run]
# Usage: ./ctrl/deploy.sh <command> [options]
#
# Examples:
# ./ctrl/deploy.sh # Sync files only
# ./ctrl/deploy.sh --restart # Sync and restart services
# ./ctrl/deploy.sh --dry-run # Preview sync
# Commands:
# rsync [--restart] [--dry-run] Sync to remote server via rsync
# aws Deploy AWS infrastructure (Lambda, Step Functions, S3)
set -e
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$PROJECT_ROOT"
source "$SCRIPT_DIR/.env" 2>/dev/null || true
@@ -21,56 +20,268 @@ GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
if [ -z "$SERVER" ] || [ -z "$REMOTE_PATH" ]; then
# ─── Rsync Deploy ─────────────────────────────────────────────────────────────
deploy_rsync() {
if [ -z "${SERVER:-}" ] || [ -z "${REMOTE_PATH:-}" ]; then
echo -e "${RED}Error: SERVER and REMOTE_PATH must be set in ctrl/.env${NC}"
echo "Example:"
echo " SERVER=user@host"
echo " REMOTE_PATH=~/mpr"
exit 1
fi
fi
RESTART=false
DRY_RUN=""
RESTART=false
DRY_RUN=""
while [ $# -gt 0 ]; do
while [ $# -gt 0 ]; do
case "$1" in
--restart)
RESTART=true
shift
;;
--dry-run)
DRY_RUN="--dry-run"
shift
;;
*)
echo "Unknown option: $1"
exit 1
;;
--restart) RESTART=true; shift ;;
--dry-run) DRY_RUN="--dry-run"; shift ;;
*) echo "Unknown option: $1"; exit 1 ;;
esac
done
done
echo -e "${GREEN}=== Deploying MPR to $SERVER:$REMOTE_PATH ===${NC}"
echo -e "${GREEN}=== Deploying MPR to $SERVER:$REMOTE_PATH ===${NC}"
# Sync files using .gitignore for excludes
echo -e "${YELLOW}Syncing files...${NC}"
rsync -avz --delete $DRY_RUN \
echo -e "${YELLOW}Syncing files...${NC}"
rsync -avz --delete $DRY_RUN \
--filter=':- .gitignore' \
--exclude='.git' \
--exclude='media/*' \
--exclude='ctrl/.env' \
"$PROJECT_ROOT/" "$SERVER:$REMOTE_PATH/"
if [ -n "$DRY_RUN" ]; then
if [ -n "$DRY_RUN" ]; then
echo -e "${YELLOW}Dry run - no changes made${NC}"
exit 0
fi
fi
# Copy env template if .env doesn't exist on remote
ssh "$SERVER" "[ -f $REMOTE_PATH/ctrl/.env ] || cp $REMOTE_PATH/ctrl/.env.template $REMOTE_PATH/ctrl/.env"
ssh "$SERVER" "[ -f $REMOTE_PATH/ctrl/.env ] || cp $REMOTE_PATH/ctrl/.env.template $REMOTE_PATH/ctrl/.env"
if [ "$RESTART" = true ]; then
if [ "$RESTART" = true ]; then
echo -e "${YELLOW}Restarting services...${NC}"
ssh "$SERVER" "cd $REMOTE_PATH/ctrl && docker compose down && docker compose up -d --build"
fi
fi
echo -e "${GREEN}Done!${NC}"
echo -e "${GREEN}Done!${NC}"
}
# ─── AWS Deploy ────────────────────────────────────────────────────────────────
deploy_aws() {
REGION="${AWS_REGION:-us-east-1}"
ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text)
PROJECT="mpr"
# S3
BUCKET_IN="${S3_BUCKET_IN:-mpr-media-in}"
BUCKET_OUT="${S3_BUCKET_OUT:-mpr-media-out}"
# ECR
ECR_REPO="${PROJECT}-transcode"
ECR_URI="${ACCOUNT_ID}.dkr.ecr.${REGION}.amazonaws.com/${ECR_REPO}"
# Lambda
LAMBDA_NAME="${PROJECT}-transcode"
LAMBDA_TIMEOUT=900
LAMBDA_MEMORY=2048
# Step Functions
SFN_NAME="${PROJECT}-transcode"
# IAM
LAMBDA_ROLE_NAME="${PROJECT}-lambda-role"
SFN_ROLE_NAME="${PROJECT}-sfn-role"
# Callback
CALLBACK_URL="${CALLBACK_URL:-https://mpr.mcrn.ar/api}"
CALLBACK_API_KEY="${CALLBACK_API_KEY:-changeme}"
echo -e "${GREEN}=== Deploying MPR to AWS ($REGION, account $ACCOUNT_ID) ===${NC}"
# ─── S3 Buckets ───────────────────────────────────────────────────────
echo -e "${YELLOW}Creating S3 buckets...${NC}"
for bucket in "$BUCKET_IN" "$BUCKET_OUT"; do
if ! aws s3api head-bucket --bucket "$bucket" 2>/dev/null; then
aws s3api create-bucket \
--bucket "$bucket" \
--region "$REGION" \
--create-bucket-configuration LocationConstraint="$REGION"
echo " Created $bucket"
else
echo " $bucket already exists"
fi
done
# ─── IAM Roles ────────────────────────────────────────────────────────
echo -e "${YELLOW}Creating IAM roles...${NC}"
if ! aws iam get-role --role-name "$LAMBDA_ROLE_NAME" 2>/dev/null; then
aws iam create-role \
--role-name "$LAMBDA_ROLE_NAME" \
--assume-role-policy-document '{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"Service": "lambda.amazonaws.com"},
"Action": "sts:AssumeRole"
}]
}'
aws iam attach-role-policy \
--role-name "$LAMBDA_ROLE_NAME" \
--policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole
aws iam put-role-policy \
--role-name "$LAMBDA_ROLE_NAME" \
--policy-name "${PROJECT}-s3-access" \
--policy-document '{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": ["s3:GetObject", "s3:PutObject"],
"Resource": [
"arn:aws:s3:::'"$BUCKET_IN"'/*",
"arn:aws:s3:::'"$BUCKET_OUT"'/*"
]
}]
}'
echo " Created $LAMBDA_ROLE_NAME"
echo " Waiting for role to propagate..."
sleep 10
else
echo " $LAMBDA_ROLE_NAME already exists"
fi
LAMBDA_ROLE_ARN=$(aws iam get-role --role-name "$LAMBDA_ROLE_NAME" --query Role.Arn --output text)
if ! aws iam get-role --role-name "$SFN_ROLE_NAME" 2>/dev/null; then
aws iam create-role \
--role-name "$SFN_ROLE_NAME" \
--assume-role-policy-document '{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"Service": "states.amazonaws.com"},
"Action": "sts:AssumeRole"
}]
}'
aws iam put-role-policy \
--role-name "$SFN_ROLE_NAME" \
--policy-name "${PROJECT}-sfn-invoke-lambda" \
--policy-document '{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": "lambda:InvokeFunction",
"Resource": "arn:aws:lambda:'"$REGION"':'"$ACCOUNT_ID"':function:'"$LAMBDA_NAME"'"
}]
}'
echo " Created $SFN_ROLE_NAME"
sleep 10
else
echo " $SFN_ROLE_NAME already exists"
fi
SFN_ROLE_ARN=$(aws iam get-role --role-name "$SFN_ROLE_NAME" --query Role.Arn --output text)
# ─── ECR Repository ──────────────────────────────────────────────────
echo -e "${YELLOW}Setting up ECR...${NC}"
if ! aws ecr describe-repositories --repository-names "$ECR_REPO" --region "$REGION" 2>/dev/null; then
aws ecr create-repository --repository-name "$ECR_REPO" --region "$REGION"
echo " Created ECR repo $ECR_REPO"
else
echo " ECR repo $ECR_REPO already exists"
fi
# ─── Build & Push Lambda Image ───────────────────────────────────────
echo -e "${YELLOW}Building Lambda container image...${NC}"
docker build -f ctrl/lambda/Dockerfile -t "${ECR_REPO}:latest" .
echo -e "${YELLOW}Pushing to ECR...${NC}"
aws ecr get-login-password --region "$REGION" | \
docker login --username AWS --password-stdin "${ACCOUNT_ID}.dkr.ecr.${REGION}.amazonaws.com"
docker tag "${ECR_REPO}:latest" "${ECR_URI}:latest"
docker push "${ECR_URI}:latest"
# ─── Lambda Function ─────────────────────────────────────────────────
echo -e "${YELLOW}Deploying Lambda function...${NC}"
LAMBDA_ARN="arn:aws:lambda:${REGION}:${ACCOUNT_ID}:function:${LAMBDA_NAME}"
if aws lambda get-function --function-name "$LAMBDA_NAME" --region "$REGION" 2>/dev/null; then
aws lambda update-function-code \
--function-name "$LAMBDA_NAME" \
--image-uri "${ECR_URI}:latest" \
--region "$REGION"
echo " Updated $LAMBDA_NAME"
else
aws lambda create-function \
--function-name "$LAMBDA_NAME" \
--package-type Image \
--code ImageUri="${ECR_URI}:latest" \
--role "$LAMBDA_ROLE_ARN" \
--timeout "$LAMBDA_TIMEOUT" \
--memory-size "$LAMBDA_MEMORY" \
--environment "Variables={S3_BUCKET_IN=${BUCKET_IN},S3_BUCKET_OUT=${BUCKET_OUT},AWS_REGION=${REGION}}" \
--region "$REGION"
echo " Created $LAMBDA_NAME"
fi
# ─── Step Functions ───────────────────────────────────────────────────
echo -e "${YELLOW}Deploying Step Functions state machine...${NC}"
SFN_DEFINITION=$(sed "s|\${TranscodeLambdaArn}|${LAMBDA_ARN}|g" ctrl/state-machine.json)
SFN_ARN="arn:aws:states:${REGION}:${ACCOUNT_ID}:stateMachine:${SFN_NAME}"
if aws stepfunctions describe-state-machine --state-machine-arn "$SFN_ARN" --region "$REGION" 2>/dev/null; then
aws stepfunctions update-state-machine \
--state-machine-arn "$SFN_ARN" \
--definition "$SFN_DEFINITION" \
--region "$REGION"
echo " Updated $SFN_NAME"
else
aws stepfunctions create-state-machine \
--name "$SFN_NAME" \
--definition "$SFN_DEFINITION" \
--role-arn "$SFN_ROLE_ARN" \
--region "$REGION"
echo " Created $SFN_NAME"
fi
# ─── Summary ──────────────────────────────────────────────────────────
echo ""
echo -e "${GREEN}Deployment complete!${NC}"
echo ""
echo "Add these to your .env:"
echo " MPR_EXECUTOR=lambda"
echo " STEP_FUNCTION_ARN=${SFN_ARN}"
echo " LAMBDA_FUNCTION_ARN=${LAMBDA_ARN}"
echo " S3_BUCKET_IN=${BUCKET_IN}"
echo " S3_BUCKET_OUT=${BUCKET_OUT}"
echo " CALLBACK_URL=${CALLBACK_URL}"
echo " CALLBACK_API_KEY=${CALLBACK_API_KEY}"
}
# ─── Main ──────────────────────────────────────────────────────────────────────
COMMAND="${1:-}"
shift || true
case "$COMMAND" in
rsync) deploy_rsync "$@" ;;
aws) deploy_aws "$@" ;;
*)
echo "Usage: ./ctrl/deploy.sh <command> [options]"
echo ""
echo "Commands:"
echo " rsync [--restart] [--dry-run] Sync to remote server"
echo " aws Deploy AWS infrastructure"
exit 1
;;
esac

View File

@@ -5,8 +5,12 @@ x-common-env: &common-env
DEBUG: 1
GRPC_HOST: grpc
GRPC_PORT: 50051
MEDIA_IN: ${MEDIA_IN:-/app/media/in}
MEDIA_OUT: ${MEDIA_OUT:-/app/media/out}
S3_ENDPOINT_URL: http://minio:9000
S3_BUCKET_IN: mpr-media-in
S3_BUCKET_OUT: mpr-media-out
AWS_ACCESS_KEY_ID: minioadmin
AWS_SECRET_ACCESS_KEY: minioadmin
AWS_REGION: us-east-1
x-healthcheck-defaults: &healthcheck-defaults
interval: 5s
@@ -42,17 +46,46 @@ services:
<<: *healthcheck-defaults
test: ["CMD", "redis-cli", "ping"]
minio:
image: minio/minio
command: ["server", "/data", "--console-address", ":9001"]
ports:
- "9000:9000"
- "9001:9001"
environment:
MINIO_ROOT_USER: minioadmin
MINIO_ROOT_PASSWORD: minioadmin
volumes:
- minio-data:/data
healthcheck:
<<: *healthcheck-defaults
test: ["CMD", "mc", "ready", "local"]
minio-init:
image: minio/mc
depends_on:
minio:
condition: service_healthy
entrypoint: ["/bin/sh", "-c"]
command:
- |
mc alias set local http://minio:9000 minioadmin minioadmin
mc mb --ignore-existing local/mpr-media-in
mc mb --ignore-existing local/mpr-media-out
mc anonymous set download local/mpr-media-in
mc anonymous set download local/mpr-media-out
nginx:
image: nginx:alpine
ports:
- "80:80"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
- ../media:/app/media:ro
depends_on:
- django
- fastapi
- timeline
- minio
# =============================================================================
# Application Services
@@ -72,7 +105,6 @@ services:
<<: *common-env
volumes:
- ..:/app
- ../media:/app/media
depends_on:
postgres:
condition: service_healthy
@@ -90,7 +122,6 @@ services:
<<: *common-env
volumes:
- ..:/app
- ../media:/app/media
depends_on:
postgres:
condition: service_healthy
@@ -110,7 +141,6 @@ services:
GRPC_MAX_WORKERS: 10
volumes:
- ..:/app
- ../media:/app/media
depends_on:
postgres:
condition: service_healthy
@@ -127,7 +157,6 @@ services:
MPR_EXECUTOR: local
volumes:
- ..:/app
- ../media:/app/media
depends_on:
postgres:
condition: service_healthy
@@ -150,6 +179,7 @@ services:
volumes:
postgres-data:
redis-data:
minio-data:
networks:
default:

View File

@@ -1,40 +1,12 @@
#!/bin/bash
# Model generation script for MPR
# Generates Django, Pydantic, TypeScript, and Protobuf from schema/models
# Generates all targets from schema/modelgen.json config
set -e
cd "$(dirname "$0")/.."
echo "Generating models from schema/models..."
# Django ORM models: domain models + enums
python -m modelgen from-schema \
--schema schema/models \
--output mpr/media_assets/models.py \
--targets django \
--include dataclasses,enums
# Pydantic schemas for FastAPI: domain models + enums
python -m modelgen from-schema \
--schema schema/models \
--output api/schemas/models.py \
--targets pydantic \
--include dataclasses,enums
# TypeScript types for Timeline UI: domain models + enums + API types
python -m modelgen from-schema \
--schema schema/models \
--output ui/timeline/src/types.ts \
--targets typescript \
--include dataclasses,enums,api
# Protobuf for gRPC: gRPC messages + service
python -m modelgen from-schema \
--schema schema/models \
--output rpc/protos/worker.proto \
--targets proto \
--include grpc
python -m modelgen generate --config schema/modelgen.json
# Generate gRPC stubs from proto
echo "Generating gRPC stubs..."

21
ctrl/lambda/Dockerfile Normal file
View File

@@ -0,0 +1,21 @@
FROM public.ecr.aws/lambda/python:3.11
# Install ffmpeg static binary
RUN yum install -y tar xz && \
curl -L https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz -o /tmp/ffmpeg.tar.xz && \
tar -xf /tmp/ffmpeg.tar.xz -C /tmp && \
cp /tmp/ffmpeg-*-amd64-static/ffmpeg /usr/local/bin/ffmpeg && \
cp /tmp/ffmpeg-*-amd64-static/ffprobe /usr/local/bin/ffprobe && \
rm -rf /tmp/ffmpeg* && \
yum clean all
# Install Python dependencies
COPY ctrl/lambda/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY task/lambda_handler.py ${LAMBDA_TASK_ROOT}/task/lambda_handler.py
COPY task/__init__.py ${LAMBDA_TASK_ROOT}/task/__init__.py
COPY core/ ${LAMBDA_TASK_ROOT}/core/
CMD ["task.lambda_handler.handler"]

View File

@@ -0,0 +1,2 @@
ffmpeg-python>=0.2.0
requests>=2.31.0

View File

@@ -21,6 +21,10 @@ http {
server timeline:5173;
}
upstream minio {
server minio:9000;
}
server {
listen 80;
server_name mpr.local.ar;
@@ -67,16 +71,15 @@ http {
proxy_set_header Host $host;
}
# Media files - input (source)
location /media/in {
alias /app/media/in;
autoindex on;
# Media files - proxied from MinIO (local) or S3 (AWS)
location /media/in/ {
proxy_pass http://minio/mpr-media-in/;
proxy_set_header Host $http_host;
}
# Media files - output (transcoded)
location /media/out {
alias /app/media/out;
autoindex on;
location /media/out/ {
proxy_pass http://minio/mpr-media-out/;
proxy_set_header Host $http_host;
}
# Default to Timeline UI

39
ctrl/state-machine.json Normal file
View File

@@ -0,0 +1,39 @@
{
"Comment": "MPR Transcode Job - orchestrates Lambda-based media transcoding",
"StartAt": "Transcode",
"States": {
"Transcode": {
"Type": "Task",
"Resource": "${TranscodeLambdaArn}",
"TimeoutSeconds": 900,
"Retry": [
{
"ErrorEquals": ["States.TaskFailed", "Lambda.ServiceException"],
"IntervalSeconds": 10,
"MaxAttempts": 2,
"BackoffRate": 2.0
}
],
"Catch": [
{
"ErrorEquals": ["States.ALL"],
"Next": "HandleError",
"ResultPath": "$.error"
}
],
"Next": "Done"
},
"HandleError": {
"Type": "Pass",
"Parameters": {
"status": "failed",
"job_id.$": "$.job_id",
"error.$": "$.error.Cause"
},
"Next": "Done"
},
"Done": {
"Type": "Succeed"
}
}
}

View File

@@ -3,13 +3,11 @@ digraph system_overview {
node [shape=box, style=rounded, fontname="Helvetica"]
edge [fontname="Helvetica", fontsize=10]
// Title
labelloc="t"
label="MPR - System Overview"
fontsize=16
fontname="Helvetica-Bold"
// Styling
graph [splines=ortho, nodesep=0.8, ranksep=0.8]
// External
@@ -18,7 +16,7 @@ digraph system_overview {
style=dashed
color=gray
browser [label="Browser\nmpr.local.ar", shape=ellipse]
browser [label="Browser\nmpr.local.ar / mpr.mcrn.ar", shape=ellipse]
}
// Nginx reverse proxy
@@ -37,7 +35,7 @@ digraph system_overview {
fillcolor="#f0f8e8"
django [label="Django\n/admin\nport 8701"]
fastapi [label="FastAPI\n/api\nport 8702"]
fastapi [label="FastAPI\n/api + /graphql\nport 8702"]
timeline [label="Timeline UI\n/ui\nport 5173"]
}
@@ -48,8 +46,17 @@ digraph system_overview {
fillcolor="#fff8e8"
grpc_server [label="gRPC Server\nport 50051"]
celery [label="Celery Worker\n(local)"]
lambda [label="Lambda\n(cloud)", style="dashed,rounded"]
celery [label="Celery Worker\n(local mode)"]
}
// AWS layer
subgraph cluster_aws {
label="AWS (lambda mode)"
style=filled
fillcolor="#fde8d0"
step_functions [label="Step Functions\nstate machine"]
lambda [label="Lambda\nFFmpeg container"]
}
// Data layer
@@ -58,48 +65,50 @@ digraph system_overview {
style=filled
fillcolor="#f8e8f0"
postgres [label="PostgreSQL\nport 5433", shape=cylinder]
redis [label="Redis\nport 6380", shape=cylinder]
sqs [label="SQS\n(cloud)", shape=cylinder, style=dashed]
postgres [label="PostgreSQL\nport 5436", shape=cylinder]
redis [label="Redis\nport 6381", shape=cylinder]
}
// Storage
subgraph cluster_storage {
label="File Storage"
label="S3 Storage"
style=filled
fillcolor="#f0f0f0"
local_fs [label="Local FS\n/media", shape=folder]
s3 [label="S3\n(cloud)", shape=folder, style=dashed]
minio [label="MinIO (local)\nport 9000", shape=folder]
s3 [label="AWS S3 (cloud)", shape=folder, style="dashed,rounded"]
bucket_in [label="mpr-media-in", shape=note]
bucket_out [label="mpr-media-out", shape=note]
}
// Connections
browser -> nginx
nginx -> django [label="/admin"]
nginx -> fastapi [label="/api"]
nginx -> timeline [label="/ui"]
nginx -> django [xlabel="/admin"]
nginx -> fastapi [xlabel="/api, /graphql"]
nginx -> timeline [xlabel="/ui"]
nginx -> minio [xlabel="/media/*"]
// Django uses FastAPI for operations (single API gateway)
django -> fastapi [label="job operations"]
django -> postgres [label="CRUD only"]
timeline -> fastapi [xlabel="REST API"]
// Timeline UI uses FastAPI
timeline -> fastapi [label="REST API"]
// FastAPI is the single API gateway
fastapi -> postgres
fastapi -> redis [label="job status"]
fastapi -> grpc_server [label="gRPC\nprogress streaming"]
fastapi -> grpc_server [xlabel="gRPC\nprogress"]
// Worker layer
grpc_server -> celery [label="task dispatch"]
celery -> redis [label="queue"]
celery -> postgres [label="job updates"]
celery -> grpc_server [label="progress\ncallbacks", style=dotted]
celery -> local_fs [label="read/write"]
// Local mode
grpc_server -> celery [xlabel="task dispatch"]
celery -> redis [xlabel="queue"]
celery -> postgres [xlabel="job updates"]
celery -> minio [xlabel="S3 API\ndownload/upload"]
// Cloud (future)
lambda -> sqs [label="queue", style=dashed]
lambda -> s3 [label="read/write", style=dashed]
// Lambda mode
fastapi -> step_functions [xlabel="boto3\nstart_execution", style=dashed]
step_functions -> lambda [style=dashed]
lambda -> s3 [xlabel="download/upload", style=dashed]
lambda -> fastapi [xlabel="callback\nPOST /jobs/{id}/callback", style=dashed]
// Storage details
minio -> bucket_in [style=dotted, arrowhead=none]
minio -> bucket_out [style=dotted, arrowhead=none]
s3 -> bucket_in [style=dotted, arrowhead=none]
s3 -> bucket_out [style=dotted, arrowhead=none]
}

View File

@@ -1,260 +1,293 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 14.1.1 (0)
<!-- Generated by graphviz version 14.1.2 (0)
-->
<!-- Title: system_overview Pages: 1 -->
<svg width="843pt" height="957pt"
viewBox="0.00 0.00 843.00 957.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 952.79)">
<svg width="620pt" height="903pt"
viewBox="0.00 0.00 620.00 903.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 898.54)">
<title>system_overview</title>
<polygon fill="white" stroke="none" points="-4,4 -4,-952.79 838.5,-952.79 838.5,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="417.25" y="-929.59" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; System Overview</text>
<polygon fill="white" stroke="none" points="-4,4 -4,-898.54 616,-898.54 616,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="306" y="-875.34" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; System Overview</text>
<g id="clust1" class="cluster">
<title>cluster_external</title>
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="478,-809.69 478,-913.29 632,-913.29 632,-809.69 478,-809.69"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-894.09" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">External</text>
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="246,-755.44 246,-859.04 540,-859.04 540,-755.44 246,-755.44"/>
<text xml:space="preserve" text-anchor="middle" x="393" y="-839.84" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">External</text>
</g>
<g id="clust2" class="cluster">
<title>cluster_proxy</title>
<polygon fill="#e8f4f8" stroke="black" points="482,-693.69 482,-779.69 628,-779.69 628,-693.69 482,-693.69"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-760.49" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Reverse Proxy</text>
<polygon fill="#e8f4f8" stroke="black" points="320,-654.94 320,-740.94 466,-740.94 466,-654.94 320,-654.94"/>
<text xml:space="preserve" text-anchor="middle" x="393" y="-721.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Reverse Proxy</text>
</g>
<g id="clust3" class="cluster">
<title>cluster_apps</title>
<polygon fill="#f0f8e8" stroke="black" points="352,-418.19 352,-651.94 606,-651.94 606,-418.19 352,-418.19"/>
<text xml:space="preserve" text-anchor="middle" x="479" y="-632.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Application Layer</text>
<polygon fill="#f0f8e8" stroke="black" points="278,-419.44 278,-640.44 532,-640.44 532,-419.44 278,-419.44"/>
<text xml:space="preserve" text-anchor="middle" x="405" y="-621.24" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Application Layer</text>
</g>
<g id="clust4" class="cluster">
<title>cluster_workers</title>
<polygon fill="#fff8e8" stroke="black" points="125,-151.69 125,-363.69 374,-363.69 374,-151.69 125,-151.69"/>
<text xml:space="preserve" text-anchor="middle" x="249.5" y="-344.49" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Worker Layer</text>
<polygon fill="#fff8e8" stroke="black" points="142,-218.44 142,-404.94 280,-404.94 280,-218.44 142,-218.44"/>
<text xml:space="preserve" text-anchor="middle" x="211" y="-385.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Worker Layer</text>
</g>
<g id="clust5" class="cluster">
<title>cluster_data</title>
<polygon fill="#f8e8f0" stroke="black" points="322,-8 322,-109.94 700,-109.94 700,-8 322,-8"/>
<text xml:space="preserve" text-anchor="middle" x="511" y="-90.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Data Layer</text>
<title>cluster_aws</title>
<polygon fill="#fde8d0" stroke="black" points="383,-218.44 383,-404.94 581,-404.94 581,-218.44 383,-218.44"/>
<text xml:space="preserve" text-anchor="middle" x="482" y="-385.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">AWS (lambda mode)</text>
</g>
<g id="clust6" class="cluster">
<title>cluster_data</title>
<polygon fill="#f8e8f0" stroke="black" points="8,-102 8,-203.94 263,-203.94 263,-102 8,-102"/>
<text xml:space="preserve" text-anchor="middle" x="135.5" y="-184.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Data Layer</text>
</g>
<g id="clust7" class="cluster">
<title>cluster_storage</title>
<polygon fill="#f0f0f0" stroke="black" points="8,-15.97 8,-101.97 218,-101.97 218,-15.97 8,-15.97"/>
<text xml:space="preserve" text-anchor="middle" x="113" y="-82.77" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">File Storage</text>
<polygon fill="#f0f0f0" stroke="black" points="302,-8 302,-195.97 604,-195.97 604,-8 302,-8"/>
<text xml:space="preserve" text-anchor="middle" x="453" y="-176.77" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">S3 Storage</text>
</g>
<!-- browser -->
<g id="node1" class="node">
<title>browser</title>
<ellipse fill="none" stroke="black" cx="555" cy="-847.74" rx="69.12" ry="30.05"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-851.69" font-family="Helvetica,sans-Serif" font-size="14.00">Browser</text>
<text xml:space="preserve" text-anchor="middle" x="555" y="-834.44" font-family="Helvetica,sans-Serif" font-size="14.00">mpr.local.ar</text>
<ellipse fill="none" stroke="black" cx="393" cy="-793.49" rx="139.12" ry="30.05"/>
<text xml:space="preserve" text-anchor="middle" x="393" y="-797.44" font-family="Helvetica,sans-Serif" font-size="14.00">Browser</text>
<text xml:space="preserve" text-anchor="middle" x="393" y="-780.19" font-family="Helvetica,sans-Serif" font-size="14.00">mpr.local.ar / mpr.mcrn.ar</text>
</g>
<!-- nginx -->
<g id="node2" class="node">
<title>nginx</title>
<path fill="none" stroke="black" d="M576.5,-744.19C576.5,-744.19 533.5,-744.19 533.5,-744.19 527.5,-744.19 521.5,-738.19 521.5,-732.19 521.5,-732.19 521.5,-713.69 521.5,-713.69 521.5,-707.69 527.5,-701.69 533.5,-701.69 533.5,-701.69 576.5,-701.69 576.5,-701.69 582.5,-701.69 588.5,-707.69 588.5,-713.69 588.5,-713.69 588.5,-732.19 588.5,-732.19 588.5,-738.19 582.5,-744.19 576.5,-744.19"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-726.89" font-family="Helvetica,sans-Serif" font-size="14.00">nginx</text>
<text xml:space="preserve" text-anchor="middle" x="555" y="-709.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 80</text>
<path fill="none" stroke="black" d="M414.5,-705.44C414.5,-705.44 371.5,-705.44 371.5,-705.44 365.5,-705.44 359.5,-699.44 359.5,-693.44 359.5,-693.44 359.5,-674.94 359.5,-674.94 359.5,-668.94 365.5,-662.94 371.5,-662.94 371.5,-662.94 414.5,-662.94 414.5,-662.94 420.5,-662.94 426.5,-668.94 426.5,-674.94 426.5,-674.94 426.5,-693.44 426.5,-693.44 426.5,-699.44 420.5,-705.44 414.5,-705.44"/>
<text xml:space="preserve" text-anchor="middle" x="393" y="-688.14" font-family="Helvetica,sans-Serif" font-size="14.00">nginx</text>
<text xml:space="preserve" text-anchor="middle" x="393" y="-670.89" font-family="Helvetica,sans-Serif" font-size="14.00">port 80</text>
</g>
<!-- browser&#45;&gt;nginx -->
<g id="edge1" class="edge">
<title>browser&#45;&gt;nginx</title>
<path fill="none" stroke="black" d="M555,-817.21C555,-817.21 555,-756.06 555,-756.06"/>
<polygon fill="black" stroke="black" points="558.5,-756.06 555,-746.06 551.5,-756.06 558.5,-756.06"/>
<path fill="none" stroke="black" d="M393,-763.04C393,-763.04 393,-717.33 393,-717.33"/>
<polygon fill="black" stroke="black" points="396.5,-717.33 393,-707.33 389.5,-717.33 396.5,-717.33"/>
</g>
<!-- django -->
<g id="node3" class="node">
<title>django</title>
<path fill="none" stroke="black" d="M585.5,-616.44C585.5,-616.44 524.5,-616.44 524.5,-616.44 518.5,-616.44 512.5,-610.44 512.5,-604.44 512.5,-604.44 512.5,-568.69 512.5,-568.69 512.5,-562.69 518.5,-556.69 524.5,-556.69 524.5,-556.69 585.5,-556.69 585.5,-556.69 591.5,-556.69 597.5,-562.69 597.5,-568.69 597.5,-568.69 597.5,-604.44 597.5,-604.44 597.5,-610.44 591.5,-616.44 585.5,-616.44"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-599.14" font-family="Helvetica,sans-Serif" font-size="14.00">Django</text>
<text xml:space="preserve" text-anchor="middle" x="555" y="-581.89" font-family="Helvetica,sans-Serif" font-size="14.00">/admin</text>
<text xml:space="preserve" text-anchor="middle" x="555" y="-564.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 8701</text>
<path fill="none" stroke="black" d="M359.5,-604.94C359.5,-604.94 298.5,-604.94 298.5,-604.94 292.5,-604.94 286.5,-598.94 286.5,-592.94 286.5,-592.94 286.5,-557.19 286.5,-557.19 286.5,-551.19 292.5,-545.19 298.5,-545.19 298.5,-545.19 359.5,-545.19 359.5,-545.19 365.5,-545.19 371.5,-551.19 371.5,-557.19 371.5,-557.19 371.5,-592.94 371.5,-592.94 371.5,-598.94 365.5,-604.94 359.5,-604.94"/>
<text xml:space="preserve" text-anchor="middle" x="329" y="-587.64" font-family="Helvetica,sans-Serif" font-size="14.00">Django</text>
<text xml:space="preserve" text-anchor="middle" x="329" y="-570.39" font-family="Helvetica,sans-Serif" font-size="14.00">/admin</text>
<text xml:space="preserve" text-anchor="middle" x="329" y="-553.14" font-family="Helvetica,sans-Serif" font-size="14.00">port 8701</text>
</g>
<!-- nginx&#45;&gt;django -->
<g id="edge2" class="edge">
<title>nginx&#45;&gt;django</title>
<path fill="none" stroke="black" d="M555,-701.33C555,-701.33 555,-628.2 555,-628.2"/>
<polygon fill="black" stroke="black" points="558.5,-628.2 555,-618.2 551.5,-628.2 558.5,-628.2"/>
<text xml:space="preserve" text-anchor="middle" x="571.88" y="-663.19" font-family="Helvetica,sans-Serif" font-size="10.00">/admin</text>
<path fill="none" stroke="black" d="M365.5,-662.63C365.5,-662.63 365.5,-616.77 365.5,-616.77"/>
<polygon fill="black" stroke="black" points="369,-616.77 365.5,-606.77 362,-616.77 369,-616.77"/>
<text xml:space="preserve" text-anchor="middle" x="348.62" y="-642.95" font-family="Helvetica,sans-Serif" font-size="10.00">/admin</text>
</g>
<!-- fastapi -->
<g id="node4" class="node">
<title>fastapi</title>
<path fill="none" stroke="black" d="M554.5,-485.94C554.5,-485.94 493.5,-485.94 493.5,-485.94 487.5,-485.94 481.5,-479.94 481.5,-473.94 481.5,-473.94 481.5,-438.19 481.5,-438.19 481.5,-432.19 487.5,-426.19 493.5,-426.19 493.5,-426.19 554.5,-426.19 554.5,-426.19 560.5,-426.19 566.5,-432.19 566.5,-438.19 566.5,-438.19 566.5,-473.94 566.5,-473.94 566.5,-479.94 560.5,-485.94 554.5,-485.94"/>
<text xml:space="preserve" text-anchor="middle" x="524" y="-468.64" font-family="Helvetica,sans-Serif" font-size="14.00">FastAPI</text>
<text xml:space="preserve" text-anchor="middle" x="524" y="-451.39" font-family="Helvetica,sans-Serif" font-size="14.00">/api</text>
<text xml:space="preserve" text-anchor="middle" x="524" y="-434.14" font-family="Helvetica,sans-Serif" font-size="14.00">port 8702</text>
<path fill="none" stroke="black" d="M395.5,-487.19C395.5,-487.19 298.5,-487.19 298.5,-487.19 292.5,-487.19 286.5,-481.19 286.5,-475.19 286.5,-475.19 286.5,-439.44 286.5,-439.44 286.5,-433.44 292.5,-427.44 298.5,-427.44 298.5,-427.44 395.5,-427.44 395.5,-427.44 401.5,-427.44 407.5,-433.44 407.5,-439.44 407.5,-439.44 407.5,-475.19 407.5,-475.19 407.5,-481.19 401.5,-487.19 395.5,-487.19"/>
<text xml:space="preserve" text-anchor="middle" x="347" y="-469.89" font-family="Helvetica,sans-Serif" font-size="14.00">FastAPI</text>
<text xml:space="preserve" text-anchor="middle" x="347" y="-452.64" font-family="Helvetica,sans-Serif" font-size="14.00">/api + /graphql</text>
<text xml:space="preserve" text-anchor="middle" x="347" y="-435.39" font-family="Helvetica,sans-Serif" font-size="14.00">port 8702</text>
</g>
<!-- nginx&#45;&gt;fastapi -->
<g id="edge3" class="edge">
<title>nginx&#45;&gt;fastapi</title>
<path fill="none" stroke="black" d="M521.02,-716C511.47,-716 503.63,-716 503.63,-716 503.63,-716 503.63,-497.9 503.63,-497.9"/>
<polygon fill="black" stroke="black" points="507.13,-497.9 503.63,-487.9 500.13,-497.9 507.13,-497.9"/>
<text xml:space="preserve" text-anchor="middle" x="723" y="-583.44" font-family="Helvetica,sans-Serif" font-size="10.00">/api</text>
<path fill="none" stroke="black" d="M383.5,-662.84C383.5,-662.84 383.5,-498.82 383.5,-498.82"/>
<polygon fill="black" stroke="black" points="387,-498.82 383.5,-488.82 380,-498.82 387,-498.82"/>
<text xml:space="preserve" text-anchor="middle" x="399.44" y="-571.33" font-family="Helvetica,sans-Serif" font-size="10.00">/api, /graphql</text>
</g>
<!-- timeline -->
<g id="node5" class="node">
<title>timeline</title>
<path fill="none" stroke="black" d="M442,-616.44C442,-616.44 372,-616.44 372,-616.44 366,-616.44 360,-610.44 360,-604.44 360,-604.44 360,-568.69 360,-568.69 360,-562.69 366,-556.69 372,-556.69 372,-556.69 442,-556.69 442,-556.69 448,-556.69 454,-562.69 454,-568.69 454,-568.69 454,-604.44 454,-604.44 454,-610.44 448,-616.44 442,-616.44"/>
<text xml:space="preserve" text-anchor="middle" x="407" y="-599.14" font-family="Helvetica,sans-Serif" font-size="14.00">Timeline UI</text>
<text xml:space="preserve" text-anchor="middle" x="407" y="-581.89" font-family="Helvetica,sans-Serif" font-size="14.00">/ui</text>
<text xml:space="preserve" text-anchor="middle" x="407" y="-564.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 5173</text>
<path fill="none" stroke="black" d="M512,-604.94C512,-604.94 442,-604.94 442,-604.94 436,-604.94 430,-598.94 430,-592.94 430,-592.94 430,-557.19 430,-557.19 430,-551.19 436,-545.19 442,-545.19 442,-545.19 512,-545.19 512,-545.19 518,-545.19 524,-551.19 524,-557.19 524,-557.19 524,-592.94 524,-592.94 524,-598.94 518,-604.94 512,-604.94"/>
<text xml:space="preserve" text-anchor="middle" x="477" y="-587.64" font-family="Helvetica,sans-Serif" font-size="14.00">Timeline UI</text>
<text xml:space="preserve" text-anchor="middle" x="477" y="-570.39" font-family="Helvetica,sans-Serif" font-size="14.00">/ui</text>
<text xml:space="preserve" text-anchor="middle" x="477" y="-553.14" font-family="Helvetica,sans-Serif" font-size="14.00">port 5173</text>
</g>
<!-- nginx&#45;&gt;timeline -->
<g id="edge4" class="edge">
<title>nginx&#45;&gt;timeline</title>
<path fill="none" stroke="black" d="M521.05,-730C477.35,-730 407,-730 407,-730 407,-730 407,-628.15 407,-628.15"/>
<polygon fill="black" stroke="black" points="410.5,-628.15 407,-618.15 403.5,-628.15 410.5,-628.15"/>
<text xml:space="preserve" text-anchor="middle" x="450" y="-663.19" font-family="Helvetica,sans-Serif" font-size="10.00">/ui</text>
<path fill="none" stroke="black" d="M422.62,-662.67C422.62,-633.49 422.62,-585 422.62,-585 422.62,-585 423.34,-585 423.34,-585"/>
<polygon fill="black" stroke="black" points="418.22,-588.5 428.22,-585 418.22,-581.5 418.22,-588.5"/>
<text xml:space="preserve" text-anchor="middle" x="416.62" y="-613.98" font-family="Helvetica,sans-Serif" font-size="10.00">/ui</text>
</g>
<!-- django&#45;&gt;fastapi -->
<!-- minio -->
<g id="node12" class="node">
<title>minio</title>
<polygon fill="none" stroke="black" points="415.5,-160.47 412.5,-164.47 391.5,-164.47 388.5,-160.47 312.5,-160.47 312.5,-117.97 415.5,-117.97 415.5,-160.47"/>
<text xml:space="preserve" text-anchor="middle" x="364" y="-143.17" font-family="Helvetica,sans-Serif" font-size="14.00">MinIO (local)</text>
<text xml:space="preserve" text-anchor="middle" x="364" y="-125.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 9000</text>
</g>
<!-- nginx&#45;&gt;minio -->
<g id="edge5" class="edge">
<title>django&#45;&gt;fastapi</title>
<path fill="none" stroke="black" d="M539.5,-556.3C539.5,-556.3 539.5,-497.68 539.5,-497.68"/>
<polygon fill="black" stroke="black" points="543,-497.68 539.5,-487.68 536,-497.68 543,-497.68"/>
<text xml:space="preserve" text-anchor="middle" x="561.88" y="-518.19" font-family="Helvetica,sans-Serif" font-size="10.00">job operations</text>
</g>
<!-- postgres -->
<g id="node9" class="node">
<title>postgres</title>
<path fill="none" stroke="black" d="M691.75,-69.12C691.75,-72.06 670.35,-74.44 644,-74.44 617.65,-74.44 596.25,-72.06 596.25,-69.12 596.25,-69.12 596.25,-21.31 596.25,-21.31 596.25,-18.38 617.65,-16 644,-16 670.35,-16 691.75,-18.38 691.75,-21.31 691.75,-21.31 691.75,-69.12 691.75,-69.12"/>
<path fill="none" stroke="black" d="M691.75,-69.12C691.75,-66.19 670.35,-63.81 644,-63.81 617.65,-63.81 596.25,-66.19 596.25,-69.12"/>
<text xml:space="preserve" text-anchor="middle" x="644" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">PostgreSQL</text>
<text xml:space="preserve" text-anchor="middle" x="644" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 5433</text>
</g>
<!-- django&#45;&gt;postgres -->
<g id="edge6" class="edge">
<title>django&#45;&gt;postgres</title>
<path fill="none" stroke="black" d="M597.82,-587C607.63,-587 615.25,-587 615.25,-587 615.25,-587 615.25,-85.86 615.25,-85.86"/>
<polygon fill="black" stroke="black" points="618.75,-85.86 615.25,-75.86 611.75,-85.86 618.75,-85.86"/>
<text xml:space="preserve" text-anchor="middle" x="808.25" y="-303.81" font-family="Helvetica,sans-Serif" font-size="10.00">CRUD only</text>
<title>nginx&#45;&gt;minio</title>
<path fill="none" stroke="black" d="M414.88,-662.68C414.88,-596.12 414.88,-398 414.88,-398 414.88,-398 344.17,-398 344.17,-398 344.17,-398 344.17,-172.35 344.17,-172.35"/>
<polygon fill="black" stroke="black" points="347.67,-172.35 344.17,-162.35 340.67,-172.35 347.67,-172.35"/>
<text xml:space="preserve" text-anchor="middle" x="378.03" y="-401.25" font-family="Helvetica,sans-Serif" font-size="10.00">/media/*</text>
</g>
<!-- grpc_server -->
<g id="node6" class="node">
<title>grpc_server</title>
<path fill="none" stroke="black" d="M353.5,-328.19C353.5,-328.19 274.5,-328.19 274.5,-328.19 268.5,-328.19 262.5,-322.19 262.5,-316.19 262.5,-316.19 262.5,-297.69 262.5,-297.69 262.5,-291.69 268.5,-285.69 274.5,-285.69 274.5,-285.69 353.5,-285.69 353.5,-285.69 359.5,-285.69 365.5,-291.69 365.5,-297.69 365.5,-297.69 365.5,-316.19 365.5,-316.19 365.5,-322.19 359.5,-328.19 353.5,-328.19"/>
<text xml:space="preserve" text-anchor="middle" x="314" y="-310.89" font-family="Helvetica,sans-Serif" font-size="14.00">gRPC Server</text>
<text xml:space="preserve" text-anchor="middle" x="314" y="-293.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 50051</text>
<path fill="none" stroke="black" d="M246.5,-369.44C246.5,-369.44 167.5,-369.44 167.5,-369.44 161.5,-369.44 155.5,-363.44 155.5,-357.44 155.5,-357.44 155.5,-338.94 155.5,-338.94 155.5,-332.94 161.5,-326.94 167.5,-326.94 167.5,-326.94 246.5,-326.94 246.5,-326.94 252.5,-326.94 258.5,-332.94 258.5,-338.94 258.5,-338.94 258.5,-357.44 258.5,-357.44 258.5,-363.44 252.5,-369.44 246.5,-369.44"/>
<text xml:space="preserve" text-anchor="middle" x="207" y="-352.14" font-family="Helvetica,sans-Serif" font-size="14.00">gRPC Server</text>
<text xml:space="preserve" text-anchor="middle" x="207" y="-334.89" font-family="Helvetica,sans-Serif" font-size="14.00">port 50051</text>
</g>
<!-- fastapi&#45;&gt;grpc_server -->
<g id="edge10" class="edge">
<g id="edge8" class="edge">
<title>fastapi&#45;&gt;grpc_server</title>
<path fill="none" stroke="black" d="M509.75,-425.9C509.75,-382.34 509.75,-307 509.75,-307 509.75,-307 377.46,-307 377.46,-307"/>
<polygon fill="black" stroke="black" points="377.46,-303.5 367.46,-307 377.46,-310.5 377.46,-303.5"/>
<text xml:space="preserve" text-anchor="middle" x="398.25" y="-387.69" font-family="Helvetica,sans-Serif" font-size="10.00">gRPC</text>
<text xml:space="preserve" text-anchor="middle" x="398.25" y="-374.94" font-family="Helvetica,sans-Serif" font-size="10.00">progress streaming</text>
<path fill="none" stroke="black" d="M298.5,-427.06C298.5,-392.59 298.5,-341 298.5,-341 298.5,-341 270.41,-341 270.41,-341"/>
<polygon fill="black" stroke="black" points="270.41,-337.5 260.41,-341 270.41,-344.5 270.41,-337.5"/>
<text xml:space="preserve" text-anchor="middle" x="319.5" y="-385.98" font-family="Helvetica,sans-Serif" font-size="10.00">gRPC</text>
<text xml:space="preserve" text-anchor="middle" x="319.5" y="-373.23" font-family="Helvetica,sans-Serif" font-size="10.00">progress</text>
</g>
<!-- step_functions -->
<g id="node8" class="node">
<title>step_functions</title>
<path fill="none" stroke="black" d="M541.38,-369.44C541.38,-369.44 446.62,-369.44 446.62,-369.44 440.62,-369.44 434.62,-363.44 434.62,-357.44 434.62,-357.44 434.62,-338.94 434.62,-338.94 434.62,-332.94 440.62,-326.94 446.62,-326.94 446.62,-326.94 541.38,-326.94 541.38,-326.94 547.38,-326.94 553.38,-332.94 553.38,-338.94 553.38,-338.94 553.38,-357.44 553.38,-357.44 553.38,-363.44 547.38,-369.44 541.38,-369.44"/>
<text xml:space="preserve" text-anchor="middle" x="494" y="-352.14" font-family="Helvetica,sans-Serif" font-size="14.00">Step Functions</text>
<text xml:space="preserve" text-anchor="middle" x="494" y="-334.89" font-family="Helvetica,sans-Serif" font-size="14.00">state machine</text>
</g>
<!-- fastapi&#45;&gt;step_functions -->
<g id="edge13" class="edge">
<title>fastapi&#45;&gt;step_functions</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M375.83,-427.17C375.83,-396.99 375.83,-355 375.83,-355 375.83,-355 422.71,-355 422.71,-355"/>
<polygon fill="black" stroke="black" points="422.71,-358.5 432.71,-355 422.71,-351.5 422.71,-358.5"/>
<text xml:space="preserve" text-anchor="middle" x="338.33" y="-358.15" font-family="Helvetica,sans-Serif" font-size="10.00">boto3</text>
<text xml:space="preserve" text-anchor="middle" x="338.33" y="-345.4" font-family="Helvetica,sans-Serif" font-size="10.00">start_execution</text>
</g>
<!-- postgres -->
<g id="node10" class="node">
<title>postgres</title>
<path fill="none" stroke="black" d="M111.75,-163.12C111.75,-166.06 90.35,-168.44 64,-168.44 37.65,-168.44 16.25,-166.06 16.25,-163.12 16.25,-163.12 16.25,-115.31 16.25,-115.31 16.25,-112.38 37.65,-110 64,-110 90.35,-110 111.75,-112.38 111.75,-115.31 111.75,-115.31 111.75,-163.12 111.75,-163.12"/>
<path fill="none" stroke="black" d="M111.75,-163.12C111.75,-160.19 90.35,-157.81 64,-157.81 37.65,-157.81 16.25,-160.19 16.25,-163.12"/>
<text xml:space="preserve" text-anchor="middle" x="64" y="-143.17" font-family="Helvetica,sans-Serif" font-size="14.00">PostgreSQL</text>
<text xml:space="preserve" text-anchor="middle" x="64" y="-125.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 5436</text>
</g>
<!-- fastapi&#45;&gt;postgres -->
<g id="edge8" class="edge">
<g id="edge7" class="edge">
<title>fastapi&#45;&gt;postgres</title>
<path fill="none" stroke="black" d="M552.25,-425.84C552.25,-330.91 552.25,-45 552.25,-45 552.25,-45 584.46,-45 584.46,-45"/>
<polygon fill="black" stroke="black" points="584.46,-48.5 594.46,-45 584.46,-41.5 584.46,-48.5"/>
</g>
<!-- redis -->
<g id="node10" class="node">
<title>redis</title>
<path fill="none" stroke="black" d="M415.5,-69.12C415.5,-72.06 396.45,-74.44 373,-74.44 349.55,-74.44 330.5,-72.06 330.5,-69.12 330.5,-69.12 330.5,-21.31 330.5,-21.31 330.5,-18.38 349.55,-16 373,-16 396.45,-16 415.5,-18.38 415.5,-21.31 415.5,-21.31 415.5,-69.12 415.5,-69.12"/>
<path fill="none" stroke="black" d="M415.5,-69.12C415.5,-66.19 396.45,-63.81 373,-63.81 349.55,-63.81 330.5,-66.19 330.5,-69.12"/>
<text xml:space="preserve" text-anchor="middle" x="373" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">Redis</text>
<text xml:space="preserve" text-anchor="middle" x="373" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 6380</text>
</g>
<!-- fastapi&#45;&gt;redis -->
<g id="edge9" class="edge">
<title>fastapi&#45;&gt;redis</title>
<path fill="none" stroke="black" d="M481.02,-456C442,-456 390.5,-456 390.5,-456 390.5,-456 390.5,-86.27 390.5,-86.27"/>
<polygon fill="black" stroke="black" points="394,-86.27 390.5,-76.27 387,-86.27 394,-86.27"/>
<text xml:space="preserve" text-anchor="middle" x="542" y="-240.81" font-family="Helvetica,sans-Serif" font-size="10.00">job status</text>
<path fill="none" stroke="black" d="M286.29,-457C203.13,-457 64,-457 64,-457 64,-457 64,-180.34 64,-180.34"/>
<polygon fill="black" stroke="black" points="67.5,-180.34 64,-170.34 60.5,-180.34 67.5,-180.34"/>
</g>
<!-- timeline&#45;&gt;fastapi -->
<g id="edge7" class="edge">
<g id="edge6" class="edge">
<title>timeline&#45;&gt;fastapi</title>
<path fill="none" stroke="black" d="M454.47,-587C475.15,-587 494.75,-587 494.75,-587 494.75,-587 494.75,-497.94 494.75,-497.94"/>
<polygon fill="black" stroke="black" points="498.25,-497.94 494.75,-487.94 491.25,-497.94 498.25,-497.94"/>
<text xml:space="preserve" text-anchor="middle" x="440.75" y="-518.19" font-family="Helvetica,sans-Serif" font-size="10.00">REST API</text>
<path fill="none" stroke="black" d="M429.59,-565C411.66,-565 395.5,-565 395.5,-565 395.5,-565 395.5,-499.11 395.5,-499.11"/>
<polygon fill="black" stroke="black" points="399,-499.11 395.5,-489.11 392,-499.11 399,-499.11"/>
<text xml:space="preserve" text-anchor="middle" x="406.38" y="-539.6" font-family="Helvetica,sans-Serif" font-size="10.00">REST API</text>
</g>
<!-- celery -->
<g id="node7" class="node">
<title>celery</title>
<path fill="none" stroke="black" d="M271.75,-202.19C271.75,-202.19 182.25,-202.19 182.25,-202.19 176.25,-202.19 170.25,-196.19 170.25,-190.19 170.25,-190.19 170.25,-171.69 170.25,-171.69 170.25,-165.69 176.25,-159.69 182.25,-159.69 182.25,-159.69 271.75,-159.69 271.75,-159.69 277.75,-159.69 283.75,-165.69 283.75,-171.69 283.75,-171.69 283.75,-190.19 283.75,-190.19 283.75,-196.19 277.75,-202.19 271.75,-202.19"/>
<text xml:space="preserve" text-anchor="middle" x="227" y="-184.89" font-family="Helvetica,sans-Serif" font-size="14.00">Celery Worker</text>
<text xml:space="preserve" text-anchor="middle" x="227" y="-167.64" font-family="Helvetica,sans-Serif" font-size="14.00">(local)</text>
<path fill="none" stroke="black" d="M255.75,-268.94C255.75,-268.94 166.25,-268.94 166.25,-268.94 160.25,-268.94 154.25,-262.94 154.25,-256.94 154.25,-256.94 154.25,-238.44 154.25,-238.44 154.25,-232.44 160.25,-226.44 166.25,-226.44 166.25,-226.44 255.75,-226.44 255.75,-226.44 261.75,-226.44 267.75,-232.44 267.75,-238.44 267.75,-238.44 267.75,-256.94 267.75,-256.94 267.75,-262.94 261.75,-268.94 255.75,-268.94"/>
<text xml:space="preserve" text-anchor="middle" x="211" y="-251.64" font-family="Helvetica,sans-Serif" font-size="14.00">Celery Worker</text>
<text xml:space="preserve" text-anchor="middle" x="211" y="-234.39" font-family="Helvetica,sans-Serif" font-size="14.00">(local mode)</text>
</g>
<!-- grpc_server&#45;&gt;celery -->
<g id="edge11" class="edge">
<g id="edge9" class="edge">
<title>grpc_server&#45;&gt;celery</title>
<path fill="none" stroke="black" d="M269.58,-285.28C269.58,-285.28 269.58,-213.83 269.58,-213.83"/>
<polygon fill="black" stroke="black" points="273.08,-213.83 269.58,-203.83 266.08,-213.83 273.08,-213.83"/>
<text xml:space="preserve" text-anchor="middle" x="223.62" y="-240.81" font-family="Helvetica,sans-Serif" font-size="10.00">task dispatch</text>
</g>
<!-- celery&#45;&gt;grpc_server -->
<g id="edge14" class="edge">
<title>celery&#45;&gt;grpc_server</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M276.67,-202.6C276.67,-202.6 276.67,-274.05 276.67,-274.05"/>
<polygon fill="black" stroke="black" points="273.17,-274.05 276.67,-284.05 280.17,-274.05 273.17,-274.05"/>
<text xml:space="preserve" text-anchor="middle" x="341.88" y="-247.19" font-family="Helvetica,sans-Serif" font-size="10.00">progress</text>
<text xml:space="preserve" text-anchor="middle" x="341.88" y="-234.44" font-family="Helvetica,sans-Serif" font-size="10.00">callbacks</text>
<path fill="none" stroke="black" d="M207,-326.87C207,-326.87 207,-280.83 207,-280.83"/>
<polygon fill="black" stroke="black" points="210.5,-280.83 207,-270.83 203.5,-280.83 210.5,-280.83"/>
<text xml:space="preserve" text-anchor="middle" x="174.38" y="-307.1" font-family="Helvetica,sans-Serif" font-size="10.00">task dispatch</text>
</g>
<!-- celery&#45;&gt;postgres -->
<g id="edge13" class="edge">
<g id="edge11" class="edge">
<title>celery&#45;&gt;postgres</title>
<path fill="none" stroke="black" d="M284.21,-188C390.19,-188 606.37,-188 606.37,-188 606.37,-188 606.37,-84.94 606.37,-84.94"/>
<polygon fill="black" stroke="black" points="609.87,-84.94 606.37,-74.94 602.87,-84.94 609.87,-84.94"/>
<text xml:space="preserve" text-anchor="middle" x="392.5" y="-121.19" font-family="Helvetica,sans-Serif" font-size="10.00">job updates</text>
<path fill="none" stroke="black" d="M161.88,-225.95C161.88,-194.24 161.88,-139 161.88,-139 161.88,-139 123.59,-139 123.59,-139"/>
<polygon fill="black" stroke="black" points="123.59,-135.5 113.59,-139 123.59,-142.5 123.59,-135.5"/>
<text xml:space="preserve" text-anchor="middle" x="133.38" y="-166.59" font-family="Helvetica,sans-Serif" font-size="10.00">job updates</text>
</g>
<!-- redis -->
<g id="node11" class="node">
<title>redis</title>
<path fill="none" stroke="black" d="M254.5,-163.12C254.5,-166.06 235.45,-168.44 212,-168.44 188.55,-168.44 169.5,-166.06 169.5,-163.12 169.5,-163.12 169.5,-115.31 169.5,-115.31 169.5,-112.38 188.55,-110 212,-110 235.45,-110 254.5,-112.38 254.5,-115.31 254.5,-115.31 254.5,-163.12 254.5,-163.12"/>
<path fill="none" stroke="black" d="M254.5,-163.12C254.5,-160.19 235.45,-157.81 212,-157.81 188.55,-157.81 169.5,-160.19 169.5,-163.12"/>
<text xml:space="preserve" text-anchor="middle" x="212" y="-143.17" font-family="Helvetica,sans-Serif" font-size="14.00">Redis</text>
<text xml:space="preserve" text-anchor="middle" x="212" y="-125.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 6381</text>
</g>
<!-- celery&#45;&gt;redis -->
<g id="edge12" class="edge">
<g id="edge10" class="edge">
<title>celery&#45;&gt;redis</title>
<path fill="none" stroke="black" d="M283.96,-174C315.34,-174 348,-174 348,-174 348,-174 348,-85.95 348,-85.95"/>
<polygon fill="black" stroke="black" points="351.5,-85.95 348,-75.95 344.5,-85.95 351.5,-85.95"/>
<text xml:space="preserve" text-anchor="middle" x="286" y="-121.19" font-family="Helvetica,sans-Serif" font-size="10.00">queue</text>
<path fill="none" stroke="black" d="M212,-226C212,-226 212,-180.19 212,-180.19"/>
<polygon fill="black" stroke="black" points="215.5,-180.19 212,-170.19 208.5,-180.19 215.5,-180.19"/>
<text xml:space="preserve" text-anchor="middle" x="197" y="-206.34" font-family="Helvetica,sans-Serif" font-size="10.00">queue</text>
</g>
<!-- local_fs -->
<g id="node12" class="node">
<title>local_fs</title>
<polygon fill="none" stroke="black" points="210.12,-66.47 207.12,-70.47 186.12,-70.47 183.12,-66.47 137.88,-66.47 137.88,-23.97 210.12,-23.97 210.12,-66.47"/>
<text xml:space="preserve" text-anchor="middle" x="174" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">Local FS</text>
<text xml:space="preserve" text-anchor="middle" x="174" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">/media</text>
</g>
<!-- celery&#45;&gt;local_fs -->
<g id="edge15" class="edge">
<title>celery&#45;&gt;local_fs</title>
<path fill="none" stroke="black" d="M190.19,-159.43C190.19,-159.43 190.19,-78.14 190.19,-78.14"/>
<polygon fill="black" stroke="black" points="193.69,-78.14 190.19,-68.14 186.69,-78.14 193.69,-78.14"/>
<text xml:space="preserve" text-anchor="middle" x="182.75" y="-121.19" font-family="Helvetica,sans-Serif" font-size="10.00">read/write</text>
<!-- celery&#45;&gt;minio -->
<g id="edge12" class="edge">
<title>celery&#45;&gt;minio</title>
<path fill="none" stroke="black" d="M261.12,-225.95C261.12,-194.24 261.12,-139 261.12,-139 261.12,-139 300.75,-139 300.75,-139"/>
<polygon fill="black" stroke="black" points="300.75,-142.5 310.75,-139 300.75,-135.5 300.75,-142.5"/>
<text xml:space="preserve" text-anchor="middle" x="302.75" y="-178.67" font-family="Helvetica,sans-Serif" font-size="10.00">S3 API</text>
<text xml:space="preserve" text-anchor="middle" x="302.75" y="-165.92" font-family="Helvetica,sans-Serif" font-size="10.00">download/upload</text>
</g>
<!-- lambda -->
<g id="node8" class="node">
<g id="node9" class="node">
<title>lambda</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M192.75,-328.19C192.75,-328.19 145.25,-328.19 145.25,-328.19 139.25,-328.19 133.25,-322.19 133.25,-316.19 133.25,-316.19 133.25,-297.69 133.25,-297.69 133.25,-291.69 139.25,-285.69 145.25,-285.69 145.25,-285.69 192.75,-285.69 192.75,-285.69 198.75,-285.69 204.75,-291.69 204.75,-297.69 204.75,-297.69 204.75,-316.19 204.75,-316.19 204.75,-322.19 198.75,-328.19 192.75,-328.19"/>
<text xml:space="preserve" text-anchor="middle" x="169" y="-310.89" font-family="Helvetica,sans-Serif" font-size="14.00">Lambda</text>
<text xml:space="preserve" text-anchor="middle" x="169" y="-293.64" font-family="Helvetica,sans-Serif" font-size="14.00">(cloud)</text>
<path fill="none" stroke="black" d="M541,-268.94C541,-268.94 423,-268.94 423,-268.94 417,-268.94 411,-262.94 411,-256.94 411,-256.94 411,-238.44 411,-238.44 411,-232.44 417,-226.44 423,-226.44 423,-226.44 541,-226.44 541,-226.44 547,-226.44 553,-232.44 553,-238.44 553,-238.44 553,-256.94 553,-256.94 553,-262.94 547,-268.94 541,-268.94"/>
<text xml:space="preserve" text-anchor="middle" x="482" y="-251.64" font-family="Helvetica,sans-Serif" font-size="14.00">Lambda</text>
<text xml:space="preserve" text-anchor="middle" x="482" y="-234.39" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg container</text>
</g>
<!-- sqs -->
<g id="node11" class="node">
<title>sqs</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M538,-69.12C538,-72.06 523.66,-74.44 506,-74.44 488.34,-74.44 474,-72.06 474,-69.12 474,-69.12 474,-21.31 474,-21.31 474,-18.38 488.34,-16 506,-16 523.66,-16 538,-18.38 538,-21.31 538,-21.31 538,-69.12 538,-69.12"/>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M538,-69.12C538,-66.19 523.66,-63.81 506,-63.81 488.34,-63.81 474,-66.19 474,-69.12"/>
<text xml:space="preserve" text-anchor="middle" x="506" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">SQS</text>
<text xml:space="preserve" text-anchor="middle" x="506" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">(cloud)</text>
<!-- step_functions&#45;&gt;lambda -->
<g id="edge14" class="edge">
<title>step_functions&#45;&gt;lambda</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M493.81,-326.87C493.81,-326.87 493.81,-280.83 493.81,-280.83"/>
<polygon fill="black" stroke="black" points="497.31,-280.83 493.81,-270.83 490.31,-280.83 497.31,-280.83"/>
</g>
<!-- lambda&#45;&gt;sqs -->
<!-- lambda&#45;&gt;fastapi -->
<g id="edge16" class="edge">
<title>lambda&#45;&gt;sqs</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M187.5,-285.28C187.5,-267.07 187.5,-244 187.5,-244 187.5,-244 477.75,-244 477.75,-244 477.75,-244 477.75,-84.37 477.75,-84.37"/>
<polygon fill="black" stroke="black" points="481.25,-84.37 477.75,-74.37 474.25,-84.37 481.25,-84.37"/>
<text xml:space="preserve" text-anchor="middle" x="415" y="-177.81" font-family="Helvetica,sans-Serif" font-size="10.00">queue</text>
<title>lambda&#45;&gt;fastapi</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M418.75,-269.3C418.75,-322.78 418.75,-457 418.75,-457 418.75,-457 417.66,-457 417.66,-457"/>
<polygon fill="black" stroke="black" points="419.37,-453.5 409.37,-457 419.37,-460.5 419.37,-453.5"/>
<text xml:space="preserve" text-anchor="middle" x="359.12" y="-379.69" font-family="Helvetica,sans-Serif" font-size="10.00">callback</text>
<text xml:space="preserve" text-anchor="middle" x="359.12" y="-366.94" font-family="Helvetica,sans-Serif" font-size="10.00">POST /jobs/{id}/callback</text>
</g>
<!-- s3 -->
<g id="node13" class="node">
<title>s3</title>
<polygon fill="none" stroke="black" stroke-dasharray="5,2" points="80,-66.47 77,-70.47 56,-70.47 53,-66.47 16,-66.47 16,-23.97 80,-23.97 80,-66.47"/>
<text xml:space="preserve" text-anchor="middle" x="48" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">S3</text>
<text xml:space="preserve" text-anchor="middle" x="48" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">(cloud)</text>
<polygon fill="none" stroke="black" stroke-dasharray="5,2" points="596.25,-157.22 593.25,-161.22 572.25,-161.22 569.25,-157.22 473.75,-157.22 473.75,-121.22 596.25,-121.22 596.25,-157.22"/>
<text xml:space="preserve" text-anchor="middle" x="535" y="-134.54" font-family="Helvetica,sans-Serif" font-size="14.00">AWS S3 (cloud)</text>
</g>
<!-- lambda&#45;&gt;s3 -->
<g id="edge17" class="edge">
<g id="edge15" class="edge">
<title>lambda&#45;&gt;s3</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M133.02,-307C97.36,-307 48,-307 48,-307 48,-307 48,-78.15 48,-78.15"/>
<polygon fill="black" stroke="black" points="51.5,-78.15 48,-68.15 44.5,-78.15 51.5,-78.15"/>
<text xml:space="preserve" text-anchor="middle" x="80.75" y="-177.81" font-family="Helvetica,sans-Serif" font-size="10.00">read/write</text>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M513.38,-226C513.38,-226 513.38,-169.14 513.38,-169.14"/>
<polygon fill="black" stroke="black" points="516.88,-169.14 513.38,-159.14 509.88,-169.14 516.88,-169.14"/>
<text xml:space="preserve" text-anchor="middle" x="471.75" y="-200.82" font-family="Helvetica,sans-Serif" font-size="10.00">download/upload</text>
</g>
<!-- bucket_in -->
<g id="node14" class="node">
<title>bucket_in</title>
<polygon fill="none" stroke="black" points="413.5,-52 310.5,-52 310.5,-16 419.5,-16 419.5,-46 413.5,-52"/>
<polyline fill="none" stroke="black" points="413.5,-52 413.5,-46"/>
<polyline fill="none" stroke="black" points="419.5,-46 413.5,-46"/>
<text xml:space="preserve" text-anchor="middle" x="365" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00">mpr&#45;media&#45;in</text>
</g>
<!-- minio&#45;&gt;bucket_in -->
<g id="edge17" class="edge">
<title>minio&#45;&gt;bucket_in</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M364,-117.67C364,-98.43 364,-70.56 364,-52.36"/>
</g>
<!-- bucket_out -->
<g id="node15" class="node">
<title>bucket_out</title>
<polygon fill="none" stroke="black" points="590.38,-52 477.62,-52 477.62,-16 596.38,-16 596.38,-46 590.38,-52"/>
<polyline fill="none" stroke="black" points="590.38,-52 590.38,-46"/>
<polyline fill="none" stroke="black" points="596.38,-46 590.38,-46"/>
<text xml:space="preserve" text-anchor="middle" x="537" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00">mpr&#45;media&#45;out</text>
</g>
<!-- minio&#45;&gt;bucket_out -->
<g id="edge18" class="edge">
<title>minio&#45;&gt;bucket_out</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M415.9,-145C428.08,-145 437.58,-145 437.58,-145 437.58,-145 437.58,-40 437.58,-40 437.58,-40 456.11,-40 477.16,-40"/>
</g>
<!-- s3&#45;&gt;bucket_in -->
<g id="edge19" class="edge">
<title>s3&#45;&gt;bucket_in</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M473.27,-133C463.03,-133 455.67,-133 455.67,-133 455.67,-133 455.67,-28 455.67,-28 455.67,-28 438.93,-28 419.83,-28"/>
</g>
<!-- s3&#45;&gt;bucket_out -->
<g id="edge20" class="edge">
<title>s3&#45;&gt;bucket_out</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M536.94,-120.89C536.94,-101.7 536.94,-71.72 536.94,-52.47"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 21 KiB

View File

@@ -10,13 +10,13 @@ digraph data_model {
graph [splines=ortho, nodesep=0.6, ranksep=1.2]
MediaAsset [label="{MediaAsset|id: UUID (PK)\lfilename: str\lfile_path: str\lfile_size: int?\lstatus: pending/ready/error\lerror_message: str?\l|duration: float?\lvideo_codec: str?\laudio_codec: str?\lwidth: int?\lheight: int?\lframerate: float?\lbitrate: int?\lproperties: JSON\l|comments: str\ltags: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"]
MediaAsset [label="{MediaAsset|id: UUID (PK)\lfilename: str\lfile_path: str (S3 key)\lfile_size: int?\lstatus: pending/ready/error\lerror_message: str?\l|duration: float?\lvideo_codec: str?\laudio_codec: str?\lwidth: int?\lheight: int?\lframerate: float?\lbitrate: int?\lproperties: JSON\l|comments: str\ltags: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"]
TranscodePreset [label="{TranscodePreset|id: UUID (PK)\lname: str (unique)\ldescription: str\lis_builtin: bool\l|container: str\l|video_codec: str\lvideo_bitrate: str?\lvideo_crf: int?\lvideo_preset: str?\lresolution: str?\lframerate: float?\l|audio_codec: str\laudio_bitrate: str?\laudio_channels: int?\laudio_samplerate: int?\l|extra_args: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"]
TranscodeJob [label="{TranscodeJob|id: UUID (PK)\l|source_asset_id: UUID (FK)\l|preset_id: UUID? (FK)\lpreset_snapshot: JSON\l|trim_start: float?\ltrim_end: float?\l|output_filename: str\loutput_path: str?\loutput_asset_id: UUID? (FK)\l|status: pending/processing/...\lprogress: float (0-100)\lcurrent_frame: int?\lcurrent_time: float?\lspeed: str?\lerror_message: str?\l|celery_task_id: str?\lpriority: int\l|created_at: datetime\lstarted_at: datetime?\lcompleted_at: datetime?\l}"]
TranscodeJob [label="{TranscodeJob|id: UUID (PK)\l|source_asset_id: UUID (FK)\l|preset_id: UUID? (FK)\lpreset_snapshot: JSON\l|trim_start: float?\ltrim_end: float?\l|output_filename: str\loutput_path: str? (S3 key)\loutput_asset_id: UUID? (FK)\l|status: pending/processing/...\lprogress: float (0-100)\lcurrent_frame: int?\lcurrent_time: float?\lspeed: str?\lerror_message: str?\l|celery_task_id: str?\lexecution_arn: str?\lpriority: int\l|created_at: datetime\lstarted_at: datetime?\lcompleted_at: datetime?\l}"]
MediaAsset -> TranscodeJob [label="1:N source_asset"]
TranscodePreset -> TranscodeJob [label="1:N preset"]
TranscodeJob -> MediaAsset [label="1:1 output_asset", style=dashed]
MediaAsset -> TranscodeJob [xlabel="1:N source_asset"]
TranscodePreset -> TranscodeJob [xlabel="1:N preset"]
TranscodeJob -> MediaAsset [xlabel="1:1 output_asset", style=dashed]
}

View File

@@ -1,15 +1,15 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 14.1.1 (0)
<!-- Generated by graphviz version 14.1.2 (0)
-->
<!-- Title: data_model Pages: 1 -->
<svg width="2218pt" height="286pt"
viewBox="0.00 0.00 2218.00 286.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<svg width="2134pt" height="286pt"
viewBox="0.00 0.00 2134.00 286.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 282)">
<title>data_model</title>
<polygon fill="white" stroke="none" points="-4,4 -4,-282 2213.5,-282 2213.5,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="1104.75" y="-258.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; Data Model</text>
<polygon fill="white" stroke="none" points="-4,4 -4,-282 2130.25,-282 2130.25,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="1063.12" y="-258.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; Data Model</text>
<!-- MediaAsset -->
<g id="node1" class="node">
<title>MediaAsset</title>
@@ -18,7 +18,7 @@
<polyline fill="none" stroke="black" points="197.75,-134 197.75,-250"/>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-222.05" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-208.55" font-family="Helvetica,sans-Serif" font-size="11.00">filename: str</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">file_path: str</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">file_path: str (S3 key)</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">file_size: int?</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-168.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/ready/error</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-154.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text>
@@ -41,43 +41,44 @@
<!-- TranscodeJob -->
<g id="node3" class="node">
<title>TranscodeJob</title>
<polygon fill="none" stroke="black" points="995.25,-86.5 995.25,-175.5 2209.5,-175.5 2209.5,-86.5 995.25,-86.5"/>
<text xml:space="preserve" text-anchor="middle" x="1039.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">TranscodeJob</text>
<polyline fill="none" stroke="black" points="1083.25,-86.5 1083.25,-175.5"/>
<text xml:space="preserve" text-anchor="start" x="1091.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text>
<polyline fill="none" stroke="black" points="1171.25,-86.5 1171.25,-175.5"/>
<text xml:space="preserve" text-anchor="start" x="1179.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">source_asset_id: UUID (FK)</text>
<polyline fill="none" stroke="black" points="1335.75,-86.5 1335.75,-175.5"/>
<text xml:space="preserve" text-anchor="start" x="1343.75" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">preset_id: UUID? (FK)</text>
<text xml:space="preserve" text-anchor="start" x="1343.75" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">preset_snapshot: JSON</text>
<polyline fill="none" stroke="black" points="1477,-86.5 1477,-175.5"/>
<text xml:space="preserve" text-anchor="start" x="1485" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">trim_start: float?</text>
<text xml:space="preserve" text-anchor="start" x="1485" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">trim_end: float?</text>
<polyline fill="none" stroke="black" points="1585.25,-86.5 1585.25,-175.5"/>
<text xml:space="preserve" text-anchor="start" x="1593.25" y="-140.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_filename: str</text>
<text xml:space="preserve" text-anchor="start" x="1593.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">output_path: str?</text>
<text xml:space="preserve" text-anchor="start" x="1593.25" y="-113.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_asset_id: UUID? (FK)</text>
<polyline fill="none" stroke="black" points="1755,-86.5 1755,-175.5"/>
<text xml:space="preserve" text-anchor="start" x="1763" y="-161.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/processing/...</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-147.55" font-family="Helvetica,sans-Serif" font-size="11.00">progress: float (0&#45;100)</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">current_frame: int?</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">current_time: float?</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-107.05" font-family="Helvetica,sans-Serif" font-size="11.00">speed: str?</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-93.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text>
<polyline fill="none" stroke="black" points="1934.5,-86.5 1934.5,-175.5"/>
<text xml:space="preserve" text-anchor="start" x="1942.5" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">celery_task_id: str?</text>
<text xml:space="preserve" text-anchor="start" x="1942.5" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">priority: int</text>
<polyline fill="none" stroke="black" points="2056.25,-86.5 2056.25,-175.5"/>
<text xml:space="preserve" text-anchor="start" x="2064.25" y="-140.8" font-family="Helvetica,sans-Serif" font-size="11.00">created_at: datetime</text>
<text xml:space="preserve" text-anchor="start" x="2064.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">started_at: datetime?</text>
<text xml:space="preserve" text-anchor="start" x="2064.25" y="-113.8" font-family="Helvetica,sans-Serif" font-size="11.00">completed_at: datetime?</text>
<polygon fill="none" stroke="black" points="912,-147.5 912,-236.5 2126.25,-236.5 2126.25,-147.5 912,-147.5"/>
<text xml:space="preserve" text-anchor="middle" x="956" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">TranscodeJob</text>
<polyline fill="none" stroke="black" points="1000,-147.5 1000,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1008" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text>
<polyline fill="none" stroke="black" points="1088,-147.5 1088,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1096" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">source_asset_id: UUID (FK)</text>
<polyline fill="none" stroke="black" points="1252.5,-147.5 1252.5,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1260.5" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">preset_id: UUID? (FK)</text>
<text xml:space="preserve" text-anchor="start" x="1260.5" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">preset_snapshot: JSON</text>
<polyline fill="none" stroke="black" points="1393.75,-147.5 1393.75,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1401.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">trim_start: float?</text>
<text xml:space="preserve" text-anchor="start" x="1401.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">trim_end: float?</text>
<polyline fill="none" stroke="black" points="1502,-147.5 1502,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1510" y="-201.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_filename: str</text>
<text xml:space="preserve" text-anchor="start" x="1510" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">output_path: str? (S3 key)</text>
<text xml:space="preserve" text-anchor="start" x="1510" y="-174.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_asset_id: UUID? (FK)</text>
<polyline fill="none" stroke="black" points="1671.75,-147.5 1671.75,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-222.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/processing/...</text>
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-208.55" font-family="Helvetica,sans-Serif" font-size="11.00">progress: float (0&#45;100)</text>
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">current_frame: int?</text>
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">current_time: float?</text>
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-168.05" font-family="Helvetica,sans-Serif" font-size="11.00">speed: str?</text>
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-154.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text>
<polyline fill="none" stroke="black" points="1851.25,-147.5 1851.25,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1859.25" y="-201.8" font-family="Helvetica,sans-Serif" font-size="11.00">celery_task_id: str?</text>
<text xml:space="preserve" text-anchor="start" x="1859.25" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">execution_arn: str?</text>
<text xml:space="preserve" text-anchor="start" x="1859.25" y="-174.8" font-family="Helvetica,sans-Serif" font-size="11.00">priority: int</text>
<polyline fill="none" stroke="black" points="1973,-147.5 1973,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1981" y="-201.8" font-family="Helvetica,sans-Serif" font-size="11.00">created_at: datetime</text>
<text xml:space="preserve" text-anchor="start" x="1981" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">started_at: datetime?</text>
<text xml:space="preserve" text-anchor="start" x="1981" y="-174.8" font-family="Helvetica,sans-Serif" font-size="11.00">completed_at: datetime?</text>
</g>
<!-- MediaAsset&#45;&gt;TranscodeJob -->
<g id="edge1" class="edge">
<title>MediaAsset&#45;&gt;TranscodeJob</title>
<path fill="none" stroke="black" d="M708.15,-147.67C708.15,-147.67 983.49,-147.67 983.49,-147.67"/>
<polygon fill="black" stroke="black" points="983.49,-151.17 993.49,-147.67 983.49,-144.17 983.49,-151.17"/>
<text xml:space="preserve" text-anchor="middle" x="910.62" y="-195.25" font-family="Helvetica,sans-Serif" font-size="10.00">1:N source_asset</text>
<path fill="none" stroke="black" d="M708.33,-192C708.33,-192 900.24,-192 900.24,-192"/>
<polygon fill="black" stroke="black" points="900.24,-195.5 910.24,-192 900.24,-188.5 900.24,-195.5"/>
<text xml:space="preserve" text-anchor="middle" x="762.66" y="-182.5" font-family="Helvetica,sans-Serif" font-size="10.00">1:N source_asset</text>
</g>
<!-- TranscodePreset -->
<g id="node2" class="node">
@@ -112,16 +113,16 @@
<!-- TranscodePreset&#45;&gt;TranscodeJob -->
<g id="edge2" class="edge">
<title>TranscodePreset&#45;&gt;TranscodeJob</title>
<path fill="none" stroke="black" d="M766.5,-89.89C766.5,-101.97 766.5,-111.75 766.5,-111.75 766.5,-111.75 983.39,-111.75 983.39,-111.75"/>
<polygon fill="black" stroke="black" points="983.39,-115.25 993.39,-111.75 983.39,-108.25 983.39,-115.25"/>
<text xml:space="preserve" text-anchor="middle" x="910.62" y="-48.25" font-family="Helvetica,sans-Serif" font-size="10.00">1:N preset</text>
<path fill="none" stroke="black" d="M767.25,-89.95C767.25,-125.61 767.25,-169.5 767.25,-169.5 767.25,-169.5 900.26,-169.5 900.26,-169.5"/>
<polygon fill="black" stroke="black" points="900.26,-173 910.26,-169.5 900.26,-166 900.26,-173"/>
<text xml:space="preserve" text-anchor="middle" x="768.85" y="-160" font-family="Helvetica,sans-Serif" font-size="10.00">1:N preset</text>
</g>
<!-- TranscodeJob&#45;&gt;MediaAsset -->
<g id="edge3" class="edge">
<title>TranscodeJob&#45;&gt;MediaAsset</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M995.06,-161.83C995.06,-161.83 719.99,-161.83 719.99,-161.83"/>
<polygon fill="black" stroke="black" points="719.99,-158.33 709.99,-161.83 719.99,-165.33 719.99,-158.33"/>
<text xml:space="preserve" text-anchor="middle" x="910.62" y="-134.25" font-family="Helvetica,sans-Serif" font-size="10.00">1:1 output_asset</text>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M911.86,-214.5C911.86,-214.5 719.76,-214.5 719.76,-214.5"/>
<polygon fill="black" stroke="black" points="719.76,-211 709.76,-214.5 719.76,-218 719.76,-211"/>
<text xml:space="preserve" text-anchor="middle" x="775.31" y="-205" font-family="Helvetica,sans-Serif" font-size="10.00">1:1 output_asset</text>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -3,7 +3,6 @@ digraph job_flow {
node [shape=box, style=rounded, fontname="Helvetica"]
edge [fontname="Helvetica", fontsize=10]
// Title
labelloc="t"
label="MPR - Job Flow"
fontsize=16
@@ -11,7 +10,19 @@ digraph job_flow {
graph [splines=ortho, nodesep=0.6, ranksep=0.6]
// States
// API entry points
subgraph cluster_api {
label="API Entry Points"
style=dashed
color=gray
rest_create [label="POST /api/jobs/", shape=ellipse]
gql_create [label="mutation createJob", shape=ellipse]
rest_cancel [label="POST /api/jobs/{id}/cancel", shape=ellipse]
rest_callback [label="POST /api/jobs/{id}/callback", shape=ellipse]
}
// Job states
subgraph cluster_states {
label="Job States"
style=filled
@@ -24,78 +35,70 @@ digraph job_flow {
cancelled [label="CANCELLED", fillcolor="#6c757d", style="filled,rounded", fontcolor=white]
}
// Transitions
pending -> processing [label="worker picks up"]
processing -> completed [label="success"]
processing -> failed [label="error"]
pending -> cancelled [label="user cancels"]
processing -> cancelled [label="user cancels"]
failed -> pending [label="retry"]
// State transitions
pending -> processing [xlabel="worker picks up"]
processing -> completed [xlabel="success"]
processing -> failed [xlabel="error"]
pending -> cancelled [xlabel="user cancels"]
processing -> cancelled [xlabel="user cancels"]
failed -> pending [xlabel="retry"]
// API actions
subgraph cluster_api {
label="API Actions"
style=dashed
color=gray
rest_create -> pending
gql_create -> pending
rest_cancel -> cancelled [style=dashed]
create_job [label="POST /jobs/", shape=ellipse]
cancel_job [label="POST /jobs/{id}/cancel", shape=ellipse]
retry_job [label="POST /jobs/{id}/retry", shape=ellipse]
}
create_job -> pending
cancel_job -> cancelled [style=dashed]
retry_job -> pending [style=dashed]
// Executor layer
subgraph cluster_executor {
label="Executor Layer"
// Executor dispatch
subgraph cluster_dispatch {
label="Executor Dispatch"
style=filled
fillcolor="#fff8e8"
executor [label="Executor\n(abstract)", shape=diamond]
local [label="LocalExecutor\nCelery + FFmpeg"]
lambda_exec [label="LambdaExecutor\nSQS + Lambda"]
dispatch [label="MPR_EXECUTOR", shape=diamond]
}
processing -> executor
executor -> local [label="MPR_EXECUTOR=local"]
executor -> lambda_exec [label="MPR_EXECUTOR=lambda", style=dashed]
pending -> dispatch
// FFmpeg operations
subgraph cluster_ffmpeg {
label="FFmpeg Operations"
// Local path
subgraph cluster_local {
label="Local Mode (Celery)"
style=filled
fillcolor="#e8f4e8"
transcode [label="Transcode\n(with preset)"]
trim [label="Trim\n(-c:v copy -c:a copy)"]
celery_task [label="Celery Task\n(transcode queue)"]
s3_download [label="S3 Download\n(MinIO)"]
ffmpeg_local [label="FFmpeg\ntranscode/trim"]
s3_upload [label="S3 Upload\n(MinIO)"]
db_update [label="DB Update\n(update_job_progress)"]
}
local -> transcode
local -> trim
dispatch -> celery_task [xlabel="local"]
celery_task -> s3_download
s3_download -> ffmpeg_local
ffmpeg_local -> s3_upload
s3_upload -> db_update
db_update -> completed [style=dotted]
// gRPC streaming
subgraph cluster_grpc {
label="gRPC Communication"
// Lambda path
subgraph cluster_lambda {
label="Lambda Mode (AWS)"
style=filled
fillcolor="#e8e8f8"
fillcolor="#fde8d0"
grpc_stream [label="StreamProgress\n(server streaming)", shape=parallelogram]
grpc_submit [label="SubmitJob\n(unary)", shape=parallelogram]
grpc_cancel [label="CancelJob\n(unary)", shape=parallelogram]
sfn_start [label="Step Functions\nstart_execution"]
lambda_fn [label="Lambda\nFFmpeg container"]
s3_dl_aws [label="S3 Download\n(AWS)"]
ffmpeg_aws [label="FFmpeg\ntranscode/trim"]
s3_ul_aws [label="S3 Upload\n(AWS)"]
callback [label="HTTP Callback\nPOST /jobs/{id}/callback"]
}
// Progress tracking via gRPC
progress [label="Progress Updates\n(gRPC → Redis → DB)", shape=note]
transcode -> progress [style=dotted]
trim -> progress [style=dotted]
progress -> grpc_stream [style=dotted, label="stream to client"]
grpc_stream -> processing [style=dotted, label="update status"]
dispatch -> sfn_start [xlabel="lambda"]
sfn_start -> lambda_fn
lambda_fn -> s3_dl_aws
s3_dl_aws -> ffmpeg_aws
ffmpeg_aws -> s3_ul_aws
s3_ul_aws -> callback
callback -> completed [style=dotted]
// gRPC job control
create_job -> grpc_submit [label="via gRPC"]
grpc_submit -> pending [style=dashed]
cancel_job -> grpc_cancel [label="via gRPC"]
grpc_cancel -> cancelled [style=dashed]
rest_callback -> completed [style=dashed, xlabel="Lambda reports"]
}

View File

@@ -1,296 +1,329 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 14.1.1 (0)
<!-- Generated by graphviz version 14.1.2 (0)
-->
<!-- Title: job_flow Pages: 1 -->
<svg width="1398pt" height="843pt"
viewBox="0.00 0.00 1398.00 843.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 838.75)">
<svg width="1621pt" height="655pt"
viewBox="0.00 0.00 1621.00 655.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 650.5)">
<title>job_flow</title>
<polygon fill="white" stroke="none" points="-4,4 -4,-838.75 1394,-838.75 1394,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="695" y="-815.55" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; Job Flow</text>
<polygon fill="white" stroke="none" points="-4,4 -4,-650.5 1617,-650.5 1617,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="806.5" y="-627.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; Job Flow</text>
<g id="clust1" class="cluster">
<title>cluster_states</title>
<polygon fill="#f8f8f8" stroke="black" points="774,-8 774,-297.5 1154,-297.5 1154,-8 774,-8"/>
<text xml:space="preserve" text-anchor="middle" x="964" y="-278.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Job States</text>
<title>cluster_api</title>
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="297,-269.75 297,-349.25 1395,-349.25 1395,-269.75 297,-269.75"/>
<text xml:space="preserve" text-anchor="middle" x="846" y="-330.05" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">API Entry Points</text>
</g>
<g id="clust2" class="cluster">
<title>cluster_api</title>
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="674,-360 674,-439.5 1382,-439.5 1382,-360 674,-360"/>
<text xml:space="preserve" text-anchor="middle" x="1028" y="-420.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">API Actions</text>
<title>cluster_states</title>
<polygon fill="#f8f8f8" stroke="black" points="572,-11.25 572,-261.75 939,-261.75 939,-11.25 572,-11.25"/>
<text xml:space="preserve" text-anchor="middle" x="755.5" y="-242.55" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Job States</text>
</g>
<g id="clust3" class="cluster">
<title>cluster_executor</title>
<polygon fill="#fff8e8" stroke="black" points="8,-571.5 8,-799.25 352,-799.25 352,-571.5 8,-571.5"/>
<text xml:space="preserve" text-anchor="middle" x="180" y="-780.05" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Executor Layer</text>
<title>cluster_dispatch</title>
<polygon fill="#fff8e8" stroke="black" points="103,-531.5 103,-611 377,-611 377,-531.5 103,-531.5"/>
<text xml:space="preserve" text-anchor="middle" x="240" y="-591.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Executor Dispatch</text>
</g>
<g id="clust4" class="cluster">
<title>cluster_ffmpeg</title>
<polygon fill="#e8f4e8" stroke="black" points="73,-462.5 73,-548.5 393,-548.5 393,-462.5 73,-462.5"/>
<text xml:space="preserve" text-anchor="middle" x="233" y="-529.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">FFmpeg Operations</text>
<title>cluster_local</title>
<polygon fill="#e8f4e8" stroke="black" points="8,-93.5 8,-523.5 203,-523.5 203,-93.5 8,-93.5"/>
<text xml:space="preserve" text-anchor="middle" x="105.5" y="-504.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Local Mode (Celery)</text>
</g>
<g id="clust5" class="cluster">
<title>cluster_grpc</title>
<polygon fill="#e8e8f8" stroke="black" points="8,-193.5 8,-322 766,-322 766,-193.5 8,-193.5"/>
<text xml:space="preserve" text-anchor="middle" x="387" y="-302.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">gRPC Communication</text>
<title>cluster_lambda</title>
<polygon fill="#fde8d0" stroke="black" points="1403,-8 1403,-523.5 1605,-523.5 1605,-8 1403,-8"/>
<text xml:space="preserve" text-anchor="middle" x="1504" y="-504.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Lambda Mode (AWS)</text>
</g>
<!-- rest_create -->
<g id="node1" class="node">
<title>rest_create</title>
<ellipse fill="none" stroke="black" cx="389" cy="-295.75" rx="84.35" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="389" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">POST /api/jobs/</text>
</g>
<!-- pending -->
<g id="node1" class="node">
<g id="node5" class="node">
<title>pending</title>
<path fill="#ffc107" stroke="black" d="M971.88,-262C971.88,-262 916.12,-262 916.12,-262 910.12,-262 904.12,-256 904.12,-250 904.12,-250 904.12,-238 904.12,-238 904.12,-232 910.12,-226 916.12,-226 916.12,-226 971.88,-226 971.88,-226 977.88,-226 983.88,-232 983.88,-238 983.88,-238 983.88,-250 983.88,-250 983.88,-256 977.88,-262 971.88,-262"/>
<text xml:space="preserve" text-anchor="middle" x="944" y="-239.32" font-family="Helvetica,sans-Serif" font-size="14.00">PENDING</text>
<path fill="#ffc107" stroke="black" d="M647.88,-226.25C647.88,-226.25 592.12,-226.25 592.12,-226.25 586.12,-226.25 580.12,-220.25 580.12,-214.25 580.12,-214.25 580.12,-202.25 580.12,-202.25 580.12,-196.25 586.12,-190.25 592.12,-190.25 592.12,-190.25 647.88,-190.25 647.88,-190.25 653.88,-190.25 659.88,-196.25 659.88,-202.25 659.88,-202.25 659.88,-214.25 659.88,-214.25 659.88,-220.25 653.88,-226.25 647.88,-226.25"/>
<text xml:space="preserve" text-anchor="middle" x="620" y="-203.57" font-family="Helvetica,sans-Serif" font-size="14.00">PENDING</text>
</g>
<!-- rest_create&#45;&gt;pending -->
<g id="edge7" class="edge">
<title>rest_create&#45;&gt;pending</title>
<path fill="none" stroke="black" d="M389,-277.61C389,-253.52 389,-214 389,-214 389,-214 568.25,-214 568.25,-214"/>
<polygon fill="black" stroke="black" points="568.25,-217.5 578.25,-214 568.25,-210.5 568.25,-217.5"/>
</g>
<!-- gql_create -->
<g id="node2" class="node">
<title>gql_create</title>
<ellipse fill="none" stroke="black" cx="620" cy="-295.75" rx="103.29" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="620" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">mutation createJob</text>
</g>
<!-- gql_create&#45;&gt;pending -->
<g id="edge8" class="edge">
<title>gql_create&#45;&gt;pending</title>
<path fill="none" stroke="black" d="M620,-277.62C620,-277.62 620,-238.17 620,-238.17"/>
<polygon fill="black" stroke="black" points="623.5,-238.17 620,-228.17 616.5,-238.17 623.5,-238.17"/>
</g>
<!-- rest_cancel -->
<g id="node3" class="node">
<title>rest_cancel</title>
<ellipse fill="none" stroke="black" cx="1247" cy="-295.75" rx="140.12" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="1247" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">POST /api/jobs/{id}/cancel</text>
</g>
<!-- cancelled -->
<g id="node9" class="node">
<title>cancelled</title>
<path fill="#6c757d" stroke="black" d="M918.62,-55.25C918.62,-55.25 843.38,-55.25 843.38,-55.25 837.38,-55.25 831.38,-49.25 831.38,-43.25 831.38,-43.25 831.38,-31.25 831.38,-31.25 831.38,-25.25 837.38,-19.25 843.38,-19.25 843.38,-19.25 918.62,-19.25 918.62,-19.25 924.62,-19.25 930.62,-25.25 930.62,-31.25 930.62,-31.25 930.62,-43.25 930.62,-43.25 930.62,-49.25 924.62,-55.25 918.62,-55.25"/>
<text xml:space="preserve" text-anchor="middle" x="881" y="-32.58" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">CANCELLED</text>
</g>
<!-- rest_cancel&#45;&gt;cancelled -->
<g id="edge9" class="edge">
<title>rest_cancel&#45;&gt;cancelled</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1247,-277.56C1247,-218.66 1247,-37 1247,-37 1247,-37 942.64,-37 942.64,-37"/>
<polygon fill="black" stroke="black" points="942.64,-33.5 932.64,-37 942.64,-40.5 942.64,-33.5"/>
</g>
<!-- rest_callback -->
<g id="node4" class="node">
<title>rest_callback</title>
<ellipse fill="none" stroke="black" cx="915" cy="-295.75" rx="148.54" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="915" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">POST /api/jobs/{id}/callback</text>
</g>
<!-- completed -->
<g id="node7" class="node">
<title>completed</title>
<path fill="#28a745" stroke="black" d="M776.75,-55.25C776.75,-55.25 699.25,-55.25 699.25,-55.25 693.25,-55.25 687.25,-49.25 687.25,-43.25 687.25,-43.25 687.25,-31.25 687.25,-31.25 687.25,-25.25 693.25,-19.25 699.25,-19.25 699.25,-19.25 776.75,-19.25 776.75,-19.25 782.75,-19.25 788.75,-25.25 788.75,-31.25 788.75,-31.25 788.75,-43.25 788.75,-43.25 788.75,-49.25 782.75,-55.25 776.75,-55.25"/>
<text xml:space="preserve" text-anchor="middle" x="738" y="-32.58" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">COMPLETED</text>
</g>
<!-- rest_callback&#45;&gt;completed -->
<g id="edge24" class="edge">
<title>rest_callback&#45;&gt;completed</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M783.42,-287.15C783.42,-287.15 783.42,-67.24 783.42,-67.24"/>
<polygon fill="black" stroke="black" points="786.92,-67.24 783.42,-57.24 779.92,-67.24 786.92,-67.24"/>
<text xml:space="preserve" text-anchor="middle" x="745.17" y="-180.44" font-family="Helvetica,sans-Serif" font-size="10.00">Lambda reports</text>
</g>
<!-- processing -->
<g id="node2" class="node">
<g id="node6" class="node">
<title>processing</title>
<path fill="#17a2b8" stroke="black" d="M877.75,-144.75C877.75,-144.75 794.25,-144.75 794.25,-144.75 788.25,-144.75 782.25,-138.75 782.25,-132.75 782.25,-132.75 782.25,-120.75 782.25,-120.75 782.25,-114.75 788.25,-108.75 794.25,-108.75 794.25,-108.75 877.75,-108.75 877.75,-108.75 883.75,-108.75 889.75,-114.75 889.75,-120.75 889.75,-120.75 889.75,-132.75 889.75,-132.75 889.75,-138.75 883.75,-144.75 877.75,-144.75"/>
<text xml:space="preserve" text-anchor="middle" x="836" y="-122.08" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">PROCESSING</text>
<path fill="#17a2b8" stroke="black" d="M768.75,-140.75C768.75,-140.75 685.25,-140.75 685.25,-140.75 679.25,-140.75 673.25,-134.75 673.25,-128.75 673.25,-128.75 673.25,-116.75 673.25,-116.75 673.25,-110.75 679.25,-104.75 685.25,-104.75 685.25,-104.75 768.75,-104.75 768.75,-104.75 774.75,-104.75 780.75,-110.75 780.75,-116.75 780.75,-116.75 780.75,-128.75 780.75,-128.75 780.75,-134.75 774.75,-140.75 768.75,-140.75"/>
<text xml:space="preserve" text-anchor="middle" x="727" y="-118.08" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">PROCESSING</text>
</g>
<!-- pending&#45;&gt;processing -->
<g id="edge1" class="edge">
<title>pending&#45;&gt;processing</title>
<path fill="none" stroke="black" d="M920.04,-225.68C920.04,-194.87 920.04,-136 920.04,-136 920.04,-136 901.69,-136 901.69,-136"/>
<polygon fill="black" stroke="black" points="901.69,-132.5 891.69,-136 901.69,-139.5 901.69,-132.5"/>
<text xml:space="preserve" text-anchor="middle" x="902.25" y="-170" font-family="Helvetica,sans-Serif" font-size="10.00">worker picks up</text>
</g>
<!-- cancelled -->
<g id="node5" class="node">
<title>cancelled</title>
<path fill="#6c757d" stroke="black" d="M1122.62,-52C1122.62,-52 1047.38,-52 1047.38,-52 1041.38,-52 1035.38,-46 1035.38,-40 1035.38,-40 1035.38,-28 1035.38,-28 1035.38,-22 1041.38,-16 1047.38,-16 1047.38,-16 1122.62,-16 1122.62,-16 1128.62,-16 1134.62,-22 1134.62,-28 1134.62,-28 1134.62,-40 1134.62,-40 1134.62,-46 1128.62,-52 1122.62,-52"/>
<text xml:space="preserve" text-anchor="middle" x="1085" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">CANCELLED</text>
<path fill="none" stroke="black" d="M654.58,-189.87C654.58,-166.46 654.58,-129 654.58,-129 654.58,-129 661.34,-129 661.34,-129"/>
<polygon fill="black" stroke="black" points="661.34,-132.5 671.34,-129 661.34,-125.5 661.34,-132.5"/>
<text xml:space="preserve" text-anchor="middle" x="616.33" y="-159.3" font-family="Helvetica,sans-Serif" font-size="10.00">worker picks up</text>
</g>
<!-- pending&#45;&gt;cancelled -->
<g id="edge4" class="edge">
<title>pending&#45;&gt;cancelled</title>
<path fill="none" stroke="black" d="M984.17,-238C1022.83,-238 1075.49,-238 1075.49,-238 1075.49,-238 1075.49,-63.98 1075.49,-63.98"/>
<polygon fill="black" stroke="black" points="1078.99,-63.98 1075.49,-53.98 1071.99,-63.98 1078.99,-63.98"/>
<text xml:space="preserve" text-anchor="middle" x="1115.38" y="-123.62" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
<path fill="none" stroke="black" d="M660.36,-208C737.33,-208 897.54,-208 897.54,-208 897.54,-208 897.54,-67.04 897.54,-67.04"/>
<polygon fill="black" stroke="black" points="901.04,-67.04 897.54,-57.04 894.04,-67.04 901.04,-67.04"/>
<text xml:space="preserve" text-anchor="middle" x="819.06" y="-211.25" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
</g>
<!-- completed -->
<g id="node3" class="node">
<title>completed</title>
<path fill="#28a745" stroke="black" d="M871.75,-52C871.75,-52 794.25,-52 794.25,-52 788.25,-52 782.25,-46 782.25,-40 782.25,-40 782.25,-28 782.25,-28 782.25,-22 788.25,-16 794.25,-16 794.25,-16 871.75,-16 871.75,-16 877.75,-16 883.75,-22 883.75,-28 883.75,-28 883.75,-40 883.75,-40 883.75,-46 877.75,-52 871.75,-52"/>
<text xml:space="preserve" text-anchor="middle" x="833" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">COMPLETED</text>
<!-- dispatch -->
<g id="node10" class="node">
<title>dispatch</title>
<path fill="none" stroke="black" d="M228.12,-573.84C228.12,-573.84 122.92,-559.16 122.92,-559.16 116.98,-558.33 116.98,-556.67 122.92,-555.84 122.92,-555.84 228.12,-541.16 228.12,-541.16 234.06,-540.33 245.94,-540.33 251.88,-541.16 251.88,-541.16 357.08,-555.84 357.08,-555.84 363.02,-556.67 363.02,-558.33 357.08,-559.16 357.08,-559.16 251.88,-573.84 251.88,-573.84 245.94,-574.67 234.06,-574.67 228.12,-573.84"/>
<text xml:space="preserve" text-anchor="middle" x="240" y="-552.83" font-family="Helvetica,sans-Serif" font-size="14.00">MPR_EXECUTOR</text>
</g>
<!-- pending&#45;&gt;dispatch -->
<g id="edge10" class="edge">
<title>pending&#45;&gt;dispatch</title>
<path fill="none" stroke="black" d="M579.92,-202C483.92,-202 248.76,-202 248.76,-202 248.76,-202 248.76,-528.84 248.76,-528.84"/>
<polygon fill="black" stroke="black" points="245.26,-528.84 248.76,-538.84 252.26,-528.84 245.26,-528.84"/>
</g>
<!-- processing&#45;&gt;completed -->
<g id="edge2" class="edge">
<title>processing&#45;&gt;completed</title>
<path fill="none" stroke="black" d="M833,-108.43C833,-108.43 833,-63.8 833,-63.8"/>
<polygon fill="black" stroke="black" points="836.5,-63.8 833,-53.8 829.5,-63.8 836.5,-63.8"/>
<text xml:space="preserve" text-anchor="middle" x="844.12" y="-77.25" font-family="Helvetica,sans-Serif" font-size="10.00">success</text>
<path fill="none" stroke="black" d="M734,-104.62C734,-104.62 734,-67.16 734,-67.16"/>
<polygon fill="black" stroke="black" points="737.5,-67.16 734,-57.16 730.5,-67.16 737.5,-67.16"/>
<text xml:space="preserve" text-anchor="middle" x="714.88" y="-89.14" font-family="Helvetica,sans-Serif" font-size="10.00">success</text>
</g>
<!-- failed -->
<g id="node4" class="node">
<g id="node8" class="node">
<title>failed</title>
<path fill="#dc3545" stroke="black" d="M980,-52C980,-52 940,-52 940,-52 934,-52 928,-46 928,-40 928,-40 928,-28 928,-28 928,-22 934,-16 940,-16 940,-16 980,-16 980,-16 986,-16 992,-22 992,-28 992,-28 992,-40 992,-40 992,-46 986,-52 980,-52"/>
<text xml:space="preserve" text-anchor="middle" x="960" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">FAILED</text>
<path fill="#dc3545" stroke="black" d="M632,-55.25C632,-55.25 592,-55.25 592,-55.25 586,-55.25 580,-49.25 580,-43.25 580,-43.25 580,-31.25 580,-31.25 580,-25.25 586,-19.25 592,-19.25 592,-19.25 632,-19.25 632,-19.25 638,-19.25 644,-25.25 644,-31.25 644,-31.25 644,-43.25 644,-43.25 644,-49.25 638,-55.25 632,-55.25"/>
<text xml:space="preserve" text-anchor="middle" x="612" y="-32.58" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">FAILED</text>
</g>
<!-- processing&#45;&gt;failed -->
<g id="edge3" class="edge">
<title>processing&#45;&gt;failed</title>
<path fill="none" stroke="black" d="M890.02,-118C918.1,-118 946.62,-118 946.62,-118 946.62,-118 946.62,-63.74 946.62,-63.74"/>
<polygon fill="black" stroke="black" points="950.13,-63.74 946.63,-53.74 943.13,-63.74 950.13,-63.74"/>
<text xml:space="preserve" text-anchor="middle" x="922.62" y="-77.25" font-family="Helvetica,sans-Serif" font-size="10.00">error</text>
<path fill="none" stroke="black" d="M680.25,-104.62C680.25,-77.88 680.25,-31 680.25,-31 680.25,-31 655.64,-31 655.64,-31"/>
<polygon fill="black" stroke="black" points="655.64,-27.5 645.64,-31 655.64,-34.5 655.64,-27.5"/>
<text xml:space="preserve" text-anchor="middle" x="668.62" y="-58.76" font-family="Helvetica,sans-Serif" font-size="10.00">error</text>
</g>
<!-- processing&#45;&gt;cancelled -->
<g id="edge5" class="edge">
<title>processing&#45;&gt;cancelled</title>
<path fill="none" stroke="black" d="M890.24,-127C953.27,-127 1048.75,-127 1048.75,-127 1048.75,-127 1048.75,-63.89 1048.75,-63.89"/>
<polygon fill="black" stroke="black" points="1052.25,-63.89 1048.75,-53.89 1045.25,-63.89 1052.25,-63.89"/>
<text xml:space="preserve" text-anchor="middle" x="1012.38" y="-77.25" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
</g>
<!-- executor -->
<g id="node9" class="node">
<title>executor</title>
<path fill="none" stroke="black" d="M89.31,-758.31C89.31,-758.31 27.19,-726.69 27.19,-726.69 21.85,-723.97 21.85,-718.53 27.19,-715.81 27.19,-715.81 89.31,-684.19 89.31,-684.19 94.65,-681.47 105.35,-681.47 110.69,-684.19 110.69,-684.19 172.81,-715.81 172.81,-715.81 178.15,-718.53 178.15,-723.97 172.81,-726.69 172.81,-726.69 110.69,-758.31 110.69,-758.31 105.35,-761.03 94.65,-761.03 89.31,-758.31"/>
<text xml:space="preserve" text-anchor="middle" x="100" y="-725.2" font-family="Helvetica,sans-Serif" font-size="14.00">Executor</text>
<text xml:space="preserve" text-anchor="middle" x="100" y="-707.95" font-family="Helvetica,sans-Serif" font-size="14.00">(abstract)</text>
</g>
<!-- processing&#45;&gt;executor -->
<g id="edge10" class="edge">
<title>processing&#45;&gt;executor</title>
<path fill="none" stroke="black" d="M836.12,-145.19C836.12,-245.49 836.12,-721 836.12,-721 836.12,-721 195.6,-721 195.6,-721"/>
<polygon fill="black" stroke="black" points="195.6,-717.5 185.6,-721 195.6,-724.5 195.6,-717.5"/>
<path fill="none" stroke="black" d="M780.93,-123C819.44,-123 864.46,-123 864.46,-123 864.46,-123 864.46,-66.95 864.46,-66.95"/>
<polygon fill="black" stroke="black" points="867.96,-66.95 864.46,-56.95 860.96,-66.95 867.96,-66.95"/>
<text xml:space="preserve" text-anchor="middle" x="820.35" y="-126.25" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
</g>
<!-- failed&#45;&gt;pending -->
<g id="edge6" class="edge">
<title>failed&#45;&gt;pending</title>
<path fill="none" stroke="black" d="M965.25,-52.27C965.25,-52.27 965.25,-214.11 965.25,-214.11"/>
<polygon fill="black" stroke="black" points="961.75,-214.11 965.25,-224.11 968.75,-214.11 961.75,-214.11"/>
<text xml:space="preserve" text-anchor="middle" x="987.62" y="-123.62" font-family="Helvetica,sans-Serif" font-size="10.00">retry</text>
<path fill="none" stroke="black" d="M612.06,-55.55C612.06,-55.55 612.06,-178.31 612.06,-178.31"/>
<polygon fill="black" stroke="black" points="608.56,-178.31 612.06,-188.31 615.56,-178.31 608.56,-178.31"/>
<text xml:space="preserve" text-anchor="middle" x="600.44" y="-120.18" font-family="Helvetica,sans-Serif" font-size="10.00">retry</text>
</g>
<!-- create_job -->
<g id="node6" class="node">
<title>create_job</title>
<ellipse fill="none" stroke="black" cx="748" cy="-386" rx="66.47" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="748" y="-381.32" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/</text>
</g>
<!-- create_job&#45;&gt;pending -->
<g id="edge7" class="edge">
<title>create_job&#45;&gt;pending</title>
<path fill="none" stroke="black" d="M798.36,-373.89C798.36,-339.55 798.36,-244 798.36,-244 798.36,-244 892.3,-244 892.3,-244"/>
<polygon fill="black" stroke="black" points="892.3,-247.5 902.3,-244 892.3,-240.5 892.3,-247.5"/>
</g>
<!-- grpc_submit -->
<g id="node15" class="node">
<title>grpc_submit</title>
<path fill="none" stroke="black" d="M528.46,-286.5C528.46,-286.5 408.56,-286.5 408.56,-286.5 402.56,-286.5 394.16,-281 391.77,-275.5 391.77,-275.5 364.33,-212.5 364.33,-212.5 361.94,-207 365.54,-201.5 371.54,-201.5 371.54,-201.5 491.44,-201.5 491.44,-201.5 497.44,-201.5 505.84,-207 508.23,-212.5 508.23,-212.5 535.67,-275.5 535.67,-275.5 538.06,-281 534.46,-286.5 528.46,-286.5"/>
<text xml:space="preserve" text-anchor="middle" x="450" y="-247.95" font-family="Helvetica,sans-Serif" font-size="14.00">SubmitJob</text>
<text xml:space="preserve" text-anchor="middle" x="450" y="-230.7" font-family="Helvetica,sans-Serif" font-size="14.00">(unary)</text>
</g>
<!-- create_job&#45;&gt;grpc_submit -->
<g id="edge19" class="edge">
<title>create_job&#45;&gt;grpc_submit</title>
<path fill="none" stroke="black" d="M681.06,-386C596.67,-386 462.48,-386 462.48,-386 462.48,-386 462.48,-298.5 462.48,-298.5"/>
<polygon fill="black" stroke="black" points="465.98,-298.5 462.48,-288.5 458.98,-298.5 465.98,-298.5"/>
<text xml:space="preserve" text-anchor="middle" x="620.75" y="-333.25" font-family="Helvetica,sans-Serif" font-size="10.00">via gRPC</text>
</g>
<!-- cancel_job -->
<g id="node7" class="node">
<title>cancel_job</title>
<ellipse fill="none" stroke="black" cx="980" cy="-386" rx="122.23" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="980" y="-381.32" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/{id}/cancel</text>
</g>
<!-- cancel_job&#45;&gt;cancelled -->
<g id="edge8" class="edge">
<title>cancel_job&#45;&gt;cancelled</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1088.86,-377.65C1088.86,-377.65 1088.86,-63.86 1088.86,-63.86"/>
<polygon fill="black" stroke="black" points="1092.36,-63.86 1088.86,-53.86 1085.36,-63.86 1092.36,-63.86"/>
</g>
<!-- grpc_cancel -->
<g id="node16" class="node">
<title>grpc_cancel</title>
<path fill="none" stroke="black" d="M746.35,-286.5C746.35,-286.5 631.4,-286.5 631.4,-286.5 625.4,-286.5 617.07,-280.97 614.75,-275.44 614.75,-275.44 588.31,-212.56 588.31,-212.56 585.98,-207.03 589.65,-201.5 595.65,-201.5 595.65,-201.5 710.6,-201.5 710.6,-201.5 716.6,-201.5 724.93,-207.03 727.25,-212.56 727.25,-212.56 753.69,-275.44 753.69,-275.44 756.02,-280.97 752.35,-286.5 746.35,-286.5"/>
<text xml:space="preserve" text-anchor="middle" x="671" y="-247.95" font-family="Helvetica,sans-Serif" font-size="14.00">CancelJob</text>
<text xml:space="preserve" text-anchor="middle" x="671" y="-230.7" font-family="Helvetica,sans-Serif" font-size="14.00">(unary)</text>
</g>
<!-- cancel_job&#45;&gt;grpc_cancel -->
<g id="edge21" class="edge">
<title>cancel_job&#45;&gt;grpc_cancel</title>
<path fill="none" stroke="black" d="M873.76,-376.83C873.76,-350.09 873.76,-274 873.76,-274 873.76,-274 764.98,-274 764.98,-274"/>
<polygon fill="black" stroke="black" points="764.98,-270.5 754.98,-274 764.98,-277.5 764.98,-270.5"/>
<text xml:space="preserve" text-anchor="middle" x="870.75" y="-333.25" font-family="Helvetica,sans-Serif" font-size="10.00">via gRPC</text>
</g>
<!-- retry_job -->
<g id="node8" class="node">
<title>retry_job</title>
<ellipse fill="none" stroke="black" cx="1260" cy="-386" rx="114.34" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="1260" y="-381.32" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/{id}/retry</text>
</g>
<!-- retry_job&#45;&gt;pending -->
<g id="edge9" class="edge">
<title>retry_job&#45;&gt;pending</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1260,-367.66C1260,-330.54 1260,-250 1260,-250 1260,-250 995.86,-250 995.86,-250"/>
<polygon fill="black" stroke="black" points="995.86,-246.5 985.86,-250 995.86,-253.5 995.86,-246.5"/>
</g>
<!-- local -->
<g id="node10" class="node">
<title>local</title>
<path fill="none" stroke="black" d="M316.75,-622C316.75,-622 203.25,-622 203.25,-622 197.25,-622 191.25,-616 191.25,-610 191.25,-610 191.25,-591.5 191.25,-591.5 191.25,-585.5 197.25,-579.5 203.25,-579.5 203.25,-579.5 316.75,-579.5 316.75,-579.5 322.75,-579.5 328.75,-585.5 328.75,-591.5 328.75,-591.5 328.75,-610 328.75,-610 328.75,-616 322.75,-622 316.75,-622"/>
<text xml:space="preserve" text-anchor="middle" x="260" y="-604.7" font-family="Helvetica,sans-Serif" font-size="14.00">LocalExecutor</text>
<text xml:space="preserve" text-anchor="middle" x="260" y="-587.45" font-family="Helvetica,sans-Serif" font-size="14.00">Celery + FFmpeg</text>
</g>
<!-- executor&#45;&gt;local -->
<g id="edge11" class="edge">
<title>executor&#45;&gt;local</title>
<path fill="none" stroke="black" d="M165.81,-711.81C165.81,-683.47 165.81,-601 165.81,-601 165.81,-601 179.54,-601 179.54,-601"/>
<polygon fill="black" stroke="black" points="179.54,-604.5 189.54,-601 179.54,-597.5 179.54,-604.5"/>
<text xml:space="preserve" text-anchor="middle" x="287.88" y="-647.25" font-family="Helvetica,sans-Serif" font-size="10.00">MPR_EXECUTOR=local</text>
</g>
<!-- lambda_exec -->
<!-- celery_task -->
<g id="node11" class="node">
<title>lambda_exec</title>
<path fill="none" stroke="black" d="M136.12,-622C136.12,-622 27.88,-622 27.88,-622 21.88,-622 15.88,-616 15.88,-610 15.88,-610 15.88,-591.5 15.88,-591.5 15.88,-585.5 21.88,-579.5 27.88,-579.5 27.88,-579.5 136.12,-579.5 136.12,-579.5 142.12,-579.5 148.12,-585.5 148.12,-591.5 148.12,-591.5 148.12,-610 148.12,-610 148.12,-616 142.12,-622 136.12,-622"/>
<text xml:space="preserve" text-anchor="middle" x="82" y="-604.7" font-family="Helvetica,sans-Serif" font-size="14.00">LambdaExecutor</text>
<text xml:space="preserve" text-anchor="middle" x="82" y="-587.45" font-family="Helvetica,sans-Serif" font-size="14.00">SQS + Lambda</text>
<title>celery_task</title>
<path fill="none" stroke="black" d="M162.75,-488C162.75,-488 43.25,-488 43.25,-488 37.25,-488 31.25,-482 31.25,-476 31.25,-476 31.25,-457.5 31.25,-457.5 31.25,-451.5 37.25,-445.5 43.25,-445.5 43.25,-445.5 162.75,-445.5 162.75,-445.5 168.75,-445.5 174.75,-451.5 174.75,-457.5 174.75,-457.5 174.75,-476 174.75,-476 174.75,-482 168.75,-488 162.75,-488"/>
<text xml:space="preserve" text-anchor="middle" x="103" y="-470.7" font-family="Helvetica,sans-Serif" font-size="14.00">Celery Task</text>
<text xml:space="preserve" text-anchor="middle" x="103" y="-453.45" font-family="Helvetica,sans-Serif" font-size="14.00">(transcode queue)</text>
</g>
<!-- executor&#45;&gt;lambda_exec -->
<g id="edge12" class="edge">
<title>executor&#45;&gt;lambda_exec</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M82.31,-687.36C82.31,-687.36 82.31,-633.77 82.31,-633.77"/>
<polygon fill="black" stroke="black" points="85.81,-633.77 82.31,-623.77 78.81,-633.77 85.81,-633.77"/>
<text xml:space="preserve" text-anchor="middle" x="121.62" y="-647.25" font-family="Helvetica,sans-Serif" font-size="10.00">MPR_EXECUTOR=lambda</text>
<!-- dispatch&#45;&gt;celery_task -->
<g id="edge11" class="edge">
<title>dispatch&#45;&gt;celery_task</title>
<path fill="none" stroke="black" d="M142.89,-552.62C142.89,-552.62 142.89,-499.67 142.89,-499.67"/>
<polygon fill="black" stroke="black" points="146.39,-499.67 142.89,-489.67 139.39,-499.67 146.39,-499.67"/>
<text xml:space="preserve" text-anchor="middle" x="131.27" y="-529.4" font-family="Helvetica,sans-Serif" font-size="10.00">local</text>
</g>
<!-- transcode -->
<g id="node12" class="node">
<title>transcode</title>
<path fill="none" stroke="black" d="M172.88,-513C172.88,-513 93.12,-513 93.12,-513 87.12,-513 81.12,-507 81.12,-501 81.12,-501 81.12,-482.5 81.12,-482.5 81.12,-476.5 87.12,-470.5 93.12,-470.5 93.12,-470.5 172.88,-470.5 172.88,-470.5 178.88,-470.5 184.88,-476.5 184.88,-482.5 184.88,-482.5 184.88,-501 184.88,-501 184.88,-507 178.88,-513 172.88,-513"/>
<text xml:space="preserve" text-anchor="middle" x="133" y="-495.7" font-family="Helvetica,sans-Serif" font-size="14.00">Transcode</text>
<text xml:space="preserve" text-anchor="middle" x="133" y="-478.45" font-family="Helvetica,sans-Serif" font-size="14.00">(with preset)</text>
<!-- sfn_start -->
<g id="node16" class="node">
<title>sfn_start</title>
<path fill="none" stroke="black" d="M1525.88,-488C1525.88,-488 1428.12,-488 1428.12,-488 1422.12,-488 1416.12,-482 1416.12,-476 1416.12,-476 1416.12,-457.5 1416.12,-457.5 1416.12,-451.5 1422.12,-445.5 1428.12,-445.5 1428.12,-445.5 1525.88,-445.5 1525.88,-445.5 1531.88,-445.5 1537.88,-451.5 1537.88,-457.5 1537.88,-457.5 1537.88,-476 1537.88,-476 1537.88,-482 1531.88,-488 1525.88,-488"/>
<text xml:space="preserve" text-anchor="middle" x="1477" y="-470.7" font-family="Helvetica,sans-Serif" font-size="14.00">Step Functions</text>
<text xml:space="preserve" text-anchor="middle" x="1477" y="-453.45" font-family="Helvetica,sans-Serif" font-size="14.00">start_execution</text>
</g>
<!-- local&#45;&gt;transcode -->
<g id="edge13" class="edge">
<title>local&#45;&gt;transcode</title>
<path fill="none" stroke="black" d="M209.38,-579C209.38,-547.27 209.38,-492 209.38,-492 209.38,-492 196.72,-492 196.72,-492"/>
<polygon fill="black" stroke="black" points="196.72,-488.5 186.72,-492 196.72,-495.5 196.72,-488.5"/>
</g>
<!-- trim -->
<g id="node13" class="node">
<title>trim</title>
<path fill="none" stroke="black" d="M372.5,-513C372.5,-513 239.5,-513 239.5,-513 233.5,-513 227.5,-507 227.5,-501 227.5,-501 227.5,-482.5 227.5,-482.5 227.5,-476.5 233.5,-470.5 239.5,-470.5 239.5,-470.5 372.5,-470.5 372.5,-470.5 378.5,-470.5 384.5,-476.5 384.5,-482.5 384.5,-482.5 384.5,-501 384.5,-501 384.5,-507 378.5,-513 372.5,-513"/>
<text xml:space="preserve" text-anchor="middle" x="306" y="-495.7" font-family="Helvetica,sans-Serif" font-size="14.00">Trim</text>
<text xml:space="preserve" text-anchor="middle" x="306" y="-478.45" font-family="Helvetica,sans-Serif" font-size="14.00">(&#45;c:v copy &#45;c:a copy)</text>
</g>
<!-- local&#45;&gt;trim -->
<g id="edge14" class="edge">
<title>local&#45;&gt;trim</title>
<path fill="none" stroke="black" d="M278.12,-579.22C278.12,-579.22 278.12,-524.75 278.12,-524.75"/>
<polygon fill="black" stroke="black" points="281.63,-524.75 278.13,-514.75 274.63,-524.75 281.63,-524.75"/>
</g>
<!-- progress -->
<g id="node17" class="node">
<title>progress</title>
<polygon fill="none" stroke="black" points="241.5,-407.25 84.5,-407.25 84.5,-364.75 247.5,-364.75 247.5,-401.25 241.5,-407.25"/>
<polyline fill="none" stroke="black" points="241.5,-407.25 241.5,-401.25"/>
<polyline fill="none" stroke="black" points="247.5,-401.25 241.5,-401.25"/>
<text xml:space="preserve" text-anchor="middle" x="166" y="-389.95" font-family="Helvetica,sans-Serif" font-size="14.00">Progress Updates</text>
<text xml:space="preserve" text-anchor="middle" x="166" y="-372.7" font-family="Helvetica,sans-Serif" font-size="14.00">(gRPC → Redis → DB)</text>
</g>
<!-- transcode&#45;&gt;progress -->
<g id="edge15" class="edge">
<title>transcode&#45;&gt;progress</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M134.69,-470.09C134.69,-470.09 134.69,-419.14 134.69,-419.14"/>
<polygon fill="black" stroke="black" points="138.19,-419.14 134.69,-409.14 131.19,-419.14 138.19,-419.14"/>
</g>
<!-- trim&#45;&gt;progress -->
<g id="edge16" class="edge">
<title>trim&#45;&gt;progress</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M237.5,-470.09C237.5,-470.09 237.5,-419.14 237.5,-419.14"/>
<polygon fill="black" stroke="black" points="241,-419.14 237.5,-409.14 234,-419.14 241,-419.14"/>
</g>
<!-- grpc_stream -->
<g id="node14" class="node">
<title>grpc_stream</title>
<path fill="none" stroke="black" d="M304.33,-286.5C304.33,-286.5 89.19,-286.5 89.19,-286.5 83.19,-286.5 73.67,-281.64 70.15,-276.78 70.15,-276.78 22.71,-211.22 22.71,-211.22 19.19,-206.36 21.67,-201.5 27.67,-201.5 27.67,-201.5 242.81,-201.5 242.81,-201.5 248.81,-201.5 258.33,-206.36 261.85,-211.22 261.85,-211.22 309.29,-276.78 309.29,-276.78 312.81,-281.64 310.33,-286.5 304.33,-286.5"/>
<text xml:space="preserve" text-anchor="middle" x="166" y="-247.95" font-family="Helvetica,sans-Serif" font-size="14.00">StreamProgress</text>
<text xml:space="preserve" text-anchor="middle" x="166" y="-230.7" font-family="Helvetica,sans-Serif" font-size="14.00">(server streaming)</text>
</g>
<!-- grpc_stream&#45;&gt;processing -->
<g id="edge18" class="edge">
<title>grpc_stream&#45;&gt;processing</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M166,-201.1C166,-167.71 166,-127 166,-127 166,-127 770.51,-127 770.51,-127"/>
<polygon fill="black" stroke="black" points="770.51,-130.5 780.51,-127 770.51,-123.5 770.51,-130.5"/>
<text xml:space="preserve" text-anchor="middle" x="476.38" y="-170" font-family="Helvetica,sans-Serif" font-size="10.00">update status</text>
</g>
<!-- grpc_submit&#45;&gt;pending -->
<g id="edge20" class="edge">
<title>grpc_submit&#45;&gt;pending</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M450,-201.06C450,-186.11 450,-173 450,-173 450,-173 912.08,-173 912.08,-173 912.08,-173 912.08,-214.2 912.08,-214.2"/>
<polygon fill="black" stroke="black" points="908.58,-214.2 912.08,-224.2 915.58,-214.2 908.58,-214.2"/>
</g>
<!-- grpc_cancel&#45;&gt;cancelled -->
<g id="edge22" class="edge">
<title>grpc_cancel&#45;&gt;cancelled</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M728.29,-214C836.93,-214 1062.12,-214 1062.12,-214 1062.12,-214 1062.12,-63.76 1062.12,-63.76"/>
<polygon fill="black" stroke="black" points="1065.62,-63.76 1062.12,-53.76 1058.62,-63.76 1065.62,-63.76"/>
</g>
<!-- progress&#45;&gt;grpc_stream -->
<!-- dispatch&#45;&gt;sfn_start -->
<g id="edge17" class="edge">
<title>progress&#45;&gt;grpc_stream</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M166,-364.43C166,-364.43 166,-298.49 166,-298.49"/>
<polygon fill="black" stroke="black" points="169.5,-298.49 166,-288.49 162.5,-298.49 169.5,-298.49"/>
<text xml:space="preserve" text-anchor="middle" x="204.62" y="-333.25" font-family="Helvetica,sans-Serif" font-size="10.00">stream to client</text>
<title>dispatch&#45;&gt;sfn_start</title>
<path fill="none" stroke="black" d="M336.81,-552.63C336.81,-533.84 336.81,-467 336.81,-467 336.81,-467 1404.18,-467 1404.18,-467"/>
<polygon fill="black" stroke="black" points="1404.18,-470.5 1414.18,-467 1404.18,-463.5 1404.18,-470.5"/>
<text xml:space="preserve" text-anchor="middle" x="809.3" y="-470.25" font-family="Helvetica,sans-Serif" font-size="10.00">lambda</text>
</g>
<!-- s3_download -->
<g id="node12" class="node">
<title>s3_download</title>
<path fill="none" stroke="black" d="M144.38,-402.5C144.38,-402.5 61.62,-402.5 61.62,-402.5 55.62,-402.5 49.62,-396.5 49.62,-390.5 49.62,-390.5 49.62,-372 49.62,-372 49.62,-366 55.62,-360 61.62,-360 61.62,-360 144.38,-360 144.38,-360 150.38,-360 156.38,-366 156.38,-372 156.38,-372 156.38,-390.5 156.38,-390.5 156.38,-396.5 150.38,-402.5 144.38,-402.5"/>
<text xml:space="preserve" text-anchor="middle" x="103" y="-385.2" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Download</text>
<text xml:space="preserve" text-anchor="middle" x="103" y="-367.95" font-family="Helvetica,sans-Serif" font-size="14.00">(MinIO)</text>
</g>
<!-- celery_task&#45;&gt;s3_download -->
<g id="edge12" class="edge">
<title>celery_task&#45;&gt;s3_download</title>
<path fill="none" stroke="black" d="M103,-445.17C103,-445.17 103,-414.33 103,-414.33"/>
<polygon fill="black" stroke="black" points="106.5,-414.33 103,-404.33 99.5,-414.33 106.5,-414.33"/>
</g>
<!-- ffmpeg_local -->
<g id="node13" class="node">
<title>ffmpeg_local</title>
<path fill="none" stroke="black" d="M153,-317C153,-317 59,-317 59,-317 53,-317 47,-311 47,-305 47,-305 47,-286.5 47,-286.5 47,-280.5 53,-274.5 59,-274.5 59,-274.5 153,-274.5 153,-274.5 159,-274.5 165,-280.5 165,-286.5 165,-286.5 165,-305 165,-305 165,-311 159,-317 153,-317"/>
<text xml:space="preserve" text-anchor="middle" x="106" y="-299.7" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg</text>
<text xml:space="preserve" text-anchor="middle" x="106" y="-282.45" font-family="Helvetica,sans-Serif" font-size="14.00">transcode/trim</text>
</g>
<!-- s3_download&#45;&gt;ffmpeg_local -->
<g id="edge13" class="edge">
<title>s3_download&#45;&gt;ffmpeg_local</title>
<path fill="none" stroke="black" d="M103,-359.67C103,-359.67 103,-328.83 103,-328.83"/>
<polygon fill="black" stroke="black" points="106.5,-328.83 103,-318.83 99.5,-328.83 106.5,-328.83"/>
</g>
<!-- s3_upload -->
<g id="node14" class="node">
<title>s3_upload</title>
<path fill="none" stroke="black" d="M138.62,-229.5C138.62,-229.5 75.38,-229.5 75.38,-229.5 69.38,-229.5 63.38,-223.5 63.38,-217.5 63.38,-217.5 63.38,-199 63.38,-199 63.38,-193 69.38,-187 75.38,-187 75.38,-187 138.62,-187 138.62,-187 144.62,-187 150.62,-193 150.62,-199 150.62,-199 150.62,-217.5 150.62,-217.5 150.62,-223.5 144.62,-229.5 138.62,-229.5"/>
<text xml:space="preserve" text-anchor="middle" x="107" y="-212.2" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Upload</text>
<text xml:space="preserve" text-anchor="middle" x="107" y="-194.95" font-family="Helvetica,sans-Serif" font-size="14.00">(MinIO)</text>
</g>
<!-- ffmpeg_local&#45;&gt;s3_upload -->
<g id="edge14" class="edge">
<title>ffmpeg_local&#45;&gt;s3_upload</title>
<path fill="none" stroke="black" d="M107,-274.12C107,-274.12 107,-241.45 107,-241.45"/>
<polygon fill="black" stroke="black" points="110.5,-241.45 107,-231.45 103.5,-241.45 110.5,-241.45"/>
</g>
<!-- db_update -->
<g id="node15" class="node">
<title>db_update</title>
<path fill="none" stroke="black" d="M180.88,-144C180.88,-144 35.12,-144 35.12,-144 29.12,-144 23.12,-138 23.12,-132 23.12,-132 23.12,-113.5 23.12,-113.5 23.12,-107.5 29.12,-101.5 35.12,-101.5 35.12,-101.5 180.88,-101.5 180.88,-101.5 186.88,-101.5 192.88,-107.5 192.88,-113.5 192.88,-113.5 192.88,-132 192.88,-132 192.88,-138 186.88,-144 180.88,-144"/>
<text xml:space="preserve" text-anchor="middle" x="108" y="-126.7" font-family="Helvetica,sans-Serif" font-size="14.00">DB Update</text>
<text xml:space="preserve" text-anchor="middle" x="108" y="-109.45" font-family="Helvetica,sans-Serif" font-size="14.00">(update_job_progress)</text>
</g>
<!-- s3_upload&#45;&gt;db_update -->
<g id="edge15" class="edge">
<title>s3_upload&#45;&gt;db_update</title>
<path fill="none" stroke="black" d="M107,-186.67C107,-186.67 107,-155.83 107,-155.83"/>
<polygon fill="black" stroke="black" points="110.5,-155.83 107,-145.83 103.5,-155.83 110.5,-155.83"/>
</g>
<!-- db_update&#45;&gt;completed -->
<g id="edge16" class="edge">
<title>db_update&#45;&gt;completed</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M193.17,-117C345.61,-117 649.29,-117 649.29,-117 649.29,-117 649.29,-43 649.29,-43 649.29,-43 675.4,-43 675.4,-43"/>
<polygon fill="black" stroke="black" points="675.4,-46.5 685.4,-43 675.4,-39.5 675.4,-46.5"/>
</g>
<!-- lambda_fn -->
<g id="node17" class="node">
<title>lambda_fn</title>
<path fill="none" stroke="black" d="M1546,-402.5C1546,-402.5 1428,-402.5 1428,-402.5 1422,-402.5 1416,-396.5 1416,-390.5 1416,-390.5 1416,-372 1416,-372 1416,-366 1422,-360 1428,-360 1428,-360 1546,-360 1546,-360 1552,-360 1558,-366 1558,-372 1558,-372 1558,-390.5 1558,-390.5 1558,-396.5 1552,-402.5 1546,-402.5"/>
<text xml:space="preserve" text-anchor="middle" x="1487" y="-385.2" font-family="Helvetica,sans-Serif" font-size="14.00">Lambda</text>
<text xml:space="preserve" text-anchor="middle" x="1487" y="-367.95" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg container</text>
</g>
<!-- sfn_start&#45;&gt;lambda_fn -->
<g id="edge18" class="edge">
<title>sfn_start&#45;&gt;lambda_fn</title>
<path fill="none" stroke="black" d="M1477,-445.17C1477,-445.17 1477,-414.33 1477,-414.33"/>
<polygon fill="black" stroke="black" points="1480.5,-414.33 1477,-404.33 1473.5,-414.33 1480.5,-414.33"/>
</g>
<!-- s3_dl_aws -->
<g id="node18" class="node">
<title>s3_dl_aws</title>
<path fill="none" stroke="black" d="M1534.38,-317C1534.38,-317 1451.62,-317 1451.62,-317 1445.62,-317 1439.62,-311 1439.62,-305 1439.62,-305 1439.62,-286.5 1439.62,-286.5 1439.62,-280.5 1445.62,-274.5 1451.62,-274.5 1451.62,-274.5 1534.38,-274.5 1534.38,-274.5 1540.38,-274.5 1546.38,-280.5 1546.38,-286.5 1546.38,-286.5 1546.38,-305 1546.38,-305 1546.38,-311 1540.38,-317 1534.38,-317"/>
<text xml:space="preserve" text-anchor="middle" x="1493" y="-299.7" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Download</text>
<text xml:space="preserve" text-anchor="middle" x="1493" y="-282.45" font-family="Helvetica,sans-Serif" font-size="14.00">(AWS)</text>
</g>
<!-- lambda_fn&#45;&gt;s3_dl_aws -->
<g id="edge19" class="edge">
<title>lambda_fn&#45;&gt;s3_dl_aws</title>
<path fill="none" stroke="black" d="M1493,-359.67C1493,-359.67 1493,-328.83 1493,-328.83"/>
<polygon fill="black" stroke="black" points="1496.5,-328.83 1493,-318.83 1489.5,-328.83 1496.5,-328.83"/>
</g>
<!-- ffmpeg_aws -->
<g id="node19" class="node">
<title>ffmpeg_aws</title>
<path fill="none" stroke="black" d="M1545,-229.5C1545,-229.5 1451,-229.5 1451,-229.5 1445,-229.5 1439,-223.5 1439,-217.5 1439,-217.5 1439,-199 1439,-199 1439,-193 1445,-187 1451,-187 1451,-187 1545,-187 1545,-187 1551,-187 1557,-193 1557,-199 1557,-199 1557,-217.5 1557,-217.5 1557,-223.5 1551,-229.5 1545,-229.5"/>
<text xml:space="preserve" text-anchor="middle" x="1498" y="-212.2" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg</text>
<text xml:space="preserve" text-anchor="middle" x="1498" y="-194.95" font-family="Helvetica,sans-Serif" font-size="14.00">transcode/trim</text>
</g>
<!-- s3_dl_aws&#45;&gt;ffmpeg_aws -->
<g id="edge20" class="edge">
<title>s3_dl_aws&#45;&gt;ffmpeg_aws</title>
<path fill="none" stroke="black" d="M1493,-274.12C1493,-274.12 1493,-241.45 1493,-241.45"/>
<polygon fill="black" stroke="black" points="1496.5,-241.45 1493,-231.45 1489.5,-241.45 1496.5,-241.45"/>
</g>
<!-- s3_ul_aws -->
<g id="node20" class="node">
<title>s3_ul_aws</title>
<path fill="none" stroke="black" d="M1532.62,-144C1532.62,-144 1469.38,-144 1469.38,-144 1463.38,-144 1457.38,-138 1457.38,-132 1457.38,-132 1457.38,-113.5 1457.38,-113.5 1457.38,-107.5 1463.38,-101.5 1469.38,-101.5 1469.38,-101.5 1532.62,-101.5 1532.62,-101.5 1538.62,-101.5 1544.62,-107.5 1544.62,-113.5 1544.62,-113.5 1544.62,-132 1544.62,-132 1544.62,-138 1538.62,-144 1532.62,-144"/>
<text xml:space="preserve" text-anchor="middle" x="1501" y="-126.7" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Upload</text>
<text xml:space="preserve" text-anchor="middle" x="1501" y="-109.45" font-family="Helvetica,sans-Serif" font-size="14.00">(AWS)</text>
</g>
<!-- ffmpeg_aws&#45;&gt;s3_ul_aws -->
<g id="edge21" class="edge">
<title>ffmpeg_aws&#45;&gt;s3_ul_aws</title>
<path fill="none" stroke="black" d="M1501,-186.67C1501,-186.67 1501,-155.83 1501,-155.83"/>
<polygon fill="black" stroke="black" points="1504.5,-155.83 1501,-145.83 1497.5,-155.83 1504.5,-155.83"/>
</g>
<!-- callback -->
<g id="node21" class="node">
<title>callback</title>
<path fill="none" stroke="black" d="M1585.12,-58.5C1585.12,-58.5 1422.88,-58.5 1422.88,-58.5 1416.88,-58.5 1410.88,-52.5 1410.88,-46.5 1410.88,-46.5 1410.88,-28 1410.88,-28 1410.88,-22 1416.88,-16 1422.88,-16 1422.88,-16 1585.12,-16 1585.12,-16 1591.12,-16 1597.12,-22 1597.12,-28 1597.12,-28 1597.12,-46.5 1597.12,-46.5 1597.12,-52.5 1591.12,-58.5 1585.12,-58.5"/>
<text xml:space="preserve" text-anchor="middle" x="1504" y="-41.2" font-family="Helvetica,sans-Serif" font-size="14.00">HTTP Callback</text>
<text xml:space="preserve" text-anchor="middle" x="1504" y="-23.95" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/{id}/callback</text>
</g>
<!-- s3_ul_aws&#45;&gt;callback -->
<g id="edge22" class="edge">
<title>s3_ul_aws&#45;&gt;callback</title>
<path fill="none" stroke="black" d="M1501,-101.17C1501,-101.17 1501,-70.33 1501,-70.33"/>
<polygon fill="black" stroke="black" points="1504.5,-70.33 1501,-60.33 1497.5,-70.33 1504.5,-70.33"/>
</g>
<!-- callback&#45;&gt;completed -->
<g id="edge23" class="edge">
<title>callback&#45;&gt;completed</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M1427.5,-58.88C1427.5,-69.48 1427.5,-80 1427.5,-80 1427.5,-80 786.08,-80 786.08,-80 786.08,-80 786.08,-67.14 786.08,-67.14"/>
<polygon fill="black" stroke="black" points="789.58,-67.14 786.08,-57.14 782.58,-67.14 789.58,-67.14"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

@@ -0,0 +1,122 @@
# Media Storage Architecture
## Overview
MPR uses **S3-compatible storage** everywhere. Locally via MinIO, in production via AWS S3. The same boto3 code and S3 keys work in both environments - the only difference is the `S3_ENDPOINT_URL` env var.
## Storage Strategy
### S3 Buckets
| Bucket | Env Var | Purpose |
|--------|---------|---------|
| `mpr-media-in` | `S3_BUCKET_IN` | Source media files |
| `mpr-media-out` | `S3_BUCKET_OUT` | Transcoded/trimmed output |
### S3 Keys as File Paths
- **Database**: Stores S3 object keys (e.g., `video1.mp4`, `subfolder/video3.mp4`)
- **Local dev**: MinIO serves these via S3 API on port 9000
- **AWS**: Real S3, same keys, different endpoint
### Why S3 Everywhere?
1. **Identical code paths** - no branching between local and cloud
2. **Seamless executor switching** - Celery and Lambda both use boto3
3. **Cloud-native** - ready for production without refactoring
## Local Development (MinIO)
### Configuration
```bash
S3_ENDPOINT_URL=http://minio:9000
S3_BUCKET_IN=mpr-media-in
S3_BUCKET_OUT=mpr-media-out
AWS_ACCESS_KEY_ID=minioadmin
AWS_SECRET_ACCESS_KEY=minioadmin
```
### How It Works
- MinIO runs as a Docker container (port 9000 API, port 9001 console)
- `minio-init` container creates buckets and sets public read access on startup
- Nginx proxies `/media/in/` and `/media/out/` to MinIO buckets
- Upload files via MinIO Console (http://localhost:9001) or `mc` CLI
### Upload Files to MinIO
```bash
# Using mc CLI
mc alias set local http://localhost:9000 minioadmin minioadmin
mc cp video.mp4 local/mpr-media-in/
# Using aws CLI with endpoint override
aws --endpoint-url http://localhost:9000 s3 cp video.mp4 s3://mpr-media-in/
```
## AWS Production (S3)
### Configuration
```bash
# No S3_ENDPOINT_URL = uses real AWS S3
S3_BUCKET_IN=mpr-media-in
S3_BUCKET_OUT=mpr-media-out
AWS_REGION=us-east-1
AWS_ACCESS_KEY_ID=<real-key>
AWS_SECRET_ACCESS_KEY=<real-secret>
```
### Upload Files to S3
```bash
aws s3 cp video.mp4 s3://mpr-media-in/
aws s3 sync /local/media/ s3://mpr-media-in/
```
## Storage Module
`core/storage.py` provides all S3 operations:
```python
from core.storage import (
get_s3_client, # boto3 client (MinIO or AWS)
list_objects, # List bucket contents, filter by extension
download_file, # Download S3 object to local path
download_to_temp, # Download to temp file (caller cleans up)
upload_file, # Upload local file to S3
get_presigned_url, # Generate presigned URL
BUCKET_IN, # Input bucket name
BUCKET_OUT, # Output bucket name
)
```
## API Endpoints
### Scan Media (REST)
```http
POST /api/assets/scan
```
Lists objects in `S3_BUCKET_IN`, registers new media files.
### Scan Media (GraphQL)
```graphql
mutation { scanMediaFolder { found registered skipped files } }
```
## Job Flow with S3
### Local Mode (Celery)
1. Celery task receives `source_key` and `output_key`
2. Downloads source from `S3_BUCKET_IN` to temp file
3. Runs FFmpeg locally
4. Uploads result to `S3_BUCKET_OUT`
5. Cleans up temp files
### Lambda Mode (AWS)
1. Step Functions invokes Lambda with S3 keys
2. Lambda downloads source from `S3_BUCKET_IN` to `/tmp`
3. Runs FFmpeg in container
4. Uploads result to `S3_BUCKET_OUT`
5. Calls back to API with result
Both paths use the same S3 buckets and key structure.
## Supported File Types
**Video:** `.mp4`, `.mkv`, `.avi`, `.mov`, `.webm`, `.flv`, `.wmv`, `.m4v`
**Audio:** `.mp3`, `.wav`, `.flac`, `.aac`, `.ogg`, `.m4a`

View File

@@ -1,19 +1,23 @@
<!DOCTYPE html>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>MPR - Architecture</title>
<link rel="stylesheet" href="styles.css">
</head>
<body>
<link rel="stylesheet" href="styles.css" />
</head>
<body>
<h1>MPR - Media Processor</h1>
<p>A web-based media transcoding tool with professional architecture.</p>
<p>
Media transcoding platform with dual execution modes: local (Celery
+ MinIO) and cloud (AWS Step Functions + Lambda + S3).
</p>
<nav>
<a href="#overview">System Overview</a>
<a href="#data-model">Data Model</a>
<a href="#job-flow">Job Flow</a>
<a href="#media-storage">Media Storage</a>
</nav>
<h2 id="overview">System Overview</h2>
@@ -21,20 +25,42 @@
<div class="diagram">
<h3>Architecture</h3>
<object type="image/svg+xml" data="01-system-overview.svg">
<img src="01-system-overview.svg" alt="System Overview">
<img src="01-system-overview.svg" alt="System Overview" />
</object>
<a href="01-system-overview.svg" target="_blank">Open full size</a>
<a href="01-system-overview.svg" target="_blank"
>Open full size</a
>
</div>
</div>
<div class="legend">
<h3>Components</h3>
<ul>
<li><span class="color-box" style="background: #e8f4f8"></span> Reverse Proxy (nginx)</li>
<li><span class="color-box" style="background: #f0f8e8"></span> Application Layer (Django, FastAPI, UI)</li>
<li><span class="color-box" style="background: #fff8e8"></span> Worker Layer (Celery, Lambda)</li>
<li><span class="color-box" style="background: #f8e8f0"></span> Data Layer (PostgreSQL, Redis, SQS)</li>
<li><span class="color-box" style="background: #f0f0f0"></span> Storage (Local FS, S3)</li>
<li>
<span class="color-box" style="background: #e8f4f8"></span>
Reverse Proxy (nginx)
</li>
<li>
<span class="color-box" style="background: #f0f8e8"></span>
Application Layer (Django Admin, FastAPI + GraphQL, Timeline
UI)
</li>
<li>
<span class="color-box" style="background: #fff8e8"></span>
Worker Layer (Celery local mode)
</li>
<li>
<span class="color-box" style="background: #fde8d0"></span>
AWS (Step Functions, Lambda - cloud mode)
</li>
<li>
<span class="color-box" style="background: #f8e8f0"></span>
Data Layer (PostgreSQL, Redis)
</li>
<li>
<span class="color-box" style="background: #f0f0f0"></span>
S3 Storage (MinIO local / AWS S3 cloud)
</li>
</ul>
</div>
@@ -43,7 +69,7 @@
<div class="diagram">
<h3>Entity Relationships</h3>
<object type="image/svg+xml" data="02-data-model.svg">
<img src="02-data-model.svg" alt="Data Model">
<img src="02-data-model.svg" alt="Data Model" />
</object>
<a href="02-data-model.svg" target="_blank">Open full size</a>
</div>
@@ -52,9 +78,19 @@
<div class="legend">
<h3>Entities</h3>
<ul>
<li><span class="color-box" style="background: #4a90d9"></span> MediaAsset - Video/audio files with metadata</li>
<li><span class="color-box" style="background: #50b050"></span> TranscodePreset - Encoding configurations</li>
<li><span class="color-box" style="background: #d9534f"></span> TranscodeJob - Processing queue items</li>
<li>
<span class="color-box" style="background: #4a90d9"></span>
MediaAsset - Video/audio files (S3 keys as paths)
</li>
<li>
<span class="color-box" style="background: #50b050"></span>
TranscodePreset - Encoding configurations
</li>
<li>
<span class="color-box" style="background: #d9534f"></span>
TranscodeJob - Processing queue (celery_task_id or
execution_arn)
</li>
</ul>
</div>
@@ -63,7 +99,7 @@
<div class="diagram">
<h3>Job Lifecycle</h3>
<object type="image/svg+xml" data="03-job-flow.svg">
<img src="03-job-flow.svg" alt="Job Flow">
<img src="03-job-flow.svg" alt="Job Flow" />
</object>
<a href="03-job-flow.svg" target="_blank">Open full size</a>
</div>
@@ -72,30 +108,84 @@
<div class="legend">
<h3>Job States</h3>
<ul>
<li><span class="color-box" style="background: #ffc107"></span> PENDING - Waiting in queue</li>
<li><span class="color-box" style="background: #17a2b8"></span> PROCESSING - Worker executing</li>
<li><span class="color-box" style="background: #28a745"></span> COMPLETED - Success</li>
<li><span class="color-box" style="background: #dc3545"></span> FAILED - Error occurred</li>
<li><span class="color-box" style="background: #6c757d"></span> CANCELLED - User cancelled</li>
<li>
<span class="color-box" style="background: #ffc107"></span>
PENDING - Waiting in queue
</li>
<li>
<span class="color-box" style="background: #17a2b8"></span>
PROCESSING - Worker executing
</li>
<li>
<span class="color-box" style="background: #28a745"></span>
COMPLETED - Success
</li>
<li>
<span class="color-box" style="background: #dc3545"></span>
FAILED - Error occurred
</li>
<li>
<span class="color-box" style="background: #6c757d"></span>
CANCELLED - User cancelled
</li>
</ul>
<h3>Execution Modes</h3>
<ul>
<li>
<span class="color-box" style="background: #e8f4e8"></span>
Local: Celery + MinIO (S3 API) + FFmpeg
</li>
<li>
<span class="color-box" style="background: #fde8d0"></span>
Lambda: Step Functions + Lambda + AWS S3
</li>
</ul>
</div>
<h2>Quick Reference</h2>
<pre><code># Generate SVGs from DOT files
dot -Tsvg 01-system-overview.dot -o 01-system-overview.svg
dot -Tsvg 02-data-model.dot -o 02-data-model.svg
dot -Tsvg 03-job-flow.dot -o 03-job-flow.svg
<h2 id="media-storage">Media Storage</h2>
<div class="diagram-container">
<p>
MPR separates media into input and output paths for flexible
storage configuration.
</p>
<p>
<a href="04-media-storage.md" target="_blank"
>View Media Storage Documentation →</a
>
</p>
</div>
# Or generate all at once
for f in *.dot; do dot -Tsvg "$f" -o "${f%.dot}.svg"; done</code></pre>
<h2>API Interfaces</h2>
<pre><code># REST API
http://mpr.local.ar/api/docs - Swagger UI
POST /api/assets/scan - Scan S3 bucket for media
POST /api/jobs/ - Create transcode job
POST /api/jobs/{id}/callback - Lambda completion callback
# GraphQL (GraphiQL)
http://mpr.local.ar/graphql - GraphiQL IDE
query { assets { id filename } }
mutation { createJob(input: {...}) { id status } }
mutation { scanMediaFolder { found registered } }</code></pre>
<h2>Access Points</h2>
<pre><code># Add to /etc/hosts
<pre><code># Local development
127.0.0.1 mpr.local.ar
# URLs
http://mpr.local.ar/admin - Django Admin
http://mpr.local.ar/api - FastAPI (docs at /api/docs)
http://mpr.local.ar/ui - Timeline UI</code></pre>
</body>
http://mpr.local.ar/api/docs - FastAPI Swagger
http://mpr.local.ar/graphql - GraphiQL
http://mpr.local.ar/ - Timeline UI
http://localhost:9001 - MinIO Console
# AWS deployment
https://mpr.mcrn.ar/ - Production</code></pre>
<h2>Quick Reference</h2>
<pre><code># Render SVGs from DOT files
for f in *.dot; do dot -Tsvg "$f" -o "${f%.dot}.svg"; done
# Switch executor mode
MPR_EXECUTOR=local # Celery + MinIO
MPR_EXECUTOR=lambda # Step Functions + Lambda + S3</code></pre>
</body>
</html>

232
docs/index.html Normal file
View File

@@ -0,0 +1,232 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>MPR - Architecture</title>
<link rel="stylesheet" href="architecture/styles.css" />
</head>
<body>
<h1>MPR - Media Processor</h1>
<p>
A web-based media transcoding tool with professional architecture.
</p>
<nav>
<a href="#overview">System Overview</a>
<a href="#data-model">Data Model</a>
<a href="#job-flow">Job Flow</a>
<a href="#media-storage">Media Storage</a>
</nav>
<h2 id="overview">System Overview</h2>
<div class="diagram-container">
<div class="diagram">
<h3>Architecture</h3>
<object
type="image/svg+xml"
data="architecture/01-system-overview.svg"
>
<img
src="architecture/01-system-overview.svg"
alt="System Overview"
/>
</object>
<a href="architecture/01-system-overview.svg" target="_blank"
>Open full size</a
>
</div>
</div>
<div class="legend">
<h3>Components</h3>
<ul>
<li>
<span class="color-box" style="background: #e8f4f8"></span>
Reverse Proxy (nginx)
</li>
<li>
<span class="color-box" style="background: #f0f8e8"></span>
Application Layer (Django, FastAPI, UI)
</li>
<li>
<span class="color-box" style="background: #fff8e8"></span>
Worker Layer (Celery, Lambda)
</li>
<li>
<span class="color-box" style="background: #f8e8f0"></span>
Data Layer (PostgreSQL, Redis, SQS)
</li>
<li>
<span class="color-box" style="background: #f0f0f0"></span>
Storage (Local FS, S3)
</li>
</ul>
</div>
<h2 id="data-model">Data Model</h2>
<div class="diagram-container">
<div class="diagram">
<h3>Entity Relationships</h3>
<object
type="image/svg+xml"
data="architecture/02-data-model.svg"
>
<img
src="architecture/02-data-model.svg"
alt="Data Model"
/>
</object>
<a href="architecture/02-data-model.svg" target="_blank"
>Open full size</a
>
</div>
</div>
<div class="legend">
<h3>Entities</h3>
<ul>
<li>
<span class="color-box" style="background: #4a90d9"></span>
MediaAsset - Video/audio files with metadata
</li>
<li>
<span class="color-box" style="background: #50b050"></span>
TranscodePreset - Encoding configurations
</li>
<li>
<span class="color-box" style="background: #d9534f"></span>
TranscodeJob - Processing queue items
</li>
</ul>
</div>
<h2 id="job-flow">Job Flow</h2>
<div class="diagram-container">
<div class="diagram">
<h3>Job Lifecycle</h3>
<object
type="image/svg+xml"
data="architecture/03-job-flow.svg"
>
<img src="architecture/03-job-flow.svg" alt="Job Flow" />
</object>
<a href="architecture/03-job-flow.svg" target="_blank"
>Open full size</a
>
</div>
</div>
<div class="legend">
<h3>Job States</h3>
<ul>
<li>
<span class="color-box" style="background: #ffc107"></span>
PENDING - Waiting in queue
</li>
<li>
<span class="color-box" style="background: #17a2b8"></span>
PROCESSING - Worker executing
</li>
<li>
<span class="color-box" style="background: #28a745"></span>
COMPLETED - Success
</li>
<li>
<span class="color-box" style="background: #dc3545"></span>
FAILED - Error occurred
</li>
<li>
<span class="color-box" style="background: #6c757d"></span>
CANCELLED - User cancelled
</li>
</ul>
</div>
<h2 id="media-storage">Media Storage</h2>
<div class="diagram-container">
<p>
MPR separates media into <strong>input</strong> and
<strong>output</strong> paths, each independently configurable.
File paths are stored
<strong>relative to their respective root</strong> to ensure
portability between local development and cloud deployments (AWS
S3, etc.).
</p>
</div>
<div class="legend">
<h3>Input / Output Separation</h3>
<ul>
<li>
<span class="color-box" style="background: #4a90d9"></span>
<code>MEDIA_IN</code> - Source media files to process
</li>
<li>
<span class="color-box" style="background: #50b050"></span>
<code>MEDIA_OUT</code> - Transcoded/trimmed output files
</li>
</ul>
<p><strong>Why Relative Paths?</strong></p>
<ul>
<li>Portability: Same database works locally and in cloud</li>
<li>Flexibility: Easy to switch between storage backends</li>
<li>Simplicity: No need to update paths when migrating</li>
</ul>
</div>
<div class="legend">
<h3>Local Development</h3>
<pre><code>MEDIA_IN=/app/media/in
MEDIA_OUT=/app/media/out
/app/media/
├── in/ # Source files
│ ├── video1.mp4
│ └── subfolder/video3.mp4
└── out/ # Transcoded output
└── video1_h264.mp4</code></pre>
</div>
<div class="legend">
<h3>AWS/Cloud Deployment</h3>
<pre><code>MEDIA_IN=s3://source-bucket/media/
MEDIA_OUT=s3://output-bucket/transcoded/
MEDIA_BASE_URL=https://source-bucket.s3.amazonaws.com/media/</code></pre>
<p>
Database paths remain unchanged (already relative). Just upload
files to S3 and update environment variables.
</p>
</div>
<div class="legend">
<h3>API Endpoints</h3>
<ul>
<li>
<code>POST /api/assets/scan</code> - Recursively scans
MEDIA_IN and registers video/audio files
</li>
<li>
<code>POST /api/jobs/</code> - Creates transcoding job with
source asset, preset, and optional trim times
</li>
</ul>
<p><strong>Supported File Types:</strong></p>
<p>
Video: mp4, mkv, avi, mov, webm, flv, wmv, m4v<br />
Audio: mp3, wav, flac, aac, ogg, m4a
</p>
</div>
<h2>Quick Reference</h2>
<h2>Access Points</h2>
<pre><code># Add to /etc/hosts
127.0.0.1 mpr.local.ar
# URLs
http://mpr.local.ar/admin - Django Admin
http://mpr.local.ar/api - FastAPI (docs at /api/docs)
http://mpr.local.ar/ui - Timeline UI</code></pre>
</body>
</html>

125
docs/media-storage.html Normal file
View File

@@ -0,0 +1,125 @@
<h1>Media Storage Architecture</h1>
<h2>Overview</h2>
<p>MPR separates media into <strong>input</strong> and <strong>output</strong> paths, each independently configurable. File paths are stored <strong>relative to their respective root</strong> to ensure portability between local development and cloud deployments (AWS S3, etc.).</p>
<h2>Storage Strategy</h2>
<h3>Input / Output Separation</h3>
<p>| Path | Env Var | Purpose |
|------|---------|---------|
| <code>MEDIA_IN</code> | <code>/app/media/in</code> | Source media files to process |
| <code>MEDIA_OUT</code> | <code>/app/media/out</code> | Transcoded/trimmed output files |</p>
<p>These can point to different locations or even different servers/buckets in production.</p>
<h3>File Path Storage</h3>
<ul>
<li><strong>Database</strong>: Stores only the relative path (e.g., <code>videos/sample.mp4</code>)</li>
<li><strong>Input Root</strong>: Configurable via <code>MEDIA_IN</code> env var</li>
<li><strong>Output Root</strong>: Configurable via <code>MEDIA_OUT</code> env var</li>
<li><strong>Serving</strong>: Base URL configurable via <code>MEDIA_BASE_URL</code> env var</li>
</ul>
<h3>Why Relative Paths?</h3>
<ol>
<li><strong>Portability</strong>: Same database works locally and in cloud</li>
<li><strong>Flexibility</strong>: Easy to switch between storage backends</li>
<li><strong>Simplicity</strong>: No need to update paths when migrating</li>
</ol>
<h2>Local Development</h2>
<h3>Configuration</h3>
<p><code>bash
MEDIA_IN=/app/media/in
MEDIA_OUT=/app/media/out</code></p>
<h3>File Structure</h3>
<p><code>/app/media/
├── in/ # Source files
│ ├── video1.mp4
│ ├── video2.mp4
│ └── subfolder/
│ └── video3.mp4
└── out/ # Transcoded output
├── video1_h264.mp4
└── video2_trimmed.mp4</code></p>
<h3>Database Storage</h3>
<p>```</p>
<h1>Source assets (scanned from media/in)</h1>
<p>filename: video1.mp4
file_path: video1.mp4</p>
<p>filename: video3.mp4
file_path: subfolder/video3.mp4
```</p>
<h3>URL Serving</h3>
<ul>
<li>Nginx serves input via <code>location /media/in { alias /app/media/in; }</code></li>
<li>Nginx serves output via <code>location /media/out { alias /app/media/out; }</code></li>
<li>Frontend accesses: <code>http://mpr.local.ar/media/in/video1.mp4</code></li>
<li>Video player: <code>&lt;video src="/media/in/video1.mp4" /&gt;</code></li>
</ul>
<h2>AWS/Cloud Deployment</h2>
<h3>S3 Configuration</h3>
<p>```bash</p>
<h1>Input and output can be different buckets/paths</h1>
<p>MEDIA_IN=s3://source-bucket/media/
MEDIA_OUT=s3://output-bucket/transcoded/
MEDIA_BASE_URL=https://source-bucket.s3.amazonaws.com/media/
```</p>
<h3>S3 Structure</h3>
<p>```
s3://source-bucket/media/
├── video1.mp4
└── subfolder/
└── video3.mp4</p>
<p>s3://output-bucket/transcoded/
├── video1_h264.mp4
└── video2_trimmed.mp4
```</p>
<h3>Database Storage (Same!)</h3>
<p>```
filename: video1.mp4
file_path: video1.mp4</p>
<p>filename: video3.mp4
file_path: subfolder/video3.mp4
```</p>
<h2>API Endpoints</h2>
<h3>Scan Media Folder</h3>
<p><code>http
POST /api/assets/scan</code></p>
<p><strong>Behavior:</strong>
1. Recursively scans <code>MEDIA_IN</code> directory
2. Finds all video/audio files (mp4, mkv, avi, mov, mp3, wav, etc.)
3. Stores paths <strong>relative to MEDIA_IN</strong>
4. Skips already-registered files (by filename)
5. Returns summary: <code>{ found, registered, skipped, files }</code></p>
<h3>Create Job</h3>
<p>```http
POST /api/jobs/
Content-Type: application/json</p>
<p>{
"source_asset_id": "uuid",
"preset_id": "uuid",
"trim_start": 10.0,
"trim_end": 30.0
}
```</p>
<p><strong>Behavior:</strong>
- Server sets <code>output_path</code> using <code>MEDIA_OUT</code> + generated filename
- Output goes to the output directory, not alongside source files</p>
<h2>Migration Guide</h2>
<h3>Moving from Local to S3</h3>
<ol>
<li>
<p><strong>Upload source files to S3:</strong>
<code>bash
aws s3 sync /app/media/in/ s3://source-bucket/media/
aws s3 sync /app/media/out/ s3://output-bucket/transcoded/</code></p>
</li>
<li>
<p><strong>Update environment variables:</strong>
<code>bash
MEDIA_IN=s3://source-bucket/media/
MEDIA_OUT=s3://output-bucket/transcoded/
MEDIA_BASE_URL=https://source-bucket.s3.amazonaws.com/media/</code></p>
</li>
<li>
<p><strong>Database paths remain unchanged</strong> (already relative)</p>
</li>
</ol>
<h2>Supported File Types</h2>
<p><strong>Video:</strong> <code>.mp4</code>, <code>.mkv</code>, <code>.avi</code>, <code>.mov</code>, <code>.webm</code>, <code>.flv</code>, <code>.wmv</code>, <code>.m4v</code>
<strong>Audio:</strong> <code>.mp3</code>, <code>.wav</code>, <code>.flac</code>, <code>.aac</code>, <code>.ogg</code>, <code>.m4a</code></p>

View File

@@ -1,150 +0,0 @@
# Media Storage Architecture
## Overview
MPR separates media into **input** and **output** paths, each independently configurable. File paths are stored **relative to their respective root** to ensure portability between local development and cloud deployments (AWS S3, etc.).
## Storage Strategy
### Input / Output Separation
| Path | Env Var | Purpose |
|------|---------|---------|
| `MEDIA_IN` | `/app/media/in` | Source media files to process |
| `MEDIA_OUT` | `/app/media/out` | Transcoded/trimmed output files |
These can point to different locations or even different servers/buckets in production.
### File Path Storage
- **Database**: Stores only the relative path (e.g., `videos/sample.mp4`)
- **Input Root**: Configurable via `MEDIA_IN` env var
- **Output Root**: Configurable via `MEDIA_OUT` env var
- **Serving**: Base URL configurable via `MEDIA_BASE_URL` env var
### Why Relative Paths?
1. **Portability**: Same database works locally and in cloud
2. **Flexibility**: Easy to switch between storage backends
3. **Simplicity**: No need to update paths when migrating
## Local Development
### Configuration
```bash
MEDIA_IN=/app/media/in
MEDIA_OUT=/app/media/out
```
### File Structure
```
/app/media/
├── in/ # Source files
│ ├── video1.mp4
│ ├── video2.mp4
│ └── subfolder/
│ └── video3.mp4
└── out/ # Transcoded output
├── video1_h264.mp4
└── video2_trimmed.mp4
```
### Database Storage
```
# Source assets (scanned from media/in)
filename: video1.mp4
file_path: video1.mp4
filename: video3.mp4
file_path: subfolder/video3.mp4
```
### URL Serving
- Nginx serves input via `location /media/in { alias /app/media/in; }`
- Nginx serves output via `location /media/out { alias /app/media/out; }`
- Frontend accesses: `http://mpr.local.ar/media/in/video1.mp4`
- Video player: `<video src="/media/in/video1.mp4" />`
## AWS/Cloud Deployment
### S3 Configuration
```bash
# Input and output can be different buckets/paths
MEDIA_IN=s3://source-bucket/media/
MEDIA_OUT=s3://output-bucket/transcoded/
MEDIA_BASE_URL=https://source-bucket.s3.amazonaws.com/media/
```
### S3 Structure
```
s3://source-bucket/media/
├── video1.mp4
└── subfolder/
└── video3.mp4
s3://output-bucket/transcoded/
├── video1_h264.mp4
└── video2_trimmed.mp4
```
### Database Storage (Same!)
```
filename: video1.mp4
file_path: video1.mp4
filename: video3.mp4
file_path: subfolder/video3.mp4
```
## API Endpoints
### Scan Media Folder
```http
POST /api/assets/scan
```
**Behavior:**
1. Recursively scans `MEDIA_IN` directory
2. Finds all video/audio files (mp4, mkv, avi, mov, mp3, wav, etc.)
3. Stores paths **relative to MEDIA_IN**
4. Skips already-registered files (by filename)
5. Returns summary: `{ found, registered, skipped, files }`
### Create Job
```http
POST /api/jobs/
Content-Type: application/json
{
"source_asset_id": "uuid",
"preset_id": "uuid",
"trim_start": 10.0,
"trim_end": 30.0
}
```
**Behavior:**
- Server sets `output_path` using `MEDIA_OUT` + generated filename
- Output goes to the output directory, not alongside source files
## Migration Guide
### Moving from Local to S3
1. **Upload source files to S3:**
```bash
aws s3 sync /app/media/in/ s3://source-bucket/media/
aws s3 sync /app/media/out/ s3://output-bucket/transcoded/
```
2. **Update environment variables:**
```bash
MEDIA_IN=s3://source-bucket/media/
MEDIA_OUT=s3://output-bucket/transcoded/
MEDIA_BASE_URL=https://source-bucket.s3.amazonaws.com/media/
```
3. **Database paths remain unchanged** (already relative)
## Supported File Types
**Video:** `.mp4`, `.mkv`, `.avi`, `.mov`, `.webm`, `.flv`, `.wmv`, `.m4v`
**Audio:** `.mp3`, `.wav`, `.flac`, `.aac`, `.ogg`, `.m4a`

View File

@@ -16,10 +16,10 @@ Output formats:
- prisma: Prisma schema
Usage:
python -m modelgen from-config -c config.json -o models.py
python -m modelgen from-schema -o models/ --targets pydantic,typescript
python -m modelgen extract --source /path/to/django --targets pydantic
python -m modelgen list-formats
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
python -m soleprint.station.tools.modelgen list-formats
"""
__version__ = "0.2.0"

View File

@@ -16,10 +16,11 @@ Output formats:
- prisma: Prisma schema
Usage:
python -m modelgen --help
python -m modelgen from-config -c config.json -o models.py
python -m modelgen from-schema -o models/ --targets pydantic,typescript
python -m modelgen extract --source /path/to/django --targets pydantic
python -m soleprint.station.tools.modelgen --help
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
python -m soleprint.station.tools.modelgen generate --config schema/modelgen.json
"""
import argparse
@@ -177,6 +178,47 @@ def cmd_extract(args):
print("Done!")
def cmd_generate(args):
"""Generate all targets from a JSON config file."""
import json
from .loader import load_schema
config_path = Path(args.config)
if not config_path.exists():
print(f"Error: Config file not found: {config_path}", file=sys.stderr)
sys.exit(1)
with open(config_path) as f:
config = json.load(f)
# Resolve paths relative to current working directory
schema_path = Path(config["schema"])
if not schema_path.exists():
print(f"Error: Schema folder not found: {schema_path}", file=sys.stderr)
sys.exit(1)
print(f"Loading schema: {schema_path}")
for target_conf in config["targets"]:
target = target_conf["target"]
output = Path(target_conf["output"])
include = set(target_conf.get("include", []))
name_map = target_conf.get("name_map", {})
if target not in GENERATORS:
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
continue
# Load schema with this target's include filter
schema = load_schema(schema_path, include=include or None)
generator = GENERATORS[target](name_map=name_map)
print(f"Generating {target} to: {output}")
generator.generate(schema, output)
print("Done!")
def cmd_list_formats(args):
"""List available output formats."""
print("Available output formats:")
@@ -295,6 +337,21 @@ def main():
)
extract_parser.set_defaults(func=cmd_extract)
# generate command (config-driven multi-target)
gen_parser = subparsers.add_parser(
"generate",
help="Generate all targets from a JSON config file",
)
gen_parser.add_argument(
"--config",
"-c",
type=str,
required=True,
help="Path to generation config file (e.g., schema/modelgen.json)",
)
gen_parser.set_defaults(func=cmd_generate)
# list-formats command
formats_parser = subparsers.add_parser(
"list-formats",

View File

@@ -7,12 +7,14 @@ Supported generators:
- TypeScriptGenerator: TypeScript interfaces
- ProtobufGenerator: Protocol Buffer definitions
- PrismaGenerator: Prisma schema
- GrapheneGenerator: Graphene ObjectType/InputObjectType classes
"""
from typing import Dict, Type
from .base import BaseGenerator
from .django import DjangoGenerator
from .graphene import GrapheneGenerator
from .prisma import PrismaGenerator
from .protobuf import ProtobufGenerator
from .pydantic import PydanticGenerator
@@ -27,12 +29,14 @@ GENERATORS: Dict[str, Type[BaseGenerator]] = {
"protobuf": ProtobufGenerator,
"proto": ProtobufGenerator, # Alias
"prisma": PrismaGenerator,
"graphene": GrapheneGenerator,
}
__all__ = [
"BaseGenerator",
"PydanticGenerator",
"DjangoGenerator",
"GrapheneGenerator",
"TypeScriptGenerator",
"ProtobufGenerator",
"PrismaGenerator",

View File

@@ -6,12 +6,19 @@ Abstract base class for all code generators.
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Any
from typing import Any, Dict
class BaseGenerator(ABC):
"""Abstract base for code generators."""
def __init__(self, name_map: Dict[str, str] = None):
self.name_map = name_map or {}
def map_name(self, name: str) -> str:
"""Apply name_map to a model name."""
return self.name_map.get(name, name)
@abstractmethod
def generate(self, models: Any, output_path: Path) -> None:
"""Generate code for the given models to the specified path."""

View File

@@ -224,7 +224,8 @@ class DjangoGenerator(BaseGenerator):
if default is not dc.MISSING and isinstance(default, Enum):
extra.append(f"default={enum_name}.{default.name}")
return DJANGO_TYPES["enum"].format(
enum_name=enum_name, opts=", " + ", ".join(extra) if extra else ""
enum_name=enum_name,
opts=", " + ", ".join(extra) if extra else ""
)
# Text fields (based on name heuristics)

View File

@@ -0,0 +1,236 @@
"""
Graphene Generator
Generates graphene ObjectType and InputObjectType classes from model definitions.
Only generates type definitions — queries, mutations, and resolvers are hand-written.
"""
from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
from ..helpers import get_origin_name, get_type_name, unwrap_optional
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import GRAPHENE_RESOLVERS
from .base import BaseGenerator
class GrapheneGenerator(BaseGenerator):
"""Generates graphene type definition files."""
def file_extension(self) -> str:
return ".py"
def generate(self, models, output_path: Path) -> None:
"""Generate graphene types to output_path."""
output_path.parent.mkdir(parents=True, exist_ok=True)
if hasattr(models, "models"):
# SchemaLoader
content = self._generate_from_definitions(
models.models,
getattr(models, "enums", []),
getattr(models, "api_models", []),
)
elif isinstance(models, tuple):
content = self._generate_from_definitions(models[0], models[1], [])
elif isinstance(models, list):
content = self._generate_from_dataclasses(models)
else:
raise ValueError(f"Unsupported input type: {type(models)}")
output_path.write_text(content)
def _generate_from_definitions(
self,
models: List[ModelDefinition],
enums: List[EnumDefinition],
api_models: List[ModelDefinition],
) -> str:
"""Generate from ModelDefinition objects."""
lines = self._generate_header()
# Generate enums as graphene.Enum
for enum_def in enums:
lines.extend(self._generate_enum(enum_def))
lines.append("")
lines.append("")
# Generate domain models as ObjectType
for model_def in models:
lines.extend(self._generate_object_type(model_def))
lines.append("")
lines.append("")
# Generate API models — request types as InputObjectType, others as ObjectType
for model_def in api_models:
if model_def.name.endswith("Request"):
lines.extend(self._generate_input_type(model_def))
else:
lines.extend(self._generate_object_type(model_def))
lines.append("")
lines.append("")
return "\n".join(lines).rstrip() + "\n"
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
"""Generate from Python dataclasses."""
lines = self._generate_header()
enums_generated = set()
for cls in dataclasses:
hints = get_type_hints(cls)
for type_hint in hints.values():
base, _ = unwrap_optional(type_hint)
if isinstance(base, type) and issubclass(base, Enum):
if base.__name__ not in enums_generated:
lines.extend(self._generate_enum_from_python(base))
lines.append("")
lines.append("")
enums_generated.add(base.__name__)
for cls in dataclasses:
lines.extend(self._generate_object_type_from_dataclass(cls))
lines.append("")
lines.append("")
return "\n".join(lines).rstrip() + "\n"
def _generate_header(self) -> List[str]:
return [
'"""',
"Graphene Types - GENERATED FILE",
"",
"Do not edit directly. Regenerate using modelgen.",
'"""',
"",
"import graphene",
"",
"",
]
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
"""Generate graphene.Enum from EnumDefinition."""
lines = [f"class {enum_def.name}(graphene.Enum):"]
for name, value in enum_def.values:
lines.append(f' {name} = "{value}"')
return lines
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
"""Generate graphene.Enum from Python Enum."""
lines = [f"class {enum_cls.__name__}(graphene.Enum):"]
for member in enum_cls:
lines.append(f' {member.name} = "{member.value}"')
return lines
def _generate_object_type(self, model_def: ModelDefinition) -> List[str]:
"""Generate graphene.ObjectType from ModelDefinition."""
name = model_def.name
# Append Type suffix if not already present
type_name = f"{name}Type" if not name.endswith("Type") else name
lines = [f"class {type_name}(graphene.ObjectType):"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
lines.append("")
if not model_def.fields:
lines.append(" pass")
else:
for field in model_def.fields:
graphene_type = self._resolve_type(field.type_hint, field.optional)
lines.append(f" {field.name} = {graphene_type}")
return lines
def _generate_input_type(self, model_def: ModelDefinition) -> List[str]:
"""Generate graphene.InputObjectType from ModelDefinition."""
import dataclasses as dc
name = model_def.name
# Convert FooRequest -> FooInput
if name.endswith("Request"):
input_name = name[: -len("Request")] + "Input"
else:
input_name = f"{name}Input"
lines = [f"class {input_name}(graphene.InputObjectType):"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
lines.append("")
if not model_def.fields:
lines.append(" pass")
else:
for field in model_def.fields:
graphene_type = self._resolve_type(field.type_hint, field.optional)
# Required only if not optional AND no default value
has_default = field.default is not dc.MISSING
if not field.optional and not has_default:
graphene_type = self._make_required(graphene_type)
elif has_default and not field.optional:
graphene_type = self._add_default(graphene_type, field.default)
lines.append(f" {field.name} = {graphene_type}")
return lines
def _generate_object_type_from_dataclass(self, cls: type) -> List[str]:
"""Generate graphene.ObjectType from a dataclass."""
import dataclasses as dc
type_name = f"{cls.__name__}Type"
lines = [f"class {type_name}(graphene.ObjectType):"]
hints = get_type_hints(cls)
for name, type_hint in hints.items():
if name.startswith("_"):
continue
graphene_type = self._resolve_type(type_hint, False)
lines.append(f" {name} = {graphene_type}")
return lines
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
"""Resolve Python type to graphene field call string."""
base, is_optional = unwrap_optional(type_hint)
optional = optional or is_optional
origin = get_origin_name(base)
type_name = get_type_name(base)
# Look up resolver
resolver = (
GRAPHENE_RESOLVERS.get(origin)
or GRAPHENE_RESOLVERS.get(type_name)
or GRAPHENE_RESOLVERS.get(base)
or (
GRAPHENE_RESOLVERS["enum"]
if isinstance(base, type) and issubclass(base, Enum)
else None
)
)
result = resolver(base) if resolver else "graphene.String"
# List types already have () syntax from resolver
if result.startswith("graphene.List("):
return result
# Scalar types: add () call
return f"{result}()"
def _make_required(self, field_str: str) -> str:
"""Add required=True to a graphene field."""
if field_str.endswith("()"):
return field_str[:-1] + "required=True)"
return field_str
def _add_default(self, field_str: str, default: Any) -> str:
"""Add default_value to a graphene field."""
if callable(default):
# default_factory — skip, graphene doesn't support factories
return field_str
if field_str.endswith("()"):
return field_str[:-1] + f"default_value={default!r})"
return field_str

View File

@@ -2,8 +2,12 @@
Pydantic Generator
Generates Pydantic BaseModel classes from model definitions.
Supports two output modes:
- File output: flat models (backwards compatible)
- Directory output: CRUD variants (Create/Update/Response) per model
"""
import dataclasses as dc
from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
@@ -13,6 +17,13 @@ from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import PYDANTIC_RESOLVERS
from .base import BaseGenerator
# Fields to skip per CRUD variant
SKIP_FIELDS = {
"Create": {"id", "created_at", "updated_at", "status", "error_message"},
"Update": {"id", "created_at", "updated_at"},
"Response": set(),
}
class PydanticGenerator(BaseGenerator):
"""Generates Pydantic model files."""
@@ -21,52 +32,187 @@ class PydanticGenerator(BaseGenerator):
return ".py"
def generate(self, models, output_path: Path) -> None:
"""Generate Pydantic models to output_path."""
"""Generate Pydantic models to output_path.
If output_path is a directory (or doesn't end in .py), generate
multi-file CRUD variants. Otherwise, generate flat models to a
single file.
"""
output_path = Path(output_path)
if output_path.suffix != ".py":
# Directory mode: CRUD variants
self._generate_crud_directory(models, output_path)
else:
# File mode: flat models (backwards compatible)
self._generate_flat_file(models, output_path)
def _generate_flat_file(self, models, output_path: Path) -> None:
"""Generate flat models to a single file (original behavior)."""
output_path.parent.mkdir(parents=True, exist_ok=True)
# Detect input type and generate accordingly
if hasattr(models, "get_shared_component"):
# ConfigLoader (soleprint config)
content = self._generate_from_config(models)
elif hasattr(models, "models"):
# SchemaLoader
content = self._generate_from_definitions(
models.models, getattr(models, "enums", [])
)
elif isinstance(models, tuple):
# (models, enums) tuple from extractor
content = self._generate_from_definitions(models[0], models[1])
elif isinstance(models, list):
# List of dataclasses (MPR style)
content = self._generate_from_dataclasses(models)
else:
raise ValueError(f"Unsupported input type: {type(models)}")
output_path.write_text(content)
def _generate_from_definitions(
self, models: List[ModelDefinition], enums: List[EnumDefinition]
) -> str:
"""Generate from ModelDefinition objects (schema/extract mode)."""
lines = self._generate_header()
def _generate_crud_directory(self, models, output_dir: Path) -> None:
"""Generate CRUD variant files in a directory."""
output_dir.mkdir(parents=True, exist_ok=True)
# Generate enums
for enum_def in enums:
if hasattr(models, "models"):
model_defs = models.models
enum_defs = getattr(models, "enums", [])
elif isinstance(models, tuple):
model_defs = models[0]
enum_defs = models[1]
else:
raise ValueError(f"Unsupported input type for CRUD mode: {type(models)}")
# base.py
base_content = "\n".join([
'"""Pydantic Base Schema - GENERATED FILE"""',
"",
"from pydantic import BaseModel, ConfigDict",
"",
"",
"class BaseSchema(BaseModel):",
' """Base schema with ORM mode."""',
" model_config = ConfigDict(from_attributes=True)",
"",
])
(output_dir / "base.py").write_text(base_content)
# Per-model files
imports = ["from .base import BaseSchema"]
all_exports = ['"BaseSchema"']
for model_def in model_defs:
mapped = self.map_name(model_def.name)
module_name = mapped.lower()
lines = [
f'"""{model_def.name} Schemas - GENERATED FILE"""',
"",
"from datetime import datetime",
"from enum import Enum",
"from typing import Any, Dict, List, Optional",
"from uuid import UUID",
"",
"from .base import BaseSchema",
"",
]
# Inline enums used by this model
model_enums = self._collect_model_enums(model_def, enum_defs)
for enum_def in model_enums:
lines.append("")
lines.extend(self._generate_enum(enum_def))
lines.append("")
# Generate models
# CRUD variants
for suffix in ["Create", "Update", "Response"]:
lines.append("")
lines.extend(self._generate_crud_model(model_def, mapped, suffix))
lines.append("")
content = "\n".join(lines)
(output_dir / f"{module_name}.py").write_text(content)
# Track imports
imports.append(
f"from .{module_name} import {mapped}Create, {mapped}Update, {mapped}Response"
)
all_exports.extend([
f'"{mapped}Create"', f'"{mapped}Update"', f'"{mapped}Response"'
])
for enum_def in model_enums:
imports.append(f"from .{module_name} import {enum_def.name}")
all_exports.append(f'"{enum_def.name}"')
# __init__.py
init_content = "\n".join([
'"""API Schemas - GENERATED FILE"""',
"",
*imports,
"",
f"__all__ = [{', '.join(all_exports)}]",
"",
])
(output_dir / "__init__.py").write_text(init_content)
def _collect_model_enums(
self, model_def: ModelDefinition, enum_defs: List[EnumDefinition]
) -> List[EnumDefinition]:
"""Find enums referenced by a model's fields."""
enum_names = set()
for field in model_def.fields:
base, _ = unwrap_optional(field.type_hint)
if isinstance(base, type) and issubclass(base, Enum):
enum_names.add(base.__name__)
return [e for e in enum_defs if e.name in enum_names]
def _generate_crud_model(
self, model_def: ModelDefinition, mapped_name: str, suffix: str
) -> List[str]:
"""Generate a single CRUD variant (Create/Update/Response)."""
class_name = f"{mapped_name}{suffix}"
skip = SKIP_FIELDS.get(suffix, set())
lines = [
f"class {class_name}(BaseSchema):",
f' """{class_name} schema."""',
]
has_fields = False
for field in model_def.fields:
if field.name.startswith("_") or field.name in skip:
continue
has_fields = True
py_type = self._resolve_type(field.type_hint, field.optional)
# Update variant: all fields optional
if suffix == "Update" and "Optional" not in py_type:
py_type = f"Optional[{py_type}]"
default = self._format_default(field.default, "Optional" in py_type)
lines.append(f" {field.name}: {py_type}{default}")
if not has_fields:
lines.append(" pass")
return lines
# =========================================================================
# Flat file generation (original behavior)
# =========================================================================
def _generate_from_definitions(
self, models: List[ModelDefinition], enums: List[EnumDefinition]
) -> str:
lines = self._generate_header()
for enum_def in enums:
lines.extend(self._generate_enum(enum_def))
lines.append("")
for model_def in models:
lines.extend(self._generate_model_from_definition(model_def))
lines.append("")
return "\n".join(lines)
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
"""Generate from Python dataclasses (MPR style)."""
lines = self._generate_header()
# Collect and generate enums first
enums_generated = set()
for cls in dataclasses:
hints = get_type_hints(cls)
@@ -77,16 +223,12 @@ class PydanticGenerator(BaseGenerator):
lines.extend(self._generate_enum_from_python(base))
lines.append("")
enums_generated.add(base.__name__)
# Generate models
for cls in dataclasses:
lines.extend(self._generate_model_from_dataclass(cls))
lines.append("")
return "\n".join(lines)
def _generate_header(self) -> List[str]:
"""Generate file header."""
return [
'"""',
"Pydantic Models - GENERATED FILE",
@@ -104,27 +246,23 @@ class PydanticGenerator(BaseGenerator):
]
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
"""Generate Pydantic enum from EnumDefinition."""
lines = [f"class {enum_def.name}(str, Enum):"]
for name, value in enum_def.values:
lines.append(f' {name} = "{value}"')
return lines
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
"""Generate Pydantic enum from Python Enum."""
lines = [f"class {enum_cls.__name__}(str, Enum):"]
for member in enum_cls:
lines.append(f' {member.name} = "{member.value}"')
return lines
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
"""Generate Pydantic model from ModelDefinition."""
docstring = model_def.docstring or model_def.name
lines = [
f"class {model_def.name}(BaseModel):",
f' """{docstring.strip().split(chr(10))[0]}"""',
]
if not model_def.fields:
lines.append(" pass")
else:
@@ -132,46 +270,34 @@ class PydanticGenerator(BaseGenerator):
py_type = self._resolve_type(field.type_hint, field.optional)
default = self._format_default(field.default, field.optional)
lines.append(f" {field.name}: {py_type}{default}")
return lines
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
"""Generate Pydantic model from a dataclass."""
import dataclasses as dc
docstring = cls.__doc__ or cls.__name__
lines = [
f"class {cls.__name__}(BaseModel):",
f' """{docstring.strip().split(chr(10))[0]}"""',
]
hints = get_type_hints(cls)
fields = {f.name: f for f in dc.fields(cls)}
for name, type_hint in hints.items():
if name.startswith("_"):
continue
field = fields.get(name)
default_val = dc.MISSING
if field:
if field.default is not dc.MISSING:
default_val = field.default
py_type = self._resolve_type(type_hint, False)
default = self._format_default(default_val, "Optional" in py_type)
lines.append(f" {name}: {py_type}{default}")
return lines
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
"""Resolve Python type to Pydantic type string."""
base, is_optional = unwrap_optional(type_hint)
optional = optional or is_optional
origin = get_origin_name(base)
type_name = get_type_name(base)
# Look up resolver
resolver = (
PYDANTIC_RESOLVERS.get(origin)
or PYDANTIC_RESOLVERS.get(type_name)
@@ -182,14 +308,10 @@ class PydanticGenerator(BaseGenerator):
else None
)
)
result = resolver(base) if resolver else "str"
return f"Optional[{result}]" if optional else result
def _format_default(self, default: Any, optional: bool) -> str:
"""Format default value for field."""
import dataclasses as dc
if optional:
return " = None"
if default is dc.MISSING or default is None:
@@ -204,7 +326,6 @@ class PydanticGenerator(BaseGenerator):
def _generate_from_config(self, config) -> str:
"""Generate from ConfigLoader (soleprint config.json mode)."""
# Get component names from config
config_comp = config.get_shared_component("config")
data_comp = config.get_shared_component("data")

View File

@@ -26,11 +26,10 @@ class TypeScriptGenerator(BaseGenerator):
# Handle different input types
if hasattr(models, "models"):
# SchemaLoader
# SchemaLoader — include api_models if present
all_models = models.models + getattr(models, "api_models", [])
content = self._generate_from_definitions(
models.models,
getattr(models, "enums", []),
api_models=getattr(models, "api_models", []),
all_models, getattr(models, "enums", [])
)
elif isinstance(models, tuple):
# (models, enums) tuple
@@ -44,10 +43,7 @@ class TypeScriptGenerator(BaseGenerator):
output_path.write_text(content)
def _generate_from_definitions(
self,
models: List[ModelDefinition],
enums: List[EnumDefinition],
api_models: List[ModelDefinition] = None,
self, models: List[ModelDefinition], enums: List[EnumDefinition]
) -> str:
"""Generate from ModelDefinition objects."""
lines = self._generate_header()
@@ -63,14 +59,6 @@ class TypeScriptGenerator(BaseGenerator):
lines.extend(self._generate_interface_from_definition(model_def))
lines.append("")
# Generate API request/response interfaces
if api_models:
lines.append("// API request/response types")
lines.append("")
for model_def in api_models:
lines.extend(self._generate_interface_from_definition(model_def))
lines.append("")
return "\n".join(lines)
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:

View File

@@ -5,6 +5,7 @@ Loads Python dataclasses from a schema/ folder.
Expects the folder to have an __init__.py that exports:
- DATACLASSES: List of dataclass types to generate
- ENUMS: List of Enum types to include
- API_MODELS: (optional) List of API request/response types
- GRPC_MESSAGES: (optional) List of gRPC message types
- GRPC_SERVICE: (optional) gRPC service definition dict
"""
@@ -88,7 +89,7 @@ class SchemaLoader:
for cls in dataclasses:
self.models.append(self._parse_dataclass(cls))
# Extract API_MODELS (TypeScript-only request/response types)
# Extract API_MODELS (request/response types)
if load_all or "api" in include:
api_models = getattr(module, "API_MODELS", [])
for cls in api_models:

View File

@@ -137,3 +137,36 @@ PRISMA_SPECIAL: dict[str, str] = {
"created_at": "DateTime @default(now())",
"updated_at": "DateTime @updatedAt",
}
# =============================================================================
# Graphene Type Resolvers
# =============================================================================
def _resolve_graphene_list(base: Any) -> str:
"""Resolve graphene List type."""
args = get_args(base)
if args:
inner = args[0]
if inner is str:
return "graphene.List(graphene.String)"
elif inner is int:
return "graphene.List(graphene.Int)"
elif inner is float:
return "graphene.List(graphene.Float)"
elif inner is bool:
return "graphene.List(graphene.Boolean)"
return "graphene.List(graphene.String)"
GRAPHENE_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "graphene.String",
int: lambda _: "graphene.Int",
float: lambda _: "graphene.Float",
bool: lambda _: "graphene.Boolean",
"UUID": lambda _: "graphene.UUID",
"datetime": lambda _: "graphene.DateTime",
"dict": lambda _: "graphene.JSONString",
"list": _resolve_graphene_list,
"enum": lambda base: f"graphene.String", # Enums exposed as strings in GQL
}

View File

@@ -96,6 +96,7 @@ class TranscodeJob(models.Model):
speed = models.CharField(max_length=255, null=True, blank=True)
error_message = models.TextField(blank=True, default='')
celery_task_id = models.CharField(max_length=255, null=True, blank=True)
execution_arn = models.CharField(max_length=255, null=True, blank=True)
priority = models.IntegerField(default=0)
created_at = models.DateTimeField(auto_now_add=True)
started_at = models.DateTimeField(null=True, blank=True)

View File

@@ -19,6 +19,13 @@ ffmpeg-python>=0.2.0
grpcio>=1.60.0
grpcio-tools>=1.60.0
# AWS
boto3>=1.34.0
# GraphQL
graphene>=3.3
starlette-graphene3>=0.6.0
# Testing
pytest>=7.4.0
pytest-django>=4.7.0

View File

@@ -4,7 +4,7 @@ MPR Schema Definitions - Source of Truth
This package defines the core data models as Python dataclasses.
These definitions are used to generate:
- Django ORM models (mpr/media_assets/models.py)
- Pydantic schemas (api/schemas/*.py)
- Pydantic schemas (api/schema/*.py)
- TypeScript types (ui/timeline/src/types.ts)
- Protobuf definitions (grpc/protos/worker.proto)

View File

@@ -1,718 +0,0 @@
#!/usr/bin/env python3
"""
MPR Model Generator
Generates framework-specific models from schema/models/:
- Django ORM models -> mpr/media_assets/models.py
- Pydantic schemas -> api/schemas/*.py
- TypeScript types -> ui/timeline/src/types.ts
- Protobuf -> grpc/protos/worker.proto
Usage:
python schema/generate.py [--django] [--pydantic] [--typescript] [--proto] [--all]
"""
import argparse
import dataclasses as dc
import subprocess
import sys
from enum import Enum
from pathlib import Path
from typing import Any, Callable, Union, get_args, get_origin, get_type_hints
PROJECT_ROOT = Path(__file__).parent.parent
sys.path.insert(0, str(PROJECT_ROOT))
from schema.models import API_MODELS, DATACLASSES, ENUMS, GRPC_MESSAGES, GRPC_SERVICE
# =============================================================================
# Type Dispatch Tables
# =============================================================================
DJANGO_TYPES: dict[Any, str] = {
str: "models.CharField(max_length={max_length}{opts})",
int: "models.IntegerField({opts})",
float: "models.FloatField({opts})",
bool: "models.BooleanField(default={default})",
"UUID": "models.UUIDField({opts})",
"datetime": "models.DateTimeField({opts})",
"dict": "models.JSONField(default=dict, blank=True)",
"list": "models.JSONField(default=list, blank=True)",
"text": "models.TextField(blank=True, default='')",
"bigint": "models.BigIntegerField({opts})",
"enum": "models.CharField(max_length=20, choices={enum_name}.choices{opts})",
}
DJANGO_SPECIAL: dict[str, str] = {
"id": "models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)",
"created_at": "models.DateTimeField(auto_now_add=True)",
"updated_at": "models.DateTimeField(auto_now=True)",
}
PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "str",
int: lambda _: "int",
float: lambda _: "float",
bool: lambda _: "bool",
"UUID": lambda _: "UUID",
"datetime": lambda _: "datetime",
"dict": lambda _: "Dict[str, Any]",
"list": lambda base: f"List[{get_list_inner(base)}]",
"enum": lambda base: base.__name__,
}
TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "string",
int: lambda _: "number",
float: lambda _: "number",
bool: lambda _: "boolean",
"UUID": lambda _: "string",
"datetime": lambda _: "string",
"dict": lambda _: "Record<string, unknown>",
"list": lambda base: (
f"{TS_RESOLVERS.get(get_args(base)[0], lambda _: 'string')(None)}[]"
if get_args(base)
else "string[]"
),
"enum": lambda base: base.__name__,
}
PROTO_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "string",
int: lambda _: "int32",
float: lambda _: "float",
bool: lambda _: "bool",
"list": lambda base: (
f"repeated {PROTO_RESOLVERS.get(get_args(base)[0], lambda _: 'string')(None)}"
if get_args(base)
else "repeated string"
),
}
# =============================================================================
# Type Helpers
# =============================================================================
def unwrap_optional(type_hint: Any) -> tuple[Any, bool]:
"""Unwrap Optional[T] -> (T, True) or (T, False) if not optional."""
origin = get_origin(type_hint)
if origin is Union:
args = [a for a in get_args(type_hint) if a is not type(None)]
return (args[0] if args else str, True)
return (type_hint, False)
def get_origin_name(type_hint: Any) -> str | None:
"""Get origin type name: 'dict', 'list', or None."""
origin = get_origin(type_hint)
if origin is dict:
return "dict"
if origin is list:
return "list"
return None
def get_type_name(type_hint: Any) -> str | None:
"""Get type name for special types like UUID, datetime."""
if hasattr(type_hint, "__name__"):
return type_hint.__name__
return None
def get_list_inner(type_hint: Any) -> str:
"""Get inner type of List[T]."""
args = get_args(type_hint)
if args and args[0] in (str, int, float, bool):
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
return "str"
def get_field_default(field: dc.Field) -> Any:
"""Get default value from dataclass field."""
if field.default is not dc.MISSING:
return field.default
return dc.MISSING
def format_opts(optional: bool, extra: list[str] | None = None) -> str:
"""Format field options string."""
parts = []
if optional:
parts.append("null=True, blank=True")
if extra:
parts.extend(extra)
return ", ".join(parts)
# =============================================================================
# Django Generator
# =============================================================================
def resolve_django_type(name: str, type_hint: Any, default: Any) -> str:
"""Resolve Python type to Django field."""
# Special fields
if name in DJANGO_SPECIAL:
return DJANGO_SPECIAL[name]
base, optional = unwrap_optional(type_hint)
origin = get_origin_name(base)
type_name = get_type_name(base)
opts = format_opts(optional)
# Container types
if origin == "dict":
return DJANGO_TYPES["dict"]
if origin == "list":
return DJANGO_TYPES["list"]
# UUID / datetime
if type_name == "UUID":
return DJANGO_TYPES["UUID"].format(opts=opts)
if type_name == "datetime":
return DJANGO_TYPES["datetime"].format(opts=opts)
# Enum
if isinstance(base, type) and issubclass(base, Enum):
enum_name = base.__name__
extra = []
if optional:
extra.append("null=True, blank=True")
if default is not dc.MISSING and isinstance(default, Enum):
extra.append(f"default={enum_name}.{default.name}")
return DJANGO_TYPES["enum"].format(
enum_name=enum_name, opts=", " + ", ".join(extra) if extra else ""
)
# Text fields
if base is str and any(x in name for x in ("message", "comments", "description")):
return DJANGO_TYPES["text"]
# BigInt fields
if base is int and name in ("file_size", "bitrate"):
return DJANGO_TYPES["bigint"].format(opts=opts)
# Basic types
if base is str:
max_length = 1000 if "path" in name else 500 if "filename" in name else 255
return DJANGO_TYPES[str].format(
max_length=max_length, opts=", " + opts if opts else ""
)
if base is int:
extra = [opts] if opts else []
if default is not dc.MISSING and not callable(default):
extra.append(f"default={default}")
return DJANGO_TYPES[int].format(opts=", ".join(extra))
if base is float:
extra = [opts] if opts else []
if default is not dc.MISSING and not callable(default):
extra.append(f"default={default}")
return DJANGO_TYPES[float].format(opts=", ".join(extra))
if base is bool:
default_val = default if default is not dc.MISSING else False
return DJANGO_TYPES[bool].format(default=default_val)
# Fallback
return DJANGO_TYPES[str].format(max_length=255, opts=", " + opts if opts else "")
def generate_django_enum(enum_cls: type) -> list[str]:
"""Generate Django TextChoices enum."""
lines = [f"class {enum_cls.__name__}(models.TextChoices):"]
for member in enum_cls:
label = member.name.replace("_", " ").title()
lines.append(f' {member.name} = "{member.value}", "{label}"')
return lines
def generate_django_model(cls: type) -> list[str]:
"""Generate Django model lines from dataclass."""
lines = [
f"class {cls.__name__}(models.Model):",
f' """{(cls.__doc__ or cls.__name__).strip().split(chr(10))[0]}"""',
"",
]
hints = get_type_hints(cls)
fields = {f.name: f for f in dc.fields(cls)}
# Fields
for name, type_hint in hints.items():
if name.startswith("_"):
continue
field = fields.get(name)
default = get_field_default(field) if field else dc.MISSING
django_field = resolve_django_type(name, type_hint, default)
lines.append(f" {name} = {django_field}")
# Meta and __str__
lines.extend(
[
"",
" class Meta:",
' ordering = ["-created_at"]',
"",
" def __str__(self):",
]
)
if "filename" in hints:
lines.append(" return self.filename")
elif "name" in hints:
lines.append(" return self.name")
else:
lines.append(" return str(self.id)")
return lines
def generate_django() -> str:
"""Generate complete Django models file."""
header = [
'"""',
"Django ORM Models - GENERATED FILE",
"",
"Do not edit directly. Modify schema/models/*.py and run:",
" python schema/generate.py --django",
'"""',
"",
"import uuid",
"from django.db import models",
"",
]
# Generate enums first
body = []
for enum_cls in ENUMS:
body.extend(generate_django_enum(enum_cls))
body.extend(["", ""])
# Generate models
for cls in DATACLASSES:
body.extend(generate_django_model(cls))
body.extend(["", ""])
return "\n".join(header + body)
# =============================================================================
# Pydantic Generator
# =============================================================================
def resolve_pydantic_type(type_hint: Any) -> str:
"""Resolve Python type to Pydantic type string."""
base, optional = unwrap_optional(type_hint)
origin = get_origin_name(base)
type_name = get_type_name(base)
# Look up resolver by origin, type name, base type, or enum
resolver = (
PYDANTIC_RESOLVERS.get(origin)
or PYDANTIC_RESOLVERS.get(type_name)
or PYDANTIC_RESOLVERS.get(base)
or (
PYDANTIC_RESOLVERS["enum"]
if isinstance(base, type) and issubclass(base, Enum)
else None
)
)
result = resolver(base) if resolver else "str"
return f"Optional[{result}]" if optional else result
def generate_pydantic_schema(cls: type, suffix: str) -> list[str]:
"""Generate Pydantic schema lines from dataclass."""
name = cls.__name__.replace("Transcode", "").replace("Media", "")
class_name = f"{name}{suffix}"
skip_fields = {
"Create": {"id", "created_at", "updated_at", "status", "error_message"},
"Update": {"id", "created_at", "updated_at"},
"Response": set(),
}
lines = [
f"class {class_name}(BaseSchema):",
f' """{class_name} schema."""',
]
hints = get_type_hints(cls)
fields = {f.name: f for f in dc.fields(cls)}
for name, type_hint in hints.items():
if name.startswith("_") or name in skip_fields.get(suffix, set()):
continue
py_type = resolve_pydantic_type(type_hint)
# Update schemas: all fields optional
if suffix == "Update" and "Optional" not in py_type:
py_type = f"Optional[{py_type}]"
field = fields.get(name)
default = get_field_default(field) if field else dc.MISSING
if "Optional" in py_type:
lines.append(f" {name}: {py_type} = None")
elif default is not dc.MISSING and not callable(default):
if isinstance(default, str):
lines.append(f' {name}: {py_type} = "{default}"')
elif isinstance(default, Enum):
lines.append(
f" {name}: {py_type} = {default.__class__.__name__}.{default.name}"
)
else:
lines.append(f" {name}: {py_type} = {default!r}")
else:
lines.append(f" {name}: {py_type}")
return lines
def generate_pydantic() -> dict[str, str]:
"""Generate all Pydantic schema files."""
files = {}
# base.py
files["base.py"] = "\n".join(
[
'"""Pydantic Base Schema - GENERATED FILE"""',
"",
"from pydantic import BaseModel, ConfigDict",
"",
"",
"class BaseSchema(BaseModel):",
' """Base schema with ORM mode."""',
" model_config = ConfigDict(from_attributes=True)",
"",
]
)
# Schema files per model
for cls in DATACLASSES:
module_name = cls.__name__.replace("Transcode", "").replace("Media", "").lower()
lines = [
f'"""{cls.__name__} Schemas - GENERATED FILE"""',
"",
"from datetime import datetime",
"from enum import Enum",
"from typing import Any, Dict, List, Optional",
"from uuid import UUID",
"",
"from .base import BaseSchema",
"",
]
# Add enum if present
hints = get_type_hints(cls)
for type_hint in hints.values():
base, _ = unwrap_optional(type_hint)
if isinstance(base, type) and issubclass(base, Enum):
lines.extend(
[
"",
f"class {base.__name__}(str, Enum):",
]
)
for m in base:
lines.append(f' {m.name} = "{m.value}"')
lines.append("")
break
# Schemas
for suffix in ["Create", "Update", "Response"]:
lines.append("")
lines.extend(generate_pydantic_schema(cls, suffix))
lines.append("")
files[f"{module_name}.py"] = "\n".join(lines)
# __init__.py
imports = ["from .base import BaseSchema"]
all_exports = ['"BaseSchema"']
for cls in DATACLASSES:
name = cls.__name__.replace("Transcode", "").replace("Media", "")
module = name.lower()
imports.append(
f"from .{module} import {name}Create, {name}Update, {name}Response"
)
all_exports.extend([f'"{name}Create"', f'"{name}Update"', f'"{name}Response"'])
# Add enum export
hints = get_type_hints(cls)
for type_hint in hints.values():
base, _ = unwrap_optional(type_hint)
if isinstance(base, type) and issubclass(base, Enum):
imports.append(f"from .{module} import {base.__name__}")
all_exports.append(f'"{base.__name__}"')
break
files["__init__.py"] = "\n".join(
[
'"""API Schemas - GENERATED FILE"""',
"",
*imports,
"",
f"__all__ = [{', '.join(all_exports)}]",
"",
]
)
return files
# =============================================================================
# TypeScript Generator
# =============================================================================
def resolve_ts_type(type_hint: Any) -> str:
"""Resolve Python type to TypeScript type string."""
base, optional = unwrap_optional(type_hint)
origin = get_origin_name(base)
type_name = get_type_name(base)
# Look up resolver by origin, type name, base type, or enum
resolver = (
TS_RESOLVERS.get(origin)
or TS_RESOLVERS.get(type_name)
or TS_RESOLVERS.get(base)
or (
TS_RESOLVERS["enum"]
if isinstance(base, type) and issubclass(base, Enum)
else None
)
)
result = resolver(base) if resolver else "string"
return f"{result} | null" if optional else result
def generate_ts_interface(cls: type) -> list[str]:
"""Generate TypeScript interface lines from dataclass."""
lines = [f"export interface {cls.__name__} {{"]
for name, type_hint in get_type_hints(cls).items():
if name.startswith("_"):
continue
ts_type = resolve_ts_type(type_hint)
lines.append(f" {name}: {ts_type};")
lines.append("}")
return lines
def generate_typescript() -> str:
"""Generate complete TypeScript file."""
lines = [
"/**",
" * MPR TypeScript Types - GENERATED FILE",
" *",
" * Do not edit directly. Modify schema/models/*.py and run:",
" * python schema/generate.py --typescript",
" */",
"",
]
# Enums as union types
for enum in ENUMS:
values = " | ".join(f'"{m.value}"' for m in enum)
lines.append(f"export type {enum.__name__} = {values};")
lines.append("")
# Interfaces - domain models
for cls in DATACLASSES:
lines.extend(generate_ts_interface(cls))
lines.append("")
# Interfaces - API request/response models
lines.append("// API Request/Response Types")
lines.append("")
for cls in API_MODELS:
lines.extend(generate_ts_interface(cls))
lines.append("")
return "\n".join(lines)
# =============================================================================
# Proto Generator
# =============================================================================
def resolve_proto_type(type_hint: Any) -> tuple[str, bool]:
"""Resolve Python type to proto type. Returns (type, is_optional)."""
base, optional = unwrap_optional(type_hint)
origin = get_origin_name(base)
# Look up resolver by origin or base type
resolver = PROTO_RESOLVERS.get(origin) or PROTO_RESOLVERS.get(base)
if resolver:
result = resolver(base)
is_repeated = result.startswith("repeated")
return result, optional and not is_repeated
return "string", optional
def generate_proto_message(cls: type) -> list[str]:
"""Generate proto message lines from dataclass."""
lines = [f"message {cls.__name__} {{"]
hints = get_type_hints(cls)
if not hints:
lines.append(" // Empty")
else:
for i, (name, type_hint) in enumerate(hints.items(), 1):
proto_type, optional = resolve_proto_type(type_hint)
prefix = (
"optional "
if optional and not proto_type.startswith("repeated")
else ""
)
lines.append(f" {prefix}{proto_type} {name} = {i};")
lines.append("}")
return lines
def generate_proto() -> str:
"""Generate complete proto file."""
lines = [
"// MPR Worker Service - GENERATED FILE",
"//",
"// Do not edit directly. Modify schema/models/grpc.py and run:",
"// python schema/generate.py --proto",
"",
'syntax = "proto3";',
"",
f"package {GRPC_SERVICE['package']};",
"",
f"service {GRPC_SERVICE['name']} {{",
]
# Methods
for m in GRPC_SERVICE["methods"]:
req = m["request"].__name__
resp = m["response"].__name__
returns = f"stream {resp}" if m["stream_response"] else resp
lines.append(f" rpc {m['name']}({req}) returns ({returns});")
lines.extend(["}", ""])
# Messages
for cls in GRPC_MESSAGES:
lines.extend(generate_proto_message(cls))
lines.append("")
return "\n".join(lines)
# =============================================================================
# Writers
# =============================================================================
def write_file(path: Path, content: str) -> None:
"""Write content to file, creating directories as needed."""
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(content)
print(f" {path}")
def write_django(output_dir: Path) -> None:
"""Write Django models."""
write_file(output_dir / "mpr" / "media_assets" / "models.py", generate_django())
def write_pydantic(output_dir: Path) -> None:
"""Write Pydantic schemas."""
schemas_dir = output_dir / "api" / "schemas"
for filename, content in generate_pydantic().items():
write_file(schemas_dir / filename, content)
def write_typescript(output_dir: Path) -> None:
"""Write TypeScript types."""
write_file(
output_dir / "ui" / "timeline" / "src" / "types.ts", generate_typescript()
)
def write_proto(output_dir: Path) -> None:
"""Write proto and generate stubs."""
proto_dir = output_dir / "grpc" / "protos"
proto_path = proto_dir / "worker.proto"
write_file(proto_path, generate_proto())
# Generate Python stubs
grpc_dir = output_dir / "grpc"
result = subprocess.run(
[
sys.executable,
"-m",
"grpc_tools.protoc",
f"-I{proto_dir}",
f"--python_out={grpc_dir}",
f"--grpc_python_out={grpc_dir}",
str(proto_path),
],
capture_output=True,
text=True,
)
if result.returncode == 0:
print(f" {grpc_dir}/worker_pb2.py")
print(f" {grpc_dir}/worker_pb2_grpc.py")
else:
print(" Warning: grpc_tools failed - pip install grpcio-tools")
# =============================================================================
# Main
# =============================================================================
def main() -> None:
parser = argparse.ArgumentParser(description="Generate from schema")
parser.add_argument("--django", action="store_true")
parser.add_argument("--pydantic", action="store_true")
parser.add_argument("--typescript", action="store_true")
parser.add_argument("--proto", action="store_true")
parser.add_argument("--all", action="store_true")
parser.add_argument("--output", type=Path, default=PROJECT_ROOT)
args = parser.parse_args()
if not any([args.django, args.pydantic, args.typescript, args.proto, args.all]):
args.all = True
print(f"Generating to {args.output}\n")
targets: list[tuple[bool, str, Callable]] = [
(args.django or args.all, "Django", write_django),
(args.pydantic or args.all, "Pydantic", write_pydantic),
(args.typescript or args.all, "TypeScript", write_typescript),
(args.proto or args.all, "Proto", write_proto),
]
for enabled, name, writer in targets:
if enabled:
print(f"{name}:")
writer(args.output)
print()
print("Done!")
if __name__ == "__main__":
main()

35
schema/modelgen.json Normal file
View File

@@ -0,0 +1,35 @@
{
"schema": "schema/models",
"targets": [
{
"target": "django",
"output": "mpr/media_assets/models.py",
"include": ["dataclasses", "enums"]
},
{
"target": "pydantic",
"output": "api/schema/",
"include": ["dataclasses", "enums"],
"name_map": {
"TranscodeJob": "Job",
"MediaAsset": "Asset",
"TranscodePreset": "Preset"
}
},
{
"target": "graphene",
"output": "api/schema/graphql.py",
"include": ["dataclasses", "enums", "api"]
},
{
"target": "typescript",
"output": "ui/timeline/src/types.ts",
"include": ["dataclasses", "enums", "api"]
},
{
"target": "protobuf",
"output": "rpc/protos/worker.proto",
"include": ["grpc"]
}
]
}

View File

@@ -5,7 +5,7 @@ This module exports all dataclasses, enums, and constants that the generator
should process. Add new models here to have them included in generation.
"""
from .api import CreateJobRequest, SystemStatus
from .api import CreateJobRequest, ScanResult, SystemStatus
from .grpc import (
GRPC_SERVICE,
CancelRequest,
@@ -26,7 +26,7 @@ DATACLASSES = [MediaAsset, TranscodePreset, TranscodeJob]
# API request/response models - generates TypeScript only (no Django)
# WorkerStatus from grpc.py is reused here
API_MODELS = [CreateJobRequest, SystemStatus, WorkerStatus]
API_MODELS = [CreateJobRequest, SystemStatus, ScanResult, WorkerStatus]
# Status enums - included in generated code
ENUMS = [AssetStatus, JobStatus]
@@ -50,6 +50,7 @@ __all__ = [
"TranscodeJob",
# API Models
"CreateJobRequest",
"ScanResult",
"SystemStatus",
# Enums
"AssetStatus",

View File

@@ -5,8 +5,8 @@ These are separate from the main domain models and represent
the shape of data sent to/from the API endpoints.
"""
from dataclasses import dataclass
from typing import Optional
from dataclasses import dataclass, field
from typing import List, Optional
from uuid import UUID
@@ -19,6 +19,7 @@ class CreateJobRequest:
trim_start: Optional[float] = None # seconds
trim_end: Optional[float] = None # seconds
output_filename: Optional[str] = None
priority: int = 0
@dataclass
@@ -29,4 +30,14 @@ class SystemStatus:
version: str
@dataclass
class ScanResult:
"""Result of scanning the media input bucket."""
found: int = 0
registered: int = 0
skipped: int = 0
files: List[str] = field(default_factory=list)
# Note: WorkerStatus is defined in grpc.py and reused here

View File

@@ -63,6 +63,7 @@ class TranscodeJob:
# Worker tracking
celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None # AWS Step Functions execution ARN
priority: int = 0 # Lower = higher priority
# Timestamps

View File

@@ -110,7 +110,16 @@ class LocalExecutor(Executor):
class LambdaExecutor(Executor):
"""Execute jobs via AWS Lambda (future implementation)."""
"""Execute jobs via AWS Step Functions + Lambda."""
def __init__(self):
import boto3
region = os.environ.get("AWS_REGION", "us-east-1")
self.sfn = boto3.client("stepfunctions", region_name=region)
self.state_machine_arn = os.environ["STEP_FUNCTION_ARN"]
self.callback_url = os.environ.get("CALLBACK_URL", "")
self.callback_api_key = os.environ.get("CALLBACK_API_KEY", "")
def run(
self,
@@ -123,8 +132,36 @@ class LambdaExecutor(Executor):
duration: Optional[float] = None,
progress_callback: Optional[Callable[[int, Dict[str, Any]], None]] = None,
) -> bool:
"""Execute job via AWS Lambda."""
raise NotImplementedError("LambdaExecutor not yet implemented")
"""Start a Step Functions execution for this job."""
import json
payload = {
"job_id": job_id,
"source_key": source_path,
"output_key": output_path,
"preset": preset,
"trim_start": trim_start,
"trim_end": trim_end,
"duration": duration,
"callback_url": self.callback_url,
"api_key": self.callback_api_key,
}
response = self.sfn.start_execution(
stateMachineArn=self.state_machine_arn,
name=f"mpr-{job_id}",
input=json.dumps(payload),
)
# Store execution ARN on the job
execution_arn = response["executionArn"]
try:
from mpr.media_assets.models import TranscodeJob
TranscodeJob.objects.filter(id=job_id).update(execution_arn=execution_arn)
except Exception:
pass
return True
# Executor registry

148
task/lambda_handler.py Normal file
View File

@@ -0,0 +1,148 @@
"""
AWS Lambda handler for media transcoding.
Receives a job payload from Step Functions, downloads source from S3,
runs FFmpeg, uploads result to S3, and calls back to the API.
Uses the same core/ffmpeg module as the local Celery worker.
"""
import json
import logging
import os
import tempfile
from pathlib import Path
import boto3
import requests
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# S3 config
S3_BUCKET_IN = os.environ.get("S3_BUCKET_IN", "mpr-media-in")
S3_BUCKET_OUT = os.environ.get("S3_BUCKET_OUT", "mpr-media-out")
AWS_REGION = os.environ.get("AWS_REGION", "us-east-1")
s3 = boto3.client("s3", region_name=AWS_REGION)
def handler(event, context):
"""
Lambda entry point.
Event payload (from Step Functions):
{
"job_id": "uuid",
"source_key": "path/to/source.mp4",
"output_key": "output_filename.mp4",
"preset": {...} or null,
"trim_start": float or null,
"trim_end": float or null,
"duration": float or null,
"callback_url": "https://mpr.mcrn.ar/api",
"api_key": "secret"
}
"""
job_id = event["job_id"]
source_key = event["source_key"]
output_key = event["output_key"]
preset = event.get("preset")
trim_start = event.get("trim_start")
trim_end = event.get("trim_end")
duration = event.get("duration")
callback_url = event.get("callback_url", "")
api_key = event.get("api_key", "")
logger.info(f"Starting job {job_id}: {source_key} -> {output_key}")
# Download source from S3
ext_in = Path(source_key).suffix or ".mp4"
tmp_source = tempfile.mktemp(suffix=ext_in, dir="/tmp")
logger.info(f"Downloading s3://{S3_BUCKET_IN}/{source_key}")
s3.download_file(S3_BUCKET_IN, source_key, tmp_source)
# Prepare output temp file
ext_out = Path(output_key).suffix or ".mp4"
tmp_output = tempfile.mktemp(suffix=ext_out, dir="/tmp")
try:
# Import ffmpeg module (bundled in container)
from core.ffmpeg.transcode import TranscodeConfig, transcode
if preset:
config = TranscodeConfig(
input_path=tmp_source,
output_path=tmp_output,
video_codec=preset.get("video_codec", "libx264"),
video_bitrate=preset.get("video_bitrate"),
video_crf=preset.get("video_crf"),
video_preset=preset.get("video_preset"),
resolution=preset.get("resolution"),
framerate=preset.get("framerate"),
audio_codec=preset.get("audio_codec", "aac"),
audio_bitrate=preset.get("audio_bitrate"),
audio_channels=preset.get("audio_channels"),
audio_samplerate=preset.get("audio_samplerate"),
container=preset.get("container", "mp4"),
extra_args=preset.get("extra_args", []),
trim_start=trim_start,
trim_end=trim_end,
)
else:
config = TranscodeConfig(
input_path=tmp_source,
output_path=tmp_output,
video_codec="copy",
audio_codec="copy",
trim_start=trim_start,
trim_end=trim_end,
)
success = transcode(config, duration=duration)
if not success:
raise RuntimeError("Transcode returned False")
# Upload result to S3
logger.info(f"Uploading s3://{S3_BUCKET_OUT}/{output_key}")
s3.upload_file(tmp_output, S3_BUCKET_OUT, output_key)
result = {"status": "completed", "job_id": job_id, "output_key": output_key}
# Callback to API
_callback(callback_url, job_id, api_key, {"status": "completed"})
return result
except Exception as e:
logger.exception(f"Job {job_id} failed: {e}")
_callback(callback_url, job_id, api_key, {
"status": "failed",
"error": str(e),
})
return {"status": "failed", "job_id": job_id, "error": str(e)}
finally:
for f in [tmp_source, tmp_output]:
try:
os.unlink(f)
except OSError:
pass
def _callback(callback_url, job_id, api_key, payload):
"""Call back to API with job result."""
if not callback_url:
return
try:
url = f"{callback_url}/jobs/{job_id}/callback"
headers = {}
if api_key:
headers["X-API-Key"] = api_key
resp = requests.post(url, json=payload, headers=headers, timeout=10)
logger.info(f"Callback response: {resp.status_code}")
except Exception as e:
logger.warning(f"Callback failed: {e}")

View File

@@ -8,21 +8,19 @@ from typing import Any, Dict, Optional
from celery import shared_task
from core.storage import BUCKET_IN, BUCKET_OUT, download_to_temp, upload_file
from rpc.server import update_job_progress
from task.executor import get_executor
logger = logging.getLogger(__name__)
# Media paths from environment
MEDIA_ROOT = os.environ.get("MEDIA_ROOT", "/app/media")
@shared_task(bind=True, queue="transcode", max_retries=3, default_retry_delay=60)
def run_transcode_job(
self,
job_id: str,
source_path: str,
output_path: str,
source_key: str,
output_key: str,
preset: Optional[Dict[str, Any]] = None,
trim_start: Optional[float] = None,
trim_end: Optional[float] = None,
@@ -31,25 +29,25 @@ def run_transcode_job(
"""
Celery task to run a transcode/trim job.
Args:
job_id: Unique job identifier
source_path: Path to source file
output_path: Path for output file
preset: Transcode preset dict (optional)
trim_start: Trim start time in seconds (optional)
trim_end: Trim end time in seconds (optional)
duration: Source duration for progress calculation
Returns:
Result dict with status and output_path
Downloads source from S3, runs FFmpeg, uploads result to S3.
"""
logger.info(f"Starting job {job_id}: {source_path} -> {output_path}")
logger.info(f"Starting job {job_id}: {source_key} -> {output_key}")
# Update status to processing
update_job_progress(job_id, progress=0, status="processing")
# Download source from S3 to temp file
logger.info(f"Downloading {source_key} from {BUCKET_IN}")
tmp_source = download_to_temp(BUCKET_IN, source_key)
# Create temp output path with same extension
import tempfile
from pathlib import Path
ext = Path(output_key).suffix or ".mp4"
fd, tmp_output = tempfile.mkstemp(suffix=ext)
os.close(fd)
def progress_callback(percent: int, details: Dict[str, Any]) -> None:
"""Update gRPC progress state."""
update_job_progress(
job_id,
progress=percent,
@@ -61,8 +59,8 @@ def run_transcode_job(
executor = get_executor()
success = executor.run(
job_id=job_id,
source_path=source_path,
output_path=output_path,
source_path=tmp_source,
output_path=tmp_output,
preset=preset,
trim_start=trim_start,
trim_end=trim_end,
@@ -71,12 +69,16 @@ def run_transcode_job(
)
if success:
# Upload result to S3
logger.info(f"Uploading {output_key} to {BUCKET_OUT}")
upload_file(tmp_output, BUCKET_OUT, output_key)
logger.info(f"Job {job_id} completed successfully")
update_job_progress(job_id, progress=100, status="completed")
return {
"status": "completed",
"job_id": job_id,
"output_path": output_path,
"output_key": output_key,
}
else:
raise RuntimeError("Executor returned False")
@@ -85,7 +87,6 @@ def run_transcode_job(
logger.exception(f"Job {job_id} failed: {e}")
update_job_progress(job_id, progress=0, status="failed", error=str(e))
# Retry on transient errors
if self.request.retries < self.max_retries:
raise self.retry(exc=e)
@@ -94,3 +95,11 @@ def run_transcode_job(
"job_id": job_id,
"error": str(e),
}
finally:
# Clean up temp files
for f in [tmp_source, tmp_output]:
try:
os.unlink(f)
except OSError:
pass

View File

@@ -66,20 +66,20 @@ export interface TranscodeJob {
speed: string | null;
error_message: string | null;
celery_task_id: string | null;
execution_arn: string | null;
priority: number;
created_at: string | null;
started_at: string | null;
completed_at: string | null;
}
// API request/response types
export interface CreateJobRequest {
source_asset_id: string;
preset_id: string | null;
trim_start: number | null;
trim_end: number | null;
output_filename: string | null;
priority: number;
}
export interface SystemStatus {
@@ -87,6 +87,13 @@ export interface SystemStatus {
version: string;
}
export interface ScanResult {
found: number;
registered: number;
skipped: number;
files: string[];
}
export interface WorkerStatus {
available: boolean;
active_jobs: number;