Compare commits

...

8 Commits

Author SHA1 Message Date
72e4113529 updated modelgen tool 2026-02-06 20:18:45 -03:00
8f5d407e0e fine tuning models 2026-02-06 18:46:27 -03:00
e642908abb shoehorning graphql, step functions and lamdas. aws deployment scripts 2026-02-06 18:25:42 -03:00
013587d108 plug task enqueing properly 2026-02-06 10:49:05 -03:00
2cf6c89fbb ui video selector 2026-02-06 09:41:50 -03:00
daabd15c19 update docs 2026-02-06 09:23:36 -03:00
2e6ed4e37a scan media folder 2026-02-06 09:06:10 -03:00
68622bd6b1 fixed model names and generator 2026-02-06 08:51:35 -03:00
62 changed files with 3653 additions and 1854 deletions

6
.gitignore vendored
View File

@@ -17,8 +17,10 @@ env/
*.pot *.pot
*.pyc *.pyc
db.sqlite3 db.sqlite3
media/* media/in/*
!media/.gitkeep !media/in/.gitkeep
media/out/*
!media/out/.gitkeep
# Node # Node
node_modules/ node_modules/

View File

@@ -76,23 +76,39 @@ docker compose exec django python manage.py createsuperuser
## Code Generation ## Code Generation
Models are defined in `schema/models/` and generate: Models are defined as dataclasses in `schema/models/` and generated via `modelgen`:
- Django ORM models - **Django ORM** models (`--include dataclasses,enums`)
- Pydantic schemas - **Pydantic** schemas (`--include dataclasses,enums`)
- TypeScript types - **TypeScript** types (`--include dataclasses,enums,api`)
- Protobuf definitions - **Protobuf** definitions (`--include grpc`)
Each target only gets the model groups it needs via the `--include` flag.
```bash ```bash
# Regenerate all # Regenerate all targets
python schema/generate.py --all bash ctrl/generate.sh
# Or specific targets
python schema/generate.py --django
python schema/generate.py --pydantic
python schema/generate.py --typescript
python schema/generate.py --proto
``` ```
## Media Storage
MPR separates media into **input** (`MEDIA_IN`) and **output** (`MEDIA_OUT`) paths, each independently configurable. File paths are stored relative for cloud portability.
### Local Development
- Source files: `/app/media/in/video.mp4`
- Output files: `/app/media/out/video_h264.mp4`
- Served via: `http://mpr.local.ar/media/in/video.mp4` (nginx alias)
### AWS/Cloud Deployment
Input and output can be different buckets/locations:
```bash
MEDIA_IN=s3://source-bucket/media/
MEDIA_OUT=s3://output-bucket/transcoded/
```
**Scan Endpoint**: `POST /api/assets/scan` recursively scans `MEDIA_IN` and registers new files with relative paths.
See [docs/media-storage.md](docs/media-storage.md) for full details.
## Project Structure ## Project Structure
``` ```
@@ -105,18 +121,20 @@ mpr/
├── ctrl/ # Docker & deployment ├── ctrl/ # Docker & deployment
│ ├── docker-compose.yml │ ├── docker-compose.yml
│ └── nginx.conf │ └── nginx.conf
├── docs/ # Architecture diagrams ├── media/
├── grpc/ # gRPC server & client │ ├── in/ # Source media files
│ └── out/ # Transcoded output
├── rpc/ # gRPC server & client
│ └── protos/ # Protobuf definitions (generated) │ └── protos/ # Protobuf definitions (generated)
├── mpr/ # Django project ├── mpr/ # Django project
│ └── media_assets/ # Django app │ └── media_assets/ # Django app
├── schema/ # Source of truth ├── schema/ # Source of truth
│ └── models/ # Dataclass definitions │ └── models/ # Dataclass definitions
├── ui/ # Frontend ├── task/ # Celery job execution
── timeline/ # React app ── executor.py # Executor abstraction
└── worker/ # Job execution │ └── tasks.py # Celery tasks
├── executor.py # Executor abstraction └── ui/ # Frontend
└── tasks.py # Celery tasks └── timeline/ # React app
``` ```
## Environment Variables ## Environment Variables
@@ -130,6 +148,10 @@ See `ctrl/.env.template` for all configuration options.
| `GRPC_HOST` | grpc | gRPC server hostname | | `GRPC_HOST` | grpc | gRPC server hostname |
| `GRPC_PORT` | 50051 | gRPC server port | | `GRPC_PORT` | 50051 | gRPC server port |
| `MPR_EXECUTOR` | local | Executor type (local/lambda) | | `MPR_EXECUTOR` | local | Executor type (local/lambda) |
| `MEDIA_IN` | /app/media/in | Source media files directory |
| `MEDIA_OUT` | /app/media/out | Transcoded output directory |
| `MEDIA_BASE_URL` | /media/ | Base URL for serving media (use S3 URL for cloud) |
| `VITE_ALLOWED_HOSTS` | - | Comma-separated allowed hosts for Vite dev server |
## License ## License

251
api/graphql.py Normal file
View File

@@ -0,0 +1,251 @@
"""
GraphQL API using graphene, mounted on FastAPI/Starlette.
Provides the same data as the REST API but via GraphQL queries and mutations.
Uses Django ORM directly for data access.
Types are generated from schema/ via modelgen — see api/schema/graphql.py.
"""
import os
import graphene
from api.schema.graphql import (
CreateJobInput,
MediaAssetType,
ScanResultType,
SystemStatusType,
TranscodeJobType,
TranscodePresetType,
)
from core.storage import BUCKET_IN, list_objects
# Media extensions (same as assets route)
VIDEO_EXTS = {".mp4", ".mkv", ".avi", ".mov", ".webm", ".flv", ".wmv", ".m4v"}
AUDIO_EXTS = {".mp3", ".wav", ".flac", ".aac", ".ogg", ".m4a"}
MEDIA_EXTS = VIDEO_EXTS | AUDIO_EXTS
# ---------------------------------------------------------------------------
# Queries
# ---------------------------------------------------------------------------
class Query(graphene.ObjectType):
assets = graphene.List(
MediaAssetType,
status=graphene.String(),
search=graphene.String(),
)
asset = graphene.Field(MediaAssetType, id=graphene.UUID(required=True))
jobs = graphene.List(
TranscodeJobType,
status=graphene.String(),
source_asset_id=graphene.UUID(),
)
job = graphene.Field(TranscodeJobType, id=graphene.UUID(required=True))
presets = graphene.List(TranscodePresetType)
system_status = graphene.Field(SystemStatusType)
def resolve_assets(self, info, status=None, search=None):
from mpr.media_assets.models import MediaAsset
qs = MediaAsset.objects.all()
if status:
qs = qs.filter(status=status)
if search:
qs = qs.filter(filename__icontains=search)
return qs
def resolve_asset(self, info, id):
from mpr.media_assets.models import MediaAsset
try:
return MediaAsset.objects.get(id=id)
except MediaAsset.DoesNotExist:
return None
def resolve_jobs(self, info, status=None, source_asset_id=None):
from mpr.media_assets.models import TranscodeJob
qs = TranscodeJob.objects.all()
if status:
qs = qs.filter(status=status)
if source_asset_id:
qs = qs.filter(source_asset_id=source_asset_id)
return qs
def resolve_job(self, info, id):
from mpr.media_assets.models import TranscodeJob
try:
return TranscodeJob.objects.get(id=id)
except TranscodeJob.DoesNotExist:
return None
def resolve_presets(self, info):
from mpr.media_assets.models import TranscodePreset
return TranscodePreset.objects.all()
def resolve_system_status(self, info):
return {"status": "ok", "version": "0.1.0"}
# ---------------------------------------------------------------------------
# Mutations
# ---------------------------------------------------------------------------
class ScanMediaFolder(graphene.Mutation):
class Arguments:
pass
Output = ScanResultType
def mutate(self, info):
from mpr.media_assets.models import MediaAsset
objects = list_objects(BUCKET_IN, extensions=MEDIA_EXTS)
existing = set(MediaAsset.objects.values_list("filename", flat=True))
registered = []
skipped = []
for obj in objects:
if obj["filename"] in existing:
skipped.append(obj["filename"])
continue
try:
MediaAsset.objects.create(
filename=obj["filename"],
file_path=obj["key"],
file_size=obj["size"],
)
registered.append(obj["filename"])
except Exception:
pass
return ScanResultType(
found=len(objects),
registered=len(registered),
skipped=len(skipped),
files=registered,
)
class CreateJob(graphene.Mutation):
class Arguments:
input = CreateJobInput(required=True)
Output = TranscodeJobType
def mutate(self, info, input):
from pathlib import Path
from mpr.media_assets.models import MediaAsset, TranscodeJob, TranscodePreset
try:
source = MediaAsset.objects.get(id=input.source_asset_id)
except MediaAsset.DoesNotExist:
raise Exception("Source asset not found")
preset = None
preset_snapshot = {}
if input.preset_id:
try:
preset = TranscodePreset.objects.get(id=input.preset_id)
preset_snapshot = {
"name": preset.name,
"container": preset.container,
"video_codec": preset.video_codec,
"audio_codec": preset.audio_codec,
}
except TranscodePreset.DoesNotExist:
raise Exception("Preset not found")
if not preset and not input.trim_start and not input.trim_end:
raise Exception("Must specify preset_id or trim_start/trim_end")
output_filename = input.output_filename
if not output_filename:
stem = Path(source.filename).stem
ext = preset_snapshot.get("container", "mp4") if preset else "mp4"
output_filename = f"{stem}_output.{ext}"
job = TranscodeJob.objects.create(
source_asset_id=source.id,
preset_id=preset.id if preset else None,
preset_snapshot=preset_snapshot,
trim_start=input.trim_start,
trim_end=input.trim_end,
output_filename=output_filename,
output_path=output_filename,
priority=input.priority or 0,
)
# Dispatch
executor_mode = os.environ.get("MPR_EXECUTOR", "local")
if executor_mode == "lambda":
from task.executor import get_executor
get_executor().run(
job_id=str(job.id),
source_path=source.file_path,
output_path=output_filename,
preset=preset_snapshot or None,
trim_start=input.trim_start,
trim_end=input.trim_end,
duration=source.duration,
)
else:
from task.tasks import run_transcode_job
result = run_transcode_job.delay(
job_id=str(job.id),
source_key=source.file_path,
output_key=output_filename,
preset=preset_snapshot or None,
trim_start=input.trim_start,
trim_end=input.trim_end,
duration=source.duration,
)
job.celery_task_id = result.id
job.save(update_fields=["celery_task_id"])
return job
class CancelJob(graphene.Mutation):
class Arguments:
id = graphene.UUID(required=True)
Output = TranscodeJobType
def mutate(self, info, id):
from mpr.media_assets.models import TranscodeJob
try:
job = TranscodeJob.objects.get(id=id)
except TranscodeJob.DoesNotExist:
raise Exception("Job not found")
if job.status not in ("pending", "processing"):
raise Exception(f"Cannot cancel job with status: {job.status}")
job.status = "cancelled"
job.save(update_fields=["status"])
return job
class Mutation(graphene.ObjectType):
scan_media_folder = ScanMediaFolder.Field()
create_job = CreateJob.Field()
cancel_job = CancelJob.Field()
# ---------------------------------------------------------------------------
# Schema
# ---------------------------------------------------------------------------
schema = graphene.Schema(query=Query, mutation=Mutation)

View File

@@ -20,7 +20,9 @@ django.setup()
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from api.graphql import schema as graphql_schema
from api.routes import assets_router, jobs_router, presets_router, system_router from api.routes import assets_router, jobs_router, presets_router, system_router
from starlette_graphene3 import GraphQLApp, make_graphiql_handler
app = FastAPI( app = FastAPI(
title="MPR API", title="MPR API",
@@ -45,6 +47,9 @@ app.include_router(assets_router, prefix="/api")
app.include_router(presets_router, prefix="/api") app.include_router(presets_router, prefix="/api")
app.include_router(jobs_router, prefix="/api") app.include_router(jobs_router, prefix="/api")
# GraphQL
app.mount("/graphql", GraphQLApp(schema=graphql_schema, on_get=make_graphiql_handler()))
@app.get("/") @app.get("/")
def root(): def root():

View File

@@ -8,37 +8,27 @@ from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query from fastapi import APIRouter, Depends, HTTPException, Query
from api.deps import get_asset from api.deps import get_asset
from api.schemas import AssetCreate, AssetResponse, AssetUpdate from api.schema import AssetCreate, AssetResponse, AssetUpdate
from core.storage import BUCKET_IN, list_objects
router = APIRouter(prefix="/assets", tags=["assets"]) router = APIRouter(prefix="/assets", tags=["assets"])
# Supported media extensions
VIDEO_EXTS = {".mp4", ".mkv", ".avi", ".mov", ".webm", ".flv", ".wmv", ".m4v"}
AUDIO_EXTS = {".mp3", ".wav", ".flac", ".aac", ".ogg", ".m4a"}
MEDIA_EXTS = VIDEO_EXTS | AUDIO_EXTS
@router.post("/", response_model=AssetResponse, status_code=201) @router.post("/", response_model=AssetResponse, status_code=201)
def create_asset(data: AssetCreate): def create_asset(data: AssetCreate):
""" """Register a media file as an asset."""
Register a media file as an asset.
The file must exist on disk. A probe task will be queued
to extract metadata asynchronously.
"""
from pathlib import Path
from mpr.media_assets.models import MediaAsset from mpr.media_assets.models import MediaAsset
# Validate file exists
path = Path(data.file_path)
if not path.exists():
raise HTTPException(status_code=400, detail="File not found")
# Create asset
asset = MediaAsset.objects.create( asset = MediaAsset.objects.create(
filename=data.filename or path.name, filename=data.filename or data.file_path.split("/")[-1],
file_path=str(path.absolute()), file_path=data.file_path,
file_size=path.stat().st_size, file_size=data.file_size,
) )
# TODO: Queue probe task via gRPC/Celery
return asset return asset
@@ -52,10 +42,8 @@ def list_assets(
from mpr.media_assets.models import MediaAsset from mpr.media_assets.models import MediaAsset
qs = MediaAsset.objects.all() qs = MediaAsset.objects.all()
if status: if status:
qs = qs.filter(status=status) qs = qs.filter(status=status)
return list(qs[offset : offset + limit]) return list(qs[offset : offset + limit])
@@ -88,3 +76,42 @@ def update_asset(asset_id: UUID, data: AssetUpdate, asset=Depends(get_asset)):
def delete_asset(asset_id: UUID, asset=Depends(get_asset)): def delete_asset(asset_id: UUID, asset=Depends(get_asset)):
"""Delete an asset.""" """Delete an asset."""
asset.delete() asset.delete()
@router.post("/scan", response_model=dict)
def scan_media_folder():
"""
Scan the S3 media-in bucket for new video/audio files and register them as assets.
"""
from mpr.media_assets.models import MediaAsset
# List objects from S3 bucket
objects = list_objects(BUCKET_IN, extensions=MEDIA_EXTS)
# Get existing filenames to avoid duplicates
existing_filenames = set(MediaAsset.objects.values_list("filename", flat=True))
registered_files = []
skipped_files = []
for obj in objects:
if obj["filename"] in existing_filenames:
skipped_files.append(obj["filename"])
continue
try:
MediaAsset.objects.create(
filename=obj["filename"],
file_path=obj["key"],
file_size=obj["size"],
)
registered_files.append(obj["filename"])
except Exception as e:
print(f"Error registering {obj['filename']}: {e}")
return {
"found": len(objects),
"registered": len(registered_files),
"skipped": len(skipped_files),
"files": registered_files,
}

View File

@@ -2,17 +2,20 @@
Job endpoints - transcode/trim job management. Job endpoints - transcode/trim job management.
""" """
import json import os
from pathlib import Path
from typing import Optional from typing import Optional
from uuid import UUID from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query from fastapi import APIRouter, Depends, Header, HTTPException, Query
from api.deps import get_asset, get_job, get_preset from api.deps import get_asset, get_job, get_preset
from api.schemas import JobCreate, JobResponse from api.schema import JobCreate, JobResponse
router = APIRouter(prefix="/jobs", tags=["jobs"]) router = APIRouter(prefix="/jobs", tags=["jobs"])
CALLBACK_API_KEY = os.environ.get("CALLBACK_API_KEY", "")
@router.post("/", response_model=JobResponse, status_code=201) @router.post("/", response_model=JobResponse, status_code=201)
def create_job(data: JobCreate): def create_job(data: JobCreate):
@@ -30,16 +33,12 @@ def create_job(data: JobCreate):
except MediaAsset.DoesNotExist: except MediaAsset.DoesNotExist:
raise HTTPException(status_code=404, detail="Source asset not found") raise HTTPException(status_code=404, detail="Source asset not found")
if source.status != "ready":
raise HTTPException(status_code=400, detail="Source asset is not ready")
# Get preset if specified # Get preset if specified
preset = None preset = None
preset_snapshot = {} preset_snapshot = {}
if data.preset_id: if data.preset_id:
try: try:
preset = TranscodePreset.objects.get(id=data.preset_id) preset = TranscodePreset.objects.get(id=data.preset_id)
# Snapshot preset at job creation time
preset_snapshot = { preset_snapshot = {
"name": preset.name, "name": preset.name,
"container": preset.container, "container": preset.container,
@@ -64,31 +63,112 @@ def create_job(data: JobCreate):
status_code=400, detail="Must specify preset_id or trim_start/trim_end" status_code=400, detail="Must specify preset_id or trim_start/trim_end"
) )
# Generate output filename # Generate output filename - stored as S3 key in output bucket
output_filename = data.output_filename output_filename = data.output_filename
if not output_filename: if not output_filename:
from pathlib import Path
stem = Path(source.filename).stem stem = Path(source.filename).stem
ext = preset_snapshot.get("container", "mp4") if preset else "mp4" ext = preset_snapshot.get("container", "mp4") if preset else "mp4"
output_filename = f"{stem}_output.{ext}" output_filename = f"{stem}_output.{ext}"
# Create job # Create job
job = TranscodeJob.objects.create( job = TranscodeJob.objects.create(
source_asset=source, source_asset_id=source.id,
preset=preset, preset_id=preset.id if preset else None,
preset_snapshot=preset_snapshot, preset_snapshot=preset_snapshot,
trim_start=data.trim_start, trim_start=data.trim_start,
trim_end=data.trim_end, trim_end=data.trim_end,
output_filename=output_filename, output_filename=output_filename,
output_path=output_filename, # S3 key in output bucket
priority=data.priority or 0, priority=data.priority or 0,
) )
# TODO: Submit job via gRPC # Dispatch based on executor mode
executor_mode = os.environ.get("MPR_EXECUTOR", "local")
if executor_mode == "lambda":
_dispatch_lambda(job, source, preset_snapshot)
else:
_dispatch_celery(job, source, preset_snapshot)
return job return job
def _dispatch_celery(job, source, preset_snapshot):
"""Dispatch job to Celery worker."""
from task.tasks import run_transcode_job
result = run_transcode_job.delay(
job_id=str(job.id),
source_key=source.file_path,
output_key=job.output_filename,
preset=preset_snapshot or None,
trim_start=job.trim_start,
trim_end=job.trim_end,
duration=source.duration,
)
job.celery_task_id = result.id
job.save(update_fields=["celery_task_id"])
def _dispatch_lambda(job, source, preset_snapshot):
"""Dispatch job to AWS Step Functions."""
from task.executor import get_executor
executor = get_executor()
executor.run(
job_id=str(job.id),
source_path=source.file_path,
output_path=job.output_filename,
preset=preset_snapshot or None,
trim_start=job.trim_start,
trim_end=job.trim_end,
duration=source.duration,
)
@router.post("/{job_id}/callback")
def job_callback(
job_id: UUID,
payload: dict,
x_api_key: Optional[str] = Header(None),
):
"""
Callback endpoint for Lambda to report job completion.
Protected by API key.
"""
if CALLBACK_API_KEY and x_api_key != CALLBACK_API_KEY:
raise HTTPException(status_code=403, detail="Invalid API key")
from django.utils import timezone
from mpr.media_assets.models import TranscodeJob
try:
job = TranscodeJob.objects.get(id=job_id)
except TranscodeJob.DoesNotExist:
raise HTTPException(status_code=404, detail="Job not found")
status = payload.get("status", "failed")
job.status = status
job.progress = 100.0 if status == "completed" else job.progress
update_fields = ["status", "progress"]
if payload.get("error"):
job.error_message = payload["error"]
update_fields.append("error_message")
if status == "completed":
job.completed_at = timezone.now()
update_fields.append("completed_at")
elif status == "failed":
job.completed_at = timezone.now()
update_fields.append("completed_at")
job.save(update_fields=update_fields)
return {"ok": True}
@router.get("/", response_model=list[JobResponse]) @router.get("/", response_model=list[JobResponse])
def list_jobs( def list_jobs(
status: Optional[str] = Query(None, description="Filter by status"), status: Optional[str] = Query(None, description="Filter by status"),
@@ -100,12 +180,10 @@ def list_jobs(
from mpr.media_assets.models import TranscodeJob from mpr.media_assets.models import TranscodeJob
qs = TranscodeJob.objects.all() qs = TranscodeJob.objects.all()
if status: if status:
qs = qs.filter(status=status) qs = qs.filter(status=status)
if source_asset_id: if source_asset_id:
qs = qs.filter(source_asset_id=source_asset_id) qs = qs.filter(source_asset_id=source_asset_id)
return list(qs[offset : offset + limit]) return list(qs[offset : offset + limit])
@@ -136,11 +214,8 @@ def cancel_job(job_id: UUID, job=Depends(get_job)):
status_code=400, detail=f"Cannot cancel job with status: {job.status}" status_code=400, detail=f"Cannot cancel job with status: {job.status}"
) )
# TODO: Cancel via gRPC
job.status = "cancelled" job.status = "cancelled"
job.save(update_fields=["status"]) job.save(update_fields=["status"])
return job return job
@@ -155,6 +230,4 @@ def retry_job(job_id: UUID, job=Depends(get_job)):
job.error_message = None job.error_message = None
job.save(update_fields=["status", "progress", "error_message"]) job.save(update_fields=["status", "progress", "error_message"])
# TODO: Resubmit via gRPC
return job return job

View File

@@ -7,7 +7,7 @@ from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from api.deps import get_preset from api.deps import get_preset
from api.schemas import PresetCreate, PresetResponse, PresetUpdate from api.schema import PresetCreate, PresetResponse, PresetUpdate
router = APIRouter(prefix="/presets", tags=["presets"]) router = APIRouter(prefix="/presets", tags=["presets"])

View File

@@ -28,7 +28,7 @@ class AssetCreate(BaseSchema):
bitrate: Optional[int] = None bitrate: Optional[int] = None
properties: Dict[str, Any] properties: Dict[str, Any]
comments: str = "" comments: str = ""
tags: List[str] tags: List[str] = Field(default_factory=list)
class AssetUpdate(BaseSchema): class AssetUpdate(BaseSchema):
"""AssetUpdate schema.""" """AssetUpdate schema."""
@@ -65,6 +65,6 @@ class AssetResponse(BaseSchema):
bitrate: Optional[int] = None bitrate: Optional[int] = None
properties: Dict[str, Any] properties: Dict[str, Any]
comments: str = "" comments: str = ""
tags: List[str] tags: List[str] = Field(default_factory=list)
created_at: Optional[datetime] = None created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None updated_at: Optional[datetime] = None

129
api/schema/graphql.py Normal file
View File

@@ -0,0 +1,129 @@
"""
Graphene Types - GENERATED FILE
Do not edit directly. Regenerate using modelgen.
"""
import graphene
class AssetStatus(graphene.Enum):
PENDING = "pending"
READY = "ready"
ERROR = "error"
class JobStatus(graphene.Enum):
PENDING = "pending"
PROCESSING = "processing"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class MediaAssetType(graphene.ObjectType):
"""A video/audio file registered in the system."""
id = graphene.UUID()
filename = graphene.String()
file_path = graphene.String()
status = graphene.String()
error_message = graphene.String()
file_size = graphene.Int()
duration = graphene.Float()
video_codec = graphene.String()
audio_codec = graphene.String()
width = graphene.Int()
height = graphene.Int()
framerate = graphene.Float()
bitrate = graphene.Int()
properties = graphene.JSONString()
comments = graphene.String()
tags = graphene.List(graphene.String)
created_at = graphene.DateTime()
updated_at = graphene.DateTime()
class TranscodePresetType(graphene.ObjectType):
"""A reusable transcoding configuration (like Handbrake presets)."""
id = graphene.UUID()
name = graphene.String()
description = graphene.String()
is_builtin = graphene.Boolean()
container = graphene.String()
video_codec = graphene.String()
video_bitrate = graphene.String()
video_crf = graphene.Int()
video_preset = graphene.String()
resolution = graphene.String()
framerate = graphene.Float()
audio_codec = graphene.String()
audio_bitrate = graphene.String()
audio_channels = graphene.Int()
audio_samplerate = graphene.Int()
extra_args = graphene.List(graphene.String)
created_at = graphene.DateTime()
updated_at = graphene.DateTime()
class TranscodeJobType(graphene.ObjectType):
"""A transcoding or trimming job in the queue."""
id = graphene.UUID()
source_asset_id = graphene.UUID()
preset_id = graphene.UUID()
preset_snapshot = graphene.JSONString()
trim_start = graphene.Float()
trim_end = graphene.Float()
output_filename = graphene.String()
output_path = graphene.String()
output_asset_id = graphene.UUID()
status = graphene.String()
progress = graphene.Float()
current_frame = graphene.Int()
current_time = graphene.Float()
speed = graphene.String()
error_message = graphene.String()
celery_task_id = graphene.String()
execution_arn = graphene.String()
priority = graphene.Int()
created_at = graphene.DateTime()
started_at = graphene.DateTime()
completed_at = graphene.DateTime()
class CreateJobInput(graphene.InputObjectType):
"""Request body for creating a transcode/trim job."""
source_asset_id = graphene.UUID(required=True)
preset_id = graphene.UUID()
trim_start = graphene.Float()
trim_end = graphene.Float()
output_filename = graphene.String()
priority = graphene.Int(default_value=0)
class SystemStatusType(graphene.ObjectType):
"""System status response."""
status = graphene.String()
version = graphene.String()
class ScanResultType(graphene.ObjectType):
"""Result of scanning the media input bucket."""
found = graphene.Int()
registered = graphene.Int()
skipped = graphene.Int()
files = graphene.List(graphene.String)
class WorkerStatusType(graphene.ObjectType):
"""Worker health and capabilities."""
available = graphene.Boolean()
active_jobs = graphene.Int()
supported_codecs = graphene.List(graphene.String)
gpu_available = graphene.Boolean()

View File

@@ -31,6 +31,7 @@ class JobCreate(BaseSchema):
current_time: Optional[float] = None current_time: Optional[float] = None
speed: Optional[str] = None speed: Optional[str] = None
celery_task_id: Optional[str] = None celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None
priority: int = 0 priority: int = 0
started_at: Optional[datetime] = None started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None completed_at: Optional[datetime] = None
@@ -52,6 +53,7 @@ class JobUpdate(BaseSchema):
speed: Optional[str] = None speed: Optional[str] = None
error_message: Optional[str] = None error_message: Optional[str] = None
celery_task_id: Optional[str] = None celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None
priority: Optional[int] = None priority: Optional[int] = None
started_at: Optional[datetime] = None started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None completed_at: Optional[datetime] = None
@@ -74,6 +76,7 @@ class JobResponse(BaseSchema):
speed: Optional[str] = None speed: Optional[str] = None
error_message: Optional[str] = None error_message: Optional[str] = None
celery_task_id: Optional[str] = None celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None
priority: int = 0 priority: int = 0
created_at: Optional[datetime] = None created_at: Optional[datetime] = None
started_at: Optional[datetime] = None started_at: Optional[datetime] = None

View File

@@ -24,7 +24,7 @@ class PresetCreate(BaseSchema):
audio_bitrate: Optional[str] = None audio_bitrate: Optional[str] = None
audio_channels: Optional[int] = None audio_channels: Optional[int] = None
audio_samplerate: Optional[int] = None audio_samplerate: Optional[int] = None
extra_args: List[str] extra_args: List[str] = Field(default_factory=list)
class PresetUpdate(BaseSchema): class PresetUpdate(BaseSchema):
"""PresetUpdate schema.""" """PresetUpdate schema."""
@@ -61,6 +61,6 @@ class PresetResponse(BaseSchema):
audio_bitrate: Optional[str] = None audio_bitrate: Optional[str] = None
audio_channels: Optional[int] = None audio_channels: Optional[int] = None
audio_samplerate: Optional[int] = None audio_samplerate: Optional[int] = None
extra_args: List[str] extra_args: List[str] = Field(default_factory=list)
created_at: Optional[datetime] = None created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None updated_at: Optional[datetime] = None

View File

@@ -1,89 +0,0 @@
"""
Pydantic Models - GENERATED FILE
Do not edit directly. Regenerate using modelgen.
"""
from datetime import datetime
from enum import Enum
from typing import Any, Dict, List, Optional
from uuid import UUID
from pydantic import BaseModel, Field
class AssetStatus(str, Enum):
PENDING = "pending"
READY = "ready"
ERROR = "error"
class JobStatus(str, Enum):
PENDING = "pending"
PROCESSING = "processing"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class MediaAsset(BaseModel):
"""A video/audio file registered in the system."""
id: UUID
filename: str
file_path: str
status: AssetStatus = "AssetStatus.PENDING"
error_message: Optional[str] = None
file_size: Optional[int] = None
duration: Optional[float] = None
video_codec: Optional[str] = None
audio_codec: Optional[str] = None
width: Optional[int] = None
height: Optional[int] = None
framerate: Optional[float] = None
bitrate: Optional[int] = None
properties: Dict[str, Any]
comments: str = ""
tags: List[str] = Field(default_factory=list)
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
class TranscodePreset(BaseModel):
"""A reusable transcoding configuration (like Handbrake presets)."""
id: UUID
name: str
description: str = ""
is_builtin: bool = False
container: str = "mp4"
video_codec: str = "libx264"
video_bitrate: Optional[str] = None
video_crf: Optional[int] = None
video_preset: Optional[str] = None
resolution: Optional[str] = None
framerate: Optional[float] = None
audio_codec: str = "aac"
audio_bitrate: Optional[str] = None
audio_channels: Optional[int] = None
audio_samplerate: Optional[int] = None
extra_args: List[str] = Field(default_factory=list)
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
class TranscodeJob(BaseModel):
"""A transcoding or trimming job in the queue."""
id: UUID
source_asset_id: UUID
preset_id: Optional[UUID] = None
preset_snapshot: Dict[str, Any]
trim_start: Optional[float] = None
trim_end: Optional[float] = None
output_filename: str = ""
output_path: Optional[str] = None
output_asset_id: Optional[UUID] = None
status: JobStatus = "JobStatus.PENDING"
progress: float = 0.0
current_frame: Optional[int] = None
current_time: Optional[float] = None
speed: Optional[str] = None
error_message: Optional[str] = None
celery_task_id: Optional[str] = None
priority: int = 0
created_at: Optional[datetime] = None
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None

90
core/storage.py Normal file
View File

@@ -0,0 +1,90 @@
"""
S3 storage layer.
Uses MinIO locally (S3-compatible) and real AWS S3 in production.
The only difference is S3_ENDPOINT_URL: set for MinIO, omit for AWS.
"""
import os
import tempfile
from pathlib import Path
from typing import Optional
import boto3
from botocore.config import Config
BUCKET_IN = os.environ.get("S3_BUCKET_IN", "mpr-media-in")
BUCKET_OUT = os.environ.get("S3_BUCKET_OUT", "mpr-media-out")
def get_s3_client():
"""Get a boto3 S3 client. Works with both MinIO and real AWS S3."""
kwargs = {
"region_name": os.environ.get("AWS_REGION", "us-east-1"),
"config": Config(signature_version="s3v4"),
}
endpoint = os.environ.get("S3_ENDPOINT_URL")
if endpoint:
kwargs["endpoint_url"] = endpoint
kwargs["aws_access_key_id"] = os.environ.get("AWS_ACCESS_KEY_ID", "minioadmin")
kwargs["aws_secret_access_key"] = os.environ.get("AWS_SECRET_ACCESS_KEY", "minioadmin")
return boto3.client("s3", **kwargs)
def list_objects(bucket: str, prefix: str = "", extensions: Optional[set] = None) -> list[dict]:
"""List objects in an S3 bucket, optionally filtered by file extension."""
s3 = get_s3_client()
objects = []
kwargs = {"Bucket": bucket, "Prefix": prefix}
while True:
response = s3.list_objects_v2(**kwargs)
for obj in response.get("Contents", []):
key = obj["Key"]
if extensions:
ext = Path(key).suffix.lower()
if ext not in extensions:
continue
objects.append({
"key": key,
"size": obj["Size"],
"filename": Path(key).name,
})
if not response.get("IsTruncated"):
break
kwargs["ContinuationToken"] = response["NextContinuationToken"]
return objects
def download_file(bucket: str, key: str, local_path: str) -> str:
"""Download a file from S3 to a local path."""
s3 = get_s3_client()
Path(local_path).parent.mkdir(parents=True, exist_ok=True)
s3.download_file(bucket, key, local_path)
return local_path
def download_to_temp(bucket: str, key: str) -> str:
"""Download a file from S3 to a temp file. Caller must clean up."""
ext = Path(key).suffix
fd, tmp_path = tempfile.mkstemp(suffix=ext)
os.close(fd)
download_file(bucket, key, tmp_path)
return tmp_path
def upload_file(local_path: str, bucket: str, key: str) -> None:
"""Upload a local file to S3."""
s3 = get_s3_client()
s3.upload_file(local_path, bucket, key)
def get_presigned_url(bucket: str, key: str, expires: int = 3600) -> str:
"""Generate a presigned URL for an S3 object."""
s3 = get_s3_client()
return s3.generate_presigned_url(
"get_object",
Params={"Bucket": bucket, "Key": key},
ExpiresIn=expires,
)

View File

@@ -27,5 +27,13 @@ GRPC_HOST=grpc
GRPC_PORT=50051 GRPC_PORT=50051
GRPC_MAX_WORKERS=10 GRPC_MAX_WORKERS=10
# S3 Storage (MinIO locally, real S3 on AWS)
S3_ENDPOINT_URL=http://minio:9000
S3_BUCKET_IN=mpr-media-in
S3_BUCKET_OUT=mpr-media-out
AWS_REGION=us-east-1
AWS_ACCESS_KEY_ID=minioadmin
AWS_SECRET_ACCESS_KEY=minioadmin
# Vite # Vite
VITE_ALLOWED_HOSTS=your-domain.local VITE_ALLOWED_HOSTS=your-domain.local

View File

@@ -1,18 +1,17 @@
#!/bin/bash #!/bin/bash
# Deploy MPR to remote server via rsync # MPR Deploy Script
# Uses project .gitignore for excludes
# #
# Usage: ./ctrl/deploy.sh [--restart] [--dry-run] # Usage: ./ctrl/deploy.sh <command> [options]
# #
# Examples: # Commands:
# ./ctrl/deploy.sh # Sync files only # rsync [--restart] [--dry-run] Sync to remote server via rsync
# ./ctrl/deploy.sh --restart # Sync and restart services # aws Deploy AWS infrastructure (Lambda, Step Functions, S3)
# ./ctrl/deploy.sh --dry-run # Preview sync
set -e set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$PROJECT_ROOT"
source "$SCRIPT_DIR/.env" 2>/dev/null || true source "$SCRIPT_DIR/.env" 2>/dev/null || true
@@ -21,56 +20,268 @@ GREEN='\033[0;32m'
YELLOW='\033[1;33m' YELLOW='\033[1;33m'
NC='\033[0m' NC='\033[0m'
if [ -z "$SERVER" ] || [ -z "$REMOTE_PATH" ]; then # ─── Rsync Deploy ─────────────────────────────────────────────────────────────
deploy_rsync() {
if [ -z "${SERVER:-}" ] || [ -z "${REMOTE_PATH:-}" ]; then
echo -e "${RED}Error: SERVER and REMOTE_PATH must be set in ctrl/.env${NC}" echo -e "${RED}Error: SERVER and REMOTE_PATH must be set in ctrl/.env${NC}"
echo "Example:" echo "Example:"
echo " SERVER=user@host" echo " SERVER=user@host"
echo " REMOTE_PATH=~/mpr" echo " REMOTE_PATH=~/mpr"
exit 1 exit 1
fi fi
RESTART=false RESTART=false
DRY_RUN="" DRY_RUN=""
while [ $# -gt 0 ]; do while [ $# -gt 0 ]; do
case "$1" in case "$1" in
--restart) --restart) RESTART=true; shift ;;
RESTART=true --dry-run) DRY_RUN="--dry-run"; shift ;;
shift *) echo "Unknown option: $1"; exit 1 ;;
;;
--dry-run)
DRY_RUN="--dry-run"
shift
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac esac
done done
echo -e "${GREEN}=== Deploying MPR to $SERVER:$REMOTE_PATH ===${NC}" echo -e "${GREEN}=== Deploying MPR to $SERVER:$REMOTE_PATH ===${NC}"
# Sync files using .gitignore for excludes echo -e "${YELLOW}Syncing files...${NC}"
echo -e "${YELLOW}Syncing files...${NC}" rsync -avz --delete $DRY_RUN \
rsync -avz --delete $DRY_RUN \
--filter=':- .gitignore' \ --filter=':- .gitignore' \
--exclude='.git' \ --exclude='.git' \
--exclude='media/*' \ --exclude='media/*' \
--exclude='ctrl/.env' \ --exclude='ctrl/.env' \
"$PROJECT_ROOT/" "$SERVER:$REMOTE_PATH/" "$PROJECT_ROOT/" "$SERVER:$REMOTE_PATH/"
if [ -n "$DRY_RUN" ]; then if [ -n "$DRY_RUN" ]; then
echo -e "${YELLOW}Dry run - no changes made${NC}" echo -e "${YELLOW}Dry run - no changes made${NC}"
exit 0 exit 0
fi fi
# Copy env template if .env doesn't exist on remote ssh "$SERVER" "[ -f $REMOTE_PATH/ctrl/.env ] || cp $REMOTE_PATH/ctrl/.env.template $REMOTE_PATH/ctrl/.env"
ssh "$SERVER" "[ -f $REMOTE_PATH/ctrl/.env ] || cp $REMOTE_PATH/ctrl/.env.template $REMOTE_PATH/ctrl/.env"
if [ "$RESTART" = true ]; then if [ "$RESTART" = true ]; then
echo -e "${YELLOW}Restarting services...${NC}" echo -e "${YELLOW}Restarting services...${NC}"
ssh "$SERVER" "cd $REMOTE_PATH/ctrl && docker compose down && docker compose up -d --build" ssh "$SERVER" "cd $REMOTE_PATH/ctrl && docker compose down && docker compose up -d --build"
fi fi
echo -e "${GREEN}Done!${NC}" echo -e "${GREEN}Done!${NC}"
}
# ─── AWS Deploy ────────────────────────────────────────────────────────────────
deploy_aws() {
REGION="${AWS_REGION:-us-east-1}"
ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text)
PROJECT="mpr"
# S3
BUCKET_IN="${S3_BUCKET_IN:-mpr-media-in}"
BUCKET_OUT="${S3_BUCKET_OUT:-mpr-media-out}"
# ECR
ECR_REPO="${PROJECT}-transcode"
ECR_URI="${ACCOUNT_ID}.dkr.ecr.${REGION}.amazonaws.com/${ECR_REPO}"
# Lambda
LAMBDA_NAME="${PROJECT}-transcode"
LAMBDA_TIMEOUT=900
LAMBDA_MEMORY=2048
# Step Functions
SFN_NAME="${PROJECT}-transcode"
# IAM
LAMBDA_ROLE_NAME="${PROJECT}-lambda-role"
SFN_ROLE_NAME="${PROJECT}-sfn-role"
# Callback
CALLBACK_URL="${CALLBACK_URL:-https://mpr.mcrn.ar/api}"
CALLBACK_API_KEY="${CALLBACK_API_KEY:-changeme}"
echo -e "${GREEN}=== Deploying MPR to AWS ($REGION, account $ACCOUNT_ID) ===${NC}"
# ─── S3 Buckets ───────────────────────────────────────────────────────
echo -e "${YELLOW}Creating S3 buckets...${NC}"
for bucket in "$BUCKET_IN" "$BUCKET_OUT"; do
if ! aws s3api head-bucket --bucket "$bucket" 2>/dev/null; then
aws s3api create-bucket \
--bucket "$bucket" \
--region "$REGION" \
--create-bucket-configuration LocationConstraint="$REGION"
echo " Created $bucket"
else
echo " $bucket already exists"
fi
done
# ─── IAM Roles ────────────────────────────────────────────────────────
echo -e "${YELLOW}Creating IAM roles...${NC}"
if ! aws iam get-role --role-name "$LAMBDA_ROLE_NAME" 2>/dev/null; then
aws iam create-role \
--role-name "$LAMBDA_ROLE_NAME" \
--assume-role-policy-document '{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"Service": "lambda.amazonaws.com"},
"Action": "sts:AssumeRole"
}]
}'
aws iam attach-role-policy \
--role-name "$LAMBDA_ROLE_NAME" \
--policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole
aws iam put-role-policy \
--role-name "$LAMBDA_ROLE_NAME" \
--policy-name "${PROJECT}-s3-access" \
--policy-document '{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": ["s3:GetObject", "s3:PutObject"],
"Resource": [
"arn:aws:s3:::'"$BUCKET_IN"'/*",
"arn:aws:s3:::'"$BUCKET_OUT"'/*"
]
}]
}'
echo " Created $LAMBDA_ROLE_NAME"
echo " Waiting for role to propagate..."
sleep 10
else
echo " $LAMBDA_ROLE_NAME already exists"
fi
LAMBDA_ROLE_ARN=$(aws iam get-role --role-name "$LAMBDA_ROLE_NAME" --query Role.Arn --output text)
if ! aws iam get-role --role-name "$SFN_ROLE_NAME" 2>/dev/null; then
aws iam create-role \
--role-name "$SFN_ROLE_NAME" \
--assume-role-policy-document '{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"Service": "states.amazonaws.com"},
"Action": "sts:AssumeRole"
}]
}'
aws iam put-role-policy \
--role-name "$SFN_ROLE_NAME" \
--policy-name "${PROJECT}-sfn-invoke-lambda" \
--policy-document '{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": "lambda:InvokeFunction",
"Resource": "arn:aws:lambda:'"$REGION"':'"$ACCOUNT_ID"':function:'"$LAMBDA_NAME"'"
}]
}'
echo " Created $SFN_ROLE_NAME"
sleep 10
else
echo " $SFN_ROLE_NAME already exists"
fi
SFN_ROLE_ARN=$(aws iam get-role --role-name "$SFN_ROLE_NAME" --query Role.Arn --output text)
# ─── ECR Repository ──────────────────────────────────────────────────
echo -e "${YELLOW}Setting up ECR...${NC}"
if ! aws ecr describe-repositories --repository-names "$ECR_REPO" --region "$REGION" 2>/dev/null; then
aws ecr create-repository --repository-name "$ECR_REPO" --region "$REGION"
echo " Created ECR repo $ECR_REPO"
else
echo " ECR repo $ECR_REPO already exists"
fi
# ─── Build & Push Lambda Image ───────────────────────────────────────
echo -e "${YELLOW}Building Lambda container image...${NC}"
docker build -f ctrl/lambda/Dockerfile -t "${ECR_REPO}:latest" .
echo -e "${YELLOW}Pushing to ECR...${NC}"
aws ecr get-login-password --region "$REGION" | \
docker login --username AWS --password-stdin "${ACCOUNT_ID}.dkr.ecr.${REGION}.amazonaws.com"
docker tag "${ECR_REPO}:latest" "${ECR_URI}:latest"
docker push "${ECR_URI}:latest"
# ─── Lambda Function ─────────────────────────────────────────────────
echo -e "${YELLOW}Deploying Lambda function...${NC}"
LAMBDA_ARN="arn:aws:lambda:${REGION}:${ACCOUNT_ID}:function:${LAMBDA_NAME}"
if aws lambda get-function --function-name "$LAMBDA_NAME" --region "$REGION" 2>/dev/null; then
aws lambda update-function-code \
--function-name "$LAMBDA_NAME" \
--image-uri "${ECR_URI}:latest" \
--region "$REGION"
echo " Updated $LAMBDA_NAME"
else
aws lambda create-function \
--function-name "$LAMBDA_NAME" \
--package-type Image \
--code ImageUri="${ECR_URI}:latest" \
--role "$LAMBDA_ROLE_ARN" \
--timeout "$LAMBDA_TIMEOUT" \
--memory-size "$LAMBDA_MEMORY" \
--environment "Variables={S3_BUCKET_IN=${BUCKET_IN},S3_BUCKET_OUT=${BUCKET_OUT},AWS_REGION=${REGION}}" \
--region "$REGION"
echo " Created $LAMBDA_NAME"
fi
# ─── Step Functions ───────────────────────────────────────────────────
echo -e "${YELLOW}Deploying Step Functions state machine...${NC}"
SFN_DEFINITION=$(sed "s|\${TranscodeLambdaArn}|${LAMBDA_ARN}|g" ctrl/state-machine.json)
SFN_ARN="arn:aws:states:${REGION}:${ACCOUNT_ID}:stateMachine:${SFN_NAME}"
if aws stepfunctions describe-state-machine --state-machine-arn "$SFN_ARN" --region "$REGION" 2>/dev/null; then
aws stepfunctions update-state-machine \
--state-machine-arn "$SFN_ARN" \
--definition "$SFN_DEFINITION" \
--region "$REGION"
echo " Updated $SFN_NAME"
else
aws stepfunctions create-state-machine \
--name "$SFN_NAME" \
--definition "$SFN_DEFINITION" \
--role-arn "$SFN_ROLE_ARN" \
--region "$REGION"
echo " Created $SFN_NAME"
fi
# ─── Summary ──────────────────────────────────────────────────────────
echo ""
echo -e "${GREEN}Deployment complete!${NC}"
echo ""
echo "Add these to your .env:"
echo " MPR_EXECUTOR=lambda"
echo " STEP_FUNCTION_ARN=${SFN_ARN}"
echo " LAMBDA_FUNCTION_ARN=${LAMBDA_ARN}"
echo " S3_BUCKET_IN=${BUCKET_IN}"
echo " S3_BUCKET_OUT=${BUCKET_OUT}"
echo " CALLBACK_URL=${CALLBACK_URL}"
echo " CALLBACK_API_KEY=${CALLBACK_API_KEY}"
}
# ─── Main ──────────────────────────────────────────────────────────────────────
COMMAND="${1:-}"
shift || true
case "$COMMAND" in
rsync) deploy_rsync "$@" ;;
aws) deploy_aws "$@" ;;
*)
echo "Usage: ./ctrl/deploy.sh <command> [options]"
echo ""
echo "Commands:"
echo " rsync [--restart] [--dry-run] Sync to remote server"
echo " aws Deploy AWS infrastructure"
exit 1
;;
esac

View File

@@ -5,6 +5,12 @@ x-common-env: &common-env
DEBUG: 1 DEBUG: 1
GRPC_HOST: grpc GRPC_HOST: grpc
GRPC_PORT: 50051 GRPC_PORT: 50051
S3_ENDPOINT_URL: http://minio:9000
S3_BUCKET_IN: mpr-media-in
S3_BUCKET_OUT: mpr-media-out
AWS_ACCESS_KEY_ID: minioadmin
AWS_SECRET_ACCESS_KEY: minioadmin
AWS_REGION: us-east-1
x-healthcheck-defaults: &healthcheck-defaults x-healthcheck-defaults: &healthcheck-defaults
interval: 5s interval: 5s
@@ -40,17 +46,46 @@ services:
<<: *healthcheck-defaults <<: *healthcheck-defaults
test: ["CMD", "redis-cli", "ping"] test: ["CMD", "redis-cli", "ping"]
minio:
image: minio/minio
command: ["server", "/data", "--console-address", ":9001"]
ports:
- "9000:9000"
- "9001:9001"
environment:
MINIO_ROOT_USER: minioadmin
MINIO_ROOT_PASSWORD: minioadmin
volumes:
- minio-data:/data
healthcheck:
<<: *healthcheck-defaults
test: ["CMD", "mc", "ready", "local"]
minio-init:
image: minio/mc
depends_on:
minio:
condition: service_healthy
entrypoint: ["/bin/sh", "-c"]
command:
- |
mc alias set local http://minio:9000 minioadmin minioadmin
mc mb --ignore-existing local/mpr-media-in
mc mb --ignore-existing local/mpr-media-out
mc anonymous set download local/mpr-media-in
mc anonymous set download local/mpr-media-out
nginx: nginx:
image: nginx:alpine image: nginx:alpine
ports: ports:
- "80:80" - "80:80"
volumes: volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro - ./nginx.conf:/etc/nginx/nginx.conf:ro
- ../media:/app/media:ro
depends_on: depends_on:
- django - django
- fastapi - fastapi
- timeline - timeline
- minio
# ============================================================================= # =============================================================================
# Application Services # Application Services
@@ -70,7 +105,6 @@ services:
<<: *common-env <<: *common-env
volumes: volumes:
- ..:/app - ..:/app
- ../media:/app/media
depends_on: depends_on:
postgres: postgres:
condition: service_healthy condition: service_healthy
@@ -88,7 +122,6 @@ services:
<<: *common-env <<: *common-env
volumes: volumes:
- ..:/app - ..:/app
- ../media:/app/media
depends_on: depends_on:
postgres: postgres:
condition: service_healthy condition: service_healthy
@@ -108,7 +141,6 @@ services:
GRPC_MAX_WORKERS: 10 GRPC_MAX_WORKERS: 10
volumes: volumes:
- ..:/app - ..:/app
- ../media:/app/media
depends_on: depends_on:
postgres: postgres:
condition: service_healthy condition: service_healthy
@@ -119,13 +151,12 @@ services:
build: build:
context: .. context: ..
dockerfile: ctrl/Dockerfile dockerfile: ctrl/Dockerfile
command: celery -A mpr worker -l info -Q default -c 2 command: celery -A mpr worker -l info -Q transcode -c 2
environment: environment:
<<: *common-env <<: *common-env
MPR_EXECUTOR: local MPR_EXECUTOR: local
volumes: volumes:
- ..:/app - ..:/app
- ../media:/app/media
depends_on: depends_on:
postgres: postgres:
condition: service_healthy condition: service_healthy
@@ -148,6 +179,7 @@ services:
volumes: volumes:
postgres-data: postgres-data:
redis-data: redis-data:
minio-data:
networks: networks:
default: default:

View File

@@ -1,36 +1,12 @@
#!/bin/bash #!/bin/bash
# Model generation script for MPR # Model generation script for MPR
# Generates Django, Pydantic, TypeScript, and Protobuf from schema/models # Generates all targets from schema/modelgen.json config
set -e set -e
cd "$(dirname "$0")/.." cd "$(dirname "$0")/.."
echo "Generating models from schema/models..." echo "Generating models from schema/models..."
python -m modelgen generate --config schema/modelgen.json
# Django ORM models
python -m modelgen from-schema \
--schema schema/models \
--output mpr/media_assets/models.py \
--targets django
# Pydantic schemas for FastAPI
python -m modelgen from-schema \
--schema schema/models \
--output api/schemas/models.py \
--targets pydantic
# TypeScript types for Timeline UI
python -m modelgen from-schema \
--schema schema/models \
--output ui/timeline/src/types.ts \
--targets typescript
# Protobuf for gRPC
python -m modelgen from-schema \
--schema schema/models \
--output rpc/protos/worker.proto \
--targets proto
# Generate gRPC stubs from proto # Generate gRPC stubs from proto
echo "Generating gRPC stubs..." echo "Generating gRPC stubs..."

21
ctrl/lambda/Dockerfile Normal file
View File

@@ -0,0 +1,21 @@
FROM public.ecr.aws/lambda/python:3.11
# Install ffmpeg static binary
RUN yum install -y tar xz && \
curl -L https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz -o /tmp/ffmpeg.tar.xz && \
tar -xf /tmp/ffmpeg.tar.xz -C /tmp && \
cp /tmp/ffmpeg-*-amd64-static/ffmpeg /usr/local/bin/ffmpeg && \
cp /tmp/ffmpeg-*-amd64-static/ffprobe /usr/local/bin/ffprobe && \
rm -rf /tmp/ffmpeg* && \
yum clean all
# Install Python dependencies
COPY ctrl/lambda/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY task/lambda_handler.py ${LAMBDA_TASK_ROOT}/task/lambda_handler.py
COPY task/__init__.py ${LAMBDA_TASK_ROOT}/task/__init__.py
COPY core/ ${LAMBDA_TASK_ROOT}/core/
CMD ["task.lambda_handler.handler"]

View File

@@ -0,0 +1,2 @@
ffmpeg-python>=0.2.0
requests>=2.31.0

View File

@@ -21,6 +21,10 @@ http {
server timeline:5173; server timeline:5173;
} }
upstream minio {
server minio:9000;
}
server { server {
listen 80; listen 80;
server_name mpr.local.ar; server_name mpr.local.ar;
@@ -67,10 +71,15 @@ http {
proxy_set_header Host $host; proxy_set_header Host $host;
} }
# Media files # Media files - proxied from MinIO (local) or S3 (AWS)
location /media { location /media/in/ {
alias /app/media; proxy_pass http://minio/mpr-media-in/;
autoindex on; proxy_set_header Host $http_host;
}
location /media/out/ {
proxy_pass http://minio/mpr-media-out/;
proxy_set_header Host $http_host;
} }
# Default to Timeline UI # Default to Timeline UI

39
ctrl/state-machine.json Normal file
View File

@@ -0,0 +1,39 @@
{
"Comment": "MPR Transcode Job - orchestrates Lambda-based media transcoding",
"StartAt": "Transcode",
"States": {
"Transcode": {
"Type": "Task",
"Resource": "${TranscodeLambdaArn}",
"TimeoutSeconds": 900,
"Retry": [
{
"ErrorEquals": ["States.TaskFailed", "Lambda.ServiceException"],
"IntervalSeconds": 10,
"MaxAttempts": 2,
"BackoffRate": 2.0
}
],
"Catch": [
{
"ErrorEquals": ["States.ALL"],
"Next": "HandleError",
"ResultPath": "$.error"
}
],
"Next": "Done"
},
"HandleError": {
"Type": "Pass",
"Parameters": {
"status": "failed",
"job_id.$": "$.job_id",
"error.$": "$.error.Cause"
},
"Next": "Done"
},
"Done": {
"Type": "Succeed"
}
}
}

View File

@@ -3,13 +3,11 @@ digraph system_overview {
node [shape=box, style=rounded, fontname="Helvetica"] node [shape=box, style=rounded, fontname="Helvetica"]
edge [fontname="Helvetica", fontsize=10] edge [fontname="Helvetica", fontsize=10]
// Title
labelloc="t" labelloc="t"
label="MPR - System Overview" label="MPR - System Overview"
fontsize=16 fontsize=16
fontname="Helvetica-Bold" fontname="Helvetica-Bold"
// Styling
graph [splines=ortho, nodesep=0.8, ranksep=0.8] graph [splines=ortho, nodesep=0.8, ranksep=0.8]
// External // External
@@ -18,7 +16,7 @@ digraph system_overview {
style=dashed style=dashed
color=gray color=gray
browser [label="Browser\nmpr.local.ar", shape=ellipse] browser [label="Browser\nmpr.local.ar / mpr.mcrn.ar", shape=ellipse]
} }
// Nginx reverse proxy // Nginx reverse proxy
@@ -37,7 +35,7 @@ digraph system_overview {
fillcolor="#f0f8e8" fillcolor="#f0f8e8"
django [label="Django\n/admin\nport 8701"] django [label="Django\n/admin\nport 8701"]
fastapi [label="FastAPI\n/api\nport 8702"] fastapi [label="FastAPI\n/api + /graphql\nport 8702"]
timeline [label="Timeline UI\n/ui\nport 5173"] timeline [label="Timeline UI\n/ui\nport 5173"]
} }
@@ -48,8 +46,17 @@ digraph system_overview {
fillcolor="#fff8e8" fillcolor="#fff8e8"
grpc_server [label="gRPC Server\nport 50051"] grpc_server [label="gRPC Server\nport 50051"]
celery [label="Celery Worker\n(local)"] celery [label="Celery Worker\n(local mode)"]
lambda [label="Lambda\n(cloud)", style="dashed,rounded"] }
// AWS layer
subgraph cluster_aws {
label="AWS (lambda mode)"
style=filled
fillcolor="#fde8d0"
step_functions [label="Step Functions\nstate machine"]
lambda [label="Lambda\nFFmpeg container"]
} }
// Data layer // Data layer
@@ -58,48 +65,50 @@ digraph system_overview {
style=filled style=filled
fillcolor="#f8e8f0" fillcolor="#f8e8f0"
postgres [label="PostgreSQL\nport 5433", shape=cylinder] postgres [label="PostgreSQL\nport 5436", shape=cylinder]
redis [label="Redis\nport 6380", shape=cylinder] redis [label="Redis\nport 6381", shape=cylinder]
sqs [label="SQS\n(cloud)", shape=cylinder, style=dashed]
} }
// Storage // Storage
subgraph cluster_storage { subgraph cluster_storage {
label="File Storage" label="S3 Storage"
style=filled style=filled
fillcolor="#f0f0f0" fillcolor="#f0f0f0"
local_fs [label="Local FS\n/media", shape=folder] minio [label="MinIO (local)\nport 9000", shape=folder]
s3 [label="S3\n(cloud)", shape=folder, style=dashed] s3 [label="AWS S3 (cloud)", shape=folder, style="dashed,rounded"]
bucket_in [label="mpr-media-in", shape=note]
bucket_out [label="mpr-media-out", shape=note]
} }
// Connections // Connections
browser -> nginx browser -> nginx
nginx -> django [label="/admin"] nginx -> django [xlabel="/admin"]
nginx -> fastapi [label="/api"] nginx -> fastapi [xlabel="/api, /graphql"]
nginx -> timeline [label="/ui"] nginx -> timeline [xlabel="/ui"]
nginx -> minio [xlabel="/media/*"]
// Django uses FastAPI for operations (single API gateway) timeline -> fastapi [xlabel="REST API"]
django -> fastapi [label="job operations"]
django -> postgres [label="CRUD only"]
// Timeline UI uses FastAPI
timeline -> fastapi [label="REST API"]
// FastAPI is the single API gateway
fastapi -> postgres fastapi -> postgres
fastapi -> redis [label="job status"] fastapi -> grpc_server [xlabel="gRPC\nprogress"]
fastapi -> grpc_server [label="gRPC\nprogress streaming"]
// Worker layer // Local mode
grpc_server -> celery [label="task dispatch"] grpc_server -> celery [xlabel="task dispatch"]
celery -> redis [label="queue"] celery -> redis [xlabel="queue"]
celery -> postgres [label="job updates"] celery -> postgres [xlabel="job updates"]
celery -> grpc_server [label="progress\ncallbacks", style=dotted] celery -> minio [xlabel="S3 API\ndownload/upload"]
celery -> local_fs [label="read/write"]
// Cloud (future) // Lambda mode
lambda -> sqs [label="queue", style=dashed] fastapi -> step_functions [xlabel="boto3\nstart_execution", style=dashed]
lambda -> s3 [label="read/write", style=dashed] step_functions -> lambda [style=dashed]
lambda -> s3 [xlabel="download/upload", style=dashed]
lambda -> fastapi [xlabel="callback\nPOST /jobs/{id}/callback", style=dashed]
// Storage details
minio -> bucket_in [style=dotted, arrowhead=none]
minio -> bucket_out [style=dotted, arrowhead=none]
s3 -> bucket_in [style=dotted, arrowhead=none]
s3 -> bucket_out [style=dotted, arrowhead=none]
} }

View File

@@ -1,260 +1,293 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 14.1.1 (0) <!-- Generated by graphviz version 14.1.2 (0)
--> -->
<!-- Title: system_overview Pages: 1 --> <!-- Title: system_overview Pages: 1 -->
<svg width="843pt" height="957pt" <svg width="620pt" height="903pt"
viewBox="0.00 0.00 843.00 957.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> viewBox="0.00 0.00 620.00 903.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 952.79)"> <g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 898.54)">
<title>system_overview</title> <title>system_overview</title>
<polygon fill="white" stroke="none" points="-4,4 -4,-952.79 838.5,-952.79 838.5,4 -4,4"/> <polygon fill="white" stroke="none" points="-4,4 -4,-898.54 616,-898.54 616,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="417.25" y="-929.59" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; System Overview</text> <text xml:space="preserve" text-anchor="middle" x="306" y="-875.34" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; System Overview</text>
<g id="clust1" class="cluster"> <g id="clust1" class="cluster">
<title>cluster_external</title> <title>cluster_external</title>
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="478,-809.69 478,-913.29 632,-913.29 632,-809.69 478,-809.69"/> <polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="246,-755.44 246,-859.04 540,-859.04 540,-755.44 246,-755.44"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-894.09" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">External</text> <text xml:space="preserve" text-anchor="middle" x="393" y="-839.84" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">External</text>
</g> </g>
<g id="clust2" class="cluster"> <g id="clust2" class="cluster">
<title>cluster_proxy</title> <title>cluster_proxy</title>
<polygon fill="#e8f4f8" stroke="black" points="482,-693.69 482,-779.69 628,-779.69 628,-693.69 482,-693.69"/> <polygon fill="#e8f4f8" stroke="black" points="320,-654.94 320,-740.94 466,-740.94 466,-654.94 320,-654.94"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-760.49" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Reverse Proxy</text> <text xml:space="preserve" text-anchor="middle" x="393" y="-721.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Reverse Proxy</text>
</g> </g>
<g id="clust3" class="cluster"> <g id="clust3" class="cluster">
<title>cluster_apps</title> <title>cluster_apps</title>
<polygon fill="#f0f8e8" stroke="black" points="352,-418.19 352,-651.94 606,-651.94 606,-418.19 352,-418.19"/> <polygon fill="#f0f8e8" stroke="black" points="278,-419.44 278,-640.44 532,-640.44 532,-419.44 278,-419.44"/>
<text xml:space="preserve" text-anchor="middle" x="479" y="-632.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Application Layer</text> <text xml:space="preserve" text-anchor="middle" x="405" y="-621.24" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Application Layer</text>
</g> </g>
<g id="clust4" class="cluster"> <g id="clust4" class="cluster">
<title>cluster_workers</title> <title>cluster_workers</title>
<polygon fill="#fff8e8" stroke="black" points="125,-151.69 125,-363.69 374,-363.69 374,-151.69 125,-151.69"/> <polygon fill="#fff8e8" stroke="black" points="142,-218.44 142,-404.94 280,-404.94 280,-218.44 142,-218.44"/>
<text xml:space="preserve" text-anchor="middle" x="249.5" y="-344.49" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Worker Layer</text> <text xml:space="preserve" text-anchor="middle" x="211" y="-385.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Worker Layer</text>
</g> </g>
<g id="clust5" class="cluster"> <g id="clust5" class="cluster">
<title>cluster_data</title> <title>cluster_aws</title>
<polygon fill="#f8e8f0" stroke="black" points="322,-8 322,-109.94 700,-109.94 700,-8 322,-8"/> <polygon fill="#fde8d0" stroke="black" points="383,-218.44 383,-404.94 581,-404.94 581,-218.44 383,-218.44"/>
<text xml:space="preserve" text-anchor="middle" x="511" y="-90.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Data Layer</text> <text xml:space="preserve" text-anchor="middle" x="482" y="-385.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">AWS (lambda mode)</text>
</g> </g>
<g id="clust6" class="cluster"> <g id="clust6" class="cluster">
<title>cluster_data</title>
<polygon fill="#f8e8f0" stroke="black" points="8,-102 8,-203.94 263,-203.94 263,-102 8,-102"/>
<text xml:space="preserve" text-anchor="middle" x="135.5" y="-184.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Data Layer</text>
</g>
<g id="clust7" class="cluster">
<title>cluster_storage</title> <title>cluster_storage</title>
<polygon fill="#f0f0f0" stroke="black" points="8,-15.97 8,-101.97 218,-101.97 218,-15.97 8,-15.97"/> <polygon fill="#f0f0f0" stroke="black" points="302,-8 302,-195.97 604,-195.97 604,-8 302,-8"/>
<text xml:space="preserve" text-anchor="middle" x="113" y="-82.77" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">File Storage</text> <text xml:space="preserve" text-anchor="middle" x="453" y="-176.77" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">S3 Storage</text>
</g> </g>
<!-- browser --> <!-- browser -->
<g id="node1" class="node"> <g id="node1" class="node">
<title>browser</title> <title>browser</title>
<ellipse fill="none" stroke="black" cx="555" cy="-847.74" rx="69.12" ry="30.05"/> <ellipse fill="none" stroke="black" cx="393" cy="-793.49" rx="139.12" ry="30.05"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-851.69" font-family="Helvetica,sans-Serif" font-size="14.00">Browser</text> <text xml:space="preserve" text-anchor="middle" x="393" y="-797.44" font-family="Helvetica,sans-Serif" font-size="14.00">Browser</text>
<text xml:space="preserve" text-anchor="middle" x="555" y="-834.44" font-family="Helvetica,sans-Serif" font-size="14.00">mpr.local.ar</text> <text xml:space="preserve" text-anchor="middle" x="393" y="-780.19" font-family="Helvetica,sans-Serif" font-size="14.00">mpr.local.ar / mpr.mcrn.ar</text>
</g> </g>
<!-- nginx --> <!-- nginx -->
<g id="node2" class="node"> <g id="node2" class="node">
<title>nginx</title> <title>nginx</title>
<path fill="none" stroke="black" d="M576.5,-744.19C576.5,-744.19 533.5,-744.19 533.5,-744.19 527.5,-744.19 521.5,-738.19 521.5,-732.19 521.5,-732.19 521.5,-713.69 521.5,-713.69 521.5,-707.69 527.5,-701.69 533.5,-701.69 533.5,-701.69 576.5,-701.69 576.5,-701.69 582.5,-701.69 588.5,-707.69 588.5,-713.69 588.5,-713.69 588.5,-732.19 588.5,-732.19 588.5,-738.19 582.5,-744.19 576.5,-744.19"/> <path fill="none" stroke="black" d="M414.5,-705.44C414.5,-705.44 371.5,-705.44 371.5,-705.44 365.5,-705.44 359.5,-699.44 359.5,-693.44 359.5,-693.44 359.5,-674.94 359.5,-674.94 359.5,-668.94 365.5,-662.94 371.5,-662.94 371.5,-662.94 414.5,-662.94 414.5,-662.94 420.5,-662.94 426.5,-668.94 426.5,-674.94 426.5,-674.94 426.5,-693.44 426.5,-693.44 426.5,-699.44 420.5,-705.44 414.5,-705.44"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-726.89" font-family="Helvetica,sans-Serif" font-size="14.00">nginx</text> <text xml:space="preserve" text-anchor="middle" x="393" y="-688.14" font-family="Helvetica,sans-Serif" font-size="14.00">nginx</text>
<text xml:space="preserve" text-anchor="middle" x="555" y="-709.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 80</text> <text xml:space="preserve" text-anchor="middle" x="393" y="-670.89" font-family="Helvetica,sans-Serif" font-size="14.00">port 80</text>
</g> </g>
<!-- browser&#45;&gt;nginx --> <!-- browser&#45;&gt;nginx -->
<g id="edge1" class="edge"> <g id="edge1" class="edge">
<title>browser&#45;&gt;nginx</title> <title>browser&#45;&gt;nginx</title>
<path fill="none" stroke="black" d="M555,-817.21C555,-817.21 555,-756.06 555,-756.06"/> <path fill="none" stroke="black" d="M393,-763.04C393,-763.04 393,-717.33 393,-717.33"/>
<polygon fill="black" stroke="black" points="558.5,-756.06 555,-746.06 551.5,-756.06 558.5,-756.06"/> <polygon fill="black" stroke="black" points="396.5,-717.33 393,-707.33 389.5,-717.33 396.5,-717.33"/>
</g> </g>
<!-- django --> <!-- django -->
<g id="node3" class="node"> <g id="node3" class="node">
<title>django</title> <title>django</title>
<path fill="none" stroke="black" d="M585.5,-616.44C585.5,-616.44 524.5,-616.44 524.5,-616.44 518.5,-616.44 512.5,-610.44 512.5,-604.44 512.5,-604.44 512.5,-568.69 512.5,-568.69 512.5,-562.69 518.5,-556.69 524.5,-556.69 524.5,-556.69 585.5,-556.69 585.5,-556.69 591.5,-556.69 597.5,-562.69 597.5,-568.69 597.5,-568.69 597.5,-604.44 597.5,-604.44 597.5,-610.44 591.5,-616.44 585.5,-616.44"/> <path fill="none" stroke="black" d="M359.5,-604.94C359.5,-604.94 298.5,-604.94 298.5,-604.94 292.5,-604.94 286.5,-598.94 286.5,-592.94 286.5,-592.94 286.5,-557.19 286.5,-557.19 286.5,-551.19 292.5,-545.19 298.5,-545.19 298.5,-545.19 359.5,-545.19 359.5,-545.19 365.5,-545.19 371.5,-551.19 371.5,-557.19 371.5,-557.19 371.5,-592.94 371.5,-592.94 371.5,-598.94 365.5,-604.94 359.5,-604.94"/>
<text xml:space="preserve" text-anchor="middle" x="555" y="-599.14" font-family="Helvetica,sans-Serif" font-size="14.00">Django</text> <text xml:space="preserve" text-anchor="middle" x="329" y="-587.64" font-family="Helvetica,sans-Serif" font-size="14.00">Django</text>
<text xml:space="preserve" text-anchor="middle" x="555" y="-581.89" font-family="Helvetica,sans-Serif" font-size="14.00">/admin</text> <text xml:space="preserve" text-anchor="middle" x="329" y="-570.39" font-family="Helvetica,sans-Serif" font-size="14.00">/admin</text>
<text xml:space="preserve" text-anchor="middle" x="555" y="-564.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 8701</text> <text xml:space="preserve" text-anchor="middle" x="329" y="-553.14" font-family="Helvetica,sans-Serif" font-size="14.00">port 8701</text>
</g> </g>
<!-- nginx&#45;&gt;django --> <!-- nginx&#45;&gt;django -->
<g id="edge2" class="edge"> <g id="edge2" class="edge">
<title>nginx&#45;&gt;django</title> <title>nginx&#45;&gt;django</title>
<path fill="none" stroke="black" d="M555,-701.33C555,-701.33 555,-628.2 555,-628.2"/> <path fill="none" stroke="black" d="M365.5,-662.63C365.5,-662.63 365.5,-616.77 365.5,-616.77"/>
<polygon fill="black" stroke="black" points="558.5,-628.2 555,-618.2 551.5,-628.2 558.5,-628.2"/> <polygon fill="black" stroke="black" points="369,-616.77 365.5,-606.77 362,-616.77 369,-616.77"/>
<text xml:space="preserve" text-anchor="middle" x="571.88" y="-663.19" font-family="Helvetica,sans-Serif" font-size="10.00">/admin</text> <text xml:space="preserve" text-anchor="middle" x="348.62" y="-642.95" font-family="Helvetica,sans-Serif" font-size="10.00">/admin</text>
</g> </g>
<!-- fastapi --> <!-- fastapi -->
<g id="node4" class="node"> <g id="node4" class="node">
<title>fastapi</title> <title>fastapi</title>
<path fill="none" stroke="black" d="M554.5,-485.94C554.5,-485.94 493.5,-485.94 493.5,-485.94 487.5,-485.94 481.5,-479.94 481.5,-473.94 481.5,-473.94 481.5,-438.19 481.5,-438.19 481.5,-432.19 487.5,-426.19 493.5,-426.19 493.5,-426.19 554.5,-426.19 554.5,-426.19 560.5,-426.19 566.5,-432.19 566.5,-438.19 566.5,-438.19 566.5,-473.94 566.5,-473.94 566.5,-479.94 560.5,-485.94 554.5,-485.94"/> <path fill="none" stroke="black" d="M395.5,-487.19C395.5,-487.19 298.5,-487.19 298.5,-487.19 292.5,-487.19 286.5,-481.19 286.5,-475.19 286.5,-475.19 286.5,-439.44 286.5,-439.44 286.5,-433.44 292.5,-427.44 298.5,-427.44 298.5,-427.44 395.5,-427.44 395.5,-427.44 401.5,-427.44 407.5,-433.44 407.5,-439.44 407.5,-439.44 407.5,-475.19 407.5,-475.19 407.5,-481.19 401.5,-487.19 395.5,-487.19"/>
<text xml:space="preserve" text-anchor="middle" x="524" y="-468.64" font-family="Helvetica,sans-Serif" font-size="14.00">FastAPI</text> <text xml:space="preserve" text-anchor="middle" x="347" y="-469.89" font-family="Helvetica,sans-Serif" font-size="14.00">FastAPI</text>
<text xml:space="preserve" text-anchor="middle" x="524" y="-451.39" font-family="Helvetica,sans-Serif" font-size="14.00">/api</text> <text xml:space="preserve" text-anchor="middle" x="347" y="-452.64" font-family="Helvetica,sans-Serif" font-size="14.00">/api + /graphql</text>
<text xml:space="preserve" text-anchor="middle" x="524" y="-434.14" font-family="Helvetica,sans-Serif" font-size="14.00">port 8702</text> <text xml:space="preserve" text-anchor="middle" x="347" y="-435.39" font-family="Helvetica,sans-Serif" font-size="14.00">port 8702</text>
</g> </g>
<!-- nginx&#45;&gt;fastapi --> <!-- nginx&#45;&gt;fastapi -->
<g id="edge3" class="edge"> <g id="edge3" class="edge">
<title>nginx&#45;&gt;fastapi</title> <title>nginx&#45;&gt;fastapi</title>
<path fill="none" stroke="black" d="M521.02,-716C511.47,-716 503.63,-716 503.63,-716 503.63,-716 503.63,-497.9 503.63,-497.9"/> <path fill="none" stroke="black" d="M383.5,-662.84C383.5,-662.84 383.5,-498.82 383.5,-498.82"/>
<polygon fill="black" stroke="black" points="507.13,-497.9 503.63,-487.9 500.13,-497.9 507.13,-497.9"/> <polygon fill="black" stroke="black" points="387,-498.82 383.5,-488.82 380,-498.82 387,-498.82"/>
<text xml:space="preserve" text-anchor="middle" x="723" y="-583.44" font-family="Helvetica,sans-Serif" font-size="10.00">/api</text> <text xml:space="preserve" text-anchor="middle" x="399.44" y="-571.33" font-family="Helvetica,sans-Serif" font-size="10.00">/api, /graphql</text>
</g> </g>
<!-- timeline --> <!-- timeline -->
<g id="node5" class="node"> <g id="node5" class="node">
<title>timeline</title> <title>timeline</title>
<path fill="none" stroke="black" d="M442,-616.44C442,-616.44 372,-616.44 372,-616.44 366,-616.44 360,-610.44 360,-604.44 360,-604.44 360,-568.69 360,-568.69 360,-562.69 366,-556.69 372,-556.69 372,-556.69 442,-556.69 442,-556.69 448,-556.69 454,-562.69 454,-568.69 454,-568.69 454,-604.44 454,-604.44 454,-610.44 448,-616.44 442,-616.44"/> <path fill="none" stroke="black" d="M512,-604.94C512,-604.94 442,-604.94 442,-604.94 436,-604.94 430,-598.94 430,-592.94 430,-592.94 430,-557.19 430,-557.19 430,-551.19 436,-545.19 442,-545.19 442,-545.19 512,-545.19 512,-545.19 518,-545.19 524,-551.19 524,-557.19 524,-557.19 524,-592.94 524,-592.94 524,-598.94 518,-604.94 512,-604.94"/>
<text xml:space="preserve" text-anchor="middle" x="407" y="-599.14" font-family="Helvetica,sans-Serif" font-size="14.00">Timeline UI</text> <text xml:space="preserve" text-anchor="middle" x="477" y="-587.64" font-family="Helvetica,sans-Serif" font-size="14.00">Timeline UI</text>
<text xml:space="preserve" text-anchor="middle" x="407" y="-581.89" font-family="Helvetica,sans-Serif" font-size="14.00">/ui</text> <text xml:space="preserve" text-anchor="middle" x="477" y="-570.39" font-family="Helvetica,sans-Serif" font-size="14.00">/ui</text>
<text xml:space="preserve" text-anchor="middle" x="407" y="-564.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 5173</text> <text xml:space="preserve" text-anchor="middle" x="477" y="-553.14" font-family="Helvetica,sans-Serif" font-size="14.00">port 5173</text>
</g> </g>
<!-- nginx&#45;&gt;timeline --> <!-- nginx&#45;&gt;timeline -->
<g id="edge4" class="edge"> <g id="edge4" class="edge">
<title>nginx&#45;&gt;timeline</title> <title>nginx&#45;&gt;timeline</title>
<path fill="none" stroke="black" d="M521.05,-730C477.35,-730 407,-730 407,-730 407,-730 407,-628.15 407,-628.15"/> <path fill="none" stroke="black" d="M422.62,-662.67C422.62,-633.49 422.62,-585 422.62,-585 422.62,-585 423.34,-585 423.34,-585"/>
<polygon fill="black" stroke="black" points="410.5,-628.15 407,-618.15 403.5,-628.15 410.5,-628.15"/> <polygon fill="black" stroke="black" points="418.22,-588.5 428.22,-585 418.22,-581.5 418.22,-588.5"/>
<text xml:space="preserve" text-anchor="middle" x="450" y="-663.19" font-family="Helvetica,sans-Serif" font-size="10.00">/ui</text> <text xml:space="preserve" text-anchor="middle" x="416.62" y="-613.98" font-family="Helvetica,sans-Serif" font-size="10.00">/ui</text>
</g> </g>
<!-- django&#45;&gt;fastapi --> <!-- minio -->
<g id="node12" class="node">
<title>minio</title>
<polygon fill="none" stroke="black" points="415.5,-160.47 412.5,-164.47 391.5,-164.47 388.5,-160.47 312.5,-160.47 312.5,-117.97 415.5,-117.97 415.5,-160.47"/>
<text xml:space="preserve" text-anchor="middle" x="364" y="-143.17" font-family="Helvetica,sans-Serif" font-size="14.00">MinIO (local)</text>
<text xml:space="preserve" text-anchor="middle" x="364" y="-125.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 9000</text>
</g>
<!-- nginx&#45;&gt;minio -->
<g id="edge5" class="edge"> <g id="edge5" class="edge">
<title>django&#45;&gt;fastapi</title> <title>nginx&#45;&gt;minio</title>
<path fill="none" stroke="black" d="M539.5,-556.3C539.5,-556.3 539.5,-497.68 539.5,-497.68"/> <path fill="none" stroke="black" d="M414.88,-662.68C414.88,-596.12 414.88,-398 414.88,-398 414.88,-398 344.17,-398 344.17,-398 344.17,-398 344.17,-172.35 344.17,-172.35"/>
<polygon fill="black" stroke="black" points="543,-497.68 539.5,-487.68 536,-497.68 543,-497.68"/> <polygon fill="black" stroke="black" points="347.67,-172.35 344.17,-162.35 340.67,-172.35 347.67,-172.35"/>
<text xml:space="preserve" text-anchor="middle" x="561.88" y="-518.19" font-family="Helvetica,sans-Serif" font-size="10.00">job operations</text> <text xml:space="preserve" text-anchor="middle" x="378.03" y="-401.25" font-family="Helvetica,sans-Serif" font-size="10.00">/media/*</text>
</g>
<!-- postgres -->
<g id="node9" class="node">
<title>postgres</title>
<path fill="none" stroke="black" d="M691.75,-69.12C691.75,-72.06 670.35,-74.44 644,-74.44 617.65,-74.44 596.25,-72.06 596.25,-69.12 596.25,-69.12 596.25,-21.31 596.25,-21.31 596.25,-18.38 617.65,-16 644,-16 670.35,-16 691.75,-18.38 691.75,-21.31 691.75,-21.31 691.75,-69.12 691.75,-69.12"/>
<path fill="none" stroke="black" d="M691.75,-69.12C691.75,-66.19 670.35,-63.81 644,-63.81 617.65,-63.81 596.25,-66.19 596.25,-69.12"/>
<text xml:space="preserve" text-anchor="middle" x="644" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">PostgreSQL</text>
<text xml:space="preserve" text-anchor="middle" x="644" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 5433</text>
</g>
<!-- django&#45;&gt;postgres -->
<g id="edge6" class="edge">
<title>django&#45;&gt;postgres</title>
<path fill="none" stroke="black" d="M597.82,-587C607.63,-587 615.25,-587 615.25,-587 615.25,-587 615.25,-85.86 615.25,-85.86"/>
<polygon fill="black" stroke="black" points="618.75,-85.86 615.25,-75.86 611.75,-85.86 618.75,-85.86"/>
<text xml:space="preserve" text-anchor="middle" x="808.25" y="-303.81" font-family="Helvetica,sans-Serif" font-size="10.00">CRUD only</text>
</g> </g>
<!-- grpc_server --> <!-- grpc_server -->
<g id="node6" class="node"> <g id="node6" class="node">
<title>grpc_server</title> <title>grpc_server</title>
<path fill="none" stroke="black" d="M353.5,-328.19C353.5,-328.19 274.5,-328.19 274.5,-328.19 268.5,-328.19 262.5,-322.19 262.5,-316.19 262.5,-316.19 262.5,-297.69 262.5,-297.69 262.5,-291.69 268.5,-285.69 274.5,-285.69 274.5,-285.69 353.5,-285.69 353.5,-285.69 359.5,-285.69 365.5,-291.69 365.5,-297.69 365.5,-297.69 365.5,-316.19 365.5,-316.19 365.5,-322.19 359.5,-328.19 353.5,-328.19"/> <path fill="none" stroke="black" d="M246.5,-369.44C246.5,-369.44 167.5,-369.44 167.5,-369.44 161.5,-369.44 155.5,-363.44 155.5,-357.44 155.5,-357.44 155.5,-338.94 155.5,-338.94 155.5,-332.94 161.5,-326.94 167.5,-326.94 167.5,-326.94 246.5,-326.94 246.5,-326.94 252.5,-326.94 258.5,-332.94 258.5,-338.94 258.5,-338.94 258.5,-357.44 258.5,-357.44 258.5,-363.44 252.5,-369.44 246.5,-369.44"/>
<text xml:space="preserve" text-anchor="middle" x="314" y="-310.89" font-family="Helvetica,sans-Serif" font-size="14.00">gRPC Server</text> <text xml:space="preserve" text-anchor="middle" x="207" y="-352.14" font-family="Helvetica,sans-Serif" font-size="14.00">gRPC Server</text>
<text xml:space="preserve" text-anchor="middle" x="314" y="-293.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 50051</text> <text xml:space="preserve" text-anchor="middle" x="207" y="-334.89" font-family="Helvetica,sans-Serif" font-size="14.00">port 50051</text>
</g> </g>
<!-- fastapi&#45;&gt;grpc_server --> <!-- fastapi&#45;&gt;grpc_server -->
<g id="edge10" class="edge"> <g id="edge8" class="edge">
<title>fastapi&#45;&gt;grpc_server</title> <title>fastapi&#45;&gt;grpc_server</title>
<path fill="none" stroke="black" d="M509.75,-425.9C509.75,-382.34 509.75,-307 509.75,-307 509.75,-307 377.46,-307 377.46,-307"/> <path fill="none" stroke="black" d="M298.5,-427.06C298.5,-392.59 298.5,-341 298.5,-341 298.5,-341 270.41,-341 270.41,-341"/>
<polygon fill="black" stroke="black" points="377.46,-303.5 367.46,-307 377.46,-310.5 377.46,-303.5"/> <polygon fill="black" stroke="black" points="270.41,-337.5 260.41,-341 270.41,-344.5 270.41,-337.5"/>
<text xml:space="preserve" text-anchor="middle" x="398.25" y="-387.69" font-family="Helvetica,sans-Serif" font-size="10.00">gRPC</text> <text xml:space="preserve" text-anchor="middle" x="319.5" y="-385.98" font-family="Helvetica,sans-Serif" font-size="10.00">gRPC</text>
<text xml:space="preserve" text-anchor="middle" x="398.25" y="-374.94" font-family="Helvetica,sans-Serif" font-size="10.00">progress streaming</text> <text xml:space="preserve" text-anchor="middle" x="319.5" y="-373.23" font-family="Helvetica,sans-Serif" font-size="10.00">progress</text>
</g>
<!-- step_functions -->
<g id="node8" class="node">
<title>step_functions</title>
<path fill="none" stroke="black" d="M541.38,-369.44C541.38,-369.44 446.62,-369.44 446.62,-369.44 440.62,-369.44 434.62,-363.44 434.62,-357.44 434.62,-357.44 434.62,-338.94 434.62,-338.94 434.62,-332.94 440.62,-326.94 446.62,-326.94 446.62,-326.94 541.38,-326.94 541.38,-326.94 547.38,-326.94 553.38,-332.94 553.38,-338.94 553.38,-338.94 553.38,-357.44 553.38,-357.44 553.38,-363.44 547.38,-369.44 541.38,-369.44"/>
<text xml:space="preserve" text-anchor="middle" x="494" y="-352.14" font-family="Helvetica,sans-Serif" font-size="14.00">Step Functions</text>
<text xml:space="preserve" text-anchor="middle" x="494" y="-334.89" font-family="Helvetica,sans-Serif" font-size="14.00">state machine</text>
</g>
<!-- fastapi&#45;&gt;step_functions -->
<g id="edge13" class="edge">
<title>fastapi&#45;&gt;step_functions</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M375.83,-427.17C375.83,-396.99 375.83,-355 375.83,-355 375.83,-355 422.71,-355 422.71,-355"/>
<polygon fill="black" stroke="black" points="422.71,-358.5 432.71,-355 422.71,-351.5 422.71,-358.5"/>
<text xml:space="preserve" text-anchor="middle" x="338.33" y="-358.15" font-family="Helvetica,sans-Serif" font-size="10.00">boto3</text>
<text xml:space="preserve" text-anchor="middle" x="338.33" y="-345.4" font-family="Helvetica,sans-Serif" font-size="10.00">start_execution</text>
</g>
<!-- postgres -->
<g id="node10" class="node">
<title>postgres</title>
<path fill="none" stroke="black" d="M111.75,-163.12C111.75,-166.06 90.35,-168.44 64,-168.44 37.65,-168.44 16.25,-166.06 16.25,-163.12 16.25,-163.12 16.25,-115.31 16.25,-115.31 16.25,-112.38 37.65,-110 64,-110 90.35,-110 111.75,-112.38 111.75,-115.31 111.75,-115.31 111.75,-163.12 111.75,-163.12"/>
<path fill="none" stroke="black" d="M111.75,-163.12C111.75,-160.19 90.35,-157.81 64,-157.81 37.65,-157.81 16.25,-160.19 16.25,-163.12"/>
<text xml:space="preserve" text-anchor="middle" x="64" y="-143.17" font-family="Helvetica,sans-Serif" font-size="14.00">PostgreSQL</text>
<text xml:space="preserve" text-anchor="middle" x="64" y="-125.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 5436</text>
</g> </g>
<!-- fastapi&#45;&gt;postgres --> <!-- fastapi&#45;&gt;postgres -->
<g id="edge8" class="edge"> <g id="edge7" class="edge">
<title>fastapi&#45;&gt;postgres</title> <title>fastapi&#45;&gt;postgres</title>
<path fill="none" stroke="black" d="M552.25,-425.84C552.25,-330.91 552.25,-45 552.25,-45 552.25,-45 584.46,-45 584.46,-45"/> <path fill="none" stroke="black" d="M286.29,-457C203.13,-457 64,-457 64,-457 64,-457 64,-180.34 64,-180.34"/>
<polygon fill="black" stroke="black" points="584.46,-48.5 594.46,-45 584.46,-41.5 584.46,-48.5"/> <polygon fill="black" stroke="black" points="67.5,-180.34 64,-170.34 60.5,-180.34 67.5,-180.34"/>
</g>
<!-- redis -->
<g id="node10" class="node">
<title>redis</title>
<path fill="none" stroke="black" d="M415.5,-69.12C415.5,-72.06 396.45,-74.44 373,-74.44 349.55,-74.44 330.5,-72.06 330.5,-69.12 330.5,-69.12 330.5,-21.31 330.5,-21.31 330.5,-18.38 349.55,-16 373,-16 396.45,-16 415.5,-18.38 415.5,-21.31 415.5,-21.31 415.5,-69.12 415.5,-69.12"/>
<path fill="none" stroke="black" d="M415.5,-69.12C415.5,-66.19 396.45,-63.81 373,-63.81 349.55,-63.81 330.5,-66.19 330.5,-69.12"/>
<text xml:space="preserve" text-anchor="middle" x="373" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">Redis</text>
<text xml:space="preserve" text-anchor="middle" x="373" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 6380</text>
</g>
<!-- fastapi&#45;&gt;redis -->
<g id="edge9" class="edge">
<title>fastapi&#45;&gt;redis</title>
<path fill="none" stroke="black" d="M481.02,-456C442,-456 390.5,-456 390.5,-456 390.5,-456 390.5,-86.27 390.5,-86.27"/>
<polygon fill="black" stroke="black" points="394,-86.27 390.5,-76.27 387,-86.27 394,-86.27"/>
<text xml:space="preserve" text-anchor="middle" x="542" y="-240.81" font-family="Helvetica,sans-Serif" font-size="10.00">job status</text>
</g> </g>
<!-- timeline&#45;&gt;fastapi --> <!-- timeline&#45;&gt;fastapi -->
<g id="edge7" class="edge"> <g id="edge6" class="edge">
<title>timeline&#45;&gt;fastapi</title> <title>timeline&#45;&gt;fastapi</title>
<path fill="none" stroke="black" d="M454.47,-587C475.15,-587 494.75,-587 494.75,-587 494.75,-587 494.75,-497.94 494.75,-497.94"/> <path fill="none" stroke="black" d="M429.59,-565C411.66,-565 395.5,-565 395.5,-565 395.5,-565 395.5,-499.11 395.5,-499.11"/>
<polygon fill="black" stroke="black" points="498.25,-497.94 494.75,-487.94 491.25,-497.94 498.25,-497.94"/> <polygon fill="black" stroke="black" points="399,-499.11 395.5,-489.11 392,-499.11 399,-499.11"/>
<text xml:space="preserve" text-anchor="middle" x="440.75" y="-518.19" font-family="Helvetica,sans-Serif" font-size="10.00">REST API</text> <text xml:space="preserve" text-anchor="middle" x="406.38" y="-539.6" font-family="Helvetica,sans-Serif" font-size="10.00">REST API</text>
</g> </g>
<!-- celery --> <!-- celery -->
<g id="node7" class="node"> <g id="node7" class="node">
<title>celery</title> <title>celery</title>
<path fill="none" stroke="black" d="M271.75,-202.19C271.75,-202.19 182.25,-202.19 182.25,-202.19 176.25,-202.19 170.25,-196.19 170.25,-190.19 170.25,-190.19 170.25,-171.69 170.25,-171.69 170.25,-165.69 176.25,-159.69 182.25,-159.69 182.25,-159.69 271.75,-159.69 271.75,-159.69 277.75,-159.69 283.75,-165.69 283.75,-171.69 283.75,-171.69 283.75,-190.19 283.75,-190.19 283.75,-196.19 277.75,-202.19 271.75,-202.19"/> <path fill="none" stroke="black" d="M255.75,-268.94C255.75,-268.94 166.25,-268.94 166.25,-268.94 160.25,-268.94 154.25,-262.94 154.25,-256.94 154.25,-256.94 154.25,-238.44 154.25,-238.44 154.25,-232.44 160.25,-226.44 166.25,-226.44 166.25,-226.44 255.75,-226.44 255.75,-226.44 261.75,-226.44 267.75,-232.44 267.75,-238.44 267.75,-238.44 267.75,-256.94 267.75,-256.94 267.75,-262.94 261.75,-268.94 255.75,-268.94"/>
<text xml:space="preserve" text-anchor="middle" x="227" y="-184.89" font-family="Helvetica,sans-Serif" font-size="14.00">Celery Worker</text> <text xml:space="preserve" text-anchor="middle" x="211" y="-251.64" font-family="Helvetica,sans-Serif" font-size="14.00">Celery Worker</text>
<text xml:space="preserve" text-anchor="middle" x="227" y="-167.64" font-family="Helvetica,sans-Serif" font-size="14.00">(local)</text> <text xml:space="preserve" text-anchor="middle" x="211" y="-234.39" font-family="Helvetica,sans-Serif" font-size="14.00">(local mode)</text>
</g> </g>
<!-- grpc_server&#45;&gt;celery --> <!-- grpc_server&#45;&gt;celery -->
<g id="edge11" class="edge"> <g id="edge9" class="edge">
<title>grpc_server&#45;&gt;celery</title> <title>grpc_server&#45;&gt;celery</title>
<path fill="none" stroke="black" d="M269.58,-285.28C269.58,-285.28 269.58,-213.83 269.58,-213.83"/> <path fill="none" stroke="black" d="M207,-326.87C207,-326.87 207,-280.83 207,-280.83"/>
<polygon fill="black" stroke="black" points="273.08,-213.83 269.58,-203.83 266.08,-213.83 273.08,-213.83"/> <polygon fill="black" stroke="black" points="210.5,-280.83 207,-270.83 203.5,-280.83 210.5,-280.83"/>
<text xml:space="preserve" text-anchor="middle" x="223.62" y="-240.81" font-family="Helvetica,sans-Serif" font-size="10.00">task dispatch</text> <text xml:space="preserve" text-anchor="middle" x="174.38" y="-307.1" font-family="Helvetica,sans-Serif" font-size="10.00">task dispatch</text>
</g>
<!-- celery&#45;&gt;grpc_server -->
<g id="edge14" class="edge">
<title>celery&#45;&gt;grpc_server</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M276.67,-202.6C276.67,-202.6 276.67,-274.05 276.67,-274.05"/>
<polygon fill="black" stroke="black" points="273.17,-274.05 276.67,-284.05 280.17,-274.05 273.17,-274.05"/>
<text xml:space="preserve" text-anchor="middle" x="341.88" y="-247.19" font-family="Helvetica,sans-Serif" font-size="10.00">progress</text>
<text xml:space="preserve" text-anchor="middle" x="341.88" y="-234.44" font-family="Helvetica,sans-Serif" font-size="10.00">callbacks</text>
</g> </g>
<!-- celery&#45;&gt;postgres --> <!-- celery&#45;&gt;postgres -->
<g id="edge13" class="edge"> <g id="edge11" class="edge">
<title>celery&#45;&gt;postgres</title> <title>celery&#45;&gt;postgres</title>
<path fill="none" stroke="black" d="M284.21,-188C390.19,-188 606.37,-188 606.37,-188 606.37,-188 606.37,-84.94 606.37,-84.94"/> <path fill="none" stroke="black" d="M161.88,-225.95C161.88,-194.24 161.88,-139 161.88,-139 161.88,-139 123.59,-139 123.59,-139"/>
<polygon fill="black" stroke="black" points="609.87,-84.94 606.37,-74.94 602.87,-84.94 609.87,-84.94"/> <polygon fill="black" stroke="black" points="123.59,-135.5 113.59,-139 123.59,-142.5 123.59,-135.5"/>
<text xml:space="preserve" text-anchor="middle" x="392.5" y="-121.19" font-family="Helvetica,sans-Serif" font-size="10.00">job updates</text> <text xml:space="preserve" text-anchor="middle" x="133.38" y="-166.59" font-family="Helvetica,sans-Serif" font-size="10.00">job updates</text>
</g>
<!-- redis -->
<g id="node11" class="node">
<title>redis</title>
<path fill="none" stroke="black" d="M254.5,-163.12C254.5,-166.06 235.45,-168.44 212,-168.44 188.55,-168.44 169.5,-166.06 169.5,-163.12 169.5,-163.12 169.5,-115.31 169.5,-115.31 169.5,-112.38 188.55,-110 212,-110 235.45,-110 254.5,-112.38 254.5,-115.31 254.5,-115.31 254.5,-163.12 254.5,-163.12"/>
<path fill="none" stroke="black" d="M254.5,-163.12C254.5,-160.19 235.45,-157.81 212,-157.81 188.55,-157.81 169.5,-160.19 169.5,-163.12"/>
<text xml:space="preserve" text-anchor="middle" x="212" y="-143.17" font-family="Helvetica,sans-Serif" font-size="14.00">Redis</text>
<text xml:space="preserve" text-anchor="middle" x="212" y="-125.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 6381</text>
</g> </g>
<!-- celery&#45;&gt;redis --> <!-- celery&#45;&gt;redis -->
<g id="edge12" class="edge"> <g id="edge10" class="edge">
<title>celery&#45;&gt;redis</title> <title>celery&#45;&gt;redis</title>
<path fill="none" stroke="black" d="M283.96,-174C315.34,-174 348,-174 348,-174 348,-174 348,-85.95 348,-85.95"/> <path fill="none" stroke="black" d="M212,-226C212,-226 212,-180.19 212,-180.19"/>
<polygon fill="black" stroke="black" points="351.5,-85.95 348,-75.95 344.5,-85.95 351.5,-85.95"/> <polygon fill="black" stroke="black" points="215.5,-180.19 212,-170.19 208.5,-180.19 215.5,-180.19"/>
<text xml:space="preserve" text-anchor="middle" x="286" y="-121.19" font-family="Helvetica,sans-Serif" font-size="10.00">queue</text> <text xml:space="preserve" text-anchor="middle" x="197" y="-206.34" font-family="Helvetica,sans-Serif" font-size="10.00">queue</text>
</g> </g>
<!-- local_fs --> <!-- celery&#45;&gt;minio -->
<g id="node12" class="node"> <g id="edge12" class="edge">
<title>local_fs</title> <title>celery&#45;&gt;minio</title>
<polygon fill="none" stroke="black" points="210.12,-66.47 207.12,-70.47 186.12,-70.47 183.12,-66.47 137.88,-66.47 137.88,-23.97 210.12,-23.97 210.12,-66.47"/> <path fill="none" stroke="black" d="M261.12,-225.95C261.12,-194.24 261.12,-139 261.12,-139 261.12,-139 300.75,-139 300.75,-139"/>
<text xml:space="preserve" text-anchor="middle" x="174" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">Local FS</text> <polygon fill="black" stroke="black" points="300.75,-142.5 310.75,-139 300.75,-135.5 300.75,-142.5"/>
<text xml:space="preserve" text-anchor="middle" x="174" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">/media</text> <text xml:space="preserve" text-anchor="middle" x="302.75" y="-178.67" font-family="Helvetica,sans-Serif" font-size="10.00">S3 API</text>
</g> <text xml:space="preserve" text-anchor="middle" x="302.75" y="-165.92" font-family="Helvetica,sans-Serif" font-size="10.00">download/upload</text>
<!-- celery&#45;&gt;local_fs -->
<g id="edge15" class="edge">
<title>celery&#45;&gt;local_fs</title>
<path fill="none" stroke="black" d="M190.19,-159.43C190.19,-159.43 190.19,-78.14 190.19,-78.14"/>
<polygon fill="black" stroke="black" points="193.69,-78.14 190.19,-68.14 186.69,-78.14 193.69,-78.14"/>
<text xml:space="preserve" text-anchor="middle" x="182.75" y="-121.19" font-family="Helvetica,sans-Serif" font-size="10.00">read/write</text>
</g> </g>
<!-- lambda --> <!-- lambda -->
<g id="node8" class="node"> <g id="node9" class="node">
<title>lambda</title> <title>lambda</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M192.75,-328.19C192.75,-328.19 145.25,-328.19 145.25,-328.19 139.25,-328.19 133.25,-322.19 133.25,-316.19 133.25,-316.19 133.25,-297.69 133.25,-297.69 133.25,-291.69 139.25,-285.69 145.25,-285.69 145.25,-285.69 192.75,-285.69 192.75,-285.69 198.75,-285.69 204.75,-291.69 204.75,-297.69 204.75,-297.69 204.75,-316.19 204.75,-316.19 204.75,-322.19 198.75,-328.19 192.75,-328.19"/> <path fill="none" stroke="black" d="M541,-268.94C541,-268.94 423,-268.94 423,-268.94 417,-268.94 411,-262.94 411,-256.94 411,-256.94 411,-238.44 411,-238.44 411,-232.44 417,-226.44 423,-226.44 423,-226.44 541,-226.44 541,-226.44 547,-226.44 553,-232.44 553,-238.44 553,-238.44 553,-256.94 553,-256.94 553,-262.94 547,-268.94 541,-268.94"/>
<text xml:space="preserve" text-anchor="middle" x="169" y="-310.89" font-family="Helvetica,sans-Serif" font-size="14.00">Lambda</text> <text xml:space="preserve" text-anchor="middle" x="482" y="-251.64" font-family="Helvetica,sans-Serif" font-size="14.00">Lambda</text>
<text xml:space="preserve" text-anchor="middle" x="169" y="-293.64" font-family="Helvetica,sans-Serif" font-size="14.00">(cloud)</text> <text xml:space="preserve" text-anchor="middle" x="482" y="-234.39" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg container</text>
</g> </g>
<!-- sqs --> <!-- step_functions&#45;&gt;lambda -->
<g id="node11" class="node"> <g id="edge14" class="edge">
<title>sqs</title> <title>step_functions&#45;&gt;lambda</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M538,-69.12C538,-72.06 523.66,-74.44 506,-74.44 488.34,-74.44 474,-72.06 474,-69.12 474,-69.12 474,-21.31 474,-21.31 474,-18.38 488.34,-16 506,-16 523.66,-16 538,-18.38 538,-21.31 538,-21.31 538,-69.12 538,-69.12"/> <path fill="none" stroke="black" stroke-dasharray="5,2" d="M493.81,-326.87C493.81,-326.87 493.81,-280.83 493.81,-280.83"/>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M538,-69.12C538,-66.19 523.66,-63.81 506,-63.81 488.34,-63.81 474,-66.19 474,-69.12"/> <polygon fill="black" stroke="black" points="497.31,-280.83 493.81,-270.83 490.31,-280.83 497.31,-280.83"/>
<text xml:space="preserve" text-anchor="middle" x="506" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">SQS</text>
<text xml:space="preserve" text-anchor="middle" x="506" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">(cloud)</text>
</g> </g>
<!-- lambda&#45;&gt;sqs --> <!-- lambda&#45;&gt;fastapi -->
<g id="edge16" class="edge"> <g id="edge16" class="edge">
<title>lambda&#45;&gt;sqs</title> <title>lambda&#45;&gt;fastapi</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M187.5,-285.28C187.5,-267.07 187.5,-244 187.5,-244 187.5,-244 477.75,-244 477.75,-244 477.75,-244 477.75,-84.37 477.75,-84.37"/> <path fill="none" stroke="black" stroke-dasharray="5,2" d="M418.75,-269.3C418.75,-322.78 418.75,-457 418.75,-457 418.75,-457 417.66,-457 417.66,-457"/>
<polygon fill="black" stroke="black" points="481.25,-84.37 477.75,-74.37 474.25,-84.37 481.25,-84.37"/> <polygon fill="black" stroke="black" points="419.37,-453.5 409.37,-457 419.37,-460.5 419.37,-453.5"/>
<text xml:space="preserve" text-anchor="middle" x="415" y="-177.81" font-family="Helvetica,sans-Serif" font-size="10.00">queue</text> <text xml:space="preserve" text-anchor="middle" x="359.12" y="-379.69" font-family="Helvetica,sans-Serif" font-size="10.00">callback</text>
<text xml:space="preserve" text-anchor="middle" x="359.12" y="-366.94" font-family="Helvetica,sans-Serif" font-size="10.00">POST /jobs/{id}/callback</text>
</g> </g>
<!-- s3 --> <!-- s3 -->
<g id="node13" class="node"> <g id="node13" class="node">
<title>s3</title> <title>s3</title>
<polygon fill="none" stroke="black" stroke-dasharray="5,2" points="80,-66.47 77,-70.47 56,-70.47 53,-66.47 16,-66.47 16,-23.97 80,-23.97 80,-66.47"/> <polygon fill="none" stroke="black" stroke-dasharray="5,2" points="596.25,-157.22 593.25,-161.22 572.25,-161.22 569.25,-157.22 473.75,-157.22 473.75,-121.22 596.25,-121.22 596.25,-157.22"/>
<text xml:space="preserve" text-anchor="middle" x="48" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">S3</text> <text xml:space="preserve" text-anchor="middle" x="535" y="-134.54" font-family="Helvetica,sans-Serif" font-size="14.00">AWS S3 (cloud)</text>
<text xml:space="preserve" text-anchor="middle" x="48" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">(cloud)</text>
</g> </g>
<!-- lambda&#45;&gt;s3 --> <!-- lambda&#45;&gt;s3 -->
<g id="edge17" class="edge"> <g id="edge15" class="edge">
<title>lambda&#45;&gt;s3</title> <title>lambda&#45;&gt;s3</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M133.02,-307C97.36,-307 48,-307 48,-307 48,-307 48,-78.15 48,-78.15"/> <path fill="none" stroke="black" stroke-dasharray="5,2" d="M513.38,-226C513.38,-226 513.38,-169.14 513.38,-169.14"/>
<polygon fill="black" stroke="black" points="51.5,-78.15 48,-68.15 44.5,-78.15 51.5,-78.15"/> <polygon fill="black" stroke="black" points="516.88,-169.14 513.38,-159.14 509.88,-169.14 516.88,-169.14"/>
<text xml:space="preserve" text-anchor="middle" x="80.75" y="-177.81" font-family="Helvetica,sans-Serif" font-size="10.00">read/write</text> <text xml:space="preserve" text-anchor="middle" x="471.75" y="-200.82" font-family="Helvetica,sans-Serif" font-size="10.00">download/upload</text>
</g>
<!-- bucket_in -->
<g id="node14" class="node">
<title>bucket_in</title>
<polygon fill="none" stroke="black" points="413.5,-52 310.5,-52 310.5,-16 419.5,-16 419.5,-46 413.5,-52"/>
<polyline fill="none" stroke="black" points="413.5,-52 413.5,-46"/>
<polyline fill="none" stroke="black" points="419.5,-46 413.5,-46"/>
<text xml:space="preserve" text-anchor="middle" x="365" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00">mpr&#45;media&#45;in</text>
</g>
<!-- minio&#45;&gt;bucket_in -->
<g id="edge17" class="edge">
<title>minio&#45;&gt;bucket_in</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M364,-117.67C364,-98.43 364,-70.56 364,-52.36"/>
</g>
<!-- bucket_out -->
<g id="node15" class="node">
<title>bucket_out</title>
<polygon fill="none" stroke="black" points="590.38,-52 477.62,-52 477.62,-16 596.38,-16 596.38,-46 590.38,-52"/>
<polyline fill="none" stroke="black" points="590.38,-52 590.38,-46"/>
<polyline fill="none" stroke="black" points="596.38,-46 590.38,-46"/>
<text xml:space="preserve" text-anchor="middle" x="537" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00">mpr&#45;media&#45;out</text>
</g>
<!-- minio&#45;&gt;bucket_out -->
<g id="edge18" class="edge">
<title>minio&#45;&gt;bucket_out</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M415.9,-145C428.08,-145 437.58,-145 437.58,-145 437.58,-145 437.58,-40 437.58,-40 437.58,-40 456.11,-40 477.16,-40"/>
</g>
<!-- s3&#45;&gt;bucket_in -->
<g id="edge19" class="edge">
<title>s3&#45;&gt;bucket_in</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M473.27,-133C463.03,-133 455.67,-133 455.67,-133 455.67,-133 455.67,-28 455.67,-28 455.67,-28 438.93,-28 419.83,-28"/>
</g>
<!-- s3&#45;&gt;bucket_out -->
<g id="edge20" class="edge">
<title>s3&#45;&gt;bucket_out</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M536.94,-120.89C536.94,-101.7 536.94,-71.72 536.94,-52.47"/>
</g> </g>
</g> </g>
</svg> </svg>

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 21 KiB

View File

@@ -10,13 +10,13 @@ digraph data_model {
graph [splines=ortho, nodesep=0.6, ranksep=1.2] graph [splines=ortho, nodesep=0.6, ranksep=1.2]
MediaAsset [label="{MediaAsset|id: UUID (PK)\lfilename: str\lfile_path: str\lfile_size: int?\lstatus: pending/ready/error\lerror_message: str?\l|duration: float?\lvideo_codec: str?\laudio_codec: str?\lwidth: int?\lheight: int?\lframerate: float?\lbitrate: int?\lproperties: JSON\l|comments: str\ltags: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"] MediaAsset [label="{MediaAsset|id: UUID (PK)\lfilename: str\lfile_path: str (S3 key)\lfile_size: int?\lstatus: pending/ready/error\lerror_message: str?\l|duration: float?\lvideo_codec: str?\laudio_codec: str?\lwidth: int?\lheight: int?\lframerate: float?\lbitrate: int?\lproperties: JSON\l|comments: str\ltags: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"]
TranscodePreset [label="{TranscodePreset|id: UUID (PK)\lname: str (unique)\ldescription: str\lis_builtin: bool\l|container: str\l|video_codec: str\lvideo_bitrate: str?\lvideo_crf: int?\lvideo_preset: str?\lresolution: str?\lframerate: float?\l|audio_codec: str\laudio_bitrate: str?\laudio_channels: int?\laudio_samplerate: int?\l|extra_args: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"] TranscodePreset [label="{TranscodePreset|id: UUID (PK)\lname: str (unique)\ldescription: str\lis_builtin: bool\l|container: str\l|video_codec: str\lvideo_bitrate: str?\lvideo_crf: int?\lvideo_preset: str?\lresolution: str?\lframerate: float?\l|audio_codec: str\laudio_bitrate: str?\laudio_channels: int?\laudio_samplerate: int?\l|extra_args: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"]
TranscodeJob [label="{TranscodeJob|id: UUID (PK)\l|source_asset_id: UUID (FK)\l|preset_id: UUID? (FK)\lpreset_snapshot: JSON\l|trim_start: float?\ltrim_end: float?\l|output_filename: str\loutput_path: str?\loutput_asset_id: UUID? (FK)\l|status: pending/processing/...\lprogress: float (0-100)\lcurrent_frame: int?\lcurrent_time: float?\lspeed: str?\lerror_message: str?\l|celery_task_id: str?\lpriority: int\l|created_at: datetime\lstarted_at: datetime?\lcompleted_at: datetime?\l}"] TranscodeJob [label="{TranscodeJob|id: UUID (PK)\l|source_asset_id: UUID (FK)\l|preset_id: UUID? (FK)\lpreset_snapshot: JSON\l|trim_start: float?\ltrim_end: float?\l|output_filename: str\loutput_path: str? (S3 key)\loutput_asset_id: UUID? (FK)\l|status: pending/processing/...\lprogress: float (0-100)\lcurrent_frame: int?\lcurrent_time: float?\lspeed: str?\lerror_message: str?\l|celery_task_id: str?\lexecution_arn: str?\lpriority: int\l|created_at: datetime\lstarted_at: datetime?\lcompleted_at: datetime?\l}"]
MediaAsset -> TranscodeJob [label="1:N source_asset"] MediaAsset -> TranscodeJob [xlabel="1:N source_asset"]
TranscodePreset -> TranscodeJob [label="1:N preset"] TranscodePreset -> TranscodeJob [xlabel="1:N preset"]
TranscodeJob -> MediaAsset [label="1:1 output_asset", style=dashed] TranscodeJob -> MediaAsset [xlabel="1:1 output_asset", style=dashed]
} }

View File

@@ -1,15 +1,15 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 14.1.1 (0) <!-- Generated by graphviz version 14.1.2 (0)
--> -->
<!-- Title: data_model Pages: 1 --> <!-- Title: data_model Pages: 1 -->
<svg width="2218pt" height="286pt" <svg width="2134pt" height="286pt"
viewBox="0.00 0.00 2218.00 286.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> viewBox="0.00 0.00 2134.00 286.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 282)"> <g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 282)">
<title>data_model</title> <title>data_model</title>
<polygon fill="white" stroke="none" points="-4,4 -4,-282 2213.5,-282 2213.5,4 -4,4"/> <polygon fill="white" stroke="none" points="-4,4 -4,-282 2130.25,-282 2130.25,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="1104.75" y="-258.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; Data Model</text> <text xml:space="preserve" text-anchor="middle" x="1063.12" y="-258.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; Data Model</text>
<!-- MediaAsset --> <!-- MediaAsset -->
<g id="node1" class="node"> <g id="node1" class="node">
<title>MediaAsset</title> <title>MediaAsset</title>
@@ -18,7 +18,7 @@
<polyline fill="none" stroke="black" points="197.75,-134 197.75,-250"/> <polyline fill="none" stroke="black" points="197.75,-134 197.75,-250"/>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-222.05" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text> <text xml:space="preserve" text-anchor="start" x="205.75" y="-222.05" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-208.55" font-family="Helvetica,sans-Serif" font-size="11.00">filename: str</text> <text xml:space="preserve" text-anchor="start" x="205.75" y="-208.55" font-family="Helvetica,sans-Serif" font-size="11.00">filename: str</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">file_path: str</text> <text xml:space="preserve" text-anchor="start" x="205.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">file_path: str (S3 key)</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">file_size: int?</text> <text xml:space="preserve" text-anchor="start" x="205.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">file_size: int?</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-168.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/ready/error</text> <text xml:space="preserve" text-anchor="start" x="205.75" y="-168.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/ready/error</text>
<text xml:space="preserve" text-anchor="start" x="205.75" y="-154.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text> <text xml:space="preserve" text-anchor="start" x="205.75" y="-154.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text>
@@ -41,43 +41,44 @@
<!-- TranscodeJob --> <!-- TranscodeJob -->
<g id="node3" class="node"> <g id="node3" class="node">
<title>TranscodeJob</title> <title>TranscodeJob</title>
<polygon fill="none" stroke="black" points="995.25,-86.5 995.25,-175.5 2209.5,-175.5 2209.5,-86.5 995.25,-86.5"/> <polygon fill="none" stroke="black" points="912,-147.5 912,-236.5 2126.25,-236.5 2126.25,-147.5 912,-147.5"/>
<text xml:space="preserve" text-anchor="middle" x="1039.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">TranscodeJob</text> <text xml:space="preserve" text-anchor="middle" x="956" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">TranscodeJob</text>
<polyline fill="none" stroke="black" points="1083.25,-86.5 1083.25,-175.5"/> <polyline fill="none" stroke="black" points="1000,-147.5 1000,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1091.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text> <text xml:space="preserve" text-anchor="start" x="1008" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text>
<polyline fill="none" stroke="black" points="1171.25,-86.5 1171.25,-175.5"/> <polyline fill="none" stroke="black" points="1088,-147.5 1088,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1179.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">source_asset_id: UUID (FK)</text> <text xml:space="preserve" text-anchor="start" x="1096" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">source_asset_id: UUID (FK)</text>
<polyline fill="none" stroke="black" points="1335.75,-86.5 1335.75,-175.5"/> <polyline fill="none" stroke="black" points="1252.5,-147.5 1252.5,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1343.75" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">preset_id: UUID? (FK)</text> <text xml:space="preserve" text-anchor="start" x="1260.5" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">preset_id: UUID? (FK)</text>
<text xml:space="preserve" text-anchor="start" x="1343.75" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">preset_snapshot: JSON</text> <text xml:space="preserve" text-anchor="start" x="1260.5" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">preset_snapshot: JSON</text>
<polyline fill="none" stroke="black" points="1477,-86.5 1477,-175.5"/> <polyline fill="none" stroke="black" points="1393.75,-147.5 1393.75,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1485" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">trim_start: float?</text> <text xml:space="preserve" text-anchor="start" x="1401.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">trim_start: float?</text>
<text xml:space="preserve" text-anchor="start" x="1485" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">trim_end: float?</text> <text xml:space="preserve" text-anchor="start" x="1401.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">trim_end: float?</text>
<polyline fill="none" stroke="black" points="1585.25,-86.5 1585.25,-175.5"/> <polyline fill="none" stroke="black" points="1502,-147.5 1502,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1593.25" y="-140.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_filename: str</text> <text xml:space="preserve" text-anchor="start" x="1510" y="-201.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_filename: str</text>
<text xml:space="preserve" text-anchor="start" x="1593.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">output_path: str?</text> <text xml:space="preserve" text-anchor="start" x="1510" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">output_path: str? (S3 key)</text>
<text xml:space="preserve" text-anchor="start" x="1593.25" y="-113.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_asset_id: UUID? (FK)</text> <text xml:space="preserve" text-anchor="start" x="1510" y="-174.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_asset_id: UUID? (FK)</text>
<polyline fill="none" stroke="black" points="1755,-86.5 1755,-175.5"/> <polyline fill="none" stroke="black" points="1671.75,-147.5 1671.75,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1763" y="-161.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/processing/...</text> <text xml:space="preserve" text-anchor="start" x="1679.75" y="-222.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/processing/...</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-147.55" font-family="Helvetica,sans-Serif" font-size="11.00">progress: float (0&#45;100)</text> <text xml:space="preserve" text-anchor="start" x="1679.75" y="-208.55" font-family="Helvetica,sans-Serif" font-size="11.00">progress: float (0&#45;100)</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">current_frame: int?</text> <text xml:space="preserve" text-anchor="start" x="1679.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">current_frame: int?</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">current_time: float?</text> <text xml:space="preserve" text-anchor="start" x="1679.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">current_time: float?</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-107.05" font-family="Helvetica,sans-Serif" font-size="11.00">speed: str?</text> <text xml:space="preserve" text-anchor="start" x="1679.75" y="-168.05" font-family="Helvetica,sans-Serif" font-size="11.00">speed: str?</text>
<text xml:space="preserve" text-anchor="start" x="1763" y="-93.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text> <text xml:space="preserve" text-anchor="start" x="1679.75" y="-154.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text>
<polyline fill="none" stroke="black" points="1934.5,-86.5 1934.5,-175.5"/> <polyline fill="none" stroke="black" points="1851.25,-147.5 1851.25,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="1942.5" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">celery_task_id: str?</text> <text xml:space="preserve" text-anchor="start" x="1859.25" y="-201.8" font-family="Helvetica,sans-Serif" font-size="11.00">celery_task_id: str?</text>
<text xml:space="preserve" text-anchor="start" x="1942.5" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">priority: int</text> <text xml:space="preserve" text-anchor="start" x="1859.25" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">execution_arn: str?</text>
<polyline fill="none" stroke="black" points="2056.25,-86.5 2056.25,-175.5"/> <text xml:space="preserve" text-anchor="start" x="1859.25" y="-174.8" font-family="Helvetica,sans-Serif" font-size="11.00">priority: int</text>
<text xml:space="preserve" text-anchor="start" x="2064.25" y="-140.8" font-family="Helvetica,sans-Serif" font-size="11.00">created_at: datetime</text> <polyline fill="none" stroke="black" points="1973,-147.5 1973,-236.5"/>
<text xml:space="preserve" text-anchor="start" x="2064.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">started_at: datetime?</text> <text xml:space="preserve" text-anchor="start" x="1981" y="-201.8" font-family="Helvetica,sans-Serif" font-size="11.00">created_at: datetime</text>
<text xml:space="preserve" text-anchor="start" x="2064.25" y="-113.8" font-family="Helvetica,sans-Serif" font-size="11.00">completed_at: datetime?</text> <text xml:space="preserve" text-anchor="start" x="1981" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">started_at: datetime?</text>
<text xml:space="preserve" text-anchor="start" x="1981" y="-174.8" font-family="Helvetica,sans-Serif" font-size="11.00">completed_at: datetime?</text>
</g> </g>
<!-- MediaAsset&#45;&gt;TranscodeJob --> <!-- MediaAsset&#45;&gt;TranscodeJob -->
<g id="edge1" class="edge"> <g id="edge1" class="edge">
<title>MediaAsset&#45;&gt;TranscodeJob</title> <title>MediaAsset&#45;&gt;TranscodeJob</title>
<path fill="none" stroke="black" d="M708.15,-147.67C708.15,-147.67 983.49,-147.67 983.49,-147.67"/> <path fill="none" stroke="black" d="M708.33,-192C708.33,-192 900.24,-192 900.24,-192"/>
<polygon fill="black" stroke="black" points="983.49,-151.17 993.49,-147.67 983.49,-144.17 983.49,-151.17"/> <polygon fill="black" stroke="black" points="900.24,-195.5 910.24,-192 900.24,-188.5 900.24,-195.5"/>
<text xml:space="preserve" text-anchor="middle" x="910.62" y="-195.25" font-family="Helvetica,sans-Serif" font-size="10.00">1:N source_asset</text> <text xml:space="preserve" text-anchor="middle" x="762.66" y="-182.5" font-family="Helvetica,sans-Serif" font-size="10.00">1:N source_asset</text>
</g> </g>
<!-- TranscodePreset --> <!-- TranscodePreset -->
<g id="node2" class="node"> <g id="node2" class="node">
@@ -112,16 +113,16 @@
<!-- TranscodePreset&#45;&gt;TranscodeJob --> <!-- TranscodePreset&#45;&gt;TranscodeJob -->
<g id="edge2" class="edge"> <g id="edge2" class="edge">
<title>TranscodePreset&#45;&gt;TranscodeJob</title> <title>TranscodePreset&#45;&gt;TranscodeJob</title>
<path fill="none" stroke="black" d="M766.5,-89.89C766.5,-101.97 766.5,-111.75 766.5,-111.75 766.5,-111.75 983.39,-111.75 983.39,-111.75"/> <path fill="none" stroke="black" d="M767.25,-89.95C767.25,-125.61 767.25,-169.5 767.25,-169.5 767.25,-169.5 900.26,-169.5 900.26,-169.5"/>
<polygon fill="black" stroke="black" points="983.39,-115.25 993.39,-111.75 983.39,-108.25 983.39,-115.25"/> <polygon fill="black" stroke="black" points="900.26,-173 910.26,-169.5 900.26,-166 900.26,-173"/>
<text xml:space="preserve" text-anchor="middle" x="910.62" y="-48.25" font-family="Helvetica,sans-Serif" font-size="10.00">1:N preset</text> <text xml:space="preserve" text-anchor="middle" x="768.85" y="-160" font-family="Helvetica,sans-Serif" font-size="10.00">1:N preset</text>
</g> </g>
<!-- TranscodeJob&#45;&gt;MediaAsset --> <!-- TranscodeJob&#45;&gt;MediaAsset -->
<g id="edge3" class="edge"> <g id="edge3" class="edge">
<title>TranscodeJob&#45;&gt;MediaAsset</title> <title>TranscodeJob&#45;&gt;MediaAsset</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M995.06,-161.83C995.06,-161.83 719.99,-161.83 719.99,-161.83"/> <path fill="none" stroke="black" stroke-dasharray="5,2" d="M911.86,-214.5C911.86,-214.5 719.76,-214.5 719.76,-214.5"/>
<polygon fill="black" stroke="black" points="719.99,-158.33 709.99,-161.83 719.99,-165.33 719.99,-158.33"/> <polygon fill="black" stroke="black" points="719.76,-211 709.76,-214.5 719.76,-218 719.76,-211"/>
<text xml:space="preserve" text-anchor="middle" x="910.62" y="-134.25" font-family="Helvetica,sans-Serif" font-size="10.00">1:1 output_asset</text> <text xml:space="preserve" text-anchor="middle" x="775.31" y="-205" font-family="Helvetica,sans-Serif" font-size="10.00">1:1 output_asset</text>
</g> </g>
</g> </g>
</svg> </svg>

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -3,7 +3,6 @@ digraph job_flow {
node [shape=box, style=rounded, fontname="Helvetica"] node [shape=box, style=rounded, fontname="Helvetica"]
edge [fontname="Helvetica", fontsize=10] edge [fontname="Helvetica", fontsize=10]
// Title
labelloc="t" labelloc="t"
label="MPR - Job Flow" label="MPR - Job Flow"
fontsize=16 fontsize=16
@@ -11,7 +10,19 @@ digraph job_flow {
graph [splines=ortho, nodesep=0.6, ranksep=0.6] graph [splines=ortho, nodesep=0.6, ranksep=0.6]
// States // API entry points
subgraph cluster_api {
label="API Entry Points"
style=dashed
color=gray
rest_create [label="POST /api/jobs/", shape=ellipse]
gql_create [label="mutation createJob", shape=ellipse]
rest_cancel [label="POST /api/jobs/{id}/cancel", shape=ellipse]
rest_callback [label="POST /api/jobs/{id}/callback", shape=ellipse]
}
// Job states
subgraph cluster_states { subgraph cluster_states {
label="Job States" label="Job States"
style=filled style=filled
@@ -24,78 +35,70 @@ digraph job_flow {
cancelled [label="CANCELLED", fillcolor="#6c757d", style="filled,rounded", fontcolor=white] cancelled [label="CANCELLED", fillcolor="#6c757d", style="filled,rounded", fontcolor=white]
} }
// Transitions // State transitions
pending -> processing [label="worker picks up"] pending -> processing [xlabel="worker picks up"]
processing -> completed [label="success"] processing -> completed [xlabel="success"]
processing -> failed [label="error"] processing -> failed [xlabel="error"]
pending -> cancelled [label="user cancels"] pending -> cancelled [xlabel="user cancels"]
processing -> cancelled [label="user cancels"] processing -> cancelled [xlabel="user cancels"]
failed -> pending [label="retry"] failed -> pending [xlabel="retry"]
// API actions rest_create -> pending
subgraph cluster_api { gql_create -> pending
label="API Actions" rest_cancel -> cancelled [style=dashed]
style=dashed
color=gray
create_job [label="POST /jobs/", shape=ellipse] // Executor dispatch
cancel_job [label="POST /jobs/{id}/cancel", shape=ellipse] subgraph cluster_dispatch {
retry_job [label="POST /jobs/{id}/retry", shape=ellipse] label="Executor Dispatch"
}
create_job -> pending
cancel_job -> cancelled [style=dashed]
retry_job -> pending [style=dashed]
// Executor layer
subgraph cluster_executor {
label="Executor Layer"
style=filled style=filled
fillcolor="#fff8e8" fillcolor="#fff8e8"
executor [label="Executor\n(abstract)", shape=diamond] dispatch [label="MPR_EXECUTOR", shape=diamond]
local [label="LocalExecutor\nCelery + FFmpeg"]
lambda_exec [label="LambdaExecutor\nSQS + Lambda"]
} }
processing -> executor pending -> dispatch
executor -> local [label="MPR_EXECUTOR=local"]
executor -> lambda_exec [label="MPR_EXECUTOR=lambda", style=dashed]
// FFmpeg operations // Local path
subgraph cluster_ffmpeg { subgraph cluster_local {
label="FFmpeg Operations" label="Local Mode (Celery)"
style=filled style=filled
fillcolor="#e8f4e8" fillcolor="#e8f4e8"
transcode [label="Transcode\n(with preset)"] celery_task [label="Celery Task\n(transcode queue)"]
trim [label="Trim\n(-c:v copy -c:a copy)"] s3_download [label="S3 Download\n(MinIO)"]
ffmpeg_local [label="FFmpeg\ntranscode/trim"]
s3_upload [label="S3 Upload\n(MinIO)"]
db_update [label="DB Update\n(update_job_progress)"]
} }
local -> transcode dispatch -> celery_task [xlabel="local"]
local -> trim celery_task -> s3_download
s3_download -> ffmpeg_local
ffmpeg_local -> s3_upload
s3_upload -> db_update
db_update -> completed [style=dotted]
// gRPC streaming // Lambda path
subgraph cluster_grpc { subgraph cluster_lambda {
label="gRPC Communication" label="Lambda Mode (AWS)"
style=filled style=filled
fillcolor="#e8e8f8" fillcolor="#fde8d0"
grpc_stream [label="StreamProgress\n(server streaming)", shape=parallelogram] sfn_start [label="Step Functions\nstart_execution"]
grpc_submit [label="SubmitJob\n(unary)", shape=parallelogram] lambda_fn [label="Lambda\nFFmpeg container"]
grpc_cancel [label="CancelJob\n(unary)", shape=parallelogram] s3_dl_aws [label="S3 Download\n(AWS)"]
ffmpeg_aws [label="FFmpeg\ntranscode/trim"]
s3_ul_aws [label="S3 Upload\n(AWS)"]
callback [label="HTTP Callback\nPOST /jobs/{id}/callback"]
} }
// Progress tracking via gRPC dispatch -> sfn_start [xlabel="lambda"]
progress [label="Progress Updates\n(gRPC → Redis → DB)", shape=note] sfn_start -> lambda_fn
transcode -> progress [style=dotted] lambda_fn -> s3_dl_aws
trim -> progress [style=dotted] s3_dl_aws -> ffmpeg_aws
progress -> grpc_stream [style=dotted, label="stream to client"] ffmpeg_aws -> s3_ul_aws
grpc_stream -> processing [style=dotted, label="update status"] s3_ul_aws -> callback
callback -> completed [style=dotted]
// gRPC job control rest_callback -> completed [style=dashed, xlabel="Lambda reports"]
create_job -> grpc_submit [label="via gRPC"]
grpc_submit -> pending [style=dashed]
cancel_job -> grpc_cancel [label="via gRPC"]
grpc_cancel -> cancelled [style=dashed]
} }

View File

@@ -1,296 +1,329 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 14.1.1 (0) <!-- Generated by graphviz version 14.1.2 (0)
--> -->
<!-- Title: job_flow Pages: 1 --> <!-- Title: job_flow Pages: 1 -->
<svg width="1398pt" height="843pt" <svg width="1621pt" height="655pt"
viewBox="0.00 0.00 1398.00 843.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> viewBox="0.00 0.00 1621.00 655.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 838.75)"> <g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 650.5)">
<title>job_flow</title> <title>job_flow</title>
<polygon fill="white" stroke="none" points="-4,4 -4,-838.75 1394,-838.75 1394,4 -4,4"/> <polygon fill="white" stroke="none" points="-4,4 -4,-650.5 1617,-650.5 1617,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="695" y="-815.55" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; Job Flow</text> <text xml:space="preserve" text-anchor="middle" x="806.5" y="-627.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; Job Flow</text>
<g id="clust1" class="cluster"> <g id="clust1" class="cluster">
<title>cluster_states</title> <title>cluster_api</title>
<polygon fill="#f8f8f8" stroke="black" points="774,-8 774,-297.5 1154,-297.5 1154,-8 774,-8"/> <polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="297,-269.75 297,-349.25 1395,-349.25 1395,-269.75 297,-269.75"/>
<text xml:space="preserve" text-anchor="middle" x="964" y="-278.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Job States</text> <text xml:space="preserve" text-anchor="middle" x="846" y="-330.05" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">API Entry Points</text>
</g> </g>
<g id="clust2" class="cluster"> <g id="clust2" class="cluster">
<title>cluster_api</title> <title>cluster_states</title>
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="674,-360 674,-439.5 1382,-439.5 1382,-360 674,-360"/> <polygon fill="#f8f8f8" stroke="black" points="572,-11.25 572,-261.75 939,-261.75 939,-11.25 572,-11.25"/>
<text xml:space="preserve" text-anchor="middle" x="1028" y="-420.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">API Actions</text> <text xml:space="preserve" text-anchor="middle" x="755.5" y="-242.55" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Job States</text>
</g> </g>
<g id="clust3" class="cluster"> <g id="clust3" class="cluster">
<title>cluster_executor</title> <title>cluster_dispatch</title>
<polygon fill="#fff8e8" stroke="black" points="8,-571.5 8,-799.25 352,-799.25 352,-571.5 8,-571.5"/> <polygon fill="#fff8e8" stroke="black" points="103,-531.5 103,-611 377,-611 377,-531.5 103,-531.5"/>
<text xml:space="preserve" text-anchor="middle" x="180" y="-780.05" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Executor Layer</text> <text xml:space="preserve" text-anchor="middle" x="240" y="-591.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Executor Dispatch</text>
</g> </g>
<g id="clust4" class="cluster"> <g id="clust4" class="cluster">
<title>cluster_ffmpeg</title> <title>cluster_local</title>
<polygon fill="#e8f4e8" stroke="black" points="73,-462.5 73,-548.5 393,-548.5 393,-462.5 73,-462.5"/> <polygon fill="#e8f4e8" stroke="black" points="8,-93.5 8,-523.5 203,-523.5 203,-93.5 8,-93.5"/>
<text xml:space="preserve" text-anchor="middle" x="233" y="-529.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">FFmpeg Operations</text> <text xml:space="preserve" text-anchor="middle" x="105.5" y="-504.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Local Mode (Celery)</text>
</g> </g>
<g id="clust5" class="cluster"> <g id="clust5" class="cluster">
<title>cluster_grpc</title> <title>cluster_lambda</title>
<polygon fill="#e8e8f8" stroke="black" points="8,-193.5 8,-322 766,-322 766,-193.5 8,-193.5"/> <polygon fill="#fde8d0" stroke="black" points="1403,-8 1403,-523.5 1605,-523.5 1605,-8 1403,-8"/>
<text xml:space="preserve" text-anchor="middle" x="387" y="-302.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">gRPC Communication</text> <text xml:space="preserve" text-anchor="middle" x="1504" y="-504.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Lambda Mode (AWS)</text>
</g>
<!-- rest_create -->
<g id="node1" class="node">
<title>rest_create</title>
<ellipse fill="none" stroke="black" cx="389" cy="-295.75" rx="84.35" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="389" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">POST /api/jobs/</text>
</g> </g>
<!-- pending --> <!-- pending -->
<g id="node1" class="node"> <g id="node5" class="node">
<title>pending</title> <title>pending</title>
<path fill="#ffc107" stroke="black" d="M971.88,-262C971.88,-262 916.12,-262 916.12,-262 910.12,-262 904.12,-256 904.12,-250 904.12,-250 904.12,-238 904.12,-238 904.12,-232 910.12,-226 916.12,-226 916.12,-226 971.88,-226 971.88,-226 977.88,-226 983.88,-232 983.88,-238 983.88,-238 983.88,-250 983.88,-250 983.88,-256 977.88,-262 971.88,-262"/> <path fill="#ffc107" stroke="black" d="M647.88,-226.25C647.88,-226.25 592.12,-226.25 592.12,-226.25 586.12,-226.25 580.12,-220.25 580.12,-214.25 580.12,-214.25 580.12,-202.25 580.12,-202.25 580.12,-196.25 586.12,-190.25 592.12,-190.25 592.12,-190.25 647.88,-190.25 647.88,-190.25 653.88,-190.25 659.88,-196.25 659.88,-202.25 659.88,-202.25 659.88,-214.25 659.88,-214.25 659.88,-220.25 653.88,-226.25 647.88,-226.25"/>
<text xml:space="preserve" text-anchor="middle" x="944" y="-239.32" font-family="Helvetica,sans-Serif" font-size="14.00">PENDING</text> <text xml:space="preserve" text-anchor="middle" x="620" y="-203.57" font-family="Helvetica,sans-Serif" font-size="14.00">PENDING</text>
</g>
<!-- rest_create&#45;&gt;pending -->
<g id="edge7" class="edge">
<title>rest_create&#45;&gt;pending</title>
<path fill="none" stroke="black" d="M389,-277.61C389,-253.52 389,-214 389,-214 389,-214 568.25,-214 568.25,-214"/>
<polygon fill="black" stroke="black" points="568.25,-217.5 578.25,-214 568.25,-210.5 568.25,-217.5"/>
</g>
<!-- gql_create -->
<g id="node2" class="node">
<title>gql_create</title>
<ellipse fill="none" stroke="black" cx="620" cy="-295.75" rx="103.29" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="620" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">mutation createJob</text>
</g>
<!-- gql_create&#45;&gt;pending -->
<g id="edge8" class="edge">
<title>gql_create&#45;&gt;pending</title>
<path fill="none" stroke="black" d="M620,-277.62C620,-277.62 620,-238.17 620,-238.17"/>
<polygon fill="black" stroke="black" points="623.5,-238.17 620,-228.17 616.5,-238.17 623.5,-238.17"/>
</g>
<!-- rest_cancel -->
<g id="node3" class="node">
<title>rest_cancel</title>
<ellipse fill="none" stroke="black" cx="1247" cy="-295.75" rx="140.12" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="1247" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">POST /api/jobs/{id}/cancel</text>
</g>
<!-- cancelled -->
<g id="node9" class="node">
<title>cancelled</title>
<path fill="#6c757d" stroke="black" d="M918.62,-55.25C918.62,-55.25 843.38,-55.25 843.38,-55.25 837.38,-55.25 831.38,-49.25 831.38,-43.25 831.38,-43.25 831.38,-31.25 831.38,-31.25 831.38,-25.25 837.38,-19.25 843.38,-19.25 843.38,-19.25 918.62,-19.25 918.62,-19.25 924.62,-19.25 930.62,-25.25 930.62,-31.25 930.62,-31.25 930.62,-43.25 930.62,-43.25 930.62,-49.25 924.62,-55.25 918.62,-55.25"/>
<text xml:space="preserve" text-anchor="middle" x="881" y="-32.58" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">CANCELLED</text>
</g>
<!-- rest_cancel&#45;&gt;cancelled -->
<g id="edge9" class="edge">
<title>rest_cancel&#45;&gt;cancelled</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1247,-277.56C1247,-218.66 1247,-37 1247,-37 1247,-37 942.64,-37 942.64,-37"/>
<polygon fill="black" stroke="black" points="942.64,-33.5 932.64,-37 942.64,-40.5 942.64,-33.5"/>
</g>
<!-- rest_callback -->
<g id="node4" class="node">
<title>rest_callback</title>
<ellipse fill="none" stroke="black" cx="915" cy="-295.75" rx="148.54" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="915" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">POST /api/jobs/{id}/callback</text>
</g>
<!-- completed -->
<g id="node7" class="node">
<title>completed</title>
<path fill="#28a745" stroke="black" d="M776.75,-55.25C776.75,-55.25 699.25,-55.25 699.25,-55.25 693.25,-55.25 687.25,-49.25 687.25,-43.25 687.25,-43.25 687.25,-31.25 687.25,-31.25 687.25,-25.25 693.25,-19.25 699.25,-19.25 699.25,-19.25 776.75,-19.25 776.75,-19.25 782.75,-19.25 788.75,-25.25 788.75,-31.25 788.75,-31.25 788.75,-43.25 788.75,-43.25 788.75,-49.25 782.75,-55.25 776.75,-55.25"/>
<text xml:space="preserve" text-anchor="middle" x="738" y="-32.58" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">COMPLETED</text>
</g>
<!-- rest_callback&#45;&gt;completed -->
<g id="edge24" class="edge">
<title>rest_callback&#45;&gt;completed</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M783.42,-287.15C783.42,-287.15 783.42,-67.24 783.42,-67.24"/>
<polygon fill="black" stroke="black" points="786.92,-67.24 783.42,-57.24 779.92,-67.24 786.92,-67.24"/>
<text xml:space="preserve" text-anchor="middle" x="745.17" y="-180.44" font-family="Helvetica,sans-Serif" font-size="10.00">Lambda reports</text>
</g> </g>
<!-- processing --> <!-- processing -->
<g id="node2" class="node"> <g id="node6" class="node">
<title>processing</title> <title>processing</title>
<path fill="#17a2b8" stroke="black" d="M877.75,-144.75C877.75,-144.75 794.25,-144.75 794.25,-144.75 788.25,-144.75 782.25,-138.75 782.25,-132.75 782.25,-132.75 782.25,-120.75 782.25,-120.75 782.25,-114.75 788.25,-108.75 794.25,-108.75 794.25,-108.75 877.75,-108.75 877.75,-108.75 883.75,-108.75 889.75,-114.75 889.75,-120.75 889.75,-120.75 889.75,-132.75 889.75,-132.75 889.75,-138.75 883.75,-144.75 877.75,-144.75"/> <path fill="#17a2b8" stroke="black" d="M768.75,-140.75C768.75,-140.75 685.25,-140.75 685.25,-140.75 679.25,-140.75 673.25,-134.75 673.25,-128.75 673.25,-128.75 673.25,-116.75 673.25,-116.75 673.25,-110.75 679.25,-104.75 685.25,-104.75 685.25,-104.75 768.75,-104.75 768.75,-104.75 774.75,-104.75 780.75,-110.75 780.75,-116.75 780.75,-116.75 780.75,-128.75 780.75,-128.75 780.75,-134.75 774.75,-140.75 768.75,-140.75"/>
<text xml:space="preserve" text-anchor="middle" x="836" y="-122.08" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">PROCESSING</text> <text xml:space="preserve" text-anchor="middle" x="727" y="-118.08" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">PROCESSING</text>
</g> </g>
<!-- pending&#45;&gt;processing --> <!-- pending&#45;&gt;processing -->
<g id="edge1" class="edge"> <g id="edge1" class="edge">
<title>pending&#45;&gt;processing</title> <title>pending&#45;&gt;processing</title>
<path fill="none" stroke="black" d="M920.04,-225.68C920.04,-194.87 920.04,-136 920.04,-136 920.04,-136 901.69,-136 901.69,-136"/> <path fill="none" stroke="black" d="M654.58,-189.87C654.58,-166.46 654.58,-129 654.58,-129 654.58,-129 661.34,-129 661.34,-129"/>
<polygon fill="black" stroke="black" points="901.69,-132.5 891.69,-136 901.69,-139.5 901.69,-132.5"/> <polygon fill="black" stroke="black" points="661.34,-132.5 671.34,-129 661.34,-125.5 661.34,-132.5"/>
<text xml:space="preserve" text-anchor="middle" x="902.25" y="-170" font-family="Helvetica,sans-Serif" font-size="10.00">worker picks up</text> <text xml:space="preserve" text-anchor="middle" x="616.33" y="-159.3" font-family="Helvetica,sans-Serif" font-size="10.00">worker picks up</text>
</g>
<!-- cancelled -->
<g id="node5" class="node">
<title>cancelled</title>
<path fill="#6c757d" stroke="black" d="M1122.62,-52C1122.62,-52 1047.38,-52 1047.38,-52 1041.38,-52 1035.38,-46 1035.38,-40 1035.38,-40 1035.38,-28 1035.38,-28 1035.38,-22 1041.38,-16 1047.38,-16 1047.38,-16 1122.62,-16 1122.62,-16 1128.62,-16 1134.62,-22 1134.62,-28 1134.62,-28 1134.62,-40 1134.62,-40 1134.62,-46 1128.62,-52 1122.62,-52"/>
<text xml:space="preserve" text-anchor="middle" x="1085" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">CANCELLED</text>
</g> </g>
<!-- pending&#45;&gt;cancelled --> <!-- pending&#45;&gt;cancelled -->
<g id="edge4" class="edge"> <g id="edge4" class="edge">
<title>pending&#45;&gt;cancelled</title> <title>pending&#45;&gt;cancelled</title>
<path fill="none" stroke="black" d="M984.17,-238C1022.83,-238 1075.49,-238 1075.49,-238 1075.49,-238 1075.49,-63.98 1075.49,-63.98"/> <path fill="none" stroke="black" d="M660.36,-208C737.33,-208 897.54,-208 897.54,-208 897.54,-208 897.54,-67.04 897.54,-67.04"/>
<polygon fill="black" stroke="black" points="1078.99,-63.98 1075.49,-53.98 1071.99,-63.98 1078.99,-63.98"/> <polygon fill="black" stroke="black" points="901.04,-67.04 897.54,-57.04 894.04,-67.04 901.04,-67.04"/>
<text xml:space="preserve" text-anchor="middle" x="1115.38" y="-123.62" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text> <text xml:space="preserve" text-anchor="middle" x="819.06" y="-211.25" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
</g> </g>
<!-- completed --> <!-- dispatch -->
<g id="node3" class="node"> <g id="node10" class="node">
<title>completed</title> <title>dispatch</title>
<path fill="#28a745" stroke="black" d="M871.75,-52C871.75,-52 794.25,-52 794.25,-52 788.25,-52 782.25,-46 782.25,-40 782.25,-40 782.25,-28 782.25,-28 782.25,-22 788.25,-16 794.25,-16 794.25,-16 871.75,-16 871.75,-16 877.75,-16 883.75,-22 883.75,-28 883.75,-28 883.75,-40 883.75,-40 883.75,-46 877.75,-52 871.75,-52"/> <path fill="none" stroke="black" d="M228.12,-573.84C228.12,-573.84 122.92,-559.16 122.92,-559.16 116.98,-558.33 116.98,-556.67 122.92,-555.84 122.92,-555.84 228.12,-541.16 228.12,-541.16 234.06,-540.33 245.94,-540.33 251.88,-541.16 251.88,-541.16 357.08,-555.84 357.08,-555.84 363.02,-556.67 363.02,-558.33 357.08,-559.16 357.08,-559.16 251.88,-573.84 251.88,-573.84 245.94,-574.67 234.06,-574.67 228.12,-573.84"/>
<text xml:space="preserve" text-anchor="middle" x="833" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">COMPLETED</text> <text xml:space="preserve" text-anchor="middle" x="240" y="-552.83" font-family="Helvetica,sans-Serif" font-size="14.00">MPR_EXECUTOR</text>
</g>
<!-- pending&#45;&gt;dispatch -->
<g id="edge10" class="edge">
<title>pending&#45;&gt;dispatch</title>
<path fill="none" stroke="black" d="M579.92,-202C483.92,-202 248.76,-202 248.76,-202 248.76,-202 248.76,-528.84 248.76,-528.84"/>
<polygon fill="black" stroke="black" points="245.26,-528.84 248.76,-538.84 252.26,-528.84 245.26,-528.84"/>
</g> </g>
<!-- processing&#45;&gt;completed --> <!-- processing&#45;&gt;completed -->
<g id="edge2" class="edge"> <g id="edge2" class="edge">
<title>processing&#45;&gt;completed</title> <title>processing&#45;&gt;completed</title>
<path fill="none" stroke="black" d="M833,-108.43C833,-108.43 833,-63.8 833,-63.8"/> <path fill="none" stroke="black" d="M734,-104.62C734,-104.62 734,-67.16 734,-67.16"/>
<polygon fill="black" stroke="black" points="836.5,-63.8 833,-53.8 829.5,-63.8 836.5,-63.8"/> <polygon fill="black" stroke="black" points="737.5,-67.16 734,-57.16 730.5,-67.16 737.5,-67.16"/>
<text xml:space="preserve" text-anchor="middle" x="844.12" y="-77.25" font-family="Helvetica,sans-Serif" font-size="10.00">success</text> <text xml:space="preserve" text-anchor="middle" x="714.88" y="-89.14" font-family="Helvetica,sans-Serif" font-size="10.00">success</text>
</g> </g>
<!-- failed --> <!-- failed -->
<g id="node4" class="node"> <g id="node8" class="node">
<title>failed</title> <title>failed</title>
<path fill="#dc3545" stroke="black" d="M980,-52C980,-52 940,-52 940,-52 934,-52 928,-46 928,-40 928,-40 928,-28 928,-28 928,-22 934,-16 940,-16 940,-16 980,-16 980,-16 986,-16 992,-22 992,-28 992,-28 992,-40 992,-40 992,-46 986,-52 980,-52"/> <path fill="#dc3545" stroke="black" d="M632,-55.25C632,-55.25 592,-55.25 592,-55.25 586,-55.25 580,-49.25 580,-43.25 580,-43.25 580,-31.25 580,-31.25 580,-25.25 586,-19.25 592,-19.25 592,-19.25 632,-19.25 632,-19.25 638,-19.25 644,-25.25 644,-31.25 644,-31.25 644,-43.25 644,-43.25 644,-49.25 638,-55.25 632,-55.25"/>
<text xml:space="preserve" text-anchor="middle" x="960" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">FAILED</text> <text xml:space="preserve" text-anchor="middle" x="612" y="-32.58" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">FAILED</text>
</g> </g>
<!-- processing&#45;&gt;failed --> <!-- processing&#45;&gt;failed -->
<g id="edge3" class="edge"> <g id="edge3" class="edge">
<title>processing&#45;&gt;failed</title> <title>processing&#45;&gt;failed</title>
<path fill="none" stroke="black" d="M890.02,-118C918.1,-118 946.62,-118 946.62,-118 946.62,-118 946.62,-63.74 946.62,-63.74"/> <path fill="none" stroke="black" d="M680.25,-104.62C680.25,-77.88 680.25,-31 680.25,-31 680.25,-31 655.64,-31 655.64,-31"/>
<polygon fill="black" stroke="black" points="950.13,-63.74 946.63,-53.74 943.13,-63.74 950.13,-63.74"/> <polygon fill="black" stroke="black" points="655.64,-27.5 645.64,-31 655.64,-34.5 655.64,-27.5"/>
<text xml:space="preserve" text-anchor="middle" x="922.62" y="-77.25" font-family="Helvetica,sans-Serif" font-size="10.00">error</text> <text xml:space="preserve" text-anchor="middle" x="668.62" y="-58.76" font-family="Helvetica,sans-Serif" font-size="10.00">error</text>
</g> </g>
<!-- processing&#45;&gt;cancelled --> <!-- processing&#45;&gt;cancelled -->
<g id="edge5" class="edge"> <g id="edge5" class="edge">
<title>processing&#45;&gt;cancelled</title> <title>processing&#45;&gt;cancelled</title>
<path fill="none" stroke="black" d="M890.24,-127C953.27,-127 1048.75,-127 1048.75,-127 1048.75,-127 1048.75,-63.89 1048.75,-63.89"/> <path fill="none" stroke="black" d="M780.93,-123C819.44,-123 864.46,-123 864.46,-123 864.46,-123 864.46,-66.95 864.46,-66.95"/>
<polygon fill="black" stroke="black" points="1052.25,-63.89 1048.75,-53.89 1045.25,-63.89 1052.25,-63.89"/> <polygon fill="black" stroke="black" points="867.96,-66.95 864.46,-56.95 860.96,-66.95 867.96,-66.95"/>
<text xml:space="preserve" text-anchor="middle" x="1012.38" y="-77.25" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text> <text xml:space="preserve" text-anchor="middle" x="820.35" y="-126.25" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
</g>
<!-- executor -->
<g id="node9" class="node">
<title>executor</title>
<path fill="none" stroke="black" d="M89.31,-758.31C89.31,-758.31 27.19,-726.69 27.19,-726.69 21.85,-723.97 21.85,-718.53 27.19,-715.81 27.19,-715.81 89.31,-684.19 89.31,-684.19 94.65,-681.47 105.35,-681.47 110.69,-684.19 110.69,-684.19 172.81,-715.81 172.81,-715.81 178.15,-718.53 178.15,-723.97 172.81,-726.69 172.81,-726.69 110.69,-758.31 110.69,-758.31 105.35,-761.03 94.65,-761.03 89.31,-758.31"/>
<text xml:space="preserve" text-anchor="middle" x="100" y="-725.2" font-family="Helvetica,sans-Serif" font-size="14.00">Executor</text>
<text xml:space="preserve" text-anchor="middle" x="100" y="-707.95" font-family="Helvetica,sans-Serif" font-size="14.00">(abstract)</text>
</g>
<!-- processing&#45;&gt;executor -->
<g id="edge10" class="edge">
<title>processing&#45;&gt;executor</title>
<path fill="none" stroke="black" d="M836.12,-145.19C836.12,-245.49 836.12,-721 836.12,-721 836.12,-721 195.6,-721 195.6,-721"/>
<polygon fill="black" stroke="black" points="195.6,-717.5 185.6,-721 195.6,-724.5 195.6,-717.5"/>
</g> </g>
<!-- failed&#45;&gt;pending --> <!-- failed&#45;&gt;pending -->
<g id="edge6" class="edge"> <g id="edge6" class="edge">
<title>failed&#45;&gt;pending</title> <title>failed&#45;&gt;pending</title>
<path fill="none" stroke="black" d="M965.25,-52.27C965.25,-52.27 965.25,-214.11 965.25,-214.11"/> <path fill="none" stroke="black" d="M612.06,-55.55C612.06,-55.55 612.06,-178.31 612.06,-178.31"/>
<polygon fill="black" stroke="black" points="961.75,-214.11 965.25,-224.11 968.75,-214.11 961.75,-214.11"/> <polygon fill="black" stroke="black" points="608.56,-178.31 612.06,-188.31 615.56,-178.31 608.56,-178.31"/>
<text xml:space="preserve" text-anchor="middle" x="987.62" y="-123.62" font-family="Helvetica,sans-Serif" font-size="10.00">retry</text> <text xml:space="preserve" text-anchor="middle" x="600.44" y="-120.18" font-family="Helvetica,sans-Serif" font-size="10.00">retry</text>
</g> </g>
<!-- create_job --> <!-- celery_task -->
<g id="node6" class="node">
<title>create_job</title>
<ellipse fill="none" stroke="black" cx="748" cy="-386" rx="66.47" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="748" y="-381.32" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/</text>
</g>
<!-- create_job&#45;&gt;pending -->
<g id="edge7" class="edge">
<title>create_job&#45;&gt;pending</title>
<path fill="none" stroke="black" d="M798.36,-373.89C798.36,-339.55 798.36,-244 798.36,-244 798.36,-244 892.3,-244 892.3,-244"/>
<polygon fill="black" stroke="black" points="892.3,-247.5 902.3,-244 892.3,-240.5 892.3,-247.5"/>
</g>
<!-- grpc_submit -->
<g id="node15" class="node">
<title>grpc_submit</title>
<path fill="none" stroke="black" d="M528.46,-286.5C528.46,-286.5 408.56,-286.5 408.56,-286.5 402.56,-286.5 394.16,-281 391.77,-275.5 391.77,-275.5 364.33,-212.5 364.33,-212.5 361.94,-207 365.54,-201.5 371.54,-201.5 371.54,-201.5 491.44,-201.5 491.44,-201.5 497.44,-201.5 505.84,-207 508.23,-212.5 508.23,-212.5 535.67,-275.5 535.67,-275.5 538.06,-281 534.46,-286.5 528.46,-286.5"/>
<text xml:space="preserve" text-anchor="middle" x="450" y="-247.95" font-family="Helvetica,sans-Serif" font-size="14.00">SubmitJob</text>
<text xml:space="preserve" text-anchor="middle" x="450" y="-230.7" font-family="Helvetica,sans-Serif" font-size="14.00">(unary)</text>
</g>
<!-- create_job&#45;&gt;grpc_submit -->
<g id="edge19" class="edge">
<title>create_job&#45;&gt;grpc_submit</title>
<path fill="none" stroke="black" d="M681.06,-386C596.67,-386 462.48,-386 462.48,-386 462.48,-386 462.48,-298.5 462.48,-298.5"/>
<polygon fill="black" stroke="black" points="465.98,-298.5 462.48,-288.5 458.98,-298.5 465.98,-298.5"/>
<text xml:space="preserve" text-anchor="middle" x="620.75" y="-333.25" font-family="Helvetica,sans-Serif" font-size="10.00">via gRPC</text>
</g>
<!-- cancel_job -->
<g id="node7" class="node">
<title>cancel_job</title>
<ellipse fill="none" stroke="black" cx="980" cy="-386" rx="122.23" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="980" y="-381.32" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/{id}/cancel</text>
</g>
<!-- cancel_job&#45;&gt;cancelled -->
<g id="edge8" class="edge">
<title>cancel_job&#45;&gt;cancelled</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1088.86,-377.65C1088.86,-377.65 1088.86,-63.86 1088.86,-63.86"/>
<polygon fill="black" stroke="black" points="1092.36,-63.86 1088.86,-53.86 1085.36,-63.86 1092.36,-63.86"/>
</g>
<!-- grpc_cancel -->
<g id="node16" class="node">
<title>grpc_cancel</title>
<path fill="none" stroke="black" d="M746.35,-286.5C746.35,-286.5 631.4,-286.5 631.4,-286.5 625.4,-286.5 617.07,-280.97 614.75,-275.44 614.75,-275.44 588.31,-212.56 588.31,-212.56 585.98,-207.03 589.65,-201.5 595.65,-201.5 595.65,-201.5 710.6,-201.5 710.6,-201.5 716.6,-201.5 724.93,-207.03 727.25,-212.56 727.25,-212.56 753.69,-275.44 753.69,-275.44 756.02,-280.97 752.35,-286.5 746.35,-286.5"/>
<text xml:space="preserve" text-anchor="middle" x="671" y="-247.95" font-family="Helvetica,sans-Serif" font-size="14.00">CancelJob</text>
<text xml:space="preserve" text-anchor="middle" x="671" y="-230.7" font-family="Helvetica,sans-Serif" font-size="14.00">(unary)</text>
</g>
<!-- cancel_job&#45;&gt;grpc_cancel -->
<g id="edge21" class="edge">
<title>cancel_job&#45;&gt;grpc_cancel</title>
<path fill="none" stroke="black" d="M873.76,-376.83C873.76,-350.09 873.76,-274 873.76,-274 873.76,-274 764.98,-274 764.98,-274"/>
<polygon fill="black" stroke="black" points="764.98,-270.5 754.98,-274 764.98,-277.5 764.98,-270.5"/>
<text xml:space="preserve" text-anchor="middle" x="870.75" y="-333.25" font-family="Helvetica,sans-Serif" font-size="10.00">via gRPC</text>
</g>
<!-- retry_job -->
<g id="node8" class="node">
<title>retry_job</title>
<ellipse fill="none" stroke="black" cx="1260" cy="-386" rx="114.34" ry="18"/>
<text xml:space="preserve" text-anchor="middle" x="1260" y="-381.32" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/{id}/retry</text>
</g>
<!-- retry_job&#45;&gt;pending -->
<g id="edge9" class="edge">
<title>retry_job&#45;&gt;pending</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1260,-367.66C1260,-330.54 1260,-250 1260,-250 1260,-250 995.86,-250 995.86,-250"/>
<polygon fill="black" stroke="black" points="995.86,-246.5 985.86,-250 995.86,-253.5 995.86,-246.5"/>
</g>
<!-- local -->
<g id="node10" class="node">
<title>local</title>
<path fill="none" stroke="black" d="M316.75,-622C316.75,-622 203.25,-622 203.25,-622 197.25,-622 191.25,-616 191.25,-610 191.25,-610 191.25,-591.5 191.25,-591.5 191.25,-585.5 197.25,-579.5 203.25,-579.5 203.25,-579.5 316.75,-579.5 316.75,-579.5 322.75,-579.5 328.75,-585.5 328.75,-591.5 328.75,-591.5 328.75,-610 328.75,-610 328.75,-616 322.75,-622 316.75,-622"/>
<text xml:space="preserve" text-anchor="middle" x="260" y="-604.7" font-family="Helvetica,sans-Serif" font-size="14.00">LocalExecutor</text>
<text xml:space="preserve" text-anchor="middle" x="260" y="-587.45" font-family="Helvetica,sans-Serif" font-size="14.00">Celery + FFmpeg</text>
</g>
<!-- executor&#45;&gt;local -->
<g id="edge11" class="edge">
<title>executor&#45;&gt;local</title>
<path fill="none" stroke="black" d="M165.81,-711.81C165.81,-683.47 165.81,-601 165.81,-601 165.81,-601 179.54,-601 179.54,-601"/>
<polygon fill="black" stroke="black" points="179.54,-604.5 189.54,-601 179.54,-597.5 179.54,-604.5"/>
<text xml:space="preserve" text-anchor="middle" x="287.88" y="-647.25" font-family="Helvetica,sans-Serif" font-size="10.00">MPR_EXECUTOR=local</text>
</g>
<!-- lambda_exec -->
<g id="node11" class="node"> <g id="node11" class="node">
<title>lambda_exec</title> <title>celery_task</title>
<path fill="none" stroke="black" d="M136.12,-622C136.12,-622 27.88,-622 27.88,-622 21.88,-622 15.88,-616 15.88,-610 15.88,-610 15.88,-591.5 15.88,-591.5 15.88,-585.5 21.88,-579.5 27.88,-579.5 27.88,-579.5 136.12,-579.5 136.12,-579.5 142.12,-579.5 148.12,-585.5 148.12,-591.5 148.12,-591.5 148.12,-610 148.12,-610 148.12,-616 142.12,-622 136.12,-622"/> <path fill="none" stroke="black" d="M162.75,-488C162.75,-488 43.25,-488 43.25,-488 37.25,-488 31.25,-482 31.25,-476 31.25,-476 31.25,-457.5 31.25,-457.5 31.25,-451.5 37.25,-445.5 43.25,-445.5 43.25,-445.5 162.75,-445.5 162.75,-445.5 168.75,-445.5 174.75,-451.5 174.75,-457.5 174.75,-457.5 174.75,-476 174.75,-476 174.75,-482 168.75,-488 162.75,-488"/>
<text xml:space="preserve" text-anchor="middle" x="82" y="-604.7" font-family="Helvetica,sans-Serif" font-size="14.00">LambdaExecutor</text> <text xml:space="preserve" text-anchor="middle" x="103" y="-470.7" font-family="Helvetica,sans-Serif" font-size="14.00">Celery Task</text>
<text xml:space="preserve" text-anchor="middle" x="82" y="-587.45" font-family="Helvetica,sans-Serif" font-size="14.00">SQS + Lambda</text> <text xml:space="preserve" text-anchor="middle" x="103" y="-453.45" font-family="Helvetica,sans-Serif" font-size="14.00">(transcode queue)</text>
</g> </g>
<!-- executor&#45;&gt;lambda_exec --> <!-- dispatch&#45;&gt;celery_task -->
<g id="edge12" class="edge"> <g id="edge11" class="edge">
<title>executor&#45;&gt;lambda_exec</title> <title>dispatch&#45;&gt;celery_task</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M82.31,-687.36C82.31,-687.36 82.31,-633.77 82.31,-633.77"/> <path fill="none" stroke="black" d="M142.89,-552.62C142.89,-552.62 142.89,-499.67 142.89,-499.67"/>
<polygon fill="black" stroke="black" points="85.81,-633.77 82.31,-623.77 78.81,-633.77 85.81,-633.77"/> <polygon fill="black" stroke="black" points="146.39,-499.67 142.89,-489.67 139.39,-499.67 146.39,-499.67"/>
<text xml:space="preserve" text-anchor="middle" x="121.62" y="-647.25" font-family="Helvetica,sans-Serif" font-size="10.00">MPR_EXECUTOR=lambda</text> <text xml:space="preserve" text-anchor="middle" x="131.27" y="-529.4" font-family="Helvetica,sans-Serif" font-size="10.00">local</text>
</g> </g>
<!-- transcode --> <!-- sfn_start -->
<g id="node12" class="node"> <g id="node16" class="node">
<title>transcode</title> <title>sfn_start</title>
<path fill="none" stroke="black" d="M172.88,-513C172.88,-513 93.12,-513 93.12,-513 87.12,-513 81.12,-507 81.12,-501 81.12,-501 81.12,-482.5 81.12,-482.5 81.12,-476.5 87.12,-470.5 93.12,-470.5 93.12,-470.5 172.88,-470.5 172.88,-470.5 178.88,-470.5 184.88,-476.5 184.88,-482.5 184.88,-482.5 184.88,-501 184.88,-501 184.88,-507 178.88,-513 172.88,-513"/> <path fill="none" stroke="black" d="M1525.88,-488C1525.88,-488 1428.12,-488 1428.12,-488 1422.12,-488 1416.12,-482 1416.12,-476 1416.12,-476 1416.12,-457.5 1416.12,-457.5 1416.12,-451.5 1422.12,-445.5 1428.12,-445.5 1428.12,-445.5 1525.88,-445.5 1525.88,-445.5 1531.88,-445.5 1537.88,-451.5 1537.88,-457.5 1537.88,-457.5 1537.88,-476 1537.88,-476 1537.88,-482 1531.88,-488 1525.88,-488"/>
<text xml:space="preserve" text-anchor="middle" x="133" y="-495.7" font-family="Helvetica,sans-Serif" font-size="14.00">Transcode</text> <text xml:space="preserve" text-anchor="middle" x="1477" y="-470.7" font-family="Helvetica,sans-Serif" font-size="14.00">Step Functions</text>
<text xml:space="preserve" text-anchor="middle" x="133" y="-478.45" font-family="Helvetica,sans-Serif" font-size="14.00">(with preset)</text> <text xml:space="preserve" text-anchor="middle" x="1477" y="-453.45" font-family="Helvetica,sans-Serif" font-size="14.00">start_execution</text>
</g> </g>
<!-- local&#45;&gt;transcode --> <!-- dispatch&#45;&gt;sfn_start -->
<g id="edge13" class="edge">
<title>local&#45;&gt;transcode</title>
<path fill="none" stroke="black" d="M209.38,-579C209.38,-547.27 209.38,-492 209.38,-492 209.38,-492 196.72,-492 196.72,-492"/>
<polygon fill="black" stroke="black" points="196.72,-488.5 186.72,-492 196.72,-495.5 196.72,-488.5"/>
</g>
<!-- trim -->
<g id="node13" class="node">
<title>trim</title>
<path fill="none" stroke="black" d="M372.5,-513C372.5,-513 239.5,-513 239.5,-513 233.5,-513 227.5,-507 227.5,-501 227.5,-501 227.5,-482.5 227.5,-482.5 227.5,-476.5 233.5,-470.5 239.5,-470.5 239.5,-470.5 372.5,-470.5 372.5,-470.5 378.5,-470.5 384.5,-476.5 384.5,-482.5 384.5,-482.5 384.5,-501 384.5,-501 384.5,-507 378.5,-513 372.5,-513"/>
<text xml:space="preserve" text-anchor="middle" x="306" y="-495.7" font-family="Helvetica,sans-Serif" font-size="14.00">Trim</text>
<text xml:space="preserve" text-anchor="middle" x="306" y="-478.45" font-family="Helvetica,sans-Serif" font-size="14.00">(&#45;c:v copy &#45;c:a copy)</text>
</g>
<!-- local&#45;&gt;trim -->
<g id="edge14" class="edge">
<title>local&#45;&gt;trim</title>
<path fill="none" stroke="black" d="M278.12,-579.22C278.12,-579.22 278.12,-524.75 278.12,-524.75"/>
<polygon fill="black" stroke="black" points="281.63,-524.75 278.13,-514.75 274.63,-524.75 281.63,-524.75"/>
</g>
<!-- progress -->
<g id="node17" class="node">
<title>progress</title>
<polygon fill="none" stroke="black" points="241.5,-407.25 84.5,-407.25 84.5,-364.75 247.5,-364.75 247.5,-401.25 241.5,-407.25"/>
<polyline fill="none" stroke="black" points="241.5,-407.25 241.5,-401.25"/>
<polyline fill="none" stroke="black" points="247.5,-401.25 241.5,-401.25"/>
<text xml:space="preserve" text-anchor="middle" x="166" y="-389.95" font-family="Helvetica,sans-Serif" font-size="14.00">Progress Updates</text>
<text xml:space="preserve" text-anchor="middle" x="166" y="-372.7" font-family="Helvetica,sans-Serif" font-size="14.00">(gRPC → Redis → DB)</text>
</g>
<!-- transcode&#45;&gt;progress -->
<g id="edge15" class="edge">
<title>transcode&#45;&gt;progress</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M134.69,-470.09C134.69,-470.09 134.69,-419.14 134.69,-419.14"/>
<polygon fill="black" stroke="black" points="138.19,-419.14 134.69,-409.14 131.19,-419.14 138.19,-419.14"/>
</g>
<!-- trim&#45;&gt;progress -->
<g id="edge16" class="edge">
<title>trim&#45;&gt;progress</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M237.5,-470.09C237.5,-470.09 237.5,-419.14 237.5,-419.14"/>
<polygon fill="black" stroke="black" points="241,-419.14 237.5,-409.14 234,-419.14 241,-419.14"/>
</g>
<!-- grpc_stream -->
<g id="node14" class="node">
<title>grpc_stream</title>
<path fill="none" stroke="black" d="M304.33,-286.5C304.33,-286.5 89.19,-286.5 89.19,-286.5 83.19,-286.5 73.67,-281.64 70.15,-276.78 70.15,-276.78 22.71,-211.22 22.71,-211.22 19.19,-206.36 21.67,-201.5 27.67,-201.5 27.67,-201.5 242.81,-201.5 242.81,-201.5 248.81,-201.5 258.33,-206.36 261.85,-211.22 261.85,-211.22 309.29,-276.78 309.29,-276.78 312.81,-281.64 310.33,-286.5 304.33,-286.5"/>
<text xml:space="preserve" text-anchor="middle" x="166" y="-247.95" font-family="Helvetica,sans-Serif" font-size="14.00">StreamProgress</text>
<text xml:space="preserve" text-anchor="middle" x="166" y="-230.7" font-family="Helvetica,sans-Serif" font-size="14.00">(server streaming)</text>
</g>
<!-- grpc_stream&#45;&gt;processing -->
<g id="edge18" class="edge">
<title>grpc_stream&#45;&gt;processing</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M166,-201.1C166,-167.71 166,-127 166,-127 166,-127 770.51,-127 770.51,-127"/>
<polygon fill="black" stroke="black" points="770.51,-130.5 780.51,-127 770.51,-123.5 770.51,-130.5"/>
<text xml:space="preserve" text-anchor="middle" x="476.38" y="-170" font-family="Helvetica,sans-Serif" font-size="10.00">update status</text>
</g>
<!-- grpc_submit&#45;&gt;pending -->
<g id="edge20" class="edge">
<title>grpc_submit&#45;&gt;pending</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M450,-201.06C450,-186.11 450,-173 450,-173 450,-173 912.08,-173 912.08,-173 912.08,-173 912.08,-214.2 912.08,-214.2"/>
<polygon fill="black" stroke="black" points="908.58,-214.2 912.08,-224.2 915.58,-214.2 908.58,-214.2"/>
</g>
<!-- grpc_cancel&#45;&gt;cancelled -->
<g id="edge22" class="edge">
<title>grpc_cancel&#45;&gt;cancelled</title>
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M728.29,-214C836.93,-214 1062.12,-214 1062.12,-214 1062.12,-214 1062.12,-63.76 1062.12,-63.76"/>
<polygon fill="black" stroke="black" points="1065.62,-63.76 1062.12,-53.76 1058.62,-63.76 1065.62,-63.76"/>
</g>
<!-- progress&#45;&gt;grpc_stream -->
<g id="edge17" class="edge"> <g id="edge17" class="edge">
<title>progress&#45;&gt;grpc_stream</title> <title>dispatch&#45;&gt;sfn_start</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M166,-364.43C166,-364.43 166,-298.49 166,-298.49"/> <path fill="none" stroke="black" d="M336.81,-552.63C336.81,-533.84 336.81,-467 336.81,-467 336.81,-467 1404.18,-467 1404.18,-467"/>
<polygon fill="black" stroke="black" points="169.5,-298.49 166,-288.49 162.5,-298.49 169.5,-298.49"/> <polygon fill="black" stroke="black" points="1404.18,-470.5 1414.18,-467 1404.18,-463.5 1404.18,-470.5"/>
<text xml:space="preserve" text-anchor="middle" x="204.62" y="-333.25" font-family="Helvetica,sans-Serif" font-size="10.00">stream to client</text> <text xml:space="preserve" text-anchor="middle" x="809.3" y="-470.25" font-family="Helvetica,sans-Serif" font-size="10.00">lambda</text>
</g>
<!-- s3_download -->
<g id="node12" class="node">
<title>s3_download</title>
<path fill="none" stroke="black" d="M144.38,-402.5C144.38,-402.5 61.62,-402.5 61.62,-402.5 55.62,-402.5 49.62,-396.5 49.62,-390.5 49.62,-390.5 49.62,-372 49.62,-372 49.62,-366 55.62,-360 61.62,-360 61.62,-360 144.38,-360 144.38,-360 150.38,-360 156.38,-366 156.38,-372 156.38,-372 156.38,-390.5 156.38,-390.5 156.38,-396.5 150.38,-402.5 144.38,-402.5"/>
<text xml:space="preserve" text-anchor="middle" x="103" y="-385.2" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Download</text>
<text xml:space="preserve" text-anchor="middle" x="103" y="-367.95" font-family="Helvetica,sans-Serif" font-size="14.00">(MinIO)</text>
</g>
<!-- celery_task&#45;&gt;s3_download -->
<g id="edge12" class="edge">
<title>celery_task&#45;&gt;s3_download</title>
<path fill="none" stroke="black" d="M103,-445.17C103,-445.17 103,-414.33 103,-414.33"/>
<polygon fill="black" stroke="black" points="106.5,-414.33 103,-404.33 99.5,-414.33 106.5,-414.33"/>
</g>
<!-- ffmpeg_local -->
<g id="node13" class="node">
<title>ffmpeg_local</title>
<path fill="none" stroke="black" d="M153,-317C153,-317 59,-317 59,-317 53,-317 47,-311 47,-305 47,-305 47,-286.5 47,-286.5 47,-280.5 53,-274.5 59,-274.5 59,-274.5 153,-274.5 153,-274.5 159,-274.5 165,-280.5 165,-286.5 165,-286.5 165,-305 165,-305 165,-311 159,-317 153,-317"/>
<text xml:space="preserve" text-anchor="middle" x="106" y="-299.7" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg</text>
<text xml:space="preserve" text-anchor="middle" x="106" y="-282.45" font-family="Helvetica,sans-Serif" font-size="14.00">transcode/trim</text>
</g>
<!-- s3_download&#45;&gt;ffmpeg_local -->
<g id="edge13" class="edge">
<title>s3_download&#45;&gt;ffmpeg_local</title>
<path fill="none" stroke="black" d="M103,-359.67C103,-359.67 103,-328.83 103,-328.83"/>
<polygon fill="black" stroke="black" points="106.5,-328.83 103,-318.83 99.5,-328.83 106.5,-328.83"/>
</g>
<!-- s3_upload -->
<g id="node14" class="node">
<title>s3_upload</title>
<path fill="none" stroke="black" d="M138.62,-229.5C138.62,-229.5 75.38,-229.5 75.38,-229.5 69.38,-229.5 63.38,-223.5 63.38,-217.5 63.38,-217.5 63.38,-199 63.38,-199 63.38,-193 69.38,-187 75.38,-187 75.38,-187 138.62,-187 138.62,-187 144.62,-187 150.62,-193 150.62,-199 150.62,-199 150.62,-217.5 150.62,-217.5 150.62,-223.5 144.62,-229.5 138.62,-229.5"/>
<text xml:space="preserve" text-anchor="middle" x="107" y="-212.2" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Upload</text>
<text xml:space="preserve" text-anchor="middle" x="107" y="-194.95" font-family="Helvetica,sans-Serif" font-size="14.00">(MinIO)</text>
</g>
<!-- ffmpeg_local&#45;&gt;s3_upload -->
<g id="edge14" class="edge">
<title>ffmpeg_local&#45;&gt;s3_upload</title>
<path fill="none" stroke="black" d="M107,-274.12C107,-274.12 107,-241.45 107,-241.45"/>
<polygon fill="black" stroke="black" points="110.5,-241.45 107,-231.45 103.5,-241.45 110.5,-241.45"/>
</g>
<!-- db_update -->
<g id="node15" class="node">
<title>db_update</title>
<path fill="none" stroke="black" d="M180.88,-144C180.88,-144 35.12,-144 35.12,-144 29.12,-144 23.12,-138 23.12,-132 23.12,-132 23.12,-113.5 23.12,-113.5 23.12,-107.5 29.12,-101.5 35.12,-101.5 35.12,-101.5 180.88,-101.5 180.88,-101.5 186.88,-101.5 192.88,-107.5 192.88,-113.5 192.88,-113.5 192.88,-132 192.88,-132 192.88,-138 186.88,-144 180.88,-144"/>
<text xml:space="preserve" text-anchor="middle" x="108" y="-126.7" font-family="Helvetica,sans-Serif" font-size="14.00">DB Update</text>
<text xml:space="preserve" text-anchor="middle" x="108" y="-109.45" font-family="Helvetica,sans-Serif" font-size="14.00">(update_job_progress)</text>
</g>
<!-- s3_upload&#45;&gt;db_update -->
<g id="edge15" class="edge">
<title>s3_upload&#45;&gt;db_update</title>
<path fill="none" stroke="black" d="M107,-186.67C107,-186.67 107,-155.83 107,-155.83"/>
<polygon fill="black" stroke="black" points="110.5,-155.83 107,-145.83 103.5,-155.83 110.5,-155.83"/>
</g>
<!-- db_update&#45;&gt;completed -->
<g id="edge16" class="edge">
<title>db_update&#45;&gt;completed</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M193.17,-117C345.61,-117 649.29,-117 649.29,-117 649.29,-117 649.29,-43 649.29,-43 649.29,-43 675.4,-43 675.4,-43"/>
<polygon fill="black" stroke="black" points="675.4,-46.5 685.4,-43 675.4,-39.5 675.4,-46.5"/>
</g>
<!-- lambda_fn -->
<g id="node17" class="node">
<title>lambda_fn</title>
<path fill="none" stroke="black" d="M1546,-402.5C1546,-402.5 1428,-402.5 1428,-402.5 1422,-402.5 1416,-396.5 1416,-390.5 1416,-390.5 1416,-372 1416,-372 1416,-366 1422,-360 1428,-360 1428,-360 1546,-360 1546,-360 1552,-360 1558,-366 1558,-372 1558,-372 1558,-390.5 1558,-390.5 1558,-396.5 1552,-402.5 1546,-402.5"/>
<text xml:space="preserve" text-anchor="middle" x="1487" y="-385.2" font-family="Helvetica,sans-Serif" font-size="14.00">Lambda</text>
<text xml:space="preserve" text-anchor="middle" x="1487" y="-367.95" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg container</text>
</g>
<!-- sfn_start&#45;&gt;lambda_fn -->
<g id="edge18" class="edge">
<title>sfn_start&#45;&gt;lambda_fn</title>
<path fill="none" stroke="black" d="M1477,-445.17C1477,-445.17 1477,-414.33 1477,-414.33"/>
<polygon fill="black" stroke="black" points="1480.5,-414.33 1477,-404.33 1473.5,-414.33 1480.5,-414.33"/>
</g>
<!-- s3_dl_aws -->
<g id="node18" class="node">
<title>s3_dl_aws</title>
<path fill="none" stroke="black" d="M1534.38,-317C1534.38,-317 1451.62,-317 1451.62,-317 1445.62,-317 1439.62,-311 1439.62,-305 1439.62,-305 1439.62,-286.5 1439.62,-286.5 1439.62,-280.5 1445.62,-274.5 1451.62,-274.5 1451.62,-274.5 1534.38,-274.5 1534.38,-274.5 1540.38,-274.5 1546.38,-280.5 1546.38,-286.5 1546.38,-286.5 1546.38,-305 1546.38,-305 1546.38,-311 1540.38,-317 1534.38,-317"/>
<text xml:space="preserve" text-anchor="middle" x="1493" y="-299.7" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Download</text>
<text xml:space="preserve" text-anchor="middle" x="1493" y="-282.45" font-family="Helvetica,sans-Serif" font-size="14.00">(AWS)</text>
</g>
<!-- lambda_fn&#45;&gt;s3_dl_aws -->
<g id="edge19" class="edge">
<title>lambda_fn&#45;&gt;s3_dl_aws</title>
<path fill="none" stroke="black" d="M1493,-359.67C1493,-359.67 1493,-328.83 1493,-328.83"/>
<polygon fill="black" stroke="black" points="1496.5,-328.83 1493,-318.83 1489.5,-328.83 1496.5,-328.83"/>
</g>
<!-- ffmpeg_aws -->
<g id="node19" class="node">
<title>ffmpeg_aws</title>
<path fill="none" stroke="black" d="M1545,-229.5C1545,-229.5 1451,-229.5 1451,-229.5 1445,-229.5 1439,-223.5 1439,-217.5 1439,-217.5 1439,-199 1439,-199 1439,-193 1445,-187 1451,-187 1451,-187 1545,-187 1545,-187 1551,-187 1557,-193 1557,-199 1557,-199 1557,-217.5 1557,-217.5 1557,-223.5 1551,-229.5 1545,-229.5"/>
<text xml:space="preserve" text-anchor="middle" x="1498" y="-212.2" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg</text>
<text xml:space="preserve" text-anchor="middle" x="1498" y="-194.95" font-family="Helvetica,sans-Serif" font-size="14.00">transcode/trim</text>
</g>
<!-- s3_dl_aws&#45;&gt;ffmpeg_aws -->
<g id="edge20" class="edge">
<title>s3_dl_aws&#45;&gt;ffmpeg_aws</title>
<path fill="none" stroke="black" d="M1493,-274.12C1493,-274.12 1493,-241.45 1493,-241.45"/>
<polygon fill="black" stroke="black" points="1496.5,-241.45 1493,-231.45 1489.5,-241.45 1496.5,-241.45"/>
</g>
<!-- s3_ul_aws -->
<g id="node20" class="node">
<title>s3_ul_aws</title>
<path fill="none" stroke="black" d="M1532.62,-144C1532.62,-144 1469.38,-144 1469.38,-144 1463.38,-144 1457.38,-138 1457.38,-132 1457.38,-132 1457.38,-113.5 1457.38,-113.5 1457.38,-107.5 1463.38,-101.5 1469.38,-101.5 1469.38,-101.5 1532.62,-101.5 1532.62,-101.5 1538.62,-101.5 1544.62,-107.5 1544.62,-113.5 1544.62,-113.5 1544.62,-132 1544.62,-132 1544.62,-138 1538.62,-144 1532.62,-144"/>
<text xml:space="preserve" text-anchor="middle" x="1501" y="-126.7" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Upload</text>
<text xml:space="preserve" text-anchor="middle" x="1501" y="-109.45" font-family="Helvetica,sans-Serif" font-size="14.00">(AWS)</text>
</g>
<!-- ffmpeg_aws&#45;&gt;s3_ul_aws -->
<g id="edge21" class="edge">
<title>ffmpeg_aws&#45;&gt;s3_ul_aws</title>
<path fill="none" stroke="black" d="M1501,-186.67C1501,-186.67 1501,-155.83 1501,-155.83"/>
<polygon fill="black" stroke="black" points="1504.5,-155.83 1501,-145.83 1497.5,-155.83 1504.5,-155.83"/>
</g>
<!-- callback -->
<g id="node21" class="node">
<title>callback</title>
<path fill="none" stroke="black" d="M1585.12,-58.5C1585.12,-58.5 1422.88,-58.5 1422.88,-58.5 1416.88,-58.5 1410.88,-52.5 1410.88,-46.5 1410.88,-46.5 1410.88,-28 1410.88,-28 1410.88,-22 1416.88,-16 1422.88,-16 1422.88,-16 1585.12,-16 1585.12,-16 1591.12,-16 1597.12,-22 1597.12,-28 1597.12,-28 1597.12,-46.5 1597.12,-46.5 1597.12,-52.5 1591.12,-58.5 1585.12,-58.5"/>
<text xml:space="preserve" text-anchor="middle" x="1504" y="-41.2" font-family="Helvetica,sans-Serif" font-size="14.00">HTTP Callback</text>
<text xml:space="preserve" text-anchor="middle" x="1504" y="-23.95" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/{id}/callback</text>
</g>
<!-- s3_ul_aws&#45;&gt;callback -->
<g id="edge22" class="edge">
<title>s3_ul_aws&#45;&gt;callback</title>
<path fill="none" stroke="black" d="M1501,-101.17C1501,-101.17 1501,-70.33 1501,-70.33"/>
<polygon fill="black" stroke="black" points="1504.5,-70.33 1501,-60.33 1497.5,-70.33 1504.5,-70.33"/>
</g>
<!-- callback&#45;&gt;completed -->
<g id="edge23" class="edge">
<title>callback&#45;&gt;completed</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M1427.5,-58.88C1427.5,-69.48 1427.5,-80 1427.5,-80 1427.5,-80 786.08,-80 786.08,-80 786.08,-80 786.08,-67.14 786.08,-67.14"/>
<polygon fill="black" stroke="black" points="789.58,-67.14 786.08,-57.14 782.58,-67.14 789.58,-67.14"/>
</g> </g>
</g> </g>
</svg> </svg>

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

@@ -1,14 +1,17 @@
<!DOCTYPE html> <!doctype html>
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="UTF-8"> <meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>MPR - Architecture</title> <title>MPR - Architecture</title>
<link rel="stylesheet" href="styles.css"> <link rel="stylesheet" href="styles.css" />
</head> </head>
<body> <body>
<h1>MPR - Media Processor</h1> <h1>MPR - Media Processor</h1>
<p>A web-based media transcoding tool with professional architecture.</p> <p>
Media transcoding platform with dual execution modes: local (Celery
+ MinIO) and cloud (AWS Step Functions + Lambda + S3).
</p>
<nav> <nav>
<a href="#overview">System Overview</a> <a href="#overview">System Overview</a>
@@ -21,20 +24,42 @@
<div class="diagram"> <div class="diagram">
<h3>Architecture</h3> <h3>Architecture</h3>
<object type="image/svg+xml" data="01-system-overview.svg"> <object type="image/svg+xml" data="01-system-overview.svg">
<img src="01-system-overview.svg" alt="System Overview"> <img src="01-system-overview.svg" alt="System Overview" />
</object> </object>
<a href="01-system-overview.svg" target="_blank">Open full size</a> <a href="01-system-overview.svg" target="_blank"
>Open full size</a
>
</div> </div>
</div> </div>
<div class="legend"> <div class="legend">
<h3>Components</h3> <h3>Components</h3>
<ul> <ul>
<li><span class="color-box" style="background: #e8f4f8"></span> Reverse Proxy (nginx)</li> <li>
<li><span class="color-box" style="background: #f0f8e8"></span> Application Layer (Django, FastAPI, UI)</li> <span class="color-box" style="background: #e8f4f8"></span>
<li><span class="color-box" style="background: #fff8e8"></span> Worker Layer (Celery, Lambda)</li> Reverse Proxy (nginx)
<li><span class="color-box" style="background: #f8e8f0"></span> Data Layer (PostgreSQL, Redis, SQS)</li> </li>
<li><span class="color-box" style="background: #f0f0f0"></span> Storage (Local FS, S3)</li> <li>
<span class="color-box" style="background: #f0f8e8"></span>
Application Layer (Django Admin, FastAPI + GraphQL, Timeline
UI)
</li>
<li>
<span class="color-box" style="background: #fff8e8"></span>
Worker Layer (Celery local mode)
</li>
<li>
<span class="color-box" style="background: #fde8d0"></span>
AWS (Step Functions, Lambda - cloud mode)
</li>
<li>
<span class="color-box" style="background: #f8e8f0"></span>
Data Layer (PostgreSQL, Redis)
</li>
<li>
<span class="color-box" style="background: #f0f0f0"></span>
S3 Storage (MinIO local / AWS S3 cloud)
</li>
</ul> </ul>
</div> </div>
@@ -43,7 +68,7 @@
<div class="diagram"> <div class="diagram">
<h3>Entity Relationships</h3> <h3>Entity Relationships</h3>
<object type="image/svg+xml" data="02-data-model.svg"> <object type="image/svg+xml" data="02-data-model.svg">
<img src="02-data-model.svg" alt="Data Model"> <img src="02-data-model.svg" alt="Data Model" />
</object> </object>
<a href="02-data-model.svg" target="_blank">Open full size</a> <a href="02-data-model.svg" target="_blank">Open full size</a>
</div> </div>
@@ -52,9 +77,19 @@
<div class="legend"> <div class="legend">
<h3>Entities</h3> <h3>Entities</h3>
<ul> <ul>
<li><span class="color-box" style="background: #4a90d9"></span> MediaAsset - Video/audio files with metadata</li> <li>
<li><span class="color-box" style="background: #50b050"></span> TranscodePreset - Encoding configurations</li> <span class="color-box" style="background: #4a90d9"></span>
<li><span class="color-box" style="background: #d9534f"></span> TranscodeJob - Processing queue items</li> MediaAsset - Video/audio files (S3 keys as paths)
</li>
<li>
<span class="color-box" style="background: #50b050"></span>
TranscodePreset - Encoding configurations
</li>
<li>
<span class="color-box" style="background: #d9534f"></span>
TranscodeJob - Processing queue (celery_task_id or
execution_arn)
</li>
</ul> </ul>
</div> </div>
@@ -63,7 +98,7 @@
<div class="diagram"> <div class="diagram">
<h3>Job Lifecycle</h3> <h3>Job Lifecycle</h3>
<object type="image/svg+xml" data="03-job-flow.svg"> <object type="image/svg+xml" data="03-job-flow.svg">
<img src="03-job-flow.svg" alt="Job Flow"> <img src="03-job-flow.svg" alt="Job Flow" />
</object> </object>
<a href="03-job-flow.svg" target="_blank">Open full size</a> <a href="03-job-flow.svg" target="_blank">Open full size</a>
</div> </div>
@@ -72,30 +107,71 @@
<div class="legend"> <div class="legend">
<h3>Job States</h3> <h3>Job States</h3>
<ul> <ul>
<li><span class="color-box" style="background: #ffc107"></span> PENDING - Waiting in queue</li> <li>
<li><span class="color-box" style="background: #17a2b8"></span> PROCESSING - Worker executing</li> <span class="color-box" style="background: #ffc107"></span>
<li><span class="color-box" style="background: #28a745"></span> COMPLETED - Success</li> PENDING - Waiting in queue
<li><span class="color-box" style="background: #dc3545"></span> FAILED - Error occurred</li> </li>
<li><span class="color-box" style="background: #6c757d"></span> CANCELLED - User cancelled</li> <li>
<span class="color-box" style="background: #17a2b8"></span>
PROCESSING - Worker executing
</li>
<li>
<span class="color-box" style="background: #28a745"></span>
COMPLETED - Success
</li>
<li>
<span class="color-box" style="background: #dc3545"></span>
FAILED - Error occurred
</li>
<li>
<span class="color-box" style="background: #6c757d"></span>
CANCELLED - User cancelled
</li>
</ul>
<h3>Execution Modes</h3>
<ul>
<li>
<span class="color-box" style="background: #e8f4e8"></span>
Local: Celery + MinIO (S3 API) + FFmpeg
</li>
<li>
<span class="color-box" style="background: #fde8d0"></span>
Lambda: Step Functions + Lambda + AWS S3
</li>
</ul> </ul>
</div> </div>
<h2>Quick Reference</h2> <h2>API Interfaces</h2>
<pre><code># Generate SVGs from DOT files <pre><code># REST API
dot -Tsvg 01-system-overview.dot -o 01-system-overview.svg http://mpr.local.ar/api/docs - Swagger UI
dot -Tsvg 02-data-model.dot -o 02-data-model.svg POST /api/assets/scan - Scan S3 bucket for media
dot -Tsvg 03-job-flow.dot -o 03-job-flow.svg POST /api/jobs/ - Create transcode job
POST /api/jobs/{id}/callback - Lambda completion callback
# Or generate all at once # GraphQL (GraphiQL)
for f in *.dot; do dot -Tsvg "$f" -o "${f%.dot}.svg"; done</code></pre> http://mpr.local.ar/graphql - GraphiQL IDE
query { assets { id filename } }
mutation { createJob(input: {...}) { id status } }
mutation { scanMediaFolder { found registered } }</code></pre>
<h2>Access Points</h2> <h2>Access Points</h2>
<pre><code># Add to /etc/hosts <pre><code># Local development
127.0.0.1 mpr.local.ar 127.0.0.1 mpr.local.ar
# URLs
http://mpr.local.ar/admin - Django Admin http://mpr.local.ar/admin - Django Admin
http://mpr.local.ar/api - FastAPI (docs at /api/docs) http://mpr.local.ar/api/docs - FastAPI Swagger
http://mpr.local.ar/ui - Timeline UI</code></pre> http://mpr.local.ar/graphql - GraphiQL
</body> http://mpr.local.ar/ - Timeline UI
http://localhost:9001 - MinIO Console
# AWS deployment
https://mpr.mcrn.ar/ - Production</code></pre>
<h2>Quick Reference</h2>
<pre><code># Render SVGs from DOT files
for f in *.dot; do dot -Tsvg "$f" -o "${f%.dot}.svg"; done
# Switch executor mode
MPR_EXECUTOR=local # Celery + MinIO
MPR_EXECUTOR=lambda # Step Functions + Lambda + S3</code></pre>
</body>
</html> </html>

122
docs/media-storage.md Normal file
View File

@@ -0,0 +1,122 @@
# Media Storage Architecture
## Overview
MPR uses **S3-compatible storage** everywhere. Locally via MinIO, in production via AWS S3. The same boto3 code and S3 keys work in both environments - the only difference is the `S3_ENDPOINT_URL` env var.
## Storage Strategy
### S3 Buckets
| Bucket | Env Var | Purpose |
|--------|---------|---------|
| `mpr-media-in` | `S3_BUCKET_IN` | Source media files |
| `mpr-media-out` | `S3_BUCKET_OUT` | Transcoded/trimmed output |
### S3 Keys as File Paths
- **Database**: Stores S3 object keys (e.g., `video1.mp4`, `subfolder/video3.mp4`)
- **Local dev**: MinIO serves these via S3 API on port 9000
- **AWS**: Real S3, same keys, different endpoint
### Why S3 Everywhere?
1. **Identical code paths** - no branching between local and cloud
2. **Seamless executor switching** - Celery and Lambda both use boto3
3. **Cloud-native** - ready for production without refactoring
## Local Development (MinIO)
### Configuration
```bash
S3_ENDPOINT_URL=http://minio:9000
S3_BUCKET_IN=mpr-media-in
S3_BUCKET_OUT=mpr-media-out
AWS_ACCESS_KEY_ID=minioadmin
AWS_SECRET_ACCESS_KEY=minioadmin
```
### How It Works
- MinIO runs as a Docker container (port 9000 API, port 9001 console)
- `minio-init` container creates buckets and sets public read access on startup
- Nginx proxies `/media/in/` and `/media/out/` to MinIO buckets
- Upload files via MinIO Console (http://localhost:9001) or `mc` CLI
### Upload Files to MinIO
```bash
# Using mc CLI
mc alias set local http://localhost:9000 minioadmin minioadmin
mc cp video.mp4 local/mpr-media-in/
# Using aws CLI with endpoint override
aws --endpoint-url http://localhost:9000 s3 cp video.mp4 s3://mpr-media-in/
```
## AWS Production (S3)
### Configuration
```bash
# No S3_ENDPOINT_URL = uses real AWS S3
S3_BUCKET_IN=mpr-media-in
S3_BUCKET_OUT=mpr-media-out
AWS_REGION=us-east-1
AWS_ACCESS_KEY_ID=<real-key>
AWS_SECRET_ACCESS_KEY=<real-secret>
```
### Upload Files to S3
```bash
aws s3 cp video.mp4 s3://mpr-media-in/
aws s3 sync /local/media/ s3://mpr-media-in/
```
## Storage Module
`core/storage.py` provides all S3 operations:
```python
from core.storage import (
get_s3_client, # boto3 client (MinIO or AWS)
list_objects, # List bucket contents, filter by extension
download_file, # Download S3 object to local path
download_to_temp, # Download to temp file (caller cleans up)
upload_file, # Upload local file to S3
get_presigned_url, # Generate presigned URL
BUCKET_IN, # Input bucket name
BUCKET_OUT, # Output bucket name
)
```
## API Endpoints
### Scan Media (REST)
```http
POST /api/assets/scan
```
Lists objects in `S3_BUCKET_IN`, registers new media files.
### Scan Media (GraphQL)
```graphql
mutation { scanMediaFolder { found registered skipped files } }
```
## Job Flow with S3
### Local Mode (Celery)
1. Celery task receives `source_key` and `output_key`
2. Downloads source from `S3_BUCKET_IN` to temp file
3. Runs FFmpeg locally
4. Uploads result to `S3_BUCKET_OUT`
5. Cleans up temp files
### Lambda Mode (AWS)
1. Step Functions invokes Lambda with S3 keys
2. Lambda downloads source from `S3_BUCKET_IN` to `/tmp`
3. Runs FFmpeg in container
4. Uploads result to `S3_BUCKET_OUT`
5. Calls back to API with result
Both paths use the same S3 buckets and key structure.
## Supported File Types
**Video:** `.mp4`, `.mkv`, `.avi`, `.mov`, `.webm`, `.flv`, `.wmv`, `.m4v`
**Audio:** `.mp3`, `.wav`, `.flac`, `.aac`, `.ogg`, `.m4a`

0
media/out/.gitkeep Normal file
View File

View File

@@ -16,10 +16,10 @@ Output formats:
- prisma: Prisma schema - prisma: Prisma schema
Usage: Usage:
python -m modelgen from-config -c config.json -o models.py python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
python -m modelgen from-schema -o models/ --targets pydantic,typescript python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
python -m modelgen extract --source /path/to/django --targets pydantic python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
python -m modelgen list-formats python -m soleprint.station.tools.modelgen list-formats
""" """
__version__ = "0.2.0" __version__ = "0.2.0"

View File

@@ -16,10 +16,11 @@ Output formats:
- prisma: Prisma schema - prisma: Prisma schema
Usage: Usage:
python -m modelgen --help python -m soleprint.station.tools.modelgen --help
python -m modelgen from-config -c config.json -o models.py python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
python -m modelgen from-schema -o models/ --targets pydantic,typescript python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
python -m modelgen extract --source /path/to/django --targets pydantic python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
python -m soleprint.station.tools.modelgen generate --config schema/modelgen.json
""" """
import argparse import argparse
@@ -72,10 +73,24 @@ def cmd_from_schema(args):
print("that exports DATACLASSES and ENUMS lists.", file=sys.stderr) print("that exports DATACLASSES and ENUMS lists.", file=sys.stderr)
sys.exit(1) sys.exit(1)
print(f"Loading schema: {schema_path}") # Parse include groups
schema = load_schema(schema_path) include = None
if args.include:
include = {g.strip() for g in args.include.split(",")}
print(f"Found {len(schema.models)} models, {len(schema.enums)} enums") print(f"Loading schema: {schema_path}")
schema = load_schema(schema_path, include=include)
loaded = []
if schema.models:
loaded.append(f"{len(schema.models)} models")
if schema.enums:
loaded.append(f"{len(schema.enums)} enums")
if schema.api_models:
loaded.append(f"{len(schema.api_models)} api models")
if schema.grpc_messages:
loaded.append(f"{len(schema.grpc_messages)} grpc messages")
print(f"Found {', '.join(loaded)}")
# Parse targets # Parse targets
targets = [t.strip() for t in args.targets.split(",")] targets = [t.strip() for t in args.targets.split(",")]
@@ -163,6 +178,47 @@ def cmd_extract(args):
print("Done!") print("Done!")
def cmd_generate(args):
"""Generate all targets from a JSON config file."""
import json
from .loader import load_schema
config_path = Path(args.config)
if not config_path.exists():
print(f"Error: Config file not found: {config_path}", file=sys.stderr)
sys.exit(1)
with open(config_path) as f:
config = json.load(f)
# Resolve paths relative to current working directory
schema_path = Path(config["schema"])
if not schema_path.exists():
print(f"Error: Schema folder not found: {schema_path}", file=sys.stderr)
sys.exit(1)
print(f"Loading schema: {schema_path}")
for target_conf in config["targets"]:
target = target_conf["target"]
output = Path(target_conf["output"])
include = set(target_conf.get("include", []))
name_map = target_conf.get("name_map", {})
if target not in GENERATORS:
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
continue
# Load schema with this target's include filter
schema = load_schema(schema_path, include=include or None)
generator = GENERATORS[target](name_map=name_map)
print(f"Generating {target} to: {output}")
generator.generate(schema, output)
print("Done!")
def cmd_list_formats(args): def cmd_list_formats(args):
"""List available output formats.""" """List available output formats."""
print("Available output formats:") print("Available output formats:")
@@ -237,6 +293,12 @@ def main():
default="pydantic", default="pydantic",
help=f"Comma-separated output targets ({formats_str})", help=f"Comma-separated output targets ({formats_str})",
) )
schema_parser.add_argument(
"--include",
type=str,
default=None,
help="Comma-separated model groups to include (dataclasses,enums,api,grpc). Default: all.",
)
schema_parser.set_defaults(func=cmd_from_schema) schema_parser.set_defaults(func=cmd_from_schema)
# extract command # extract command
@@ -275,6 +337,21 @@ def main():
) )
extract_parser.set_defaults(func=cmd_extract) extract_parser.set_defaults(func=cmd_extract)
# generate command (config-driven multi-target)
gen_parser = subparsers.add_parser(
"generate",
help="Generate all targets from a JSON config file",
)
gen_parser.add_argument(
"--config",
"-c",
type=str,
required=True,
help="Path to generation config file (e.g., schema/modelgen.json)",
)
gen_parser.set_defaults(func=cmd_generate)
# list-formats command # list-formats command
formats_parser = subparsers.add_parser( formats_parser = subparsers.add_parser(
"list-formats", "list-formats",

View File

@@ -7,12 +7,14 @@ Supported generators:
- TypeScriptGenerator: TypeScript interfaces - TypeScriptGenerator: TypeScript interfaces
- ProtobufGenerator: Protocol Buffer definitions - ProtobufGenerator: Protocol Buffer definitions
- PrismaGenerator: Prisma schema - PrismaGenerator: Prisma schema
- GrapheneGenerator: Graphene ObjectType/InputObjectType classes
""" """
from typing import Dict, Type from typing import Dict, Type
from .base import BaseGenerator from .base import BaseGenerator
from .django import DjangoGenerator from .django import DjangoGenerator
from .graphene import GrapheneGenerator
from .prisma import PrismaGenerator from .prisma import PrismaGenerator
from .protobuf import ProtobufGenerator from .protobuf import ProtobufGenerator
from .pydantic import PydanticGenerator from .pydantic import PydanticGenerator
@@ -27,12 +29,14 @@ GENERATORS: Dict[str, Type[BaseGenerator]] = {
"protobuf": ProtobufGenerator, "protobuf": ProtobufGenerator,
"proto": ProtobufGenerator, # Alias "proto": ProtobufGenerator, # Alias
"prisma": PrismaGenerator, "prisma": PrismaGenerator,
"graphene": GrapheneGenerator,
} }
__all__ = [ __all__ = [
"BaseGenerator", "BaseGenerator",
"PydanticGenerator", "PydanticGenerator",
"DjangoGenerator", "DjangoGenerator",
"GrapheneGenerator",
"TypeScriptGenerator", "TypeScriptGenerator",
"ProtobufGenerator", "ProtobufGenerator",
"PrismaGenerator", "PrismaGenerator",

View File

@@ -6,12 +6,19 @@ Abstract base class for all code generators.
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any, Dict
class BaseGenerator(ABC): class BaseGenerator(ABC):
"""Abstract base for code generators.""" """Abstract base for code generators."""
def __init__(self, name_map: Dict[str, str] = None):
self.name_map = name_map or {}
def map_name(self, name: str) -> str:
"""Apply name_map to a model name."""
return self.name_map.get(name, name)
@abstractmethod @abstractmethod
def generate(self, models: Any, output_path: Path) -> None: def generate(self, models: Any, output_path: Path) -> None:
"""Generate code for the given models to the specified path.""" """Generate code for the given models to the specified path."""

View File

@@ -217,12 +217,14 @@ class DjangoGenerator(BaseGenerator):
# Enum # Enum
if isinstance(base, type) and issubclass(base, Enum): if isinstance(base, type) and issubclass(base, Enum):
enum_name = base.__name__
extra = [] extra = []
if optional: if optional:
extra.append("null=True, blank=True") extra.append("null=True, blank=True")
if default is not dc.MISSING and isinstance(default, Enum): if default is not dc.MISSING and isinstance(default, Enum):
extra.append(f"default=Status.{default.name}") extra.append(f"default={enum_name}.{default.name}")
return DJANGO_TYPES["enum"].format( return DJANGO_TYPES["enum"].format(
enum_name=enum_name,
opts=", " + ", ".join(extra) if extra else "" opts=", " + ", ".join(extra) if extra else ""
) )

View File

@@ -0,0 +1,236 @@
"""
Graphene Generator
Generates graphene ObjectType and InputObjectType classes from model definitions.
Only generates type definitions — queries, mutations, and resolvers are hand-written.
"""
from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
from ..helpers import get_origin_name, get_type_name, unwrap_optional
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import GRAPHENE_RESOLVERS
from .base import BaseGenerator
class GrapheneGenerator(BaseGenerator):
"""Generates graphene type definition files."""
def file_extension(self) -> str:
return ".py"
def generate(self, models, output_path: Path) -> None:
"""Generate graphene types to output_path."""
output_path.parent.mkdir(parents=True, exist_ok=True)
if hasattr(models, "models"):
# SchemaLoader
content = self._generate_from_definitions(
models.models,
getattr(models, "enums", []),
getattr(models, "api_models", []),
)
elif isinstance(models, tuple):
content = self._generate_from_definitions(models[0], models[1], [])
elif isinstance(models, list):
content = self._generate_from_dataclasses(models)
else:
raise ValueError(f"Unsupported input type: {type(models)}")
output_path.write_text(content)
def _generate_from_definitions(
self,
models: List[ModelDefinition],
enums: List[EnumDefinition],
api_models: List[ModelDefinition],
) -> str:
"""Generate from ModelDefinition objects."""
lines = self._generate_header()
# Generate enums as graphene.Enum
for enum_def in enums:
lines.extend(self._generate_enum(enum_def))
lines.append("")
lines.append("")
# Generate domain models as ObjectType
for model_def in models:
lines.extend(self._generate_object_type(model_def))
lines.append("")
lines.append("")
# Generate API models — request types as InputObjectType, others as ObjectType
for model_def in api_models:
if model_def.name.endswith("Request"):
lines.extend(self._generate_input_type(model_def))
else:
lines.extend(self._generate_object_type(model_def))
lines.append("")
lines.append("")
return "\n".join(lines).rstrip() + "\n"
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
"""Generate from Python dataclasses."""
lines = self._generate_header()
enums_generated = set()
for cls in dataclasses:
hints = get_type_hints(cls)
for type_hint in hints.values():
base, _ = unwrap_optional(type_hint)
if isinstance(base, type) and issubclass(base, Enum):
if base.__name__ not in enums_generated:
lines.extend(self._generate_enum_from_python(base))
lines.append("")
lines.append("")
enums_generated.add(base.__name__)
for cls in dataclasses:
lines.extend(self._generate_object_type_from_dataclass(cls))
lines.append("")
lines.append("")
return "\n".join(lines).rstrip() + "\n"
def _generate_header(self) -> List[str]:
return [
'"""',
"Graphene Types - GENERATED FILE",
"",
"Do not edit directly. Regenerate using modelgen.",
'"""',
"",
"import graphene",
"",
"",
]
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
"""Generate graphene.Enum from EnumDefinition."""
lines = [f"class {enum_def.name}(graphene.Enum):"]
for name, value in enum_def.values:
lines.append(f' {name} = "{value}"')
return lines
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
"""Generate graphene.Enum from Python Enum."""
lines = [f"class {enum_cls.__name__}(graphene.Enum):"]
for member in enum_cls:
lines.append(f' {member.name} = "{member.value}"')
return lines
def _generate_object_type(self, model_def: ModelDefinition) -> List[str]:
"""Generate graphene.ObjectType from ModelDefinition."""
name = model_def.name
# Append Type suffix if not already present
type_name = f"{name}Type" if not name.endswith("Type") else name
lines = [f"class {type_name}(graphene.ObjectType):"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
lines.append("")
if not model_def.fields:
lines.append(" pass")
else:
for field in model_def.fields:
graphene_type = self._resolve_type(field.type_hint, field.optional)
lines.append(f" {field.name} = {graphene_type}")
return lines
def _generate_input_type(self, model_def: ModelDefinition) -> List[str]:
"""Generate graphene.InputObjectType from ModelDefinition."""
import dataclasses as dc
name = model_def.name
# Convert FooRequest -> FooInput
if name.endswith("Request"):
input_name = name[: -len("Request")] + "Input"
else:
input_name = f"{name}Input"
lines = [f"class {input_name}(graphene.InputObjectType):"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
lines.append("")
if not model_def.fields:
lines.append(" pass")
else:
for field in model_def.fields:
graphene_type = self._resolve_type(field.type_hint, field.optional)
# Required only if not optional AND no default value
has_default = field.default is not dc.MISSING
if not field.optional and not has_default:
graphene_type = self._make_required(graphene_type)
elif has_default and not field.optional:
graphene_type = self._add_default(graphene_type, field.default)
lines.append(f" {field.name} = {graphene_type}")
return lines
def _generate_object_type_from_dataclass(self, cls: type) -> List[str]:
"""Generate graphene.ObjectType from a dataclass."""
import dataclasses as dc
type_name = f"{cls.__name__}Type"
lines = [f"class {type_name}(graphene.ObjectType):"]
hints = get_type_hints(cls)
for name, type_hint in hints.items():
if name.startswith("_"):
continue
graphene_type = self._resolve_type(type_hint, False)
lines.append(f" {name} = {graphene_type}")
return lines
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
"""Resolve Python type to graphene field call string."""
base, is_optional = unwrap_optional(type_hint)
optional = optional or is_optional
origin = get_origin_name(base)
type_name = get_type_name(base)
# Look up resolver
resolver = (
GRAPHENE_RESOLVERS.get(origin)
or GRAPHENE_RESOLVERS.get(type_name)
or GRAPHENE_RESOLVERS.get(base)
or (
GRAPHENE_RESOLVERS["enum"]
if isinstance(base, type) and issubclass(base, Enum)
else None
)
)
result = resolver(base) if resolver else "graphene.String"
# List types already have () syntax from resolver
if result.startswith("graphene.List("):
return result
# Scalar types: add () call
return f"{result}()"
def _make_required(self, field_str: str) -> str:
"""Add required=True to a graphene field."""
if field_str.endswith("()"):
return field_str[:-1] + "required=True)"
return field_str
def _add_default(self, field_str: str, default: Any) -> str:
"""Add default_value to a graphene field."""
if callable(default):
# default_factory — skip, graphene doesn't support factories
return field_str
if field_str.endswith("()"):
return field_str[:-1] + f"default_value={default!r})"
return field_str

View File

@@ -2,8 +2,12 @@
Pydantic Generator Pydantic Generator
Generates Pydantic BaseModel classes from model definitions. Generates Pydantic BaseModel classes from model definitions.
Supports two output modes:
- File output: flat models (backwards compatible)
- Directory output: CRUD variants (Create/Update/Response) per model
""" """
import dataclasses as dc
from enum import Enum from enum import Enum
from pathlib import Path from pathlib import Path
from typing import Any, List, get_type_hints from typing import Any, List, get_type_hints
@@ -13,6 +17,13 @@ from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import PYDANTIC_RESOLVERS from ..types import PYDANTIC_RESOLVERS
from .base import BaseGenerator from .base import BaseGenerator
# Fields to skip per CRUD variant
SKIP_FIELDS = {
"Create": {"id", "created_at", "updated_at", "status", "error_message"},
"Update": {"id", "created_at", "updated_at"},
"Response": set(),
}
class PydanticGenerator(BaseGenerator): class PydanticGenerator(BaseGenerator):
"""Generates Pydantic model files.""" """Generates Pydantic model files."""
@@ -21,52 +32,187 @@ class PydanticGenerator(BaseGenerator):
return ".py" return ".py"
def generate(self, models, output_path: Path) -> None: def generate(self, models, output_path: Path) -> None:
"""Generate Pydantic models to output_path.""" """Generate Pydantic models to output_path.
If output_path is a directory (or doesn't end in .py), generate
multi-file CRUD variants. Otherwise, generate flat models to a
single file.
"""
output_path = Path(output_path)
if output_path.suffix != ".py":
# Directory mode: CRUD variants
self._generate_crud_directory(models, output_path)
else:
# File mode: flat models (backwards compatible)
self._generate_flat_file(models, output_path)
def _generate_flat_file(self, models, output_path: Path) -> None:
"""Generate flat models to a single file (original behavior)."""
output_path.parent.mkdir(parents=True, exist_ok=True) output_path.parent.mkdir(parents=True, exist_ok=True)
# Detect input type and generate accordingly
if hasattr(models, "get_shared_component"): if hasattr(models, "get_shared_component"):
# ConfigLoader (soleprint config)
content = self._generate_from_config(models) content = self._generate_from_config(models)
elif hasattr(models, "models"): elif hasattr(models, "models"):
# SchemaLoader
content = self._generate_from_definitions( content = self._generate_from_definitions(
models.models, getattr(models, "enums", []) models.models, getattr(models, "enums", [])
) )
elif isinstance(models, tuple): elif isinstance(models, tuple):
# (models, enums) tuple from extractor
content = self._generate_from_definitions(models[0], models[1]) content = self._generate_from_definitions(models[0], models[1])
elif isinstance(models, list): elif isinstance(models, list):
# List of dataclasses (MPR style)
content = self._generate_from_dataclasses(models) content = self._generate_from_dataclasses(models)
else: else:
raise ValueError(f"Unsupported input type: {type(models)}") raise ValueError(f"Unsupported input type: {type(models)}")
output_path.write_text(content) output_path.write_text(content)
def _generate_from_definitions( def _generate_crud_directory(self, models, output_dir: Path) -> None:
self, models: List[ModelDefinition], enums: List[EnumDefinition] """Generate CRUD variant files in a directory."""
) -> str: output_dir.mkdir(parents=True, exist_ok=True)
"""Generate from ModelDefinition objects (schema/extract mode)."""
lines = self._generate_header()
# Generate enums if hasattr(models, "models"):
for enum_def in enums: model_defs = models.models
enum_defs = getattr(models, "enums", [])
elif isinstance(models, tuple):
model_defs = models[0]
enum_defs = models[1]
else:
raise ValueError(f"Unsupported input type for CRUD mode: {type(models)}")
# base.py
base_content = "\n".join([
'"""Pydantic Base Schema - GENERATED FILE"""',
"",
"from pydantic import BaseModel, ConfigDict",
"",
"",
"class BaseSchema(BaseModel):",
' """Base schema with ORM mode."""',
" model_config = ConfigDict(from_attributes=True)",
"",
])
(output_dir / "base.py").write_text(base_content)
# Per-model files
imports = ["from .base import BaseSchema"]
all_exports = ['"BaseSchema"']
for model_def in model_defs:
mapped = self.map_name(model_def.name)
module_name = mapped.lower()
lines = [
f'"""{model_def.name} Schemas - GENERATED FILE"""',
"",
"from datetime import datetime",
"from enum import Enum",
"from typing import Any, Dict, List, Optional",
"from uuid import UUID",
"",
"from .base import BaseSchema",
"",
]
# Inline enums used by this model
model_enums = self._collect_model_enums(model_def, enum_defs)
for enum_def in model_enums:
lines.append("")
lines.extend(self._generate_enum(enum_def)) lines.extend(self._generate_enum(enum_def))
lines.append("") lines.append("")
# Generate models # CRUD variants
for suffix in ["Create", "Update", "Response"]:
lines.append("")
lines.extend(self._generate_crud_model(model_def, mapped, suffix))
lines.append("")
content = "\n".join(lines)
(output_dir / f"{module_name}.py").write_text(content)
# Track imports
imports.append(
f"from .{module_name} import {mapped}Create, {mapped}Update, {mapped}Response"
)
all_exports.extend([
f'"{mapped}Create"', f'"{mapped}Update"', f'"{mapped}Response"'
])
for enum_def in model_enums:
imports.append(f"from .{module_name} import {enum_def.name}")
all_exports.append(f'"{enum_def.name}"')
# __init__.py
init_content = "\n".join([
'"""API Schemas - GENERATED FILE"""',
"",
*imports,
"",
f"__all__ = [{', '.join(all_exports)}]",
"",
])
(output_dir / "__init__.py").write_text(init_content)
def _collect_model_enums(
self, model_def: ModelDefinition, enum_defs: List[EnumDefinition]
) -> List[EnumDefinition]:
"""Find enums referenced by a model's fields."""
enum_names = set()
for field in model_def.fields:
base, _ = unwrap_optional(field.type_hint)
if isinstance(base, type) and issubclass(base, Enum):
enum_names.add(base.__name__)
return [e for e in enum_defs if e.name in enum_names]
def _generate_crud_model(
self, model_def: ModelDefinition, mapped_name: str, suffix: str
) -> List[str]:
"""Generate a single CRUD variant (Create/Update/Response)."""
class_name = f"{mapped_name}{suffix}"
skip = SKIP_FIELDS.get(suffix, set())
lines = [
f"class {class_name}(BaseSchema):",
f' """{class_name} schema."""',
]
has_fields = False
for field in model_def.fields:
if field.name.startswith("_") or field.name in skip:
continue
has_fields = True
py_type = self._resolve_type(field.type_hint, field.optional)
# Update variant: all fields optional
if suffix == "Update" and "Optional" not in py_type:
py_type = f"Optional[{py_type}]"
default = self._format_default(field.default, "Optional" in py_type)
lines.append(f" {field.name}: {py_type}{default}")
if not has_fields:
lines.append(" pass")
return lines
# =========================================================================
# Flat file generation (original behavior)
# =========================================================================
def _generate_from_definitions(
self, models: List[ModelDefinition], enums: List[EnumDefinition]
) -> str:
lines = self._generate_header()
for enum_def in enums:
lines.extend(self._generate_enum(enum_def))
lines.append("")
for model_def in models: for model_def in models:
lines.extend(self._generate_model_from_definition(model_def)) lines.extend(self._generate_model_from_definition(model_def))
lines.append("") lines.append("")
return "\n".join(lines) return "\n".join(lines)
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str: def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
"""Generate from Python dataclasses (MPR style)."""
lines = self._generate_header() lines = self._generate_header()
# Collect and generate enums first
enums_generated = set() enums_generated = set()
for cls in dataclasses: for cls in dataclasses:
hints = get_type_hints(cls) hints = get_type_hints(cls)
@@ -77,16 +223,12 @@ class PydanticGenerator(BaseGenerator):
lines.extend(self._generate_enum_from_python(base)) lines.extend(self._generate_enum_from_python(base))
lines.append("") lines.append("")
enums_generated.add(base.__name__) enums_generated.add(base.__name__)
# Generate models
for cls in dataclasses: for cls in dataclasses:
lines.extend(self._generate_model_from_dataclass(cls)) lines.extend(self._generate_model_from_dataclass(cls))
lines.append("") lines.append("")
return "\n".join(lines) return "\n".join(lines)
def _generate_header(self) -> List[str]: def _generate_header(self) -> List[str]:
"""Generate file header."""
return [ return [
'"""', '"""',
"Pydantic Models - GENERATED FILE", "Pydantic Models - GENERATED FILE",
@@ -104,27 +246,23 @@ class PydanticGenerator(BaseGenerator):
] ]
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]: def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
"""Generate Pydantic enum from EnumDefinition."""
lines = [f"class {enum_def.name}(str, Enum):"] lines = [f"class {enum_def.name}(str, Enum):"]
for name, value in enum_def.values: for name, value in enum_def.values:
lines.append(f' {name} = "{value}"') lines.append(f' {name} = "{value}"')
return lines return lines
def _generate_enum_from_python(self, enum_cls: type) -> List[str]: def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
"""Generate Pydantic enum from Python Enum."""
lines = [f"class {enum_cls.__name__}(str, Enum):"] lines = [f"class {enum_cls.__name__}(str, Enum):"]
for member in enum_cls: for member in enum_cls:
lines.append(f' {member.name} = "{member.value}"') lines.append(f' {member.name} = "{member.value}"')
return lines return lines
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]: def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
"""Generate Pydantic model from ModelDefinition."""
docstring = model_def.docstring or model_def.name docstring = model_def.docstring or model_def.name
lines = [ lines = [
f"class {model_def.name}(BaseModel):", f"class {model_def.name}(BaseModel):",
f' """{docstring.strip().split(chr(10))[0]}"""', f' """{docstring.strip().split(chr(10))[0]}"""',
] ]
if not model_def.fields: if not model_def.fields:
lines.append(" pass") lines.append(" pass")
else: else:
@@ -132,46 +270,34 @@ class PydanticGenerator(BaseGenerator):
py_type = self._resolve_type(field.type_hint, field.optional) py_type = self._resolve_type(field.type_hint, field.optional)
default = self._format_default(field.default, field.optional) default = self._format_default(field.default, field.optional)
lines.append(f" {field.name}: {py_type}{default}") lines.append(f" {field.name}: {py_type}{default}")
return lines return lines
def _generate_model_from_dataclass(self, cls: type) -> List[str]: def _generate_model_from_dataclass(self, cls: type) -> List[str]:
"""Generate Pydantic model from a dataclass."""
import dataclasses as dc
docstring = cls.__doc__ or cls.__name__ docstring = cls.__doc__ or cls.__name__
lines = [ lines = [
f"class {cls.__name__}(BaseModel):", f"class {cls.__name__}(BaseModel):",
f' """{docstring.strip().split(chr(10))[0]}"""', f' """{docstring.strip().split(chr(10))[0]}"""',
] ]
hints = get_type_hints(cls) hints = get_type_hints(cls)
fields = {f.name: f for f in dc.fields(cls)} fields = {f.name: f for f in dc.fields(cls)}
for name, type_hint in hints.items(): for name, type_hint in hints.items():
if name.startswith("_"): if name.startswith("_"):
continue continue
field = fields.get(name) field = fields.get(name)
default_val = dc.MISSING default_val = dc.MISSING
if field: if field:
if field.default is not dc.MISSING: if field.default is not dc.MISSING:
default_val = field.default default_val = field.default
py_type = self._resolve_type(type_hint, False) py_type = self._resolve_type(type_hint, False)
default = self._format_default(default_val, "Optional" in py_type) default = self._format_default(default_val, "Optional" in py_type)
lines.append(f" {name}: {py_type}{default}") lines.append(f" {name}: {py_type}{default}")
return lines return lines
def _resolve_type(self, type_hint: Any, optional: bool) -> str: def _resolve_type(self, type_hint: Any, optional: bool) -> str:
"""Resolve Python type to Pydantic type string."""
base, is_optional = unwrap_optional(type_hint) base, is_optional = unwrap_optional(type_hint)
optional = optional or is_optional optional = optional or is_optional
origin = get_origin_name(base) origin = get_origin_name(base)
type_name = get_type_name(base) type_name = get_type_name(base)
# Look up resolver
resolver = ( resolver = (
PYDANTIC_RESOLVERS.get(origin) PYDANTIC_RESOLVERS.get(origin)
or PYDANTIC_RESOLVERS.get(type_name) or PYDANTIC_RESOLVERS.get(type_name)
@@ -182,14 +308,10 @@ class PydanticGenerator(BaseGenerator):
else None else None
) )
) )
result = resolver(base) if resolver else "str" result = resolver(base) if resolver else "str"
return f"Optional[{result}]" if optional else result return f"Optional[{result}]" if optional else result
def _format_default(self, default: Any, optional: bool) -> str: def _format_default(self, default: Any, optional: bool) -> str:
"""Format default value for field."""
import dataclasses as dc
if optional: if optional:
return " = None" return " = None"
if default is dc.MISSING or default is None: if default is dc.MISSING or default is None:
@@ -204,7 +326,6 @@ class PydanticGenerator(BaseGenerator):
def _generate_from_config(self, config) -> str: def _generate_from_config(self, config) -> str:
"""Generate from ConfigLoader (soleprint config.json mode).""" """Generate from ConfigLoader (soleprint config.json mode)."""
# Get component names from config
config_comp = config.get_shared_component("config") config_comp = config.get_shared_component("config")
data_comp = config.get_shared_component("data") data_comp = config.get_shared_component("data")

View File

@@ -26,9 +26,10 @@ class TypeScriptGenerator(BaseGenerator):
# Handle different input types # Handle different input types
if hasattr(models, "models"): if hasattr(models, "models"):
# SchemaLoader # SchemaLoader — include api_models if present
all_models = models.models + getattr(models, "api_models", [])
content = self._generate_from_definitions( content = self._generate_from_definitions(
models.models, getattr(models, "enums", []) all_models, getattr(models, "enums", [])
) )
elif isinstance(models, tuple): elif isinstance(models, tuple):
# (models, enums) tuple # (models, enums) tuple

View File

@@ -5,6 +5,7 @@ Loads Python dataclasses from a schema/ folder.
Expects the folder to have an __init__.py that exports: Expects the folder to have an __init__.py that exports:
- DATACLASSES: List of dataclass types to generate - DATACLASSES: List of dataclass types to generate
- ENUMS: List of Enum types to include - ENUMS: List of Enum types to include
- API_MODELS: (optional) List of API request/response types
- GRPC_MESSAGES: (optional) List of gRPC message types - GRPC_MESSAGES: (optional) List of gRPC message types
- GRPC_SERVICE: (optional) gRPC service definition dict - GRPC_SERVICE: (optional) gRPC service definition dict
""" """
@@ -60,12 +61,18 @@ class SchemaLoader:
def __init__(self, schema_path: Path): def __init__(self, schema_path: Path):
self.schema_path = Path(schema_path) self.schema_path = Path(schema_path)
self.models: List[ModelDefinition] = [] self.models: List[ModelDefinition] = []
self.api_models: List[ModelDefinition] = []
self.enums: List[EnumDefinition] = [] self.enums: List[EnumDefinition] = []
self.grpc_messages: List[ModelDefinition] = [] self.grpc_messages: List[ModelDefinition] = []
self.grpc_service: Optional[GrpcServiceDefinition] = None self.grpc_service: Optional[GrpcServiceDefinition] = None
def load(self) -> "SchemaLoader": def load(self, include: Optional[set] = None) -> "SchemaLoader":
"""Load schema definitions from the schema folder.""" """Load schema definitions from the schema folder.
Args:
include: Set of groups to load (dataclasses, enums, api, grpc).
None means load all groups.
"""
init_path = self.schema_path / "__init__.py" init_path = self.schema_path / "__init__.py"
if not init_path.exists(): if not init_path.exists():
@@ -74,22 +81,34 @@ class SchemaLoader:
# Import the schema module # Import the schema module
module = self._import_module(init_path) module = self._import_module(init_path)
load_all = include is None
# Extract DATACLASSES # Extract DATACLASSES
if load_all or "dataclasses" in include:
dataclasses = getattr(module, "DATACLASSES", []) dataclasses = getattr(module, "DATACLASSES", [])
for cls in dataclasses: for cls in dataclasses:
self.models.append(self._parse_dataclass(cls)) self.models.append(self._parse_dataclass(cls))
# Extract API_MODELS (request/response types)
if load_all or "api" in include:
api_models = getattr(module, "API_MODELS", [])
for cls in api_models:
self.api_models.append(self._parse_dataclass(cls))
# Extract ENUMS # Extract ENUMS
if load_all or "enums" in include:
enums = getattr(module, "ENUMS", []) enums = getattr(module, "ENUMS", [])
for enum_cls in enums: for enum_cls in enums:
self.enums.append(self._parse_enum(enum_cls)) self.enums.append(self._parse_enum(enum_cls))
# Extract GRPC_MESSAGES (optional) # Extract GRPC_MESSAGES (optional)
if load_all or "grpc" in include:
grpc_messages = getattr(module, "GRPC_MESSAGES", []) grpc_messages = getattr(module, "GRPC_MESSAGES", [])
for cls in grpc_messages: for cls in grpc_messages:
self.grpc_messages.append(self._parse_dataclass(cls)) self.grpc_messages.append(self._parse_dataclass(cls))
# Extract GRPC_SERVICE (optional) # Extract GRPC_SERVICE (optional)
if load_all or "grpc" in include:
grpc_service = getattr(module, "GRPC_SERVICE", None) grpc_service = getattr(module, "GRPC_SERVICE", None)
if grpc_service: if grpc_service:
self.grpc_service = GrpcServiceDefinition( self.grpc_service = GrpcServiceDefinition(
@@ -163,7 +182,7 @@ class SchemaLoader:
return False return False
def load_schema(schema_path: str | Path) -> SchemaLoader: def load_schema(schema_path: str | Path, include: Optional[set] = None) -> SchemaLoader:
"""Load schema definitions from folder.""" """Load schema definitions from folder."""
loader = SchemaLoader(schema_path) loader = SchemaLoader(schema_path)
return loader.load() return loader.load(include=include)

View File

@@ -22,7 +22,7 @@ DJANGO_TYPES: dict[Any, str] = {
"list": "models.JSONField(default=list, blank=True)", "list": "models.JSONField(default=list, blank=True)",
"text": "models.TextField(blank=True, default='')", "text": "models.TextField(blank=True, default='')",
"bigint": "models.BigIntegerField({opts})", "bigint": "models.BigIntegerField({opts})",
"enum": "models.CharField(max_length=20, choices=Status.choices{opts})", "enum": "models.CharField(max_length=20, choices={enum_name}.choices{opts})",
} }
DJANGO_SPECIAL: dict[str, str] = { DJANGO_SPECIAL: dict[str, str] = {
@@ -137,3 +137,36 @@ PRISMA_SPECIAL: dict[str, str] = {
"created_at": "DateTime @default(now())", "created_at": "DateTime @default(now())",
"updated_at": "DateTime @updatedAt", "updated_at": "DateTime @updatedAt",
} }
# =============================================================================
# Graphene Type Resolvers
# =============================================================================
def _resolve_graphene_list(base: Any) -> str:
"""Resolve graphene List type."""
args = get_args(base)
if args:
inner = args[0]
if inner is str:
return "graphene.List(graphene.String)"
elif inner is int:
return "graphene.List(graphene.Int)"
elif inner is float:
return "graphene.List(graphene.Float)"
elif inner is bool:
return "graphene.List(graphene.Boolean)"
return "graphene.List(graphene.String)"
GRAPHENE_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "graphene.String",
int: lambda _: "graphene.Int",
float: lambda _: "graphene.Float",
bool: lambda _: "graphene.Boolean",
"UUID": lambda _: "graphene.UUID",
"datetime": lambda _: "graphene.DateTime",
"dict": lambda _: "graphene.JSONString",
"list": _resolve_graphene_list,
"enum": lambda base: f"graphene.String", # Enums exposed as strings in GQL
}

View File

@@ -7,3 +7,4 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mpr.settings")
app = Celery("mpr") app = Celery("mpr")
app.config_from_object("django.conf:settings", namespace="CELERY") app.config_from_object("django.conf:settings", namespace="CELERY")
app.autodiscover_tasks() app.autodiscover_tasks()
app.autodiscover_tasks(["task"])

View File

@@ -25,7 +25,7 @@ class MediaAsset(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
filename = models.CharField(max_length=500) filename = models.CharField(max_length=500)
file_path = models.CharField(max_length=1000) file_path = models.CharField(max_length=1000)
status = models.CharField(max_length=20, choices=Status.choices, default=Status.PENDING) status = models.CharField(max_length=20, choices=AssetStatus.choices, default=AssetStatus.PENDING)
error_message = models.TextField(blank=True, default='') error_message = models.TextField(blank=True, default='')
file_size = models.BigIntegerField(null=True, blank=True) file_size = models.BigIntegerField(null=True, blank=True)
duration = models.FloatField(null=True, blank=True, default=None) duration = models.FloatField(null=True, blank=True, default=None)
@@ -89,13 +89,14 @@ class TranscodeJob(models.Model):
output_filename = models.CharField(max_length=500) output_filename = models.CharField(max_length=500)
output_path = models.CharField(max_length=1000, null=True, blank=True) output_path = models.CharField(max_length=1000, null=True, blank=True)
output_asset_id = models.UUIDField(null=True, blank=True) output_asset_id = models.UUIDField(null=True, blank=True)
status = models.CharField(max_length=20, choices=Status.choices, default=Status.PENDING) status = models.CharField(max_length=20, choices=JobStatus.choices, default=JobStatus.PENDING)
progress = models.FloatField(default=0.0) progress = models.FloatField(default=0.0)
current_frame = models.IntegerField(null=True, blank=True, default=None) current_frame = models.IntegerField(null=True, blank=True, default=None)
current_time = models.FloatField(null=True, blank=True, default=None) current_time = models.FloatField(null=True, blank=True, default=None)
speed = models.CharField(max_length=255, null=True, blank=True) speed = models.CharField(max_length=255, null=True, blank=True)
error_message = models.TextField(blank=True, default='') error_message = models.TextField(blank=True, default='')
celery_task_id = models.CharField(max_length=255, null=True, blank=True) celery_task_id = models.CharField(max_length=255, null=True, blank=True)
execution_arn = models.CharField(max_length=255, null=True, blank=True)
priority = models.IntegerField(default=0) priority = models.IntegerField(default=0)
created_at = models.DateTimeField(auto_now_add=True) created_at = models.DateTimeField(auto_now_add=True)
started_at = models.DateTimeField(null=True, blank=True) started_at = models.DateTimeField(null=True, blank=True)

View File

@@ -19,6 +19,13 @@ ffmpeg-python>=0.2.0
grpcio>=1.60.0 grpcio>=1.60.0
grpcio-tools>=1.60.0 grpcio-tools>=1.60.0
# AWS
boto3>=1.34.0
# GraphQL
graphene>=3.3
starlette-graphene3>=0.6.0
# Testing # Testing
pytest>=7.4.0 pytest>=7.4.0
pytest-django>=4.7.0 pytest-django>=4.7.0

View File

@@ -201,7 +201,7 @@ def update_job_progress(
""" """
Update job progress (called from worker tasks). Update job progress (called from worker tasks).
This updates the in-memory state that StreamProgress reads from. Updates both the in-memory gRPC state and the Django database.
""" """
if job_id in _active_jobs: if job_id in _active_jobs:
_active_jobs[job_id].update( _active_jobs[job_id].update(
@@ -215,6 +215,36 @@ def update_job_progress(
} }
) )
# Update Django database
try:
from django.utils import timezone
from mpr.media_assets.models import TranscodeJob
update_fields = ["progress", "current_frame", "current_time", "speed", "status"]
updates = {
"progress": progress,
"current_frame": current_frame,
"current_time": current_time,
"speed": str(speed),
"status": status,
}
if error:
updates["error_message"] = error
update_fields.append("error_message")
if status == "processing":
updates["started_at"] = timezone.now()
update_fields.append("started_at")
elif status in ("completed", "failed"):
updates["completed_at"] = timezone.now()
update_fields.append("completed_at")
TranscodeJob.objects.filter(id=job_id).update(**updates)
except Exception as e:
logger.warning(f"Failed to update job {job_id} in DB: {e}")
def serve(port: int = None, celery_app=None) -> grpc.Server: def serve(port: int = None, celery_app=None) -> grpc.Server:
""" """

View File

@@ -4,7 +4,7 @@ MPR Schema Definitions - Source of Truth
This package defines the core data models as Python dataclasses. This package defines the core data models as Python dataclasses.
These definitions are used to generate: These definitions are used to generate:
- Django ORM models (mpr/media_assets/models.py) - Django ORM models (mpr/media_assets/models.py)
- Pydantic schemas (api/schemas/*.py) - Pydantic schemas (api/schema/*.py)
- TypeScript types (ui/timeline/src/types.ts) - TypeScript types (ui/timeline/src/types.ts)
- Protobuf definitions (grpc/protos/worker.proto) - Protobuf definitions (grpc/protos/worker.proto)

View File

@@ -1,718 +0,0 @@
#!/usr/bin/env python3
"""
MPR Model Generator
Generates framework-specific models from schema/models/:
- Django ORM models -> mpr/media_assets/models.py
- Pydantic schemas -> api/schemas/*.py
- TypeScript types -> ui/timeline/src/types.ts
- Protobuf -> grpc/protos/worker.proto
Usage:
python schema/generate.py [--django] [--pydantic] [--typescript] [--proto] [--all]
"""
import argparse
import dataclasses as dc
import subprocess
import sys
from enum import Enum
from pathlib import Path
from typing import Any, Callable, Union, get_args, get_origin, get_type_hints
PROJECT_ROOT = Path(__file__).parent.parent
sys.path.insert(0, str(PROJECT_ROOT))
from schema.models import API_MODELS, DATACLASSES, ENUMS, GRPC_MESSAGES, GRPC_SERVICE
# =============================================================================
# Type Dispatch Tables
# =============================================================================
DJANGO_TYPES: dict[Any, str] = {
str: "models.CharField(max_length={max_length}{opts})",
int: "models.IntegerField({opts})",
float: "models.FloatField({opts})",
bool: "models.BooleanField(default={default})",
"UUID": "models.UUIDField({opts})",
"datetime": "models.DateTimeField({opts})",
"dict": "models.JSONField(default=dict, blank=True)",
"list": "models.JSONField(default=list, blank=True)",
"text": "models.TextField(blank=True, default='')",
"bigint": "models.BigIntegerField({opts})",
"enum": "models.CharField(max_length=20, choices={enum_name}.choices{opts})",
}
DJANGO_SPECIAL: dict[str, str] = {
"id": "models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)",
"created_at": "models.DateTimeField(auto_now_add=True)",
"updated_at": "models.DateTimeField(auto_now=True)",
}
PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "str",
int: lambda _: "int",
float: lambda _: "float",
bool: lambda _: "bool",
"UUID": lambda _: "UUID",
"datetime": lambda _: "datetime",
"dict": lambda _: "Dict[str, Any]",
"list": lambda base: f"List[{get_list_inner(base)}]",
"enum": lambda base: base.__name__,
}
TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "string",
int: lambda _: "number",
float: lambda _: "number",
bool: lambda _: "boolean",
"UUID": lambda _: "string",
"datetime": lambda _: "string",
"dict": lambda _: "Record<string, unknown>",
"list": lambda base: (
f"{TS_RESOLVERS.get(get_args(base)[0], lambda _: 'string')(None)}[]"
if get_args(base)
else "string[]"
),
"enum": lambda base: base.__name__,
}
PROTO_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "string",
int: lambda _: "int32",
float: lambda _: "float",
bool: lambda _: "bool",
"list": lambda base: (
f"repeated {PROTO_RESOLVERS.get(get_args(base)[0], lambda _: 'string')(None)}"
if get_args(base)
else "repeated string"
),
}
# =============================================================================
# Type Helpers
# =============================================================================
def unwrap_optional(type_hint: Any) -> tuple[Any, bool]:
"""Unwrap Optional[T] -> (T, True) or (T, False) if not optional."""
origin = get_origin(type_hint)
if origin is Union:
args = [a for a in get_args(type_hint) if a is not type(None)]
return (args[0] if args else str, True)
return (type_hint, False)
def get_origin_name(type_hint: Any) -> str | None:
"""Get origin type name: 'dict', 'list', or None."""
origin = get_origin(type_hint)
if origin is dict:
return "dict"
if origin is list:
return "list"
return None
def get_type_name(type_hint: Any) -> str | None:
"""Get type name for special types like UUID, datetime."""
if hasattr(type_hint, "__name__"):
return type_hint.__name__
return None
def get_list_inner(type_hint: Any) -> str:
"""Get inner type of List[T]."""
args = get_args(type_hint)
if args and args[0] in (str, int, float, bool):
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
return "str"
def get_field_default(field: dc.Field) -> Any:
"""Get default value from dataclass field."""
if field.default is not dc.MISSING:
return field.default
return dc.MISSING
def format_opts(optional: bool, extra: list[str] | None = None) -> str:
"""Format field options string."""
parts = []
if optional:
parts.append("null=True, blank=True")
if extra:
parts.extend(extra)
return ", ".join(parts)
# =============================================================================
# Django Generator
# =============================================================================
def resolve_django_type(name: str, type_hint: Any, default: Any) -> str:
"""Resolve Python type to Django field."""
# Special fields
if name in DJANGO_SPECIAL:
return DJANGO_SPECIAL[name]
base, optional = unwrap_optional(type_hint)
origin = get_origin_name(base)
type_name = get_type_name(base)
opts = format_opts(optional)
# Container types
if origin == "dict":
return DJANGO_TYPES["dict"]
if origin == "list":
return DJANGO_TYPES["list"]
# UUID / datetime
if type_name == "UUID":
return DJANGO_TYPES["UUID"].format(opts=opts)
if type_name == "datetime":
return DJANGO_TYPES["datetime"].format(opts=opts)
# Enum
if isinstance(base, type) and issubclass(base, Enum):
enum_name = base.__name__
extra = []
if optional:
extra.append("null=True, blank=True")
if default is not dc.MISSING and isinstance(default, Enum):
extra.append(f"default={enum_name}.{default.name}")
return DJANGO_TYPES["enum"].format(
enum_name=enum_name, opts=", " + ", ".join(extra) if extra else ""
)
# Text fields
if base is str and any(x in name for x in ("message", "comments", "description")):
return DJANGO_TYPES["text"]
# BigInt fields
if base is int and name in ("file_size", "bitrate"):
return DJANGO_TYPES["bigint"].format(opts=opts)
# Basic types
if base is str:
max_length = 1000 if "path" in name else 500 if "filename" in name else 255
return DJANGO_TYPES[str].format(
max_length=max_length, opts=", " + opts if opts else ""
)
if base is int:
extra = [opts] if opts else []
if default is not dc.MISSING and not callable(default):
extra.append(f"default={default}")
return DJANGO_TYPES[int].format(opts=", ".join(extra))
if base is float:
extra = [opts] if opts else []
if default is not dc.MISSING and not callable(default):
extra.append(f"default={default}")
return DJANGO_TYPES[float].format(opts=", ".join(extra))
if base is bool:
default_val = default if default is not dc.MISSING else False
return DJANGO_TYPES[bool].format(default=default_val)
# Fallback
return DJANGO_TYPES[str].format(max_length=255, opts=", " + opts if opts else "")
def generate_django_enum(enum_cls: type) -> list[str]:
"""Generate Django TextChoices enum."""
lines = [f"class {enum_cls.__name__}(models.TextChoices):"]
for member in enum_cls:
label = member.name.replace("_", " ").title()
lines.append(f' {member.name} = "{member.value}", "{label}"')
return lines
def generate_django_model(cls: type) -> list[str]:
"""Generate Django model lines from dataclass."""
lines = [
f"class {cls.__name__}(models.Model):",
f' """{(cls.__doc__ or cls.__name__).strip().split(chr(10))[0]}"""',
"",
]
hints = get_type_hints(cls)
fields = {f.name: f for f in dc.fields(cls)}
# Fields
for name, type_hint in hints.items():
if name.startswith("_"):
continue
field = fields.get(name)
default = get_field_default(field) if field else dc.MISSING
django_field = resolve_django_type(name, type_hint, default)
lines.append(f" {name} = {django_field}")
# Meta and __str__
lines.extend(
[
"",
" class Meta:",
' ordering = ["-created_at"]',
"",
" def __str__(self):",
]
)
if "filename" in hints:
lines.append(" return self.filename")
elif "name" in hints:
lines.append(" return self.name")
else:
lines.append(" return str(self.id)")
return lines
def generate_django() -> str:
"""Generate complete Django models file."""
header = [
'"""',
"Django ORM Models - GENERATED FILE",
"",
"Do not edit directly. Modify schema/models/*.py and run:",
" python schema/generate.py --django",
'"""',
"",
"import uuid",
"from django.db import models",
"",
]
# Generate enums first
body = []
for enum_cls in ENUMS:
body.extend(generate_django_enum(enum_cls))
body.extend(["", ""])
# Generate models
for cls in DATACLASSES:
body.extend(generate_django_model(cls))
body.extend(["", ""])
return "\n".join(header + body)
# =============================================================================
# Pydantic Generator
# =============================================================================
def resolve_pydantic_type(type_hint: Any) -> str:
"""Resolve Python type to Pydantic type string."""
base, optional = unwrap_optional(type_hint)
origin = get_origin_name(base)
type_name = get_type_name(base)
# Look up resolver by origin, type name, base type, or enum
resolver = (
PYDANTIC_RESOLVERS.get(origin)
or PYDANTIC_RESOLVERS.get(type_name)
or PYDANTIC_RESOLVERS.get(base)
or (
PYDANTIC_RESOLVERS["enum"]
if isinstance(base, type) and issubclass(base, Enum)
else None
)
)
result = resolver(base) if resolver else "str"
return f"Optional[{result}]" if optional else result
def generate_pydantic_schema(cls: type, suffix: str) -> list[str]:
"""Generate Pydantic schema lines from dataclass."""
name = cls.__name__.replace("Transcode", "").replace("Media", "")
class_name = f"{name}{suffix}"
skip_fields = {
"Create": {"id", "created_at", "updated_at", "status", "error_message"},
"Update": {"id", "created_at", "updated_at"},
"Response": set(),
}
lines = [
f"class {class_name}(BaseSchema):",
f' """{class_name} schema."""',
]
hints = get_type_hints(cls)
fields = {f.name: f for f in dc.fields(cls)}
for name, type_hint in hints.items():
if name.startswith("_") or name in skip_fields.get(suffix, set()):
continue
py_type = resolve_pydantic_type(type_hint)
# Update schemas: all fields optional
if suffix == "Update" and "Optional" not in py_type:
py_type = f"Optional[{py_type}]"
field = fields.get(name)
default = get_field_default(field) if field else dc.MISSING
if "Optional" in py_type:
lines.append(f" {name}: {py_type} = None")
elif default is not dc.MISSING and not callable(default):
if isinstance(default, str):
lines.append(f' {name}: {py_type} = "{default}"')
elif isinstance(default, Enum):
lines.append(
f" {name}: {py_type} = {default.__class__.__name__}.{default.name}"
)
else:
lines.append(f" {name}: {py_type} = {default!r}")
else:
lines.append(f" {name}: {py_type}")
return lines
def generate_pydantic() -> dict[str, str]:
"""Generate all Pydantic schema files."""
files = {}
# base.py
files["base.py"] = "\n".join(
[
'"""Pydantic Base Schema - GENERATED FILE"""',
"",
"from pydantic import BaseModel, ConfigDict",
"",
"",
"class BaseSchema(BaseModel):",
' """Base schema with ORM mode."""',
" model_config = ConfigDict(from_attributes=True)",
"",
]
)
# Schema files per model
for cls in DATACLASSES:
module_name = cls.__name__.replace("Transcode", "").replace("Media", "").lower()
lines = [
f'"""{cls.__name__} Schemas - GENERATED FILE"""',
"",
"from datetime import datetime",
"from enum import Enum",
"from typing import Any, Dict, List, Optional",
"from uuid import UUID",
"",
"from .base import BaseSchema",
"",
]
# Add enum if present
hints = get_type_hints(cls)
for type_hint in hints.values():
base, _ = unwrap_optional(type_hint)
if isinstance(base, type) and issubclass(base, Enum):
lines.extend(
[
"",
f"class {base.__name__}(str, Enum):",
]
)
for m in base:
lines.append(f' {m.name} = "{m.value}"')
lines.append("")
break
# Schemas
for suffix in ["Create", "Update", "Response"]:
lines.append("")
lines.extend(generate_pydantic_schema(cls, suffix))
lines.append("")
files[f"{module_name}.py"] = "\n".join(lines)
# __init__.py
imports = ["from .base import BaseSchema"]
all_exports = ['"BaseSchema"']
for cls in DATACLASSES:
name = cls.__name__.replace("Transcode", "").replace("Media", "")
module = name.lower()
imports.append(
f"from .{module} import {name}Create, {name}Update, {name}Response"
)
all_exports.extend([f'"{name}Create"', f'"{name}Update"', f'"{name}Response"'])
# Add enum export
hints = get_type_hints(cls)
for type_hint in hints.values():
base, _ = unwrap_optional(type_hint)
if isinstance(base, type) and issubclass(base, Enum):
imports.append(f"from .{module} import {base.__name__}")
all_exports.append(f'"{base.__name__}"')
break
files["__init__.py"] = "\n".join(
[
'"""API Schemas - GENERATED FILE"""',
"",
*imports,
"",
f"__all__ = [{', '.join(all_exports)}]",
"",
]
)
return files
# =============================================================================
# TypeScript Generator
# =============================================================================
def resolve_ts_type(type_hint: Any) -> str:
"""Resolve Python type to TypeScript type string."""
base, optional = unwrap_optional(type_hint)
origin = get_origin_name(base)
type_name = get_type_name(base)
# Look up resolver by origin, type name, base type, or enum
resolver = (
TS_RESOLVERS.get(origin)
or TS_RESOLVERS.get(type_name)
or TS_RESOLVERS.get(base)
or (
TS_RESOLVERS["enum"]
if isinstance(base, type) and issubclass(base, Enum)
else None
)
)
result = resolver(base) if resolver else "string"
return f"{result} | null" if optional else result
def generate_ts_interface(cls: type) -> list[str]:
"""Generate TypeScript interface lines from dataclass."""
lines = [f"export interface {cls.__name__} {{"]
for name, type_hint in get_type_hints(cls).items():
if name.startswith("_"):
continue
ts_type = resolve_ts_type(type_hint)
lines.append(f" {name}: {ts_type};")
lines.append("}")
return lines
def generate_typescript() -> str:
"""Generate complete TypeScript file."""
lines = [
"/**",
" * MPR TypeScript Types - GENERATED FILE",
" *",
" * Do not edit directly. Modify schema/models/*.py and run:",
" * python schema/generate.py --typescript",
" */",
"",
]
# Enums as union types
for enum in ENUMS:
values = " | ".join(f'"{m.value}"' for m in enum)
lines.append(f"export type {enum.__name__} = {values};")
lines.append("")
# Interfaces - domain models
for cls in DATACLASSES:
lines.extend(generate_ts_interface(cls))
lines.append("")
# Interfaces - API request/response models
lines.append("// API Request/Response Types")
lines.append("")
for cls in API_MODELS:
lines.extend(generate_ts_interface(cls))
lines.append("")
return "\n".join(lines)
# =============================================================================
# Proto Generator
# =============================================================================
def resolve_proto_type(type_hint: Any) -> tuple[str, bool]:
"""Resolve Python type to proto type. Returns (type, is_optional)."""
base, optional = unwrap_optional(type_hint)
origin = get_origin_name(base)
# Look up resolver by origin or base type
resolver = PROTO_RESOLVERS.get(origin) or PROTO_RESOLVERS.get(base)
if resolver:
result = resolver(base)
is_repeated = result.startswith("repeated")
return result, optional and not is_repeated
return "string", optional
def generate_proto_message(cls: type) -> list[str]:
"""Generate proto message lines from dataclass."""
lines = [f"message {cls.__name__} {{"]
hints = get_type_hints(cls)
if not hints:
lines.append(" // Empty")
else:
for i, (name, type_hint) in enumerate(hints.items(), 1):
proto_type, optional = resolve_proto_type(type_hint)
prefix = (
"optional "
if optional and not proto_type.startswith("repeated")
else ""
)
lines.append(f" {prefix}{proto_type} {name} = {i};")
lines.append("}")
return lines
def generate_proto() -> str:
"""Generate complete proto file."""
lines = [
"// MPR Worker Service - GENERATED FILE",
"//",
"// Do not edit directly. Modify schema/models/grpc.py and run:",
"// python schema/generate.py --proto",
"",
'syntax = "proto3";',
"",
f"package {GRPC_SERVICE['package']};",
"",
f"service {GRPC_SERVICE['name']} {{",
]
# Methods
for m in GRPC_SERVICE["methods"]:
req = m["request"].__name__
resp = m["response"].__name__
returns = f"stream {resp}" if m["stream_response"] else resp
lines.append(f" rpc {m['name']}({req}) returns ({returns});")
lines.extend(["}", ""])
# Messages
for cls in GRPC_MESSAGES:
lines.extend(generate_proto_message(cls))
lines.append("")
return "\n".join(lines)
# =============================================================================
# Writers
# =============================================================================
def write_file(path: Path, content: str) -> None:
"""Write content to file, creating directories as needed."""
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(content)
print(f" {path}")
def write_django(output_dir: Path) -> None:
"""Write Django models."""
write_file(output_dir / "mpr" / "media_assets" / "models.py", generate_django())
def write_pydantic(output_dir: Path) -> None:
"""Write Pydantic schemas."""
schemas_dir = output_dir / "api" / "schemas"
for filename, content in generate_pydantic().items():
write_file(schemas_dir / filename, content)
def write_typescript(output_dir: Path) -> None:
"""Write TypeScript types."""
write_file(
output_dir / "ui" / "timeline" / "src" / "types.ts", generate_typescript()
)
def write_proto(output_dir: Path) -> None:
"""Write proto and generate stubs."""
proto_dir = output_dir / "grpc" / "protos"
proto_path = proto_dir / "worker.proto"
write_file(proto_path, generate_proto())
# Generate Python stubs
grpc_dir = output_dir / "grpc"
result = subprocess.run(
[
sys.executable,
"-m",
"grpc_tools.protoc",
f"-I{proto_dir}",
f"--python_out={grpc_dir}",
f"--grpc_python_out={grpc_dir}",
str(proto_path),
],
capture_output=True,
text=True,
)
if result.returncode == 0:
print(f" {grpc_dir}/worker_pb2.py")
print(f" {grpc_dir}/worker_pb2_grpc.py")
else:
print(" Warning: grpc_tools failed - pip install grpcio-tools")
# =============================================================================
# Main
# =============================================================================
def main() -> None:
parser = argparse.ArgumentParser(description="Generate from schema")
parser.add_argument("--django", action="store_true")
parser.add_argument("--pydantic", action="store_true")
parser.add_argument("--typescript", action="store_true")
parser.add_argument("--proto", action="store_true")
parser.add_argument("--all", action="store_true")
parser.add_argument("--output", type=Path, default=PROJECT_ROOT)
args = parser.parse_args()
if not any([args.django, args.pydantic, args.typescript, args.proto, args.all]):
args.all = True
print(f"Generating to {args.output}\n")
targets: list[tuple[bool, str, Callable]] = [
(args.django or args.all, "Django", write_django),
(args.pydantic or args.all, "Pydantic", write_pydantic),
(args.typescript or args.all, "TypeScript", write_typescript),
(args.proto or args.all, "Proto", write_proto),
]
for enabled, name, writer in targets:
if enabled:
print(f"{name}:")
writer(args.output)
print()
print("Done!")
if __name__ == "__main__":
main()

35
schema/modelgen.json Normal file
View File

@@ -0,0 +1,35 @@
{
"schema": "schema/models",
"targets": [
{
"target": "django",
"output": "mpr/media_assets/models.py",
"include": ["dataclasses", "enums"]
},
{
"target": "pydantic",
"output": "api/schema/",
"include": ["dataclasses", "enums"],
"name_map": {
"TranscodeJob": "Job",
"MediaAsset": "Asset",
"TranscodePreset": "Preset"
}
},
{
"target": "graphene",
"output": "api/schema/graphql.py",
"include": ["dataclasses", "enums", "api"]
},
{
"target": "typescript",
"output": "ui/timeline/src/types.ts",
"include": ["dataclasses", "enums", "api"]
},
{
"target": "protobuf",
"output": "rpc/protos/worker.proto",
"include": ["grpc"]
}
]
}

View File

@@ -5,7 +5,7 @@ This module exports all dataclasses, enums, and constants that the generator
should process. Add new models here to have them included in generation. should process. Add new models here to have them included in generation.
""" """
from .api import CreateJobRequest, SystemStatus from .api import CreateJobRequest, ScanResult, SystemStatus
from .grpc import ( from .grpc import (
GRPC_SERVICE, GRPC_SERVICE,
CancelRequest, CancelRequest,
@@ -26,7 +26,7 @@ DATACLASSES = [MediaAsset, TranscodePreset, TranscodeJob]
# API request/response models - generates TypeScript only (no Django) # API request/response models - generates TypeScript only (no Django)
# WorkerStatus from grpc.py is reused here # WorkerStatus from grpc.py is reused here
API_MODELS = [CreateJobRequest, SystemStatus, WorkerStatus] API_MODELS = [CreateJobRequest, SystemStatus, ScanResult, WorkerStatus]
# Status enums - included in generated code # Status enums - included in generated code
ENUMS = [AssetStatus, JobStatus] ENUMS = [AssetStatus, JobStatus]
@@ -50,6 +50,7 @@ __all__ = [
"TranscodeJob", "TranscodeJob",
# API Models # API Models
"CreateJobRequest", "CreateJobRequest",
"ScanResult",
"SystemStatus", "SystemStatus",
# Enums # Enums
"AssetStatus", "AssetStatus",

View File

@@ -5,8 +5,8 @@ These are separate from the main domain models and represent
the shape of data sent to/from the API endpoints. the shape of data sent to/from the API endpoints.
""" """
from dataclasses import dataclass from dataclasses import dataclass, field
from typing import Optional from typing import List, Optional
from uuid import UUID from uuid import UUID
@@ -19,6 +19,7 @@ class CreateJobRequest:
trim_start: Optional[float] = None # seconds trim_start: Optional[float] = None # seconds
trim_end: Optional[float] = None # seconds trim_end: Optional[float] = None # seconds
output_filename: Optional[str] = None output_filename: Optional[str] = None
priority: int = 0
@dataclass @dataclass
@@ -29,4 +30,14 @@ class SystemStatus:
version: str version: str
@dataclass
class ScanResult:
"""Result of scanning the media input bucket."""
found: int = 0
registered: int = 0
skipped: int = 0
files: List[str] = field(default_factory=list)
# Note: WorkerStatus is defined in grpc.py and reused here # Note: WorkerStatus is defined in grpc.py and reused here

View File

@@ -63,6 +63,7 @@ class TranscodeJob:
# Worker tracking # Worker tracking
celery_task_id: Optional[str] = None celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None # AWS Step Functions execution ARN
priority: int = 0 # Lower = higher priority priority: int = 0 # Lower = higher priority
# Timestamps # Timestamps

View File

@@ -110,7 +110,16 @@ class LocalExecutor(Executor):
class LambdaExecutor(Executor): class LambdaExecutor(Executor):
"""Execute jobs via AWS Lambda (future implementation).""" """Execute jobs via AWS Step Functions + Lambda."""
def __init__(self):
import boto3
region = os.environ.get("AWS_REGION", "us-east-1")
self.sfn = boto3.client("stepfunctions", region_name=region)
self.state_machine_arn = os.environ["STEP_FUNCTION_ARN"]
self.callback_url = os.environ.get("CALLBACK_URL", "")
self.callback_api_key = os.environ.get("CALLBACK_API_KEY", "")
def run( def run(
self, self,
@@ -123,8 +132,36 @@ class LambdaExecutor(Executor):
duration: Optional[float] = None, duration: Optional[float] = None,
progress_callback: Optional[Callable[[int, Dict[str, Any]], None]] = None, progress_callback: Optional[Callable[[int, Dict[str, Any]], None]] = None,
) -> bool: ) -> bool:
"""Execute job via AWS Lambda.""" """Start a Step Functions execution for this job."""
raise NotImplementedError("LambdaExecutor not yet implemented") import json
payload = {
"job_id": job_id,
"source_key": source_path,
"output_key": output_path,
"preset": preset,
"trim_start": trim_start,
"trim_end": trim_end,
"duration": duration,
"callback_url": self.callback_url,
"api_key": self.callback_api_key,
}
response = self.sfn.start_execution(
stateMachineArn=self.state_machine_arn,
name=f"mpr-{job_id}",
input=json.dumps(payload),
)
# Store execution ARN on the job
execution_arn = response["executionArn"]
try:
from mpr.media_assets.models import TranscodeJob
TranscodeJob.objects.filter(id=job_id).update(execution_arn=execution_arn)
except Exception:
pass
return True
# Executor registry # Executor registry

148
task/lambda_handler.py Normal file
View File

@@ -0,0 +1,148 @@
"""
AWS Lambda handler for media transcoding.
Receives a job payload from Step Functions, downloads source from S3,
runs FFmpeg, uploads result to S3, and calls back to the API.
Uses the same core/ffmpeg module as the local Celery worker.
"""
import json
import logging
import os
import tempfile
from pathlib import Path
import boto3
import requests
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# S3 config
S3_BUCKET_IN = os.environ.get("S3_BUCKET_IN", "mpr-media-in")
S3_BUCKET_OUT = os.environ.get("S3_BUCKET_OUT", "mpr-media-out")
AWS_REGION = os.environ.get("AWS_REGION", "us-east-1")
s3 = boto3.client("s3", region_name=AWS_REGION)
def handler(event, context):
"""
Lambda entry point.
Event payload (from Step Functions):
{
"job_id": "uuid",
"source_key": "path/to/source.mp4",
"output_key": "output_filename.mp4",
"preset": {...} or null,
"trim_start": float or null,
"trim_end": float or null,
"duration": float or null,
"callback_url": "https://mpr.mcrn.ar/api",
"api_key": "secret"
}
"""
job_id = event["job_id"]
source_key = event["source_key"]
output_key = event["output_key"]
preset = event.get("preset")
trim_start = event.get("trim_start")
trim_end = event.get("trim_end")
duration = event.get("duration")
callback_url = event.get("callback_url", "")
api_key = event.get("api_key", "")
logger.info(f"Starting job {job_id}: {source_key} -> {output_key}")
# Download source from S3
ext_in = Path(source_key).suffix or ".mp4"
tmp_source = tempfile.mktemp(suffix=ext_in, dir="/tmp")
logger.info(f"Downloading s3://{S3_BUCKET_IN}/{source_key}")
s3.download_file(S3_BUCKET_IN, source_key, tmp_source)
# Prepare output temp file
ext_out = Path(output_key).suffix or ".mp4"
tmp_output = tempfile.mktemp(suffix=ext_out, dir="/tmp")
try:
# Import ffmpeg module (bundled in container)
from core.ffmpeg.transcode import TranscodeConfig, transcode
if preset:
config = TranscodeConfig(
input_path=tmp_source,
output_path=tmp_output,
video_codec=preset.get("video_codec", "libx264"),
video_bitrate=preset.get("video_bitrate"),
video_crf=preset.get("video_crf"),
video_preset=preset.get("video_preset"),
resolution=preset.get("resolution"),
framerate=preset.get("framerate"),
audio_codec=preset.get("audio_codec", "aac"),
audio_bitrate=preset.get("audio_bitrate"),
audio_channels=preset.get("audio_channels"),
audio_samplerate=preset.get("audio_samplerate"),
container=preset.get("container", "mp4"),
extra_args=preset.get("extra_args", []),
trim_start=trim_start,
trim_end=trim_end,
)
else:
config = TranscodeConfig(
input_path=tmp_source,
output_path=tmp_output,
video_codec="copy",
audio_codec="copy",
trim_start=trim_start,
trim_end=trim_end,
)
success = transcode(config, duration=duration)
if not success:
raise RuntimeError("Transcode returned False")
# Upload result to S3
logger.info(f"Uploading s3://{S3_BUCKET_OUT}/{output_key}")
s3.upload_file(tmp_output, S3_BUCKET_OUT, output_key)
result = {"status": "completed", "job_id": job_id, "output_key": output_key}
# Callback to API
_callback(callback_url, job_id, api_key, {"status": "completed"})
return result
except Exception as e:
logger.exception(f"Job {job_id} failed: {e}")
_callback(callback_url, job_id, api_key, {
"status": "failed",
"error": str(e),
})
return {"status": "failed", "job_id": job_id, "error": str(e)}
finally:
for f in [tmp_source, tmp_output]:
try:
os.unlink(f)
except OSError:
pass
def _callback(callback_url, job_id, api_key, payload):
"""Call back to API with job result."""
if not callback_url:
return
try:
url = f"{callback_url}/jobs/{job_id}/callback"
headers = {}
if api_key:
headers["X-API-Key"] = api_key
resp = requests.post(url, json=payload, headers=headers, timeout=10)
logger.info(f"Callback response: {resp.status_code}")
except Exception as e:
logger.warning(f"Callback failed: {e}")

View File

@@ -8,21 +8,19 @@ from typing import Any, Dict, Optional
from celery import shared_task from celery import shared_task
from core.storage import BUCKET_IN, BUCKET_OUT, download_to_temp, upload_file
from rpc.server import update_job_progress from rpc.server import update_job_progress
from task.executor import get_executor from task.executor import get_executor
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Media paths from environment
MEDIA_ROOT = os.environ.get("MEDIA_ROOT", "/app/media")
@shared_task(bind=True, queue="transcode", max_retries=3, default_retry_delay=60)
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
def run_transcode_job( def run_transcode_job(
self, self,
job_id: str, job_id: str,
source_path: str, source_key: str,
output_path: str, output_key: str,
preset: Optional[Dict[str, Any]] = None, preset: Optional[Dict[str, Any]] = None,
trim_start: Optional[float] = None, trim_start: Optional[float] = None,
trim_end: Optional[float] = None, trim_end: Optional[float] = None,
@@ -31,25 +29,25 @@ def run_transcode_job(
""" """
Celery task to run a transcode/trim job. Celery task to run a transcode/trim job.
Args: Downloads source from S3, runs FFmpeg, uploads result to S3.
job_id: Unique job identifier
source_path: Path to source file
output_path: Path for output file
preset: Transcode preset dict (optional)
trim_start: Trim start time in seconds (optional)
trim_end: Trim end time in seconds (optional)
duration: Source duration for progress calculation
Returns:
Result dict with status and output_path
""" """
logger.info(f"Starting job {job_id}: {source_path} -> {output_path}") logger.info(f"Starting job {job_id}: {source_key} -> {output_key}")
# Update status to processing
update_job_progress(job_id, progress=0, status="processing") update_job_progress(job_id, progress=0, status="processing")
# Download source from S3 to temp file
logger.info(f"Downloading {source_key} from {BUCKET_IN}")
tmp_source = download_to_temp(BUCKET_IN, source_key)
# Create temp output path with same extension
import tempfile
from pathlib import Path
ext = Path(output_key).suffix or ".mp4"
fd, tmp_output = tempfile.mkstemp(suffix=ext)
os.close(fd)
def progress_callback(percent: int, details: Dict[str, Any]) -> None: def progress_callback(percent: int, details: Dict[str, Any]) -> None:
"""Update gRPC progress state."""
update_job_progress( update_job_progress(
job_id, job_id,
progress=percent, progress=percent,
@@ -61,8 +59,8 @@ def run_transcode_job(
executor = get_executor() executor = get_executor()
success = executor.run( success = executor.run(
job_id=job_id, job_id=job_id,
source_path=source_path, source_path=tmp_source,
output_path=output_path, output_path=tmp_output,
preset=preset, preset=preset,
trim_start=trim_start, trim_start=trim_start,
trim_end=trim_end, trim_end=trim_end,
@@ -71,12 +69,16 @@ def run_transcode_job(
) )
if success: if success:
# Upload result to S3
logger.info(f"Uploading {output_key} to {BUCKET_OUT}")
upload_file(tmp_output, BUCKET_OUT, output_key)
logger.info(f"Job {job_id} completed successfully") logger.info(f"Job {job_id} completed successfully")
update_job_progress(job_id, progress=100, status="completed") update_job_progress(job_id, progress=100, status="completed")
return { return {
"status": "completed", "status": "completed",
"job_id": job_id, "job_id": job_id,
"output_path": output_path, "output_key": output_key,
} }
else: else:
raise RuntimeError("Executor returned False") raise RuntimeError("Executor returned False")
@@ -85,7 +87,6 @@ def run_transcode_job(
logger.exception(f"Job {job_id} failed: {e}") logger.exception(f"Job {job_id} failed: {e}")
update_job_progress(job_id, progress=0, status="failed", error=str(e)) update_job_progress(job_id, progress=0, status="failed", error=str(e))
# Retry on transient errors
if self.request.retries < self.max_retries: if self.request.retries < self.max_retries:
raise self.retry(exc=e) raise self.retry(exc=e)
@@ -94,3 +95,11 @@ def run_transcode_job(
"job_id": job_id, "job_id": job_id,
"error": str(e), "error": str(e),
} }
finally:
# Clean up temp files
for f in [tmp_source, tmp_output]:
try:
os.unlink(f)
except OSError:
pass

View File

@@ -5,7 +5,8 @@
} }
body { body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; font-family:
-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
background: #1a1a1a; background: #1a1a1a;
color: #e0e0e0; color: #e0e0e0;
} }
@@ -46,16 +47,91 @@ body {
background: #202020; background: #202020;
border-right: 1px solid #333; border-right: 1px solid #333;
overflow-y: auto; overflow-y: auto;
display: flex;
flex-direction: column;
} }
.sidebar h2 { .sidebar-section {
border-bottom: 1px solid #333;
}
.sidebar-section:first-child {
flex: 1;
min-height: 0;
overflow-y: auto;
}
.sidebar-count {
font-size: 0.7rem;
background: #333;
color: #888;
padding: 0.125rem 0.375rem;
border-radius: 8px;
}
.sidebar-list {
max-height: 200px;
overflow-y: auto;
}
.sidebar-empty {
padding: 0.5rem 1rem;
font-size: 0.8rem;
color: #555;
}
.output-item {
display: block;
padding: 0.5rem 1rem;
font-size: 0.8rem;
color: #10b981;
text-decoration: none;
border-bottom: 1px solid #2a2a2a;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.output-item:hover {
background: #2a2a2a;
}
.sidebar-header {
padding: 1rem; padding: 1rem;
display: flex;
justify-content: space-between;
align-items: center;
gap: 0.5rem;
}
.sidebar-header h2 {
font-size: 0.875rem; font-size: 0.875rem;
text-transform: uppercase; text-transform: uppercase;
letter-spacing: 0.05em; letter-spacing: 0.05em;
color: #888; color: #888;
} }
.scan-button {
padding: 0.375rem 0.75rem;
font-size: 0.75rem;
background: #3b82f6;
color: white;
border: none;
border-radius: 4px;
cursor: pointer;
transition: background 0.2s;
}
.scan-button:hover:not(:disabled) {
background: #2563eb;
}
.scan-button:disabled {
background: #4b5563;
cursor: not-allowed;
opacity: 0.6;
}
.asset-list { .asset-list {
list-style: none; list-style: none;
} }
@@ -148,41 +224,221 @@ body {
} }
.timeline-container { .timeline-container {
height: 120px;
background: #252525; background: #252525;
border-top: 1px solid #333; border-top: 1px solid #333;
padding: 0.75rem 1rem;
} }
.timeline-placeholder { /* Timeline component */
.timeline {
user-select: none;
}
.timeline-times {
display: flex; display: flex;
align-items: center; justify-content: space-between;
justify-content: center; font-size: 0.75rem;
height: 100%; color: #aaa;
color: #666; margin-bottom: 0.5rem;
font-variant-numeric: tabular-nums;
} }
.info { .timeline-track {
padding: 1rem; position: relative;
height: 40px;
background: #333;
border-radius: 4px;
cursor: pointer;
overflow: hidden;
}
.timeline-dim {
position: absolute;
top: 0;
height: 100%;
background: rgba(0, 0, 0, 0.5);
pointer-events: none;
}
.timeline-selection {
position: absolute;
top: 0;
height: 100%;
background: rgba(59, 130, 246, 0.15);
pointer-events: none;
}
.timeline-playhead {
position: absolute;
top: 0;
width: 2px;
height: 100%;
background: #fff;
pointer-events: none;
transform: translateX(-1px);
z-index: 2;
}
.timeline-handle {
position: absolute;
top: 0;
width: 12px;
height: 100%;
cursor: ew-resize;
transform: translateX(-6px);
z-index: 3;
border-radius: 2px;
transition: background 0.1s;
}
.timeline-handle::after {
content: "";
position: absolute;
top: 0;
left: 5px;
width: 2px;
height: 100%;
background: #3b82f6;
}
.timeline-handle:hover,
.timeline-handle.dragging {
background: rgba(59, 130, 246, 0.3);
}
.timeline-handle.dragging {
cursor: grabbing;
}
.timeline-duration {
display: flex;
justify-content: space-between;
font-size: 0.625rem;
color: #666;
margin-top: 0.25rem;
}
/* Job panel */
.job-panel {
padding: 0.75rem 1rem;
background: #202020; background: #202020;
border-top: 1px solid #333; border-top: 1px solid #333;
} }
.info h3 { .job-controls {
margin-bottom: 0.5rem; display: flex;
font-size: 1rem; gap: 0.5rem;
align-items: center;
} }
.info dl { .preset-select {
display: grid; flex: 1;
grid-template-columns: auto 1fr; padding: 0.375rem 0.5rem;
gap: 0.25rem 1rem; font-size: 0.8rem;
font-size: 0.875rem; background: #333;
}
.info dt {
color: #888;
}
.info dd {
color: #e0e0e0; color: #e0e0e0;
border: 1px solid #444;
border-radius: 4px;
cursor: pointer;
}
.preset-select:focus {
outline: none;
border-color: #3b82f6;
}
.enqueue-button {
padding: 0.375rem 1rem;
font-size: 0.8rem;
background: #10b981;
color: #000;
border: none;
border-radius: 4px;
cursor: pointer;
font-weight: 500;
white-space: nowrap;
transition: background 0.2s;
}
.enqueue-button:hover:not(:disabled) {
background: #059669;
}
.enqueue-button:disabled {
background: #4b5563;
color: #888;
cursor: not-allowed;
}
/* Job items */
.job-item {
padding: 0.5rem 1rem;
border-bottom: 1px solid #2a2a2a;
font-size: 0.8rem;
}
.job-item-header {
display: flex;
justify-content: space-between;
align-items: center;
}
.job-filename {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
color: #ccc;
}
.job-status {
font-size: 0.7rem;
padding: 0.125rem 0.375rem;
border-radius: 3px;
text-transform: uppercase;
font-weight: 500;
flex-shrink: 0;
margin-left: 0.5rem;
}
.job-status.pending {
background: #f59e0b;
color: #000;
}
.job-status.processing {
background: #3b82f6;
color: #fff;
}
.job-status.completed {
background: #10b981;
color: #000;
}
.job-status.failed {
background: #ef4444;
color: #fff;
}
.job-status.cancelled {
background: #6b7280;
color: #fff;
}
.job-progress-bar {
height: 4px;
background: #444;
border-radius: 2px;
margin-top: 0.375rem;
overflow: hidden;
}
.job-progress-fill {
height: 100%;
background: #3b82f6;
border-radius: 2px;
transition: width 0.3s;
} }

View File

@@ -1,14 +1,25 @@
import { useState, useEffect } from 'react' import { useState, useEffect, useRef, useCallback } from "react";
import { getAssets, getSystemStatus } from './api' import { getAssets, getJobs, getSystemStatus, scanMediaFolder } from "./api";
import type { MediaAsset, SystemStatus } from './types' import type { MediaAsset, TranscodeJob, SystemStatus } from "./types";
import './App.css' import Timeline from "./Timeline";
import JobPanel from "./JobPanel";
import "./App.css";
function App() { function App() {
const [assets, setAssets] = useState<MediaAsset[]>([]) const [assets, setAssets] = useState<MediaAsset[]>([]);
const [status, setStatus] = useState<SystemStatus | null>(null) const [jobs, setJobs] = useState<TranscodeJob[]>([]);
const [selectedAsset, setSelectedAsset] = useState<MediaAsset | null>(null) const [status, setStatus] = useState<SystemStatus | null>(null);
const [loading, setLoading] = useState(true) const [selectedAsset, setSelectedAsset] = useState<MediaAsset | null>(null);
const [error, setError] = useState<string | null>(null) const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [scanning, setScanning] = useState(false);
// Video sync state
const videoRef = useRef<HTMLVideoElement>(null);
const [currentTime, setCurrentTime] = useState(0);
const [duration, setDuration] = useState(0);
const [trimStart, setTrimStart] = useState(0);
const [trimEnd, setTrimEnd] = useState(0);
useEffect(() => { useEffect(() => {
async function load() { async function load() {
@@ -16,25 +27,99 @@ function App() {
const [assetsData, statusData] = await Promise.all([ const [assetsData, statusData] = await Promise.all([
getAssets(), getAssets(),
getSystemStatus(), getSystemStatus(),
]) ]);
setAssets(assetsData) setAssets(
setStatus(statusData) assetsData.sort((a, b) => a.filename.localeCompare(b.filename)),
);
setStatus(statusData);
} catch (e) { } catch (e) {
setError(e instanceof Error ? e.message : 'Failed to load') setError(e instanceof Error ? e.message : "Failed to load");
} finally { } finally {
setLoading(false) setLoading(false);
} }
} }
load() load();
}, []) }, []);
if (loading) { // Poll jobs
return <div className="loading">Loading...</div> useEffect(() => {
let active = true;
const fetchJobs = () => {
getJobs()
.then((data) => {
if (active) setJobs(data);
})
.catch(console.error);
};
fetchJobs();
const interval = setInterval(fetchJobs, 3000);
return () => {
active = false;
clearInterval(interval);
};
}, []);
// Reset trim state when asset changes
useEffect(() => {
setTrimStart(0);
setTrimEnd(0);
setCurrentTime(0);
setDuration(0);
}, [selectedAsset?.id]);
const handleTimeUpdate = useCallback(() => {
if (videoRef.current) setCurrentTime(videoRef.current.currentTime);
}, []);
const handleLoadedMetadata = useCallback(() => {
if (videoRef.current) {
const dur = videoRef.current.duration;
setDuration(dur);
setTrimEnd(dur);
}
}, []);
const handleSeek = useCallback((time: number) => {
if (videoRef.current) {
videoRef.current.currentTime = time;
setCurrentTime(time);
}
}, []);
const handleTrimChange = useCallback((start: number, end: number) => {
setTrimStart(start);
setTrimEnd(end);
}, []);
async function handleScan() {
setScanning(true);
setError(null);
try {
const result = await scanMediaFolder();
alert(
`Scan complete!\nFound: ${result.found}\nRegistered: ${result.registered}\nSkipped: ${result.skipped}`,
);
const assetsData = await getAssets();
setAssets(
assetsData.sort((a, b) => a.filename.localeCompare(b.filename)),
);
} catch (e) {
setError(e instanceof Error ? e.message : "Scan failed");
} finally {
setScanning(false);
}
} }
if (error) { const refreshJobs = async () => {
return <div className="error">Error: {error}</div> const data = await getJobs();
} setJobs(data);
};
const assetJobs = jobs.filter((j) => j.source_asset_id === selectedAsset?.id);
const completedJobs = jobs.filter((j) => j.status === "completed");
if (loading) return <div className="loading">Loading...</div>;
if (error) return <div className="error">Error: {error}</div>;
return ( return (
<div className="app"> <div className="app">
@@ -49,21 +134,88 @@ function App() {
<div className="layout"> <div className="layout">
<aside className="sidebar"> <aside className="sidebar">
<div className="sidebar-section">
<div className="sidebar-header">
<h2>Assets</h2> <h2>Assets</h2>
<button
onClick={handleScan}
disabled={scanning}
className="scan-button"
>
{scanning ? "Scanning..." : "Scan Folder"}
</button>
</div>
<ul className="asset-list"> <ul className="asset-list">
{assets.map((asset) => ( {assets.map((asset) => (
<li <li
key={asset.id} key={asset.id}
className={selectedAsset?.id === asset.id ? 'selected' : ''} className={selectedAsset?.id === asset.id ? "selected" : ""}
onClick={() => setSelectedAsset(asset)} onClick={() => setSelectedAsset(asset)}
title={asset.filename}
> >
<span className="filename">{asset.filename}</span> <span className="filename">{asset.filename}</span>
<span className={`status-badge ${asset.status}`}>
{asset.status}
</span>
</li> </li>
))} ))}
</ul> </ul>
</div>
<div className="sidebar-section">
<div className="sidebar-header">
<h2>Jobs</h2>
<span className="sidebar-count">{jobs.length}</span>
</div>
<div className="sidebar-list">
{jobs.length === 0 ? (
<div className="sidebar-empty">No jobs</div>
) : (
jobs.map((job) => (
<div key={job.id} className="job-item">
<div className="job-item-header">
<span className="job-filename">
{job.output_filename}
</span>
<span className={`job-status ${job.status}`}>
{job.status}
</span>
</div>
{job.status === "processing" && (
<div className="job-progress-bar">
<div
className="job-progress-fill"
style={{ width: `${job.progress}%` }}
/>
</div>
)}
</div>
))
)}
</div>
</div>
<div className="sidebar-section">
<div className="sidebar-header">
<h2>Output</h2>
<span className="sidebar-count">{completedJobs.length}</span>
</div>
<div className="sidebar-list">
{completedJobs.length === 0 ? (
<div className="sidebar-empty">No output files</div>
) : (
completedJobs.map((job) => (
<a
key={job.id}
className="output-item"
href={`/media/out/${job.output_filename}`}
target="_blank"
rel="noreferrer"
title={job.output_filename}
>
<span className="filename">{job.output_filename}</span>
</a>
))
)}
</div>
</div>
</aside> </aside>
<main className="main"> <main className="main">
@@ -71,29 +223,29 @@ function App() {
<div className="editor"> <div className="editor">
<div className="video-container"> <div className="video-container">
<video <video
ref={videoRef}
controls controls
src={`/media/${selectedAsset.file_path}`} src={`/media/in/${selectedAsset.file_path}`}
onTimeUpdate={handleTimeUpdate}
onLoadedMetadata={handleLoadedMetadata}
/> />
</div> </div>
<div className="timeline-container"> <div className="timeline-container">
{/* Timeline component will go here */} <Timeline
<div className="timeline-placeholder"> duration={duration}
Timeline: {selectedAsset.duration?.toFixed(1)}s currentTime={currentTime}
</div> trimStart={trimStart}
</div> trimEnd={trimEnd}
<div className="info"> onTrimChange={handleTrimChange}
<h3>{selectedAsset.filename}</h3> onSeek={handleSeek}
<dl> />
<dt>Duration</dt>
<dd>{selectedAsset.duration?.toFixed(2)}s</dd>
<dt>Resolution</dt>
<dd>{selectedAsset.width}x{selectedAsset.height}</dd>
<dt>Video</dt>
<dd>{selectedAsset.video_codec}</dd>
<dt>Audio</dt>
<dd>{selectedAsset.audio_codec}</dd>
</dl>
</div> </div>
<JobPanel
asset={selectedAsset}
trimStart={trimStart}
trimEnd={trimEnd}
onJobCreated={refreshJobs}
/>
</div> </div>
) : ( ) : (
<div className="empty">Select an asset to begin</div> <div className="empty">Select an asset to begin</div>
@@ -101,7 +253,7 @@ function App() {
</main> </main>
</div> </div>
</div> </div>
) );
} }
export default App export default App;

View File

@@ -0,0 +1,79 @@
import { useState, useEffect } from "react";
import { getPresets, createJob } from "./api";
import type { MediaAsset, TranscodePreset } from "./types";
interface JobPanelProps {
asset: MediaAsset;
trimStart: number;
trimEnd: number;
onJobCreated: () => void;
}
export default function JobPanel({
asset,
trimStart,
trimEnd,
onJobCreated,
}: JobPanelProps) {
const [presets, setPresets] = useState<TranscodePreset[]>([]);
const [selectedPresetId, setSelectedPresetId] = useState<string>("");
const [submitting, setSubmitting] = useState(false);
useEffect(() => {
getPresets().then(setPresets).catch(console.error);
}, []);
const hasTrim =
trimStart > 0 || (asset.duration != null && trimEnd < asset.duration);
const hasPreset = selectedPresetId !== "";
const canSubmit = hasTrim || hasPreset;
const buttonLabel = hasPreset
? "Transcode"
: hasTrim
? "Trim (Copy)"
: "Select trim or preset";
async function handleSubmit() {
setSubmitting(true);
try {
await createJob({
source_asset_id: asset.id,
preset_id: selectedPresetId || null,
trim_start: hasTrim ? trimStart : null,
trim_end: hasTrim ? trimEnd : null,
});
onJobCreated();
} catch (e) {
alert(e instanceof Error ? e.message : "Failed to create job");
} finally {
setSubmitting(false);
}
}
return (
<div className="job-panel">
<div className="job-controls">
<select
value={selectedPresetId}
onChange={(e) => setSelectedPresetId(e.target.value)}
className="preset-select"
>
<option value="">No preset (trim only)</option>
{presets.map((p) => (
<option key={p.id} value={p.id}>
{p.name}
</option>
))}
</select>
<button
onClick={handleSubmit}
disabled={!canSubmit || submitting}
className="enqueue-button"
>
{submitting ? "Submitting..." : buttonLabel}
</button>
</div>
</div>
);
}

View File

@@ -0,0 +1,121 @@
import { useRef, useCallback, useState, useEffect } from "react";
interface TimelineProps {
duration: number;
currentTime: number;
trimStart: number;
trimEnd: number;
onTrimChange: (start: number, end: number) => void;
onSeek: (time: number) => void;
}
function formatTime(seconds: number): string {
const m = Math.floor(seconds / 60);
const s = Math.floor(seconds % 60);
const ms = Math.floor((seconds % 1) * 10);
return `${m}:${s.toString().padStart(2, "0")}.${ms}`;
}
export default function Timeline({
duration,
currentTime,
trimStart,
trimEnd,
onTrimChange,
onSeek,
}: TimelineProps) {
const trackRef = useRef<HTMLDivElement>(null);
const [dragging, setDragging] = useState<"in" | "out" | null>(null);
const timeToPercent = (t: number) => (duration > 0 ? (t / duration) * 100 : 0);
const positionToTime = useCallback(
(clientX: number) => {
const track = trackRef.current;
if (!track || duration <= 0) return 0;
const rect = track.getBoundingClientRect();
const ratio = Math.max(0, Math.min(1, (clientX - rect.left) / rect.width));
return ratio * duration;
},
[duration],
);
const handleTrackClick = (e: React.MouseEvent) => {
if (dragging) return;
onSeek(positionToTime(e.clientX));
};
const handleMouseDown = (handle: "in" | "out") => (e: React.MouseEvent) => {
e.stopPropagation();
setDragging(handle);
};
useEffect(() => {
if (!dragging) return;
const minGap = 0.1;
const handleMove = (e: MouseEvent) => {
const time = positionToTime(e.clientX);
if (dragging === "in") {
onTrimChange(Math.min(time, trimEnd - minGap), trimEnd);
} else {
onTrimChange(trimStart, Math.max(time, trimStart + minGap));
}
};
const handleUp = () => setDragging(null);
document.addEventListener("mousemove", handleMove);
document.addEventListener("mouseup", handleUp);
return () => {
document.removeEventListener("mousemove", handleMove);
document.removeEventListener("mouseup", handleUp);
};
}, [dragging, trimStart, trimEnd, positionToTime, onTrimChange]);
const inPct = timeToPercent(trimStart);
const outPct = timeToPercent(trimEnd);
const playheadPct = timeToPercent(currentTime);
const selectionDuration = trimEnd - trimStart;
return (
<div className="timeline">
<div className="timeline-times">
<span>In: {formatTime(trimStart)}</span>
<span>Selection: {formatTime(selectionDuration)}</span>
<span>Out: {formatTime(trimEnd)}</span>
</div>
<div className="timeline-track" ref={trackRef} onClick={handleTrackClick}>
{/* Dimmed regions */}
<div className="timeline-dim" style={{ left: 0, width: `${inPct}%` }} />
<div className="timeline-dim" style={{ left: `${outPct}%`, width: `${100 - outPct}%` }} />
{/* Selection highlight */}
<div
className="timeline-selection"
style={{ left: `${inPct}%`, width: `${outPct - inPct}%` }}
/>
{/* Playhead */}
<div className="timeline-playhead" style={{ left: `${playheadPct}%` }} />
{/* Handles */}
<div
className={`timeline-handle timeline-handle-in ${dragging === "in" ? "dragging" : ""}`}
style={{ left: `${inPct}%` }}
onMouseDown={handleMouseDown("in")}
/>
<div
className={`timeline-handle timeline-handle-out ${dragging === "out" ? "dragging" : ""}`}
style={{ left: `${outPct}%` }}
onMouseDown={handleMouseDown("out")}
/>
</div>
<div className="timeline-duration">
<span>0:00</span>
<span>{formatTime(duration)}</span>
</div>
</div>
);
}

View File

@@ -38,6 +38,17 @@ export async function getAsset(id: string): Promise<MediaAsset> {
return request(`/assets/${id}`); return request(`/assets/${id}`);
} }
export async function scanMediaFolder(): Promise<{
found: number;
registered: number;
skipped: number;
files: string[];
}> {
return request("/assets/scan", {
method: "POST",
});
}
// Presets // Presets
export async function getPresets(): Promise<TranscodePreset[]> { export async function getPresets(): Promise<TranscodePreset[]> {
return request("/presets/"); return request("/presets/");

View File

@@ -66,8 +66,37 @@ export interface TranscodeJob {
speed: string | null; speed: string | null;
error_message: string | null; error_message: string | null;
celery_task_id: string | null; celery_task_id: string | null;
execution_arn: string | null;
priority: number; priority: number;
created_at: string | null; created_at: string | null;
started_at: string | null; started_at: string | null;
completed_at: string | null; completed_at: string | null;
} }
export interface CreateJobRequest {
source_asset_id: string;
preset_id: string | null;
trim_start: number | null;
trim_end: number | null;
output_filename: string | null;
priority: number;
}
export interface SystemStatus {
status: string;
version: string;
}
export interface ScanResult {
found: number;
registered: number;
skipped: number;
files: string[];
}
export interface WorkerStatus {
available: boolean;
active_jobs: number;
supported_codecs: string[];
gpu_available: boolean;
}