Compare commits
16 Commits
3db8c0c453
...
aws-int
| Author | SHA1 | Date | |
|---|---|---|---|
| 72e4113529 | |||
| 8f5d407e0e | |||
| e642908abb | |||
| 013587d108 | |||
| 2cf6c89fbb | |||
| daabd15c19 | |||
| 2e6ed4e37a | |||
| 68622bd6b1 | |||
| 65c3055de6 | |||
| c0a3901951 | |||
| 318741d8ca | |||
| 022baa407f | |||
| 26bd158c47 | |||
| 30b2e1cf44 | |||
| b88f75fce0 | |||
| ffbbf87873 |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -17,7 +17,10 @@ env/
|
||||
*.pot
|
||||
*.pyc
|
||||
db.sqlite3
|
||||
media/
|
||||
media/in/*
|
||||
!media/in/.gitkeep
|
||||
media/out/*
|
||||
!media/out/.gitkeep
|
||||
|
||||
# Node
|
||||
node_modules/
|
||||
|
||||
62
README.md
62
README.md
@@ -76,23 +76,39 @@ docker compose exec django python manage.py createsuperuser
|
||||
|
||||
## Code Generation
|
||||
|
||||
Models are defined in `schema/models/` and generate:
|
||||
- Django ORM models
|
||||
- Pydantic schemas
|
||||
- TypeScript types
|
||||
- Protobuf definitions
|
||||
Models are defined as dataclasses in `schema/models/` and generated via `modelgen`:
|
||||
- **Django ORM** models (`--include dataclasses,enums`)
|
||||
- **Pydantic** schemas (`--include dataclasses,enums`)
|
||||
- **TypeScript** types (`--include dataclasses,enums,api`)
|
||||
- **Protobuf** definitions (`--include grpc`)
|
||||
|
||||
Each target only gets the model groups it needs via the `--include` flag.
|
||||
|
||||
```bash
|
||||
# Regenerate all
|
||||
python schema/generate.py --all
|
||||
|
||||
# Or specific targets
|
||||
python schema/generate.py --django
|
||||
python schema/generate.py --pydantic
|
||||
python schema/generate.py --typescript
|
||||
python schema/generate.py --proto
|
||||
# Regenerate all targets
|
||||
bash ctrl/generate.sh
|
||||
```
|
||||
|
||||
## Media Storage
|
||||
|
||||
MPR separates media into **input** (`MEDIA_IN`) and **output** (`MEDIA_OUT`) paths, each independently configurable. File paths are stored relative for cloud portability.
|
||||
|
||||
### Local Development
|
||||
- Source files: `/app/media/in/video.mp4`
|
||||
- Output files: `/app/media/out/video_h264.mp4`
|
||||
- Served via: `http://mpr.local.ar/media/in/video.mp4` (nginx alias)
|
||||
|
||||
### AWS/Cloud Deployment
|
||||
Input and output can be different buckets/locations:
|
||||
```bash
|
||||
MEDIA_IN=s3://source-bucket/media/
|
||||
MEDIA_OUT=s3://output-bucket/transcoded/
|
||||
```
|
||||
|
||||
**Scan Endpoint**: `POST /api/assets/scan` recursively scans `MEDIA_IN` and registers new files with relative paths.
|
||||
|
||||
See [docs/media-storage.md](docs/media-storage.md) for full details.
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
@@ -105,18 +121,20 @@ mpr/
|
||||
├── ctrl/ # Docker & deployment
|
||||
│ ├── docker-compose.yml
|
||||
│ └── nginx.conf
|
||||
├── docs/ # Architecture diagrams
|
||||
├── grpc/ # gRPC server & client
|
||||
├── media/
|
||||
│ ├── in/ # Source media files
|
||||
│ └── out/ # Transcoded output
|
||||
├── rpc/ # gRPC server & client
|
||||
│ └── protos/ # Protobuf definitions (generated)
|
||||
├── mpr/ # Django project
|
||||
│ └── media_assets/ # Django app
|
||||
├── schema/ # Source of truth
|
||||
│ └── models/ # Dataclass definitions
|
||||
├── ui/ # Frontend
|
||||
│ └── timeline/ # React app
|
||||
└── worker/ # Job execution
|
||||
├── executor.py # Executor abstraction
|
||||
└── tasks.py # Celery tasks
|
||||
├── task/ # Celery job execution
|
||||
│ ├── executor.py # Executor abstraction
|
||||
│ └── tasks.py # Celery tasks
|
||||
└── ui/ # Frontend
|
||||
└── timeline/ # React app
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
@@ -130,6 +148,10 @@ See `ctrl/.env.template` for all configuration options.
|
||||
| `GRPC_HOST` | grpc | gRPC server hostname |
|
||||
| `GRPC_PORT` | 50051 | gRPC server port |
|
||||
| `MPR_EXECUTOR` | local | Executor type (local/lambda) |
|
||||
| `MEDIA_IN` | /app/media/in | Source media files directory |
|
||||
| `MEDIA_OUT` | /app/media/out | Transcoded output directory |
|
||||
| `MEDIA_BASE_URL` | /media/ | Base URL for serving media (use S3 URL for cloud) |
|
||||
| `VITE_ALLOWED_HOSTS` | - | Comma-separated allowed hosts for Vite dev server |
|
||||
|
||||
## License
|
||||
|
||||
|
||||
251
api/graphql.py
Normal file
251
api/graphql.py
Normal file
@@ -0,0 +1,251 @@
|
||||
"""
|
||||
GraphQL API using graphene, mounted on FastAPI/Starlette.
|
||||
|
||||
Provides the same data as the REST API but via GraphQL queries and mutations.
|
||||
Uses Django ORM directly for data access.
|
||||
Types are generated from schema/ via modelgen — see api/schema/graphql.py.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import graphene
|
||||
|
||||
from api.schema.graphql import (
|
||||
CreateJobInput,
|
||||
MediaAssetType,
|
||||
ScanResultType,
|
||||
SystemStatusType,
|
||||
TranscodeJobType,
|
||||
TranscodePresetType,
|
||||
)
|
||||
from core.storage import BUCKET_IN, list_objects
|
||||
|
||||
# Media extensions (same as assets route)
|
||||
VIDEO_EXTS = {".mp4", ".mkv", ".avi", ".mov", ".webm", ".flv", ".wmv", ".m4v"}
|
||||
AUDIO_EXTS = {".mp3", ".wav", ".flac", ".aac", ".ogg", ".m4a"}
|
||||
MEDIA_EXTS = VIDEO_EXTS | AUDIO_EXTS
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Queries
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Query(graphene.ObjectType):
|
||||
assets = graphene.List(
|
||||
MediaAssetType,
|
||||
status=graphene.String(),
|
||||
search=graphene.String(),
|
||||
)
|
||||
asset = graphene.Field(MediaAssetType, id=graphene.UUID(required=True))
|
||||
jobs = graphene.List(
|
||||
TranscodeJobType,
|
||||
status=graphene.String(),
|
||||
source_asset_id=graphene.UUID(),
|
||||
)
|
||||
job = graphene.Field(TranscodeJobType, id=graphene.UUID(required=True))
|
||||
presets = graphene.List(TranscodePresetType)
|
||||
system_status = graphene.Field(SystemStatusType)
|
||||
|
||||
def resolve_assets(self, info, status=None, search=None):
|
||||
from mpr.media_assets.models import MediaAsset
|
||||
|
||||
qs = MediaAsset.objects.all()
|
||||
if status:
|
||||
qs = qs.filter(status=status)
|
||||
if search:
|
||||
qs = qs.filter(filename__icontains=search)
|
||||
return qs
|
||||
|
||||
def resolve_asset(self, info, id):
|
||||
from mpr.media_assets.models import MediaAsset
|
||||
|
||||
try:
|
||||
return MediaAsset.objects.get(id=id)
|
||||
except MediaAsset.DoesNotExist:
|
||||
return None
|
||||
|
||||
def resolve_jobs(self, info, status=None, source_asset_id=None):
|
||||
from mpr.media_assets.models import TranscodeJob
|
||||
|
||||
qs = TranscodeJob.objects.all()
|
||||
if status:
|
||||
qs = qs.filter(status=status)
|
||||
if source_asset_id:
|
||||
qs = qs.filter(source_asset_id=source_asset_id)
|
||||
return qs
|
||||
|
||||
def resolve_job(self, info, id):
|
||||
from mpr.media_assets.models import TranscodeJob
|
||||
|
||||
try:
|
||||
return TranscodeJob.objects.get(id=id)
|
||||
except TranscodeJob.DoesNotExist:
|
||||
return None
|
||||
|
||||
def resolve_presets(self, info):
|
||||
from mpr.media_assets.models import TranscodePreset
|
||||
|
||||
return TranscodePreset.objects.all()
|
||||
|
||||
def resolve_system_status(self, info):
|
||||
return {"status": "ok", "version": "0.1.0"}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Mutations
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class ScanMediaFolder(graphene.Mutation):
|
||||
class Arguments:
|
||||
pass
|
||||
|
||||
Output = ScanResultType
|
||||
|
||||
def mutate(self, info):
|
||||
from mpr.media_assets.models import MediaAsset
|
||||
|
||||
objects = list_objects(BUCKET_IN, extensions=MEDIA_EXTS)
|
||||
existing = set(MediaAsset.objects.values_list("filename", flat=True))
|
||||
|
||||
registered = []
|
||||
skipped = []
|
||||
|
||||
for obj in objects:
|
||||
if obj["filename"] in existing:
|
||||
skipped.append(obj["filename"])
|
||||
continue
|
||||
try:
|
||||
MediaAsset.objects.create(
|
||||
filename=obj["filename"],
|
||||
file_path=obj["key"],
|
||||
file_size=obj["size"],
|
||||
)
|
||||
registered.append(obj["filename"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ScanResultType(
|
||||
found=len(objects),
|
||||
registered=len(registered),
|
||||
skipped=len(skipped),
|
||||
files=registered,
|
||||
)
|
||||
|
||||
|
||||
class CreateJob(graphene.Mutation):
|
||||
class Arguments:
|
||||
input = CreateJobInput(required=True)
|
||||
|
||||
Output = TranscodeJobType
|
||||
|
||||
def mutate(self, info, input):
|
||||
from pathlib import Path
|
||||
|
||||
from mpr.media_assets.models import MediaAsset, TranscodeJob, TranscodePreset
|
||||
|
||||
try:
|
||||
source = MediaAsset.objects.get(id=input.source_asset_id)
|
||||
except MediaAsset.DoesNotExist:
|
||||
raise Exception("Source asset not found")
|
||||
|
||||
preset = None
|
||||
preset_snapshot = {}
|
||||
if input.preset_id:
|
||||
try:
|
||||
preset = TranscodePreset.objects.get(id=input.preset_id)
|
||||
preset_snapshot = {
|
||||
"name": preset.name,
|
||||
"container": preset.container,
|
||||
"video_codec": preset.video_codec,
|
||||
"audio_codec": preset.audio_codec,
|
||||
}
|
||||
except TranscodePreset.DoesNotExist:
|
||||
raise Exception("Preset not found")
|
||||
|
||||
if not preset and not input.trim_start and not input.trim_end:
|
||||
raise Exception("Must specify preset_id or trim_start/trim_end")
|
||||
|
||||
output_filename = input.output_filename
|
||||
if not output_filename:
|
||||
stem = Path(source.filename).stem
|
||||
ext = preset_snapshot.get("container", "mp4") if preset else "mp4"
|
||||
output_filename = f"{stem}_output.{ext}"
|
||||
|
||||
job = TranscodeJob.objects.create(
|
||||
source_asset_id=source.id,
|
||||
preset_id=preset.id if preset else None,
|
||||
preset_snapshot=preset_snapshot,
|
||||
trim_start=input.trim_start,
|
||||
trim_end=input.trim_end,
|
||||
output_filename=output_filename,
|
||||
output_path=output_filename,
|
||||
priority=input.priority or 0,
|
||||
)
|
||||
|
||||
# Dispatch
|
||||
executor_mode = os.environ.get("MPR_EXECUTOR", "local")
|
||||
if executor_mode == "lambda":
|
||||
from task.executor import get_executor
|
||||
|
||||
get_executor().run(
|
||||
job_id=str(job.id),
|
||||
source_path=source.file_path,
|
||||
output_path=output_filename,
|
||||
preset=preset_snapshot or None,
|
||||
trim_start=input.trim_start,
|
||||
trim_end=input.trim_end,
|
||||
duration=source.duration,
|
||||
)
|
||||
else:
|
||||
from task.tasks import run_transcode_job
|
||||
|
||||
result = run_transcode_job.delay(
|
||||
job_id=str(job.id),
|
||||
source_key=source.file_path,
|
||||
output_key=output_filename,
|
||||
preset=preset_snapshot or None,
|
||||
trim_start=input.trim_start,
|
||||
trim_end=input.trim_end,
|
||||
duration=source.duration,
|
||||
)
|
||||
job.celery_task_id = result.id
|
||||
job.save(update_fields=["celery_task_id"])
|
||||
|
||||
return job
|
||||
|
||||
|
||||
class CancelJob(graphene.Mutation):
|
||||
class Arguments:
|
||||
id = graphene.UUID(required=True)
|
||||
|
||||
Output = TranscodeJobType
|
||||
|
||||
def mutate(self, info, id):
|
||||
from mpr.media_assets.models import TranscodeJob
|
||||
|
||||
try:
|
||||
job = TranscodeJob.objects.get(id=id)
|
||||
except TranscodeJob.DoesNotExist:
|
||||
raise Exception("Job not found")
|
||||
|
||||
if job.status not in ("pending", "processing"):
|
||||
raise Exception(f"Cannot cancel job with status: {job.status}")
|
||||
|
||||
job.status = "cancelled"
|
||||
job.save(update_fields=["status"])
|
||||
return job
|
||||
|
||||
|
||||
class Mutation(graphene.ObjectType):
|
||||
scan_media_folder = ScanMediaFolder.Field()
|
||||
create_job = CreateJob.Field()
|
||||
cancel_job = CancelJob.Field()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Schema
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
schema = graphene.Schema(query=Query, mutation=Mutation)
|
||||
15
api/main.py
15
api/main.py
@@ -20,7 +20,9 @@ django.setup()
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from api.graphql import schema as graphql_schema
|
||||
from api.routes import assets_router, jobs_router, presets_router, system_router
|
||||
from starlette_graphene3 import GraphQLApp, make_graphiql_handler
|
||||
|
||||
app = FastAPI(
|
||||
title="MPR API",
|
||||
@@ -39,11 +41,14 @@ app.add_middleware(
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Routes
|
||||
app.include_router(system_router)
|
||||
app.include_router(assets_router)
|
||||
app.include_router(presets_router)
|
||||
app.include_router(jobs_router)
|
||||
# Routes - all under /api prefix
|
||||
app.include_router(system_router, prefix="/api")
|
||||
app.include_router(assets_router, prefix="/api")
|
||||
app.include_router(presets_router, prefix="/api")
|
||||
app.include_router(jobs_router, prefix="/api")
|
||||
|
||||
# GraphQL
|
||||
app.mount("/graphql", GraphQLApp(schema=graphql_schema, on_get=make_graphiql_handler()))
|
||||
|
||||
|
||||
@app.get("/")
|
||||
|
||||
@@ -8,37 +8,27 @@ from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
|
||||
from api.deps import get_asset
|
||||
from api.schemas import AssetCreate, AssetResponse, AssetUpdate
|
||||
from api.schema import AssetCreate, AssetResponse, AssetUpdate
|
||||
from core.storage import BUCKET_IN, list_objects
|
||||
|
||||
router = APIRouter(prefix="/assets", tags=["assets"])
|
||||
|
||||
# Supported media extensions
|
||||
VIDEO_EXTS = {".mp4", ".mkv", ".avi", ".mov", ".webm", ".flv", ".wmv", ".m4v"}
|
||||
AUDIO_EXTS = {".mp3", ".wav", ".flac", ".aac", ".ogg", ".m4a"}
|
||||
MEDIA_EXTS = VIDEO_EXTS | AUDIO_EXTS
|
||||
|
||||
|
||||
@router.post("/", response_model=AssetResponse, status_code=201)
|
||||
def create_asset(data: AssetCreate):
|
||||
"""
|
||||
Register a media file as an asset.
|
||||
|
||||
The file must exist on disk. A probe task will be queued
|
||||
to extract metadata asynchronously.
|
||||
"""
|
||||
from pathlib import Path
|
||||
|
||||
"""Register a media file as an asset."""
|
||||
from mpr.media_assets.models import MediaAsset
|
||||
|
||||
# Validate file exists
|
||||
path = Path(data.file_path)
|
||||
if not path.exists():
|
||||
raise HTTPException(status_code=400, detail="File not found")
|
||||
|
||||
# Create asset
|
||||
asset = MediaAsset.objects.create(
|
||||
filename=data.filename or path.name,
|
||||
file_path=str(path.absolute()),
|
||||
file_size=path.stat().st_size,
|
||||
filename=data.filename or data.file_path.split("/")[-1],
|
||||
file_path=data.file_path,
|
||||
file_size=data.file_size,
|
||||
)
|
||||
|
||||
# TODO: Queue probe task via gRPC/Celery
|
||||
|
||||
return asset
|
||||
|
||||
|
||||
@@ -52,10 +42,8 @@ def list_assets(
|
||||
from mpr.media_assets.models import MediaAsset
|
||||
|
||||
qs = MediaAsset.objects.all()
|
||||
|
||||
if status:
|
||||
qs = qs.filter(status=status)
|
||||
|
||||
return list(qs[offset : offset + limit])
|
||||
|
||||
|
||||
@@ -88,3 +76,42 @@ def update_asset(asset_id: UUID, data: AssetUpdate, asset=Depends(get_asset)):
|
||||
def delete_asset(asset_id: UUID, asset=Depends(get_asset)):
|
||||
"""Delete an asset."""
|
||||
asset.delete()
|
||||
|
||||
|
||||
@router.post("/scan", response_model=dict)
|
||||
def scan_media_folder():
|
||||
"""
|
||||
Scan the S3 media-in bucket for new video/audio files and register them as assets.
|
||||
"""
|
||||
from mpr.media_assets.models import MediaAsset
|
||||
|
||||
# List objects from S3 bucket
|
||||
objects = list_objects(BUCKET_IN, extensions=MEDIA_EXTS)
|
||||
|
||||
# Get existing filenames to avoid duplicates
|
||||
existing_filenames = set(MediaAsset.objects.values_list("filename", flat=True))
|
||||
|
||||
registered_files = []
|
||||
skipped_files = []
|
||||
|
||||
for obj in objects:
|
||||
if obj["filename"] in existing_filenames:
|
||||
skipped_files.append(obj["filename"])
|
||||
continue
|
||||
|
||||
try:
|
||||
MediaAsset.objects.create(
|
||||
filename=obj["filename"],
|
||||
file_path=obj["key"],
|
||||
file_size=obj["size"],
|
||||
)
|
||||
registered_files.append(obj["filename"])
|
||||
except Exception as e:
|
||||
print(f"Error registering {obj['filename']}: {e}")
|
||||
|
||||
return {
|
||||
"found": len(objects),
|
||||
"registered": len(registered_files),
|
||||
"skipped": len(skipped_files),
|
||||
"files": registered_files,
|
||||
}
|
||||
|
||||
@@ -2,17 +2,20 @@
|
||||
Job endpoints - transcode/trim job management.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException, Query
|
||||
|
||||
from api.deps import get_asset, get_job, get_preset
|
||||
from api.schemas import JobCreate, JobResponse
|
||||
from api.schema import JobCreate, JobResponse
|
||||
|
||||
router = APIRouter(prefix="/jobs", tags=["jobs"])
|
||||
|
||||
CALLBACK_API_KEY = os.environ.get("CALLBACK_API_KEY", "")
|
||||
|
||||
|
||||
@router.post("/", response_model=JobResponse, status_code=201)
|
||||
def create_job(data: JobCreate):
|
||||
@@ -30,16 +33,12 @@ def create_job(data: JobCreate):
|
||||
except MediaAsset.DoesNotExist:
|
||||
raise HTTPException(status_code=404, detail="Source asset not found")
|
||||
|
||||
if source.status != "ready":
|
||||
raise HTTPException(status_code=400, detail="Source asset is not ready")
|
||||
|
||||
# Get preset if specified
|
||||
preset = None
|
||||
preset_snapshot = {}
|
||||
if data.preset_id:
|
||||
try:
|
||||
preset = TranscodePreset.objects.get(id=data.preset_id)
|
||||
# Snapshot preset at job creation time
|
||||
preset_snapshot = {
|
||||
"name": preset.name,
|
||||
"container": preset.container,
|
||||
@@ -64,31 +63,112 @@ def create_job(data: JobCreate):
|
||||
status_code=400, detail="Must specify preset_id or trim_start/trim_end"
|
||||
)
|
||||
|
||||
# Generate output filename
|
||||
# Generate output filename - stored as S3 key in output bucket
|
||||
output_filename = data.output_filename
|
||||
if not output_filename:
|
||||
from pathlib import Path
|
||||
|
||||
stem = Path(source.filename).stem
|
||||
ext = preset_snapshot.get("container", "mp4") if preset else "mp4"
|
||||
output_filename = f"{stem}_output.{ext}"
|
||||
|
||||
# Create job
|
||||
job = TranscodeJob.objects.create(
|
||||
source_asset=source,
|
||||
preset=preset,
|
||||
source_asset_id=source.id,
|
||||
preset_id=preset.id if preset else None,
|
||||
preset_snapshot=preset_snapshot,
|
||||
trim_start=data.trim_start,
|
||||
trim_end=data.trim_end,
|
||||
output_filename=output_filename,
|
||||
output_path=output_filename, # S3 key in output bucket
|
||||
priority=data.priority or 0,
|
||||
)
|
||||
|
||||
# TODO: Submit job via gRPC
|
||||
# Dispatch based on executor mode
|
||||
executor_mode = os.environ.get("MPR_EXECUTOR", "local")
|
||||
|
||||
if executor_mode == "lambda":
|
||||
_dispatch_lambda(job, source, preset_snapshot)
|
||||
else:
|
||||
_dispatch_celery(job, source, preset_snapshot)
|
||||
|
||||
return job
|
||||
|
||||
|
||||
def _dispatch_celery(job, source, preset_snapshot):
|
||||
"""Dispatch job to Celery worker."""
|
||||
from task.tasks import run_transcode_job
|
||||
|
||||
result = run_transcode_job.delay(
|
||||
job_id=str(job.id),
|
||||
source_key=source.file_path,
|
||||
output_key=job.output_filename,
|
||||
preset=preset_snapshot or None,
|
||||
trim_start=job.trim_start,
|
||||
trim_end=job.trim_end,
|
||||
duration=source.duration,
|
||||
)
|
||||
job.celery_task_id = result.id
|
||||
job.save(update_fields=["celery_task_id"])
|
||||
|
||||
|
||||
def _dispatch_lambda(job, source, preset_snapshot):
|
||||
"""Dispatch job to AWS Step Functions."""
|
||||
from task.executor import get_executor
|
||||
|
||||
executor = get_executor()
|
||||
executor.run(
|
||||
job_id=str(job.id),
|
||||
source_path=source.file_path,
|
||||
output_path=job.output_filename,
|
||||
preset=preset_snapshot or None,
|
||||
trim_start=job.trim_start,
|
||||
trim_end=job.trim_end,
|
||||
duration=source.duration,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{job_id}/callback")
|
||||
def job_callback(
|
||||
job_id: UUID,
|
||||
payload: dict,
|
||||
x_api_key: Optional[str] = Header(None),
|
||||
):
|
||||
"""
|
||||
Callback endpoint for Lambda to report job completion.
|
||||
Protected by API key.
|
||||
"""
|
||||
if CALLBACK_API_KEY and x_api_key != CALLBACK_API_KEY:
|
||||
raise HTTPException(status_code=403, detail="Invalid API key")
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
from mpr.media_assets.models import TranscodeJob
|
||||
|
||||
try:
|
||||
job = TranscodeJob.objects.get(id=job_id)
|
||||
except TranscodeJob.DoesNotExist:
|
||||
raise HTTPException(status_code=404, detail="Job not found")
|
||||
|
||||
status = payload.get("status", "failed")
|
||||
job.status = status
|
||||
job.progress = 100.0 if status == "completed" else job.progress
|
||||
update_fields = ["status", "progress"]
|
||||
|
||||
if payload.get("error"):
|
||||
job.error_message = payload["error"]
|
||||
update_fields.append("error_message")
|
||||
|
||||
if status == "completed":
|
||||
job.completed_at = timezone.now()
|
||||
update_fields.append("completed_at")
|
||||
elif status == "failed":
|
||||
job.completed_at = timezone.now()
|
||||
update_fields.append("completed_at")
|
||||
|
||||
job.save(update_fields=update_fields)
|
||||
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@router.get("/", response_model=list[JobResponse])
|
||||
def list_jobs(
|
||||
status: Optional[str] = Query(None, description="Filter by status"),
|
||||
@@ -100,12 +180,10 @@ def list_jobs(
|
||||
from mpr.media_assets.models import TranscodeJob
|
||||
|
||||
qs = TranscodeJob.objects.all()
|
||||
|
||||
if status:
|
||||
qs = qs.filter(status=status)
|
||||
if source_asset_id:
|
||||
qs = qs.filter(source_asset_id=source_asset_id)
|
||||
|
||||
return list(qs[offset : offset + limit])
|
||||
|
||||
|
||||
@@ -136,11 +214,8 @@ def cancel_job(job_id: UUID, job=Depends(get_job)):
|
||||
status_code=400, detail=f"Cannot cancel job with status: {job.status}"
|
||||
)
|
||||
|
||||
# TODO: Cancel via gRPC
|
||||
|
||||
job.status = "cancelled"
|
||||
job.save(update_fields=["status"])
|
||||
|
||||
return job
|
||||
|
||||
|
||||
@@ -155,6 +230,4 @@ def retry_job(job_id: UUID, job=Depends(get_job)):
|
||||
job.error_message = None
|
||||
job.save(update_fields=["status", "progress", "error_message"])
|
||||
|
||||
# TODO: Resubmit via gRPC
|
||||
|
||||
return job
|
||||
|
||||
@@ -7,7 +7,7 @@ from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from api.deps import get_preset
|
||||
from api.schemas import PresetCreate, PresetResponse, PresetUpdate
|
||||
from api.schema import PresetCreate, PresetResponse, PresetUpdate
|
||||
|
||||
router = APIRouter(prefix="/presets", tags=["presets"])
|
||||
|
||||
|
||||
@@ -15,6 +15,27 @@ def health_check():
|
||||
return {"status": "healthy"}
|
||||
|
||||
|
||||
@router.get("/status")
|
||||
def system_status():
|
||||
"""System status for UI."""
|
||||
return {"status": "ok", "version": "0.1.0"}
|
||||
|
||||
|
||||
@router.get("/worker")
|
||||
def worker_status():
|
||||
"""Worker status from gRPC."""
|
||||
try:
|
||||
from rpc.client import get_client
|
||||
|
||||
client = get_client()
|
||||
status = client.get_worker_status()
|
||||
if status:
|
||||
return status
|
||||
return {"available": False, "error": "No response from worker"}
|
||||
except Exception as e:
|
||||
return {"available": False, "error": str(e)}
|
||||
|
||||
|
||||
@router.get("/ffmpeg/codecs")
|
||||
def ffmpeg_codecs():
|
||||
"""Get available FFmpeg encoders and decoders."""
|
||||
|
||||
@@ -28,7 +28,7 @@ class AssetCreate(BaseSchema):
|
||||
bitrate: Optional[int] = None
|
||||
properties: Dict[str, Any]
|
||||
comments: str = ""
|
||||
tags: List[str]
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
|
||||
class AssetUpdate(BaseSchema):
|
||||
"""AssetUpdate schema."""
|
||||
@@ -65,6 +65,6 @@ class AssetResponse(BaseSchema):
|
||||
bitrate: Optional[int] = None
|
||||
properties: Dict[str, Any]
|
||||
comments: str = ""
|
||||
tags: List[str]
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
129
api/schema/graphql.py
Normal file
129
api/schema/graphql.py
Normal file
@@ -0,0 +1,129 @@
|
||||
"""
|
||||
Graphene Types - GENERATED FILE
|
||||
|
||||
Do not edit directly. Regenerate using modelgen.
|
||||
"""
|
||||
|
||||
import graphene
|
||||
|
||||
|
||||
class AssetStatus(graphene.Enum):
|
||||
PENDING = "pending"
|
||||
READY = "ready"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
class JobStatus(graphene.Enum):
|
||||
PENDING = "pending"
|
||||
PROCESSING = "processing"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
|
||||
class MediaAssetType(graphene.ObjectType):
|
||||
"""A video/audio file registered in the system."""
|
||||
|
||||
id = graphene.UUID()
|
||||
filename = graphene.String()
|
||||
file_path = graphene.String()
|
||||
status = graphene.String()
|
||||
error_message = graphene.String()
|
||||
file_size = graphene.Int()
|
||||
duration = graphene.Float()
|
||||
video_codec = graphene.String()
|
||||
audio_codec = graphene.String()
|
||||
width = graphene.Int()
|
||||
height = graphene.Int()
|
||||
framerate = graphene.Float()
|
||||
bitrate = graphene.Int()
|
||||
properties = graphene.JSONString()
|
||||
comments = graphene.String()
|
||||
tags = graphene.List(graphene.String)
|
||||
created_at = graphene.DateTime()
|
||||
updated_at = graphene.DateTime()
|
||||
|
||||
|
||||
class TranscodePresetType(graphene.ObjectType):
|
||||
"""A reusable transcoding configuration (like Handbrake presets)."""
|
||||
|
||||
id = graphene.UUID()
|
||||
name = graphene.String()
|
||||
description = graphene.String()
|
||||
is_builtin = graphene.Boolean()
|
||||
container = graphene.String()
|
||||
video_codec = graphene.String()
|
||||
video_bitrate = graphene.String()
|
||||
video_crf = graphene.Int()
|
||||
video_preset = graphene.String()
|
||||
resolution = graphene.String()
|
||||
framerate = graphene.Float()
|
||||
audio_codec = graphene.String()
|
||||
audio_bitrate = graphene.String()
|
||||
audio_channels = graphene.Int()
|
||||
audio_samplerate = graphene.Int()
|
||||
extra_args = graphene.List(graphene.String)
|
||||
created_at = graphene.DateTime()
|
||||
updated_at = graphene.DateTime()
|
||||
|
||||
|
||||
class TranscodeJobType(graphene.ObjectType):
|
||||
"""A transcoding or trimming job in the queue."""
|
||||
|
||||
id = graphene.UUID()
|
||||
source_asset_id = graphene.UUID()
|
||||
preset_id = graphene.UUID()
|
||||
preset_snapshot = graphene.JSONString()
|
||||
trim_start = graphene.Float()
|
||||
trim_end = graphene.Float()
|
||||
output_filename = graphene.String()
|
||||
output_path = graphene.String()
|
||||
output_asset_id = graphene.UUID()
|
||||
status = graphene.String()
|
||||
progress = graphene.Float()
|
||||
current_frame = graphene.Int()
|
||||
current_time = graphene.Float()
|
||||
speed = graphene.String()
|
||||
error_message = graphene.String()
|
||||
celery_task_id = graphene.String()
|
||||
execution_arn = graphene.String()
|
||||
priority = graphene.Int()
|
||||
created_at = graphene.DateTime()
|
||||
started_at = graphene.DateTime()
|
||||
completed_at = graphene.DateTime()
|
||||
|
||||
|
||||
class CreateJobInput(graphene.InputObjectType):
|
||||
"""Request body for creating a transcode/trim job."""
|
||||
|
||||
source_asset_id = graphene.UUID(required=True)
|
||||
preset_id = graphene.UUID()
|
||||
trim_start = graphene.Float()
|
||||
trim_end = graphene.Float()
|
||||
output_filename = graphene.String()
|
||||
priority = graphene.Int(default_value=0)
|
||||
|
||||
|
||||
class SystemStatusType(graphene.ObjectType):
|
||||
"""System status response."""
|
||||
|
||||
status = graphene.String()
|
||||
version = graphene.String()
|
||||
|
||||
|
||||
class ScanResultType(graphene.ObjectType):
|
||||
"""Result of scanning the media input bucket."""
|
||||
|
||||
found = graphene.Int()
|
||||
registered = graphene.Int()
|
||||
skipped = graphene.Int()
|
||||
files = graphene.List(graphene.String)
|
||||
|
||||
|
||||
class WorkerStatusType(graphene.ObjectType):
|
||||
"""Worker health and capabilities."""
|
||||
|
||||
available = graphene.Boolean()
|
||||
active_jobs = graphene.Int()
|
||||
supported_codecs = graphene.List(graphene.String)
|
||||
gpu_available = graphene.Boolean()
|
||||
@@ -31,6 +31,7 @@ class JobCreate(BaseSchema):
|
||||
current_time: Optional[float] = None
|
||||
speed: Optional[str] = None
|
||||
celery_task_id: Optional[str] = None
|
||||
execution_arn: Optional[str] = None
|
||||
priority: int = 0
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
@@ -52,6 +53,7 @@ class JobUpdate(BaseSchema):
|
||||
speed: Optional[str] = None
|
||||
error_message: Optional[str] = None
|
||||
celery_task_id: Optional[str] = None
|
||||
execution_arn: Optional[str] = None
|
||||
priority: Optional[int] = None
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
@@ -74,6 +76,7 @@ class JobResponse(BaseSchema):
|
||||
speed: Optional[str] = None
|
||||
error_message: Optional[str] = None
|
||||
celery_task_id: Optional[str] = None
|
||||
execution_arn: Optional[str] = None
|
||||
priority: int = 0
|
||||
created_at: Optional[datetime] = None
|
||||
started_at: Optional[datetime] = None
|
||||
@@ -24,7 +24,7 @@ class PresetCreate(BaseSchema):
|
||||
audio_bitrate: Optional[str] = None
|
||||
audio_channels: Optional[int] = None
|
||||
audio_samplerate: Optional[int] = None
|
||||
extra_args: List[str]
|
||||
extra_args: List[str] = Field(default_factory=list)
|
||||
|
||||
class PresetUpdate(BaseSchema):
|
||||
"""PresetUpdate schema."""
|
||||
@@ -61,6 +61,6 @@ class PresetResponse(BaseSchema):
|
||||
audio_bitrate: Optional[str] = None
|
||||
audio_channels: Optional[int] = None
|
||||
audio_samplerate: Optional[int] = None
|
||||
extra_args: List[str]
|
||||
extra_args: List[str] = Field(default_factory=list)
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
90
core/storage.py
Normal file
90
core/storage.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""
|
||||
S3 storage layer.
|
||||
|
||||
Uses MinIO locally (S3-compatible) and real AWS S3 in production.
|
||||
The only difference is S3_ENDPOINT_URL: set for MinIO, omit for AWS.
|
||||
"""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import boto3
|
||||
from botocore.config import Config
|
||||
|
||||
BUCKET_IN = os.environ.get("S3_BUCKET_IN", "mpr-media-in")
|
||||
BUCKET_OUT = os.environ.get("S3_BUCKET_OUT", "mpr-media-out")
|
||||
|
||||
|
||||
def get_s3_client():
|
||||
"""Get a boto3 S3 client. Works with both MinIO and real AWS S3."""
|
||||
kwargs = {
|
||||
"region_name": os.environ.get("AWS_REGION", "us-east-1"),
|
||||
"config": Config(signature_version="s3v4"),
|
||||
}
|
||||
endpoint = os.environ.get("S3_ENDPOINT_URL")
|
||||
if endpoint:
|
||||
kwargs["endpoint_url"] = endpoint
|
||||
kwargs["aws_access_key_id"] = os.environ.get("AWS_ACCESS_KEY_ID", "minioadmin")
|
||||
kwargs["aws_secret_access_key"] = os.environ.get("AWS_SECRET_ACCESS_KEY", "minioadmin")
|
||||
return boto3.client("s3", **kwargs)
|
||||
|
||||
|
||||
def list_objects(bucket: str, prefix: str = "", extensions: Optional[set] = None) -> list[dict]:
|
||||
"""List objects in an S3 bucket, optionally filtered by file extension."""
|
||||
s3 = get_s3_client()
|
||||
objects = []
|
||||
kwargs = {"Bucket": bucket, "Prefix": prefix}
|
||||
|
||||
while True:
|
||||
response = s3.list_objects_v2(**kwargs)
|
||||
for obj in response.get("Contents", []):
|
||||
key = obj["Key"]
|
||||
if extensions:
|
||||
ext = Path(key).suffix.lower()
|
||||
if ext not in extensions:
|
||||
continue
|
||||
objects.append({
|
||||
"key": key,
|
||||
"size": obj["Size"],
|
||||
"filename": Path(key).name,
|
||||
})
|
||||
if not response.get("IsTruncated"):
|
||||
break
|
||||
kwargs["ContinuationToken"] = response["NextContinuationToken"]
|
||||
|
||||
return objects
|
||||
|
||||
|
||||
def download_file(bucket: str, key: str, local_path: str) -> str:
|
||||
"""Download a file from S3 to a local path."""
|
||||
s3 = get_s3_client()
|
||||
Path(local_path).parent.mkdir(parents=True, exist_ok=True)
|
||||
s3.download_file(bucket, key, local_path)
|
||||
return local_path
|
||||
|
||||
|
||||
def download_to_temp(bucket: str, key: str) -> str:
|
||||
"""Download a file from S3 to a temp file. Caller must clean up."""
|
||||
ext = Path(key).suffix
|
||||
fd, tmp_path = tempfile.mkstemp(suffix=ext)
|
||||
os.close(fd)
|
||||
download_file(bucket, key, tmp_path)
|
||||
return tmp_path
|
||||
|
||||
|
||||
def upload_file(local_path: str, bucket: str, key: str) -> None:
|
||||
"""Upload a local file to S3."""
|
||||
s3 = get_s3_client()
|
||||
s3.upload_file(local_path, bucket, key)
|
||||
|
||||
|
||||
def get_presigned_url(bucket: str, key: str, expires: int = 3600) -> str:
|
||||
"""Generate a presigned URL for an S3 object."""
|
||||
s3 = get_s3_client()
|
||||
return s3.generate_presigned_url(
|
||||
"get_object",
|
||||
Params={"Bucket": bucket, "Key": key},
|
||||
ExpiresIn=expires,
|
||||
)
|
||||
@@ -7,12 +7,12 @@ POSTGRES_USER=mpr_user
|
||||
POSTGRES_PASSWORD=mpr_pass
|
||||
POSTGRES_HOST=postgres
|
||||
POSTGRES_PORT=5432
|
||||
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
DATABASE_URL=postgresql://mpr_user:mpr_pass@postgres:5432/mpr
|
||||
|
||||
# Redis
|
||||
REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
REDIS_URL=redis://${REDIS_HOST}:${REDIS_PORT}/0
|
||||
REDIS_URL=redis://redis:6379/0
|
||||
|
||||
# Django
|
||||
DEBUG=1
|
||||
@@ -26,3 +26,14 @@ MPR_EXECUTOR=local
|
||||
GRPC_HOST=grpc
|
||||
GRPC_PORT=50051
|
||||
GRPC_MAX_WORKERS=10
|
||||
|
||||
# S3 Storage (MinIO locally, real S3 on AWS)
|
||||
S3_ENDPOINT_URL=http://minio:9000
|
||||
S3_BUCKET_IN=mpr-media-in
|
||||
S3_BUCKET_OUT=mpr-media-out
|
||||
AWS_REGION=us-east-1
|
||||
AWS_ACCESS_KEY_ID=minioadmin
|
||||
AWS_SECRET_ACCESS_KEY=minioadmin
|
||||
|
||||
# Vite
|
||||
VITE_ALLOWED_HOSTS=your-domain.local
|
||||
|
||||
257
ctrl/deploy.sh
257
ctrl/deploy.sh
@@ -1,18 +1,17 @@
|
||||
#!/bin/bash
|
||||
# Deploy MPR to remote server via rsync
|
||||
# Uses project .gitignore for excludes
|
||||
# MPR Deploy Script
|
||||
#
|
||||
# Usage: ./ctrl/deploy.sh [--restart] [--dry-run]
|
||||
# Usage: ./ctrl/deploy.sh <command> [options]
|
||||
#
|
||||
# Examples:
|
||||
# ./ctrl/deploy.sh # Sync files only
|
||||
# ./ctrl/deploy.sh --restart # Sync and restart services
|
||||
# ./ctrl/deploy.sh --dry-run # Preview sync
|
||||
# Commands:
|
||||
# rsync [--restart] [--dry-run] Sync to remote server via rsync
|
||||
# aws Deploy AWS infrastructure (Lambda, Step Functions, S3)
|
||||
|
||||
set -e
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
source "$SCRIPT_DIR/.env" 2>/dev/null || true
|
||||
|
||||
@@ -21,7 +20,10 @@ GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
if [ -z "$SERVER" ] || [ -z "$REMOTE_PATH" ]; then
|
||||
# ─── Rsync Deploy ─────────────────────────────────────────────────────────────
|
||||
|
||||
deploy_rsync() {
|
||||
if [ -z "${SERVER:-}" ] || [ -z "${REMOTE_PATH:-}" ]; then
|
||||
echo -e "${RED}Error: SERVER and REMOTE_PATH must be set in ctrl/.env${NC}"
|
||||
echo "Example:"
|
||||
echo " SERVER=user@host"
|
||||
@@ -34,24 +36,14 @@ DRY_RUN=""
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--restart)
|
||||
RESTART=true
|
||||
shift
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN="--dry-run"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1"
|
||||
exit 1
|
||||
;;
|
||||
--restart) RESTART=true; shift ;;
|
||||
--dry-run) DRY_RUN="--dry-run"; shift ;;
|
||||
*) echo "Unknown option: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo -e "${GREEN}=== Deploying MPR to $SERVER:$REMOTE_PATH ===${NC}"
|
||||
|
||||
# Sync files using .gitignore for excludes
|
||||
echo -e "${YELLOW}Syncing files...${NC}"
|
||||
rsync -avz --delete $DRY_RUN \
|
||||
--filter=':- .gitignore' \
|
||||
@@ -65,7 +57,6 @@ if [ -n "$DRY_RUN" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Copy env template if .env doesn't exist on remote
|
||||
ssh "$SERVER" "[ -f $REMOTE_PATH/ctrl/.env ] || cp $REMOTE_PATH/ctrl/.env.template $REMOTE_PATH/ctrl/.env"
|
||||
|
||||
if [ "$RESTART" = true ]; then
|
||||
@@ -74,3 +65,223 @@ if [ "$RESTART" = true ]; then
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}Done!${NC}"
|
||||
}
|
||||
|
||||
# ─── AWS Deploy ────────────────────────────────────────────────────────────────
|
||||
|
||||
deploy_aws() {
|
||||
REGION="${AWS_REGION:-us-east-1}"
|
||||
ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text)
|
||||
PROJECT="mpr"
|
||||
|
||||
# S3
|
||||
BUCKET_IN="${S3_BUCKET_IN:-mpr-media-in}"
|
||||
BUCKET_OUT="${S3_BUCKET_OUT:-mpr-media-out}"
|
||||
|
||||
# ECR
|
||||
ECR_REPO="${PROJECT}-transcode"
|
||||
ECR_URI="${ACCOUNT_ID}.dkr.ecr.${REGION}.amazonaws.com/${ECR_REPO}"
|
||||
|
||||
# Lambda
|
||||
LAMBDA_NAME="${PROJECT}-transcode"
|
||||
LAMBDA_TIMEOUT=900
|
||||
LAMBDA_MEMORY=2048
|
||||
|
||||
# Step Functions
|
||||
SFN_NAME="${PROJECT}-transcode"
|
||||
|
||||
# IAM
|
||||
LAMBDA_ROLE_NAME="${PROJECT}-lambda-role"
|
||||
SFN_ROLE_NAME="${PROJECT}-sfn-role"
|
||||
|
||||
# Callback
|
||||
CALLBACK_URL="${CALLBACK_URL:-https://mpr.mcrn.ar/api}"
|
||||
CALLBACK_API_KEY="${CALLBACK_API_KEY:-changeme}"
|
||||
|
||||
echo -e "${GREEN}=== Deploying MPR to AWS ($REGION, account $ACCOUNT_ID) ===${NC}"
|
||||
|
||||
# ─── S3 Buckets ───────────────────────────────────────────────────────
|
||||
|
||||
echo -e "${YELLOW}Creating S3 buckets...${NC}"
|
||||
for bucket in "$BUCKET_IN" "$BUCKET_OUT"; do
|
||||
if ! aws s3api head-bucket --bucket "$bucket" 2>/dev/null; then
|
||||
aws s3api create-bucket \
|
||||
--bucket "$bucket" \
|
||||
--region "$REGION" \
|
||||
--create-bucket-configuration LocationConstraint="$REGION"
|
||||
echo " Created $bucket"
|
||||
else
|
||||
echo " $bucket already exists"
|
||||
fi
|
||||
done
|
||||
|
||||
# ─── IAM Roles ────────────────────────────────────────────────────────
|
||||
|
||||
echo -e "${YELLOW}Creating IAM roles...${NC}"
|
||||
|
||||
if ! aws iam get-role --role-name "$LAMBDA_ROLE_NAME" 2>/dev/null; then
|
||||
aws iam create-role \
|
||||
--role-name "$LAMBDA_ROLE_NAME" \
|
||||
--assume-role-policy-document '{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [{
|
||||
"Effect": "Allow",
|
||||
"Principal": {"Service": "lambda.amazonaws.com"},
|
||||
"Action": "sts:AssumeRole"
|
||||
}]
|
||||
}'
|
||||
aws iam attach-role-policy \
|
||||
--role-name "$LAMBDA_ROLE_NAME" \
|
||||
--policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole
|
||||
aws iam put-role-policy \
|
||||
--role-name "$LAMBDA_ROLE_NAME" \
|
||||
--policy-name "${PROJECT}-s3-access" \
|
||||
--policy-document '{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [{
|
||||
"Effect": "Allow",
|
||||
"Action": ["s3:GetObject", "s3:PutObject"],
|
||||
"Resource": [
|
||||
"arn:aws:s3:::'"$BUCKET_IN"'/*",
|
||||
"arn:aws:s3:::'"$BUCKET_OUT"'/*"
|
||||
]
|
||||
}]
|
||||
}'
|
||||
echo " Created $LAMBDA_ROLE_NAME"
|
||||
echo " Waiting for role to propagate..."
|
||||
sleep 10
|
||||
else
|
||||
echo " $LAMBDA_ROLE_NAME already exists"
|
||||
fi
|
||||
LAMBDA_ROLE_ARN=$(aws iam get-role --role-name "$LAMBDA_ROLE_NAME" --query Role.Arn --output text)
|
||||
|
||||
if ! aws iam get-role --role-name "$SFN_ROLE_NAME" 2>/dev/null; then
|
||||
aws iam create-role \
|
||||
--role-name "$SFN_ROLE_NAME" \
|
||||
--assume-role-policy-document '{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [{
|
||||
"Effect": "Allow",
|
||||
"Principal": {"Service": "states.amazonaws.com"},
|
||||
"Action": "sts:AssumeRole"
|
||||
}]
|
||||
}'
|
||||
aws iam put-role-policy \
|
||||
--role-name "$SFN_ROLE_NAME" \
|
||||
--policy-name "${PROJECT}-sfn-invoke-lambda" \
|
||||
--policy-document '{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [{
|
||||
"Effect": "Allow",
|
||||
"Action": "lambda:InvokeFunction",
|
||||
"Resource": "arn:aws:lambda:'"$REGION"':'"$ACCOUNT_ID"':function:'"$LAMBDA_NAME"'"
|
||||
}]
|
||||
}'
|
||||
echo " Created $SFN_ROLE_NAME"
|
||||
sleep 10
|
||||
else
|
||||
echo " $SFN_ROLE_NAME already exists"
|
||||
fi
|
||||
SFN_ROLE_ARN=$(aws iam get-role --role-name "$SFN_ROLE_NAME" --query Role.Arn --output text)
|
||||
|
||||
# ─── ECR Repository ──────────────────────────────────────────────────
|
||||
|
||||
echo -e "${YELLOW}Setting up ECR...${NC}"
|
||||
if ! aws ecr describe-repositories --repository-names "$ECR_REPO" --region "$REGION" 2>/dev/null; then
|
||||
aws ecr create-repository --repository-name "$ECR_REPO" --region "$REGION"
|
||||
echo " Created ECR repo $ECR_REPO"
|
||||
else
|
||||
echo " ECR repo $ECR_REPO already exists"
|
||||
fi
|
||||
|
||||
# ─── Build & Push Lambda Image ───────────────────────────────────────
|
||||
|
||||
echo -e "${YELLOW}Building Lambda container image...${NC}"
|
||||
|
||||
docker build -f ctrl/lambda/Dockerfile -t "${ECR_REPO}:latest" .
|
||||
|
||||
echo -e "${YELLOW}Pushing to ECR...${NC}"
|
||||
aws ecr get-login-password --region "$REGION" | \
|
||||
docker login --username AWS --password-stdin "${ACCOUNT_ID}.dkr.ecr.${REGION}.amazonaws.com"
|
||||
|
||||
docker tag "${ECR_REPO}:latest" "${ECR_URI}:latest"
|
||||
docker push "${ECR_URI}:latest"
|
||||
|
||||
# ─── Lambda Function ─────────────────────────────────────────────────
|
||||
|
||||
echo -e "${YELLOW}Deploying Lambda function...${NC}"
|
||||
LAMBDA_ARN="arn:aws:lambda:${REGION}:${ACCOUNT_ID}:function:${LAMBDA_NAME}"
|
||||
|
||||
if aws lambda get-function --function-name "$LAMBDA_NAME" --region "$REGION" 2>/dev/null; then
|
||||
aws lambda update-function-code \
|
||||
--function-name "$LAMBDA_NAME" \
|
||||
--image-uri "${ECR_URI}:latest" \
|
||||
--region "$REGION"
|
||||
echo " Updated $LAMBDA_NAME"
|
||||
else
|
||||
aws lambda create-function \
|
||||
--function-name "$LAMBDA_NAME" \
|
||||
--package-type Image \
|
||||
--code ImageUri="${ECR_URI}:latest" \
|
||||
--role "$LAMBDA_ROLE_ARN" \
|
||||
--timeout "$LAMBDA_TIMEOUT" \
|
||||
--memory-size "$LAMBDA_MEMORY" \
|
||||
--environment "Variables={S3_BUCKET_IN=${BUCKET_IN},S3_BUCKET_OUT=${BUCKET_OUT},AWS_REGION=${REGION}}" \
|
||||
--region "$REGION"
|
||||
echo " Created $LAMBDA_NAME"
|
||||
fi
|
||||
|
||||
# ─── Step Functions ───────────────────────────────────────────────────
|
||||
|
||||
echo -e "${YELLOW}Deploying Step Functions state machine...${NC}"
|
||||
|
||||
SFN_DEFINITION=$(sed "s|\${TranscodeLambdaArn}|${LAMBDA_ARN}|g" ctrl/state-machine.json)
|
||||
|
||||
SFN_ARN="arn:aws:states:${REGION}:${ACCOUNT_ID}:stateMachine:${SFN_NAME}"
|
||||
if aws stepfunctions describe-state-machine --state-machine-arn "$SFN_ARN" --region "$REGION" 2>/dev/null; then
|
||||
aws stepfunctions update-state-machine \
|
||||
--state-machine-arn "$SFN_ARN" \
|
||||
--definition "$SFN_DEFINITION" \
|
||||
--region "$REGION"
|
||||
echo " Updated $SFN_NAME"
|
||||
else
|
||||
aws stepfunctions create-state-machine \
|
||||
--name "$SFN_NAME" \
|
||||
--definition "$SFN_DEFINITION" \
|
||||
--role-arn "$SFN_ROLE_ARN" \
|
||||
--region "$REGION"
|
||||
echo " Created $SFN_NAME"
|
||||
fi
|
||||
|
||||
# ─── Summary ──────────────────────────────────────────────────────────
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}Deployment complete!${NC}"
|
||||
echo ""
|
||||
echo "Add these to your .env:"
|
||||
echo " MPR_EXECUTOR=lambda"
|
||||
echo " STEP_FUNCTION_ARN=${SFN_ARN}"
|
||||
echo " LAMBDA_FUNCTION_ARN=${LAMBDA_ARN}"
|
||||
echo " S3_BUCKET_IN=${BUCKET_IN}"
|
||||
echo " S3_BUCKET_OUT=${BUCKET_OUT}"
|
||||
echo " CALLBACK_URL=${CALLBACK_URL}"
|
||||
echo " CALLBACK_API_KEY=${CALLBACK_API_KEY}"
|
||||
}
|
||||
|
||||
# ─── Main ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
COMMAND="${1:-}"
|
||||
shift || true
|
||||
|
||||
case "$COMMAND" in
|
||||
rsync) deploy_rsync "$@" ;;
|
||||
aws) deploy_aws "$@" ;;
|
||||
*)
|
||||
echo "Usage: ./ctrl/deploy.sh <command> [options]"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " rsync [--restart] [--dry-run] Sync to remote server"
|
||||
echo " aws Deploy AWS infrastructure"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -5,6 +5,12 @@ x-common-env: &common-env
|
||||
DEBUG: 1
|
||||
GRPC_HOST: grpc
|
||||
GRPC_PORT: 50051
|
||||
S3_ENDPOINT_URL: http://minio:9000
|
||||
S3_BUCKET_IN: mpr-media-in
|
||||
S3_BUCKET_OUT: mpr-media-out
|
||||
AWS_ACCESS_KEY_ID: minioadmin
|
||||
AWS_SECRET_ACCESS_KEY: minioadmin
|
||||
AWS_REGION: us-east-1
|
||||
|
||||
x-healthcheck-defaults: &healthcheck-defaults
|
||||
interval: 5s
|
||||
@@ -23,7 +29,7 @@ services:
|
||||
POSTGRES_USER: mpr_user
|
||||
POSTGRES_PASSWORD: mpr_pass
|
||||
ports:
|
||||
- "5433:5432"
|
||||
- "5436:5432"
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
@@ -33,24 +39,53 @@ services:
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
ports:
|
||||
- "6380:6379"
|
||||
- "6381:6379"
|
||||
volumes:
|
||||
- redis-data:/data
|
||||
healthcheck:
|
||||
<<: *healthcheck-defaults
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
|
||||
minio:
|
||||
image: minio/minio
|
||||
command: ["server", "/data", "--console-address", ":9001"]
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
environment:
|
||||
MINIO_ROOT_USER: minioadmin
|
||||
MINIO_ROOT_PASSWORD: minioadmin
|
||||
volumes:
|
||||
- minio-data:/data
|
||||
healthcheck:
|
||||
<<: *healthcheck-defaults
|
||||
test: ["CMD", "mc", "ready", "local"]
|
||||
|
||||
minio-init:
|
||||
image: minio/mc
|
||||
depends_on:
|
||||
minio:
|
||||
condition: service_healthy
|
||||
entrypoint: ["/bin/sh", "-c"]
|
||||
command:
|
||||
- |
|
||||
mc alias set local http://minio:9000 minioadmin minioadmin
|
||||
mc mb --ignore-existing local/mpr-media-in
|
||||
mc mb --ignore-existing local/mpr-media-out
|
||||
mc anonymous set download local/mpr-media-in
|
||||
mc anonymous set download local/mpr-media-out
|
||||
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
ports:
|
||||
- "80:80"
|
||||
volumes:
|
||||
- ./nginx.conf:/etc/nginx/nginx.conf:ro
|
||||
- ../media:/app/media:ro
|
||||
depends_on:
|
||||
- django
|
||||
- fastapi
|
||||
- timeline
|
||||
- minio
|
||||
|
||||
# =============================================================================
|
||||
# Application Services
|
||||
@@ -70,7 +105,6 @@ services:
|
||||
<<: *common-env
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ../media:/app/media
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -88,7 +122,6 @@ services:
|
||||
<<: *common-env
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ../media:/app/media
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -99,16 +132,15 @@ services:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: ctrl/Dockerfile
|
||||
command: python -m grpc.server
|
||||
command: python -m rpc.server
|
||||
ports:
|
||||
- "50051:50051"
|
||||
- "50052:50051"
|
||||
environment:
|
||||
<<: *common-env
|
||||
GRPC_PORT: 50051
|
||||
GRPC_MAX_WORKERS: 10
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ../media:/app/media
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -119,13 +151,12 @@ services:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: ctrl/Dockerfile
|
||||
command: celery -A mpr worker -l info -Q default -c 2
|
||||
command: celery -A mpr worker -l info -Q transcode -c 2
|
||||
environment:
|
||||
<<: *common-env
|
||||
MPR_EXECUTOR: local
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ../media:/app/media
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -140,13 +171,18 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "5173:5173"
|
||||
environment:
|
||||
VITE_ALLOWED_HOSTS: ${VITE_ALLOWED_HOSTS:-}
|
||||
volumes:
|
||||
- ../ui/timeline/src:/app/src
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
redis-data:
|
||||
minio-data:
|
||||
|
||||
networks:
|
||||
default:
|
||||
name: mpr
|
||||
|
||||
name: mpr
|
||||
|
||||
22
ctrl/generate.sh
Executable file
22
ctrl/generate.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
# Model generation script for MPR
|
||||
# Generates all targets from schema/modelgen.json config
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
echo "Generating models from schema/models..."
|
||||
python -m modelgen generate --config schema/modelgen.json
|
||||
|
||||
# Generate gRPC stubs from proto
|
||||
echo "Generating gRPC stubs..."
|
||||
python -m grpc_tools.protoc \
|
||||
-I rpc/protos \
|
||||
--python_out=rpc \
|
||||
--grpc_python_out=rpc \
|
||||
rpc/protos/worker.proto
|
||||
|
||||
# Fix relative import in generated grpc stub
|
||||
sed -i 's/^import worker_pb2/from . import worker_pb2/' rpc/worker_pb2_grpc.py
|
||||
|
||||
echo "Done!"
|
||||
21
ctrl/lambda/Dockerfile
Normal file
21
ctrl/lambda/Dockerfile
Normal file
@@ -0,0 +1,21 @@
|
||||
FROM public.ecr.aws/lambda/python:3.11
|
||||
|
||||
# Install ffmpeg static binary
|
||||
RUN yum install -y tar xz && \
|
||||
curl -L https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz -o /tmp/ffmpeg.tar.xz && \
|
||||
tar -xf /tmp/ffmpeg.tar.xz -C /tmp && \
|
||||
cp /tmp/ffmpeg-*-amd64-static/ffmpeg /usr/local/bin/ffmpeg && \
|
||||
cp /tmp/ffmpeg-*-amd64-static/ffprobe /usr/local/bin/ffprobe && \
|
||||
rm -rf /tmp/ffmpeg* && \
|
||||
yum clean all
|
||||
|
||||
# Install Python dependencies
|
||||
COPY ctrl/lambda/requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY task/lambda_handler.py ${LAMBDA_TASK_ROOT}/task/lambda_handler.py
|
||||
COPY task/__init__.py ${LAMBDA_TASK_ROOT}/task/__init__.py
|
||||
COPY core/ ${LAMBDA_TASK_ROOT}/core/
|
||||
|
||||
CMD ["task.lambda_handler.handler"]
|
||||
2
ctrl/lambda/requirements.txt
Normal file
2
ctrl/lambda/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
ffmpeg-python>=0.2.0
|
||||
requests>=2.31.0
|
||||
@@ -21,6 +21,10 @@ http {
|
||||
server timeline:5173;
|
||||
}
|
||||
|
||||
upstream minio {
|
||||
server minio:9000;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name mpr.local.ar;
|
||||
@@ -41,7 +45,7 @@ http {
|
||||
}
|
||||
|
||||
# FastAPI
|
||||
location /api {
|
||||
location /api/ {
|
||||
proxy_pass http://fastapi;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
@@ -67,10 +71,15 @@ http {
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
# Media files
|
||||
location /media {
|
||||
alias /app/media;
|
||||
autoindex on;
|
||||
# Media files - proxied from MinIO (local) or S3 (AWS)
|
||||
location /media/in/ {
|
||||
proxy_pass http://minio/mpr-media-in/;
|
||||
proxy_set_header Host $http_host;
|
||||
}
|
||||
|
||||
location /media/out/ {
|
||||
proxy_pass http://minio/mpr-media-out/;
|
||||
proxy_set_header Host $http_host;
|
||||
}
|
||||
|
||||
# Default to Timeline UI
|
||||
|
||||
37
ctrl/run.sh
37
ctrl/run.sh
@@ -1,12 +1,16 @@
|
||||
#!/bin/bash
|
||||
# Run MPR stack locally
|
||||
# Usage: ./ctrl/run.sh [docker-compose args]
|
||||
# Usage: ./run.sh [OPTIONS] [docker-compose args]
|
||||
#
|
||||
# Options:
|
||||
# -f, --foreground Run in foreground (don't detach)
|
||||
# --build Rebuild images before starting
|
||||
#
|
||||
# Examples:
|
||||
# ./ctrl/run.sh # Start all services
|
||||
# ./ctrl/run.sh --build # Rebuild and start
|
||||
# ./ctrl/run.sh -d # Detached mode
|
||||
# ./ctrl/run.sh down # Stop all
|
||||
# ./run.sh # Start detached
|
||||
# ./run.sh -f # Start in foreground (see logs)
|
||||
# ./run.sh --build # Rebuild and start
|
||||
# ./run.sh logs -f # Follow logs
|
||||
|
||||
set -e
|
||||
|
||||
@@ -30,4 +34,27 @@ if ! grep -q "mpr.local.ar" /etc/hosts 2>/dev/null; then
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Parse options
|
||||
DETACH="-d"
|
||||
BUILD=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-f|--foreground)
|
||||
DETACH=""
|
||||
shift
|
||||
;;
|
||||
--build)
|
||||
BUILD="--build"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
# Pass remaining args to docker compose
|
||||
docker compose "$@"
|
||||
exit $?
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Default: up with options
|
||||
docker compose up $DETACH $BUILD
|
||||
|
||||
39
ctrl/state-machine.json
Normal file
39
ctrl/state-machine.json
Normal file
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"Comment": "MPR Transcode Job - orchestrates Lambda-based media transcoding",
|
||||
"StartAt": "Transcode",
|
||||
"States": {
|
||||
"Transcode": {
|
||||
"Type": "Task",
|
||||
"Resource": "${TranscodeLambdaArn}",
|
||||
"TimeoutSeconds": 900,
|
||||
"Retry": [
|
||||
{
|
||||
"ErrorEquals": ["States.TaskFailed", "Lambda.ServiceException"],
|
||||
"IntervalSeconds": 10,
|
||||
"MaxAttempts": 2,
|
||||
"BackoffRate": 2.0
|
||||
}
|
||||
],
|
||||
"Catch": [
|
||||
{
|
||||
"ErrorEquals": ["States.ALL"],
|
||||
"Next": "HandleError",
|
||||
"ResultPath": "$.error"
|
||||
}
|
||||
],
|
||||
"Next": "Done"
|
||||
},
|
||||
"HandleError": {
|
||||
"Type": "Pass",
|
||||
"Parameters": {
|
||||
"status": "failed",
|
||||
"job_id.$": "$.job_id",
|
||||
"error.$": "$.error.Cause"
|
||||
},
|
||||
"Next": "Done"
|
||||
},
|
||||
"Done": {
|
||||
"Type": "Succeed"
|
||||
}
|
||||
}
|
||||
}
|
||||
31
ctrl/stop.sh
Executable file
31
ctrl/stop.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
# Stop MPR stack
|
||||
# Usage: ./stop.sh [OPTIONS]
|
||||
#
|
||||
# Options:
|
||||
# -v, --volumes Also remove volumes (database data)
|
||||
#
|
||||
# Examples:
|
||||
# ./stop.sh # Stop containers
|
||||
# ./stop.sh -v # Stop and remove volumes
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
VOLUMES=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-v|--volumes)
|
||||
VOLUMES="-v"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
docker compose down $VOLUMES
|
||||
@@ -3,13 +3,11 @@ digraph system_overview {
|
||||
node [shape=box, style=rounded, fontname="Helvetica"]
|
||||
edge [fontname="Helvetica", fontsize=10]
|
||||
|
||||
// Title
|
||||
labelloc="t"
|
||||
label="MPR - System Overview"
|
||||
fontsize=16
|
||||
fontname="Helvetica-Bold"
|
||||
|
||||
// Styling
|
||||
graph [splines=ortho, nodesep=0.8, ranksep=0.8]
|
||||
|
||||
// External
|
||||
@@ -18,7 +16,7 @@ digraph system_overview {
|
||||
style=dashed
|
||||
color=gray
|
||||
|
||||
browser [label="Browser\nmpr.local.ar", shape=ellipse]
|
||||
browser [label="Browser\nmpr.local.ar / mpr.mcrn.ar", shape=ellipse]
|
||||
}
|
||||
|
||||
// Nginx reverse proxy
|
||||
@@ -37,7 +35,7 @@ digraph system_overview {
|
||||
fillcolor="#f0f8e8"
|
||||
|
||||
django [label="Django\n/admin\nport 8701"]
|
||||
fastapi [label="FastAPI\n/api\nport 8702"]
|
||||
fastapi [label="FastAPI\n/api + /graphql\nport 8702"]
|
||||
timeline [label="Timeline UI\n/ui\nport 5173"]
|
||||
}
|
||||
|
||||
@@ -48,8 +46,17 @@ digraph system_overview {
|
||||
fillcolor="#fff8e8"
|
||||
|
||||
grpc_server [label="gRPC Server\nport 50051"]
|
||||
celery [label="Celery Worker\n(local)"]
|
||||
lambda [label="Lambda\n(cloud)", style="dashed,rounded"]
|
||||
celery [label="Celery Worker\n(local mode)"]
|
||||
}
|
||||
|
||||
// AWS layer
|
||||
subgraph cluster_aws {
|
||||
label="AWS (lambda mode)"
|
||||
style=filled
|
||||
fillcolor="#fde8d0"
|
||||
|
||||
step_functions [label="Step Functions\nstate machine"]
|
||||
lambda [label="Lambda\nFFmpeg container"]
|
||||
}
|
||||
|
||||
// Data layer
|
||||
@@ -58,48 +65,50 @@ digraph system_overview {
|
||||
style=filled
|
||||
fillcolor="#f8e8f0"
|
||||
|
||||
postgres [label="PostgreSQL\nport 5433", shape=cylinder]
|
||||
redis [label="Redis\nport 6380", shape=cylinder]
|
||||
sqs [label="SQS\n(cloud)", shape=cylinder, style=dashed]
|
||||
postgres [label="PostgreSQL\nport 5436", shape=cylinder]
|
||||
redis [label="Redis\nport 6381", shape=cylinder]
|
||||
}
|
||||
|
||||
// Storage
|
||||
subgraph cluster_storage {
|
||||
label="File Storage"
|
||||
label="S3 Storage"
|
||||
style=filled
|
||||
fillcolor="#f0f0f0"
|
||||
|
||||
local_fs [label="Local FS\n/media", shape=folder]
|
||||
s3 [label="S3\n(cloud)", shape=folder, style=dashed]
|
||||
minio [label="MinIO (local)\nport 9000", shape=folder]
|
||||
s3 [label="AWS S3 (cloud)", shape=folder, style="dashed,rounded"]
|
||||
bucket_in [label="mpr-media-in", shape=note]
|
||||
bucket_out [label="mpr-media-out", shape=note]
|
||||
}
|
||||
|
||||
// Connections
|
||||
browser -> nginx
|
||||
|
||||
nginx -> django [label="/admin"]
|
||||
nginx -> fastapi [label="/api"]
|
||||
nginx -> timeline [label="/ui"]
|
||||
nginx -> django [xlabel="/admin"]
|
||||
nginx -> fastapi [xlabel="/api, /graphql"]
|
||||
nginx -> timeline [xlabel="/ui"]
|
||||
nginx -> minio [xlabel="/media/*"]
|
||||
|
||||
// Django uses FastAPI for operations (single API gateway)
|
||||
django -> fastapi [label="job operations"]
|
||||
django -> postgres [label="CRUD only"]
|
||||
timeline -> fastapi [xlabel="REST API"]
|
||||
|
||||
// Timeline UI uses FastAPI
|
||||
timeline -> fastapi [label="REST API"]
|
||||
|
||||
// FastAPI is the single API gateway
|
||||
fastapi -> postgres
|
||||
fastapi -> redis [label="job status"]
|
||||
fastapi -> grpc_server [label="gRPC\nprogress streaming"]
|
||||
fastapi -> grpc_server [xlabel="gRPC\nprogress"]
|
||||
|
||||
// Worker layer
|
||||
grpc_server -> celery [label="task dispatch"]
|
||||
celery -> redis [label="queue"]
|
||||
celery -> postgres [label="job updates"]
|
||||
celery -> grpc_server [label="progress\ncallbacks", style=dotted]
|
||||
celery -> local_fs [label="read/write"]
|
||||
// Local mode
|
||||
grpc_server -> celery [xlabel="task dispatch"]
|
||||
celery -> redis [xlabel="queue"]
|
||||
celery -> postgres [xlabel="job updates"]
|
||||
celery -> minio [xlabel="S3 API\ndownload/upload"]
|
||||
|
||||
// Cloud (future)
|
||||
lambda -> sqs [label="queue", style=dashed]
|
||||
lambda -> s3 [label="read/write", style=dashed]
|
||||
// Lambda mode
|
||||
fastapi -> step_functions [xlabel="boto3\nstart_execution", style=dashed]
|
||||
step_functions -> lambda [style=dashed]
|
||||
lambda -> s3 [xlabel="download/upload", style=dashed]
|
||||
lambda -> fastapi [xlabel="callback\nPOST /jobs/{id}/callback", style=dashed]
|
||||
|
||||
// Storage details
|
||||
minio -> bucket_in [style=dotted, arrowhead=none]
|
||||
minio -> bucket_out [style=dotted, arrowhead=none]
|
||||
s3 -> bucket_in [style=dotted, arrowhead=none]
|
||||
s3 -> bucket_out [style=dotted, arrowhead=none]
|
||||
}
|
||||
|
||||
@@ -1,260 +1,293 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 14.1.1 (0)
|
||||
<!-- Generated by graphviz version 14.1.2 (0)
|
||||
-->
|
||||
<!-- Title: system_overview Pages: 1 -->
|
||||
<svg width="843pt" height="957pt"
|
||||
viewBox="0.00 0.00 843.00 957.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 952.79)">
|
||||
<svg width="620pt" height="903pt"
|
||||
viewBox="0.00 0.00 620.00 903.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 898.54)">
|
||||
<title>system_overview</title>
|
||||
<polygon fill="white" stroke="none" points="-4,4 -4,-952.79 838.5,-952.79 838.5,4 -4,4"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="417.25" y="-929.59" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR - System Overview</text>
|
||||
<polygon fill="white" stroke="none" points="-4,4 -4,-898.54 616,-898.54 616,4 -4,4"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="306" y="-875.34" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR - System Overview</text>
|
||||
<g id="clust1" class="cluster">
|
||||
<title>cluster_external</title>
|
||||
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="478,-809.69 478,-913.29 632,-913.29 632,-809.69 478,-809.69"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="555" y="-894.09" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">External</text>
|
||||
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="246,-755.44 246,-859.04 540,-859.04 540,-755.44 246,-755.44"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="393" y="-839.84" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">External</text>
|
||||
</g>
|
||||
<g id="clust2" class="cluster">
|
||||
<title>cluster_proxy</title>
|
||||
<polygon fill="#e8f4f8" stroke="black" points="482,-693.69 482,-779.69 628,-779.69 628,-693.69 482,-693.69"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="555" y="-760.49" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Reverse Proxy</text>
|
||||
<polygon fill="#e8f4f8" stroke="black" points="320,-654.94 320,-740.94 466,-740.94 466,-654.94 320,-654.94"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="393" y="-721.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Reverse Proxy</text>
|
||||
</g>
|
||||
<g id="clust3" class="cluster">
|
||||
<title>cluster_apps</title>
|
||||
<polygon fill="#f0f8e8" stroke="black" points="352,-418.19 352,-651.94 606,-651.94 606,-418.19 352,-418.19"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="479" y="-632.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Application Layer</text>
|
||||
<polygon fill="#f0f8e8" stroke="black" points="278,-419.44 278,-640.44 532,-640.44 532,-419.44 278,-419.44"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="405" y="-621.24" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Application Layer</text>
|
||||
</g>
|
||||
<g id="clust4" class="cluster">
|
||||
<title>cluster_workers</title>
|
||||
<polygon fill="#fff8e8" stroke="black" points="125,-151.69 125,-363.69 374,-363.69 374,-151.69 125,-151.69"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="249.5" y="-344.49" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Worker Layer</text>
|
||||
<polygon fill="#fff8e8" stroke="black" points="142,-218.44 142,-404.94 280,-404.94 280,-218.44 142,-218.44"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="211" y="-385.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Worker Layer</text>
|
||||
</g>
|
||||
<g id="clust5" class="cluster">
|
||||
<title>cluster_data</title>
|
||||
<polygon fill="#f8e8f0" stroke="black" points="322,-8 322,-109.94 700,-109.94 700,-8 322,-8"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="511" y="-90.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Data Layer</text>
|
||||
<title>cluster_aws</title>
|
||||
<polygon fill="#fde8d0" stroke="black" points="383,-218.44 383,-404.94 581,-404.94 581,-218.44 383,-218.44"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="482" y="-385.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">AWS (lambda mode)</text>
|
||||
</g>
|
||||
<g id="clust6" class="cluster">
|
||||
<title>cluster_data</title>
|
||||
<polygon fill="#f8e8f0" stroke="black" points="8,-102 8,-203.94 263,-203.94 263,-102 8,-102"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="135.5" y="-184.74" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Data Layer</text>
|
||||
</g>
|
||||
<g id="clust7" class="cluster">
|
||||
<title>cluster_storage</title>
|
||||
<polygon fill="#f0f0f0" stroke="black" points="8,-15.97 8,-101.97 218,-101.97 218,-15.97 8,-15.97"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="113" y="-82.77" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">File Storage</text>
|
||||
<polygon fill="#f0f0f0" stroke="black" points="302,-8 302,-195.97 604,-195.97 604,-8 302,-8"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="453" y="-176.77" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">S3 Storage</text>
|
||||
</g>
|
||||
<!-- browser -->
|
||||
<g id="node1" class="node">
|
||||
<title>browser</title>
|
||||
<ellipse fill="none" stroke="black" cx="555" cy="-847.74" rx="69.12" ry="30.05"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="555" y="-851.69" font-family="Helvetica,sans-Serif" font-size="14.00">Browser</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="555" y="-834.44" font-family="Helvetica,sans-Serif" font-size="14.00">mpr.local.ar</text>
|
||||
<ellipse fill="none" stroke="black" cx="393" cy="-793.49" rx="139.12" ry="30.05"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="393" y="-797.44" font-family="Helvetica,sans-Serif" font-size="14.00">Browser</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="393" y="-780.19" font-family="Helvetica,sans-Serif" font-size="14.00">mpr.local.ar / mpr.mcrn.ar</text>
|
||||
</g>
|
||||
<!-- nginx -->
|
||||
<g id="node2" class="node">
|
||||
<title>nginx</title>
|
||||
<path fill="none" stroke="black" d="M576.5,-744.19C576.5,-744.19 533.5,-744.19 533.5,-744.19 527.5,-744.19 521.5,-738.19 521.5,-732.19 521.5,-732.19 521.5,-713.69 521.5,-713.69 521.5,-707.69 527.5,-701.69 533.5,-701.69 533.5,-701.69 576.5,-701.69 576.5,-701.69 582.5,-701.69 588.5,-707.69 588.5,-713.69 588.5,-713.69 588.5,-732.19 588.5,-732.19 588.5,-738.19 582.5,-744.19 576.5,-744.19"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="555" y="-726.89" font-family="Helvetica,sans-Serif" font-size="14.00">nginx</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="555" y="-709.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 80</text>
|
||||
<path fill="none" stroke="black" d="M414.5,-705.44C414.5,-705.44 371.5,-705.44 371.5,-705.44 365.5,-705.44 359.5,-699.44 359.5,-693.44 359.5,-693.44 359.5,-674.94 359.5,-674.94 359.5,-668.94 365.5,-662.94 371.5,-662.94 371.5,-662.94 414.5,-662.94 414.5,-662.94 420.5,-662.94 426.5,-668.94 426.5,-674.94 426.5,-674.94 426.5,-693.44 426.5,-693.44 426.5,-699.44 420.5,-705.44 414.5,-705.44"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="393" y="-688.14" font-family="Helvetica,sans-Serif" font-size="14.00">nginx</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="393" y="-670.89" font-family="Helvetica,sans-Serif" font-size="14.00">port 80</text>
|
||||
</g>
|
||||
<!-- browser->nginx -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>browser->nginx</title>
|
||||
<path fill="none" stroke="black" d="M555,-817.21C555,-817.21 555,-756.06 555,-756.06"/>
|
||||
<polygon fill="black" stroke="black" points="558.5,-756.06 555,-746.06 551.5,-756.06 558.5,-756.06"/>
|
||||
<path fill="none" stroke="black" d="M393,-763.04C393,-763.04 393,-717.33 393,-717.33"/>
|
||||
<polygon fill="black" stroke="black" points="396.5,-717.33 393,-707.33 389.5,-717.33 396.5,-717.33"/>
|
||||
</g>
|
||||
<!-- django -->
|
||||
<g id="node3" class="node">
|
||||
<title>django</title>
|
||||
<path fill="none" stroke="black" d="M585.5,-616.44C585.5,-616.44 524.5,-616.44 524.5,-616.44 518.5,-616.44 512.5,-610.44 512.5,-604.44 512.5,-604.44 512.5,-568.69 512.5,-568.69 512.5,-562.69 518.5,-556.69 524.5,-556.69 524.5,-556.69 585.5,-556.69 585.5,-556.69 591.5,-556.69 597.5,-562.69 597.5,-568.69 597.5,-568.69 597.5,-604.44 597.5,-604.44 597.5,-610.44 591.5,-616.44 585.5,-616.44"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="555" y="-599.14" font-family="Helvetica,sans-Serif" font-size="14.00">Django</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="555" y="-581.89" font-family="Helvetica,sans-Serif" font-size="14.00">/admin</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="555" y="-564.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 8701</text>
|
||||
<path fill="none" stroke="black" d="M359.5,-604.94C359.5,-604.94 298.5,-604.94 298.5,-604.94 292.5,-604.94 286.5,-598.94 286.5,-592.94 286.5,-592.94 286.5,-557.19 286.5,-557.19 286.5,-551.19 292.5,-545.19 298.5,-545.19 298.5,-545.19 359.5,-545.19 359.5,-545.19 365.5,-545.19 371.5,-551.19 371.5,-557.19 371.5,-557.19 371.5,-592.94 371.5,-592.94 371.5,-598.94 365.5,-604.94 359.5,-604.94"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="329" y="-587.64" font-family="Helvetica,sans-Serif" font-size="14.00">Django</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="329" y="-570.39" font-family="Helvetica,sans-Serif" font-size="14.00">/admin</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="329" y="-553.14" font-family="Helvetica,sans-Serif" font-size="14.00">port 8701</text>
|
||||
</g>
|
||||
<!-- nginx->django -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>nginx->django</title>
|
||||
<path fill="none" stroke="black" d="M555,-701.33C555,-701.33 555,-628.2 555,-628.2"/>
|
||||
<polygon fill="black" stroke="black" points="558.5,-628.2 555,-618.2 551.5,-628.2 558.5,-628.2"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="571.88" y="-663.19" font-family="Helvetica,sans-Serif" font-size="10.00">/admin</text>
|
||||
<path fill="none" stroke="black" d="M365.5,-662.63C365.5,-662.63 365.5,-616.77 365.5,-616.77"/>
|
||||
<polygon fill="black" stroke="black" points="369,-616.77 365.5,-606.77 362,-616.77 369,-616.77"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="348.62" y="-642.95" font-family="Helvetica,sans-Serif" font-size="10.00">/admin</text>
|
||||
</g>
|
||||
<!-- fastapi -->
|
||||
<g id="node4" class="node">
|
||||
<title>fastapi</title>
|
||||
<path fill="none" stroke="black" d="M554.5,-485.94C554.5,-485.94 493.5,-485.94 493.5,-485.94 487.5,-485.94 481.5,-479.94 481.5,-473.94 481.5,-473.94 481.5,-438.19 481.5,-438.19 481.5,-432.19 487.5,-426.19 493.5,-426.19 493.5,-426.19 554.5,-426.19 554.5,-426.19 560.5,-426.19 566.5,-432.19 566.5,-438.19 566.5,-438.19 566.5,-473.94 566.5,-473.94 566.5,-479.94 560.5,-485.94 554.5,-485.94"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="524" y="-468.64" font-family="Helvetica,sans-Serif" font-size="14.00">FastAPI</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="524" y="-451.39" font-family="Helvetica,sans-Serif" font-size="14.00">/api</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="524" y="-434.14" font-family="Helvetica,sans-Serif" font-size="14.00">port 8702</text>
|
||||
<path fill="none" stroke="black" d="M395.5,-487.19C395.5,-487.19 298.5,-487.19 298.5,-487.19 292.5,-487.19 286.5,-481.19 286.5,-475.19 286.5,-475.19 286.5,-439.44 286.5,-439.44 286.5,-433.44 292.5,-427.44 298.5,-427.44 298.5,-427.44 395.5,-427.44 395.5,-427.44 401.5,-427.44 407.5,-433.44 407.5,-439.44 407.5,-439.44 407.5,-475.19 407.5,-475.19 407.5,-481.19 401.5,-487.19 395.5,-487.19"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="347" y="-469.89" font-family="Helvetica,sans-Serif" font-size="14.00">FastAPI</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="347" y="-452.64" font-family="Helvetica,sans-Serif" font-size="14.00">/api + /graphql</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="347" y="-435.39" font-family="Helvetica,sans-Serif" font-size="14.00">port 8702</text>
|
||||
</g>
|
||||
<!-- nginx->fastapi -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>nginx->fastapi</title>
|
||||
<path fill="none" stroke="black" d="M521.02,-716C511.47,-716 503.63,-716 503.63,-716 503.63,-716 503.63,-497.9 503.63,-497.9"/>
|
||||
<polygon fill="black" stroke="black" points="507.13,-497.9 503.63,-487.9 500.13,-497.9 507.13,-497.9"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="723" y="-583.44" font-family="Helvetica,sans-Serif" font-size="10.00">/api</text>
|
||||
<path fill="none" stroke="black" d="M383.5,-662.84C383.5,-662.84 383.5,-498.82 383.5,-498.82"/>
|
||||
<polygon fill="black" stroke="black" points="387,-498.82 383.5,-488.82 380,-498.82 387,-498.82"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="399.44" y="-571.33" font-family="Helvetica,sans-Serif" font-size="10.00">/api, /graphql</text>
|
||||
</g>
|
||||
<!-- timeline -->
|
||||
<g id="node5" class="node">
|
||||
<title>timeline</title>
|
||||
<path fill="none" stroke="black" d="M442,-616.44C442,-616.44 372,-616.44 372,-616.44 366,-616.44 360,-610.44 360,-604.44 360,-604.44 360,-568.69 360,-568.69 360,-562.69 366,-556.69 372,-556.69 372,-556.69 442,-556.69 442,-556.69 448,-556.69 454,-562.69 454,-568.69 454,-568.69 454,-604.44 454,-604.44 454,-610.44 448,-616.44 442,-616.44"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="407" y="-599.14" font-family="Helvetica,sans-Serif" font-size="14.00">Timeline UI</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="407" y="-581.89" font-family="Helvetica,sans-Serif" font-size="14.00">/ui</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="407" y="-564.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 5173</text>
|
||||
<path fill="none" stroke="black" d="M512,-604.94C512,-604.94 442,-604.94 442,-604.94 436,-604.94 430,-598.94 430,-592.94 430,-592.94 430,-557.19 430,-557.19 430,-551.19 436,-545.19 442,-545.19 442,-545.19 512,-545.19 512,-545.19 518,-545.19 524,-551.19 524,-557.19 524,-557.19 524,-592.94 524,-592.94 524,-598.94 518,-604.94 512,-604.94"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="477" y="-587.64" font-family="Helvetica,sans-Serif" font-size="14.00">Timeline UI</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="477" y="-570.39" font-family="Helvetica,sans-Serif" font-size="14.00">/ui</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="477" y="-553.14" font-family="Helvetica,sans-Serif" font-size="14.00">port 5173</text>
|
||||
</g>
|
||||
<!-- nginx->timeline -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>nginx->timeline</title>
|
||||
<path fill="none" stroke="black" d="M521.05,-730C477.35,-730 407,-730 407,-730 407,-730 407,-628.15 407,-628.15"/>
|
||||
<polygon fill="black" stroke="black" points="410.5,-628.15 407,-618.15 403.5,-628.15 410.5,-628.15"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="450" y="-663.19" font-family="Helvetica,sans-Serif" font-size="10.00">/ui</text>
|
||||
<path fill="none" stroke="black" d="M422.62,-662.67C422.62,-633.49 422.62,-585 422.62,-585 422.62,-585 423.34,-585 423.34,-585"/>
|
||||
<polygon fill="black" stroke="black" points="418.22,-588.5 428.22,-585 418.22,-581.5 418.22,-588.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="416.62" y="-613.98" font-family="Helvetica,sans-Serif" font-size="10.00">/ui</text>
|
||||
</g>
|
||||
<!-- django->fastapi -->
|
||||
<!-- minio -->
|
||||
<g id="node12" class="node">
|
||||
<title>minio</title>
|
||||
<polygon fill="none" stroke="black" points="415.5,-160.47 412.5,-164.47 391.5,-164.47 388.5,-160.47 312.5,-160.47 312.5,-117.97 415.5,-117.97 415.5,-160.47"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="364" y="-143.17" font-family="Helvetica,sans-Serif" font-size="14.00">MinIO (local)</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="364" y="-125.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 9000</text>
|
||||
</g>
|
||||
<!-- nginx->minio -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>django->fastapi</title>
|
||||
<path fill="none" stroke="black" d="M539.5,-556.3C539.5,-556.3 539.5,-497.68 539.5,-497.68"/>
|
||||
<polygon fill="black" stroke="black" points="543,-497.68 539.5,-487.68 536,-497.68 543,-497.68"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="561.88" y="-518.19" font-family="Helvetica,sans-Serif" font-size="10.00">job operations</text>
|
||||
</g>
|
||||
<!-- postgres -->
|
||||
<g id="node9" class="node">
|
||||
<title>postgres</title>
|
||||
<path fill="none" stroke="black" d="M691.75,-69.12C691.75,-72.06 670.35,-74.44 644,-74.44 617.65,-74.44 596.25,-72.06 596.25,-69.12 596.25,-69.12 596.25,-21.31 596.25,-21.31 596.25,-18.38 617.65,-16 644,-16 670.35,-16 691.75,-18.38 691.75,-21.31 691.75,-21.31 691.75,-69.12 691.75,-69.12"/>
|
||||
<path fill="none" stroke="black" d="M691.75,-69.12C691.75,-66.19 670.35,-63.81 644,-63.81 617.65,-63.81 596.25,-66.19 596.25,-69.12"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="644" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">PostgreSQL</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="644" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 5433</text>
|
||||
</g>
|
||||
<!-- django->postgres -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>django->postgres</title>
|
||||
<path fill="none" stroke="black" d="M597.82,-587C607.63,-587 615.25,-587 615.25,-587 615.25,-587 615.25,-85.86 615.25,-85.86"/>
|
||||
<polygon fill="black" stroke="black" points="618.75,-85.86 615.25,-75.86 611.75,-85.86 618.75,-85.86"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="808.25" y="-303.81" font-family="Helvetica,sans-Serif" font-size="10.00">CRUD only</text>
|
||||
<title>nginx->minio</title>
|
||||
<path fill="none" stroke="black" d="M414.88,-662.68C414.88,-596.12 414.88,-398 414.88,-398 414.88,-398 344.17,-398 344.17,-398 344.17,-398 344.17,-172.35 344.17,-172.35"/>
|
||||
<polygon fill="black" stroke="black" points="347.67,-172.35 344.17,-162.35 340.67,-172.35 347.67,-172.35"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="378.03" y="-401.25" font-family="Helvetica,sans-Serif" font-size="10.00">/media/*</text>
|
||||
</g>
|
||||
<!-- grpc_server -->
|
||||
<g id="node6" class="node">
|
||||
<title>grpc_server</title>
|
||||
<path fill="none" stroke="black" d="M353.5,-328.19C353.5,-328.19 274.5,-328.19 274.5,-328.19 268.5,-328.19 262.5,-322.19 262.5,-316.19 262.5,-316.19 262.5,-297.69 262.5,-297.69 262.5,-291.69 268.5,-285.69 274.5,-285.69 274.5,-285.69 353.5,-285.69 353.5,-285.69 359.5,-285.69 365.5,-291.69 365.5,-297.69 365.5,-297.69 365.5,-316.19 365.5,-316.19 365.5,-322.19 359.5,-328.19 353.5,-328.19"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="314" y="-310.89" font-family="Helvetica,sans-Serif" font-size="14.00">gRPC Server</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="314" y="-293.64" font-family="Helvetica,sans-Serif" font-size="14.00">port 50051</text>
|
||||
<path fill="none" stroke="black" d="M246.5,-369.44C246.5,-369.44 167.5,-369.44 167.5,-369.44 161.5,-369.44 155.5,-363.44 155.5,-357.44 155.5,-357.44 155.5,-338.94 155.5,-338.94 155.5,-332.94 161.5,-326.94 167.5,-326.94 167.5,-326.94 246.5,-326.94 246.5,-326.94 252.5,-326.94 258.5,-332.94 258.5,-338.94 258.5,-338.94 258.5,-357.44 258.5,-357.44 258.5,-363.44 252.5,-369.44 246.5,-369.44"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="207" y="-352.14" font-family="Helvetica,sans-Serif" font-size="14.00">gRPC Server</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="207" y="-334.89" font-family="Helvetica,sans-Serif" font-size="14.00">port 50051</text>
|
||||
</g>
|
||||
<!-- fastapi->grpc_server -->
|
||||
<g id="edge10" class="edge">
|
||||
<g id="edge8" class="edge">
|
||||
<title>fastapi->grpc_server</title>
|
||||
<path fill="none" stroke="black" d="M509.75,-425.9C509.75,-382.34 509.75,-307 509.75,-307 509.75,-307 377.46,-307 377.46,-307"/>
|
||||
<polygon fill="black" stroke="black" points="377.46,-303.5 367.46,-307 377.46,-310.5 377.46,-303.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="398.25" y="-387.69" font-family="Helvetica,sans-Serif" font-size="10.00">gRPC</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="398.25" y="-374.94" font-family="Helvetica,sans-Serif" font-size="10.00">progress streaming</text>
|
||||
<path fill="none" stroke="black" d="M298.5,-427.06C298.5,-392.59 298.5,-341 298.5,-341 298.5,-341 270.41,-341 270.41,-341"/>
|
||||
<polygon fill="black" stroke="black" points="270.41,-337.5 260.41,-341 270.41,-344.5 270.41,-337.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="319.5" y="-385.98" font-family="Helvetica,sans-Serif" font-size="10.00">gRPC</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="319.5" y="-373.23" font-family="Helvetica,sans-Serif" font-size="10.00">progress</text>
|
||||
</g>
|
||||
<!-- step_functions -->
|
||||
<g id="node8" class="node">
|
||||
<title>step_functions</title>
|
||||
<path fill="none" stroke="black" d="M541.38,-369.44C541.38,-369.44 446.62,-369.44 446.62,-369.44 440.62,-369.44 434.62,-363.44 434.62,-357.44 434.62,-357.44 434.62,-338.94 434.62,-338.94 434.62,-332.94 440.62,-326.94 446.62,-326.94 446.62,-326.94 541.38,-326.94 541.38,-326.94 547.38,-326.94 553.38,-332.94 553.38,-338.94 553.38,-338.94 553.38,-357.44 553.38,-357.44 553.38,-363.44 547.38,-369.44 541.38,-369.44"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="494" y="-352.14" font-family="Helvetica,sans-Serif" font-size="14.00">Step Functions</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="494" y="-334.89" font-family="Helvetica,sans-Serif" font-size="14.00">state machine</text>
|
||||
</g>
|
||||
<!-- fastapi->step_functions -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>fastapi->step_functions</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M375.83,-427.17C375.83,-396.99 375.83,-355 375.83,-355 375.83,-355 422.71,-355 422.71,-355"/>
|
||||
<polygon fill="black" stroke="black" points="422.71,-358.5 432.71,-355 422.71,-351.5 422.71,-358.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="338.33" y="-358.15" font-family="Helvetica,sans-Serif" font-size="10.00">boto3</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="338.33" y="-345.4" font-family="Helvetica,sans-Serif" font-size="10.00">start_execution</text>
|
||||
</g>
|
||||
<!-- postgres -->
|
||||
<g id="node10" class="node">
|
||||
<title>postgres</title>
|
||||
<path fill="none" stroke="black" d="M111.75,-163.12C111.75,-166.06 90.35,-168.44 64,-168.44 37.65,-168.44 16.25,-166.06 16.25,-163.12 16.25,-163.12 16.25,-115.31 16.25,-115.31 16.25,-112.38 37.65,-110 64,-110 90.35,-110 111.75,-112.38 111.75,-115.31 111.75,-115.31 111.75,-163.12 111.75,-163.12"/>
|
||||
<path fill="none" stroke="black" d="M111.75,-163.12C111.75,-160.19 90.35,-157.81 64,-157.81 37.65,-157.81 16.25,-160.19 16.25,-163.12"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="64" y="-143.17" font-family="Helvetica,sans-Serif" font-size="14.00">PostgreSQL</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="64" y="-125.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 5436</text>
|
||||
</g>
|
||||
<!-- fastapi->postgres -->
|
||||
<g id="edge8" class="edge">
|
||||
<g id="edge7" class="edge">
|
||||
<title>fastapi->postgres</title>
|
||||
<path fill="none" stroke="black" d="M552.25,-425.84C552.25,-330.91 552.25,-45 552.25,-45 552.25,-45 584.46,-45 584.46,-45"/>
|
||||
<polygon fill="black" stroke="black" points="584.46,-48.5 594.46,-45 584.46,-41.5 584.46,-48.5"/>
|
||||
</g>
|
||||
<!-- redis -->
|
||||
<g id="node10" class="node">
|
||||
<title>redis</title>
|
||||
<path fill="none" stroke="black" d="M415.5,-69.12C415.5,-72.06 396.45,-74.44 373,-74.44 349.55,-74.44 330.5,-72.06 330.5,-69.12 330.5,-69.12 330.5,-21.31 330.5,-21.31 330.5,-18.38 349.55,-16 373,-16 396.45,-16 415.5,-18.38 415.5,-21.31 415.5,-21.31 415.5,-69.12 415.5,-69.12"/>
|
||||
<path fill="none" stroke="black" d="M415.5,-69.12C415.5,-66.19 396.45,-63.81 373,-63.81 349.55,-63.81 330.5,-66.19 330.5,-69.12"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="373" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">Redis</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="373" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 6380</text>
|
||||
</g>
|
||||
<!-- fastapi->redis -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>fastapi->redis</title>
|
||||
<path fill="none" stroke="black" d="M481.02,-456C442,-456 390.5,-456 390.5,-456 390.5,-456 390.5,-86.27 390.5,-86.27"/>
|
||||
<polygon fill="black" stroke="black" points="394,-86.27 390.5,-76.27 387,-86.27 394,-86.27"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="542" y="-240.81" font-family="Helvetica,sans-Serif" font-size="10.00">job status</text>
|
||||
<path fill="none" stroke="black" d="M286.29,-457C203.13,-457 64,-457 64,-457 64,-457 64,-180.34 64,-180.34"/>
|
||||
<polygon fill="black" stroke="black" points="67.5,-180.34 64,-170.34 60.5,-180.34 67.5,-180.34"/>
|
||||
</g>
|
||||
<!-- timeline->fastapi -->
|
||||
<g id="edge7" class="edge">
|
||||
<g id="edge6" class="edge">
|
||||
<title>timeline->fastapi</title>
|
||||
<path fill="none" stroke="black" d="M454.47,-587C475.15,-587 494.75,-587 494.75,-587 494.75,-587 494.75,-497.94 494.75,-497.94"/>
|
||||
<polygon fill="black" stroke="black" points="498.25,-497.94 494.75,-487.94 491.25,-497.94 498.25,-497.94"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="440.75" y="-518.19" font-family="Helvetica,sans-Serif" font-size="10.00">REST API</text>
|
||||
<path fill="none" stroke="black" d="M429.59,-565C411.66,-565 395.5,-565 395.5,-565 395.5,-565 395.5,-499.11 395.5,-499.11"/>
|
||||
<polygon fill="black" stroke="black" points="399,-499.11 395.5,-489.11 392,-499.11 399,-499.11"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="406.38" y="-539.6" font-family="Helvetica,sans-Serif" font-size="10.00">REST API</text>
|
||||
</g>
|
||||
<!-- celery -->
|
||||
<g id="node7" class="node">
|
||||
<title>celery</title>
|
||||
<path fill="none" stroke="black" d="M271.75,-202.19C271.75,-202.19 182.25,-202.19 182.25,-202.19 176.25,-202.19 170.25,-196.19 170.25,-190.19 170.25,-190.19 170.25,-171.69 170.25,-171.69 170.25,-165.69 176.25,-159.69 182.25,-159.69 182.25,-159.69 271.75,-159.69 271.75,-159.69 277.75,-159.69 283.75,-165.69 283.75,-171.69 283.75,-171.69 283.75,-190.19 283.75,-190.19 283.75,-196.19 277.75,-202.19 271.75,-202.19"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="227" y="-184.89" font-family="Helvetica,sans-Serif" font-size="14.00">Celery Worker</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="227" y="-167.64" font-family="Helvetica,sans-Serif" font-size="14.00">(local)</text>
|
||||
<path fill="none" stroke="black" d="M255.75,-268.94C255.75,-268.94 166.25,-268.94 166.25,-268.94 160.25,-268.94 154.25,-262.94 154.25,-256.94 154.25,-256.94 154.25,-238.44 154.25,-238.44 154.25,-232.44 160.25,-226.44 166.25,-226.44 166.25,-226.44 255.75,-226.44 255.75,-226.44 261.75,-226.44 267.75,-232.44 267.75,-238.44 267.75,-238.44 267.75,-256.94 267.75,-256.94 267.75,-262.94 261.75,-268.94 255.75,-268.94"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="211" y="-251.64" font-family="Helvetica,sans-Serif" font-size="14.00">Celery Worker</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="211" y="-234.39" font-family="Helvetica,sans-Serif" font-size="14.00">(local mode)</text>
|
||||
</g>
|
||||
<!-- grpc_server->celery -->
|
||||
<g id="edge11" class="edge">
|
||||
<g id="edge9" class="edge">
|
||||
<title>grpc_server->celery</title>
|
||||
<path fill="none" stroke="black" d="M269.58,-285.28C269.58,-285.28 269.58,-213.83 269.58,-213.83"/>
|
||||
<polygon fill="black" stroke="black" points="273.08,-213.83 269.58,-203.83 266.08,-213.83 273.08,-213.83"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="223.62" y="-240.81" font-family="Helvetica,sans-Serif" font-size="10.00">task dispatch</text>
|
||||
</g>
|
||||
<!-- celery->grpc_server -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>celery->grpc_server</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M276.67,-202.6C276.67,-202.6 276.67,-274.05 276.67,-274.05"/>
|
||||
<polygon fill="black" stroke="black" points="273.17,-274.05 276.67,-284.05 280.17,-274.05 273.17,-274.05"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="341.88" y="-247.19" font-family="Helvetica,sans-Serif" font-size="10.00">progress</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="341.88" y="-234.44" font-family="Helvetica,sans-Serif" font-size="10.00">callbacks</text>
|
||||
<path fill="none" stroke="black" d="M207,-326.87C207,-326.87 207,-280.83 207,-280.83"/>
|
||||
<polygon fill="black" stroke="black" points="210.5,-280.83 207,-270.83 203.5,-280.83 210.5,-280.83"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="174.38" y="-307.1" font-family="Helvetica,sans-Serif" font-size="10.00">task dispatch</text>
|
||||
</g>
|
||||
<!-- celery->postgres -->
|
||||
<g id="edge13" class="edge">
|
||||
<g id="edge11" class="edge">
|
||||
<title>celery->postgres</title>
|
||||
<path fill="none" stroke="black" d="M284.21,-188C390.19,-188 606.37,-188 606.37,-188 606.37,-188 606.37,-84.94 606.37,-84.94"/>
|
||||
<polygon fill="black" stroke="black" points="609.87,-84.94 606.37,-74.94 602.87,-84.94 609.87,-84.94"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="392.5" y="-121.19" font-family="Helvetica,sans-Serif" font-size="10.00">job updates</text>
|
||||
<path fill="none" stroke="black" d="M161.88,-225.95C161.88,-194.24 161.88,-139 161.88,-139 161.88,-139 123.59,-139 123.59,-139"/>
|
||||
<polygon fill="black" stroke="black" points="123.59,-135.5 113.59,-139 123.59,-142.5 123.59,-135.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="133.38" y="-166.59" font-family="Helvetica,sans-Serif" font-size="10.00">job updates</text>
|
||||
</g>
|
||||
<!-- redis -->
|
||||
<g id="node11" class="node">
|
||||
<title>redis</title>
|
||||
<path fill="none" stroke="black" d="M254.5,-163.12C254.5,-166.06 235.45,-168.44 212,-168.44 188.55,-168.44 169.5,-166.06 169.5,-163.12 169.5,-163.12 169.5,-115.31 169.5,-115.31 169.5,-112.38 188.55,-110 212,-110 235.45,-110 254.5,-112.38 254.5,-115.31 254.5,-115.31 254.5,-163.12 254.5,-163.12"/>
|
||||
<path fill="none" stroke="black" d="M254.5,-163.12C254.5,-160.19 235.45,-157.81 212,-157.81 188.55,-157.81 169.5,-160.19 169.5,-163.12"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="212" y="-143.17" font-family="Helvetica,sans-Serif" font-size="14.00">Redis</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="212" y="-125.92" font-family="Helvetica,sans-Serif" font-size="14.00">port 6381</text>
|
||||
</g>
|
||||
<!-- celery->redis -->
|
||||
<g id="edge12" class="edge">
|
||||
<g id="edge10" class="edge">
|
||||
<title>celery->redis</title>
|
||||
<path fill="none" stroke="black" d="M283.96,-174C315.34,-174 348,-174 348,-174 348,-174 348,-85.95 348,-85.95"/>
|
||||
<polygon fill="black" stroke="black" points="351.5,-85.95 348,-75.95 344.5,-85.95 351.5,-85.95"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="286" y="-121.19" font-family="Helvetica,sans-Serif" font-size="10.00">queue</text>
|
||||
<path fill="none" stroke="black" d="M212,-226C212,-226 212,-180.19 212,-180.19"/>
|
||||
<polygon fill="black" stroke="black" points="215.5,-180.19 212,-170.19 208.5,-180.19 215.5,-180.19"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="197" y="-206.34" font-family="Helvetica,sans-Serif" font-size="10.00">queue</text>
|
||||
</g>
|
||||
<!-- local_fs -->
|
||||
<g id="node12" class="node">
|
||||
<title>local_fs</title>
|
||||
<polygon fill="none" stroke="black" points="210.12,-66.47 207.12,-70.47 186.12,-70.47 183.12,-66.47 137.88,-66.47 137.88,-23.97 210.12,-23.97 210.12,-66.47"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="174" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">Local FS</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="174" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">/media</text>
|
||||
</g>
|
||||
<!-- celery->local_fs -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>celery->local_fs</title>
|
||||
<path fill="none" stroke="black" d="M190.19,-159.43C190.19,-159.43 190.19,-78.14 190.19,-78.14"/>
|
||||
<polygon fill="black" stroke="black" points="193.69,-78.14 190.19,-68.14 186.69,-78.14 193.69,-78.14"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="182.75" y="-121.19" font-family="Helvetica,sans-Serif" font-size="10.00">read/write</text>
|
||||
<!-- celery->minio -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>celery->minio</title>
|
||||
<path fill="none" stroke="black" d="M261.12,-225.95C261.12,-194.24 261.12,-139 261.12,-139 261.12,-139 300.75,-139 300.75,-139"/>
|
||||
<polygon fill="black" stroke="black" points="300.75,-142.5 310.75,-139 300.75,-135.5 300.75,-142.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="302.75" y="-178.67" font-family="Helvetica,sans-Serif" font-size="10.00">S3 API</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="302.75" y="-165.92" font-family="Helvetica,sans-Serif" font-size="10.00">download/upload</text>
|
||||
</g>
|
||||
<!-- lambda -->
|
||||
<g id="node8" class="node">
|
||||
<g id="node9" class="node">
|
||||
<title>lambda</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M192.75,-328.19C192.75,-328.19 145.25,-328.19 145.25,-328.19 139.25,-328.19 133.25,-322.19 133.25,-316.19 133.25,-316.19 133.25,-297.69 133.25,-297.69 133.25,-291.69 139.25,-285.69 145.25,-285.69 145.25,-285.69 192.75,-285.69 192.75,-285.69 198.75,-285.69 204.75,-291.69 204.75,-297.69 204.75,-297.69 204.75,-316.19 204.75,-316.19 204.75,-322.19 198.75,-328.19 192.75,-328.19"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="169" y="-310.89" font-family="Helvetica,sans-Serif" font-size="14.00">Lambda</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="169" y="-293.64" font-family="Helvetica,sans-Serif" font-size="14.00">(cloud)</text>
|
||||
<path fill="none" stroke="black" d="M541,-268.94C541,-268.94 423,-268.94 423,-268.94 417,-268.94 411,-262.94 411,-256.94 411,-256.94 411,-238.44 411,-238.44 411,-232.44 417,-226.44 423,-226.44 423,-226.44 541,-226.44 541,-226.44 547,-226.44 553,-232.44 553,-238.44 553,-238.44 553,-256.94 553,-256.94 553,-262.94 547,-268.94 541,-268.94"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="482" y="-251.64" font-family="Helvetica,sans-Serif" font-size="14.00">Lambda</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="482" y="-234.39" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg container</text>
|
||||
</g>
|
||||
<!-- sqs -->
|
||||
<g id="node11" class="node">
|
||||
<title>sqs</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M538,-69.12C538,-72.06 523.66,-74.44 506,-74.44 488.34,-74.44 474,-72.06 474,-69.12 474,-69.12 474,-21.31 474,-21.31 474,-18.38 488.34,-16 506,-16 523.66,-16 538,-18.38 538,-21.31 538,-21.31 538,-69.12 538,-69.12"/>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M538,-69.12C538,-66.19 523.66,-63.81 506,-63.81 488.34,-63.81 474,-66.19 474,-69.12"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="506" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">SQS</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="506" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">(cloud)</text>
|
||||
<!-- step_functions->lambda -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>step_functions->lambda</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M493.81,-326.87C493.81,-326.87 493.81,-280.83 493.81,-280.83"/>
|
||||
<polygon fill="black" stroke="black" points="497.31,-280.83 493.81,-270.83 490.31,-280.83 497.31,-280.83"/>
|
||||
</g>
|
||||
<!-- lambda->sqs -->
|
||||
<!-- lambda->fastapi -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>lambda->sqs</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M187.5,-285.28C187.5,-267.07 187.5,-244 187.5,-244 187.5,-244 477.75,-244 477.75,-244 477.75,-244 477.75,-84.37 477.75,-84.37"/>
|
||||
<polygon fill="black" stroke="black" points="481.25,-84.37 477.75,-74.37 474.25,-84.37 481.25,-84.37"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="415" y="-177.81" font-family="Helvetica,sans-Serif" font-size="10.00">queue</text>
|
||||
<title>lambda->fastapi</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M418.75,-269.3C418.75,-322.78 418.75,-457 418.75,-457 418.75,-457 417.66,-457 417.66,-457"/>
|
||||
<polygon fill="black" stroke="black" points="419.37,-453.5 409.37,-457 419.37,-460.5 419.37,-453.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="359.12" y="-379.69" font-family="Helvetica,sans-Serif" font-size="10.00">callback</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="359.12" y="-366.94" font-family="Helvetica,sans-Serif" font-size="10.00">POST /jobs/{id}/callback</text>
|
||||
</g>
|
||||
<!-- s3 -->
|
||||
<g id="node13" class="node">
|
||||
<title>s3</title>
|
||||
<polygon fill="none" stroke="black" stroke-dasharray="5,2" points="80,-66.47 77,-70.47 56,-70.47 53,-66.47 16,-66.47 16,-23.97 80,-23.97 80,-66.47"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="48" y="-49.17" font-family="Helvetica,sans-Serif" font-size="14.00">S3</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="48" y="-31.92" font-family="Helvetica,sans-Serif" font-size="14.00">(cloud)</text>
|
||||
<polygon fill="none" stroke="black" stroke-dasharray="5,2" points="596.25,-157.22 593.25,-161.22 572.25,-161.22 569.25,-157.22 473.75,-157.22 473.75,-121.22 596.25,-121.22 596.25,-157.22"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="535" y="-134.54" font-family="Helvetica,sans-Serif" font-size="14.00">AWS S3 (cloud)</text>
|
||||
</g>
|
||||
<!-- lambda->s3 -->
|
||||
<g id="edge17" class="edge">
|
||||
<g id="edge15" class="edge">
|
||||
<title>lambda->s3</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M133.02,-307C97.36,-307 48,-307 48,-307 48,-307 48,-78.15 48,-78.15"/>
|
||||
<polygon fill="black" stroke="black" points="51.5,-78.15 48,-68.15 44.5,-78.15 51.5,-78.15"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="80.75" y="-177.81" font-family="Helvetica,sans-Serif" font-size="10.00">read/write</text>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M513.38,-226C513.38,-226 513.38,-169.14 513.38,-169.14"/>
|
||||
<polygon fill="black" stroke="black" points="516.88,-169.14 513.38,-159.14 509.88,-169.14 516.88,-169.14"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="471.75" y="-200.82" font-family="Helvetica,sans-Serif" font-size="10.00">download/upload</text>
|
||||
</g>
|
||||
<!-- bucket_in -->
|
||||
<g id="node14" class="node">
|
||||
<title>bucket_in</title>
|
||||
<polygon fill="none" stroke="black" points="413.5,-52 310.5,-52 310.5,-16 419.5,-16 419.5,-46 413.5,-52"/>
|
||||
<polyline fill="none" stroke="black" points="413.5,-52 413.5,-46"/>
|
||||
<polyline fill="none" stroke="black" points="419.5,-46 413.5,-46"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="365" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00">mpr-media-in</text>
|
||||
</g>
|
||||
<!-- minio->bucket_in -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>minio->bucket_in</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M364,-117.67C364,-98.43 364,-70.56 364,-52.36"/>
|
||||
</g>
|
||||
<!-- bucket_out -->
|
||||
<g id="node15" class="node">
|
||||
<title>bucket_out</title>
|
||||
<polygon fill="none" stroke="black" points="590.38,-52 477.62,-52 477.62,-16 596.38,-16 596.38,-46 590.38,-52"/>
|
||||
<polyline fill="none" stroke="black" points="590.38,-52 590.38,-46"/>
|
||||
<polyline fill="none" stroke="black" points="596.38,-46 590.38,-46"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="537" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00">mpr-media-out</text>
|
||||
</g>
|
||||
<!-- minio->bucket_out -->
|
||||
<g id="edge18" class="edge">
|
||||
<title>minio->bucket_out</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M415.9,-145C428.08,-145 437.58,-145 437.58,-145 437.58,-145 437.58,-40 437.58,-40 437.58,-40 456.11,-40 477.16,-40"/>
|
||||
</g>
|
||||
<!-- s3->bucket_in -->
|
||||
<g id="edge19" class="edge">
|
||||
<title>s3->bucket_in</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M473.27,-133C463.03,-133 455.67,-133 455.67,-133 455.67,-133 455.67,-28 455.67,-28 455.67,-28 438.93,-28 419.83,-28"/>
|
||||
</g>
|
||||
<!-- s3->bucket_out -->
|
||||
<g id="edge20" class="edge">
|
||||
<title>s3->bucket_out</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M536.94,-120.89C536.94,-101.7 536.94,-71.72 536.94,-52.47"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 21 KiB |
@@ -10,13 +10,13 @@ digraph data_model {
|
||||
|
||||
graph [splines=ortho, nodesep=0.6, ranksep=1.2]
|
||||
|
||||
MediaAsset [label="{MediaAsset|id: UUID (PK)\lfilename: str\lfile_path: str\lfile_size: int?\lstatus: pending/ready/error\lerror_message: str?\l|duration: float?\lvideo_codec: str?\laudio_codec: str?\lwidth: int?\lheight: int?\lframerate: float?\lbitrate: int?\lproperties: JSON\l|comments: str\ltags: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"]
|
||||
MediaAsset [label="{MediaAsset|id: UUID (PK)\lfilename: str\lfile_path: str (S3 key)\lfile_size: int?\lstatus: pending/ready/error\lerror_message: str?\l|duration: float?\lvideo_codec: str?\laudio_codec: str?\lwidth: int?\lheight: int?\lframerate: float?\lbitrate: int?\lproperties: JSON\l|comments: str\ltags: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"]
|
||||
|
||||
TranscodePreset [label="{TranscodePreset|id: UUID (PK)\lname: str (unique)\ldescription: str\lis_builtin: bool\l|container: str\l|video_codec: str\lvideo_bitrate: str?\lvideo_crf: int?\lvideo_preset: str?\lresolution: str?\lframerate: float?\l|audio_codec: str\laudio_bitrate: str?\laudio_channels: int?\laudio_samplerate: int?\l|extra_args: JSON[]\l|created_at: datetime\lupdated_at: datetime\l}"]
|
||||
|
||||
TranscodeJob [label="{TranscodeJob|id: UUID (PK)\l|source_asset_id: UUID (FK)\l|preset_id: UUID? (FK)\lpreset_snapshot: JSON\l|trim_start: float?\ltrim_end: float?\l|output_filename: str\loutput_path: str?\loutput_asset_id: UUID? (FK)\l|status: pending/processing/...\lprogress: float (0-100)\lcurrent_frame: int?\lcurrent_time: float?\lspeed: str?\lerror_message: str?\l|celery_task_id: str?\lpriority: int\l|created_at: datetime\lstarted_at: datetime?\lcompleted_at: datetime?\l}"]
|
||||
TranscodeJob [label="{TranscodeJob|id: UUID (PK)\l|source_asset_id: UUID (FK)\l|preset_id: UUID? (FK)\lpreset_snapshot: JSON\l|trim_start: float?\ltrim_end: float?\l|output_filename: str\loutput_path: str? (S3 key)\loutput_asset_id: UUID? (FK)\l|status: pending/processing/...\lprogress: float (0-100)\lcurrent_frame: int?\lcurrent_time: float?\lspeed: str?\lerror_message: str?\l|celery_task_id: str?\lexecution_arn: str?\lpriority: int\l|created_at: datetime\lstarted_at: datetime?\lcompleted_at: datetime?\l}"]
|
||||
|
||||
MediaAsset -> TranscodeJob [label="1:N source_asset"]
|
||||
TranscodePreset -> TranscodeJob [label="1:N preset"]
|
||||
TranscodeJob -> MediaAsset [label="1:1 output_asset", style=dashed]
|
||||
MediaAsset -> TranscodeJob [xlabel="1:N source_asset"]
|
||||
TranscodePreset -> TranscodeJob [xlabel="1:N preset"]
|
||||
TranscodeJob -> MediaAsset [xlabel="1:1 output_asset", style=dashed]
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 14.1.1 (0)
|
||||
<!-- Generated by graphviz version 14.1.2 (0)
|
||||
-->
|
||||
<!-- Title: data_model Pages: 1 -->
|
||||
<svg width="2218pt" height="286pt"
|
||||
viewBox="0.00 0.00 2218.00 286.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<svg width="2134pt" height="286pt"
|
||||
viewBox="0.00 0.00 2134.00 286.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 282)">
|
||||
<title>data_model</title>
|
||||
<polygon fill="white" stroke="none" points="-4,4 -4,-282 2213.5,-282 2213.5,4 -4,4"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1104.75" y="-258.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR - Data Model</text>
|
||||
<polygon fill="white" stroke="none" points="-4,4 -4,-282 2130.25,-282 2130.25,4 -4,4"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1063.12" y="-258.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR - Data Model</text>
|
||||
<!-- MediaAsset -->
|
||||
<g id="node1" class="node">
|
||||
<title>MediaAsset</title>
|
||||
@@ -18,7 +18,7 @@
|
||||
<polyline fill="none" stroke="black" points="197.75,-134 197.75,-250"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="205.75" y="-222.05" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="205.75" y="-208.55" font-family="Helvetica,sans-Serif" font-size="11.00">filename: str</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="205.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">file_path: str</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="205.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">file_path: str (S3 key)</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="205.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">file_size: int?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="205.75" y="-168.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/ready/error</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="205.75" y="-154.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text>
|
||||
@@ -41,43 +41,44 @@
|
||||
<!-- TranscodeJob -->
|
||||
<g id="node3" class="node">
|
||||
<title>TranscodeJob</title>
|
||||
<polygon fill="none" stroke="black" points="995.25,-86.5 995.25,-175.5 2209.5,-175.5 2209.5,-86.5 995.25,-86.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1039.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">TranscodeJob</text>
|
||||
<polyline fill="none" stroke="black" points="1083.25,-86.5 1083.25,-175.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1091.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text>
|
||||
<polyline fill="none" stroke="black" points="1171.25,-86.5 1171.25,-175.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1179.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">source_asset_id: UUID (FK)</text>
|
||||
<polyline fill="none" stroke="black" points="1335.75,-86.5 1335.75,-175.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1343.75" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">preset_id: UUID? (FK)</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1343.75" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">preset_snapshot: JSON</text>
|
||||
<polyline fill="none" stroke="black" points="1477,-86.5 1477,-175.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1485" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">trim_start: float?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1485" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">trim_end: float?</text>
|
||||
<polyline fill="none" stroke="black" points="1585.25,-86.5 1585.25,-175.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1593.25" y="-140.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_filename: str</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1593.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">output_path: str?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1593.25" y="-113.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_asset_id: UUID? (FK)</text>
|
||||
<polyline fill="none" stroke="black" points="1755,-86.5 1755,-175.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1763" y="-161.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/processing/...</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1763" y="-147.55" font-family="Helvetica,sans-Serif" font-size="11.00">progress: float (0-100)</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1763" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">current_frame: int?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1763" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">current_time: float?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1763" y="-107.05" font-family="Helvetica,sans-Serif" font-size="11.00">speed: str?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1763" y="-93.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text>
|
||||
<polyline fill="none" stroke="black" points="1934.5,-86.5 1934.5,-175.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1942.5" y="-134.05" font-family="Helvetica,sans-Serif" font-size="11.00">celery_task_id: str?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1942.5" y="-120.55" font-family="Helvetica,sans-Serif" font-size="11.00">priority: int</text>
|
||||
<polyline fill="none" stroke="black" points="2056.25,-86.5 2056.25,-175.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="2064.25" y="-140.8" font-family="Helvetica,sans-Serif" font-size="11.00">created_at: datetime</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="2064.25" y="-127.3" font-family="Helvetica,sans-Serif" font-size="11.00">started_at: datetime?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="2064.25" y="-113.8" font-family="Helvetica,sans-Serif" font-size="11.00">completed_at: datetime?</text>
|
||||
<polygon fill="none" stroke="black" points="912,-147.5 912,-236.5 2126.25,-236.5 2126.25,-147.5 912,-147.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="956" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">TranscodeJob</text>
|
||||
<polyline fill="none" stroke="black" points="1000,-147.5 1000,-236.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1008" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">id: UUID (PK)</text>
|
||||
<polyline fill="none" stroke="black" points="1088,-147.5 1088,-236.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1096" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">source_asset_id: UUID (FK)</text>
|
||||
<polyline fill="none" stroke="black" points="1252.5,-147.5 1252.5,-236.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1260.5" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">preset_id: UUID? (FK)</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1260.5" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">preset_snapshot: JSON</text>
|
||||
<polyline fill="none" stroke="black" points="1393.75,-147.5 1393.75,-236.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1401.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">trim_start: float?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1401.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">trim_end: float?</text>
|
||||
<polyline fill="none" stroke="black" points="1502,-147.5 1502,-236.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1510" y="-201.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_filename: str</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1510" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">output_path: str? (S3 key)</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1510" y="-174.8" font-family="Helvetica,sans-Serif" font-size="11.00">output_asset_id: UUID? (FK)</text>
|
||||
<polyline fill="none" stroke="black" points="1671.75,-147.5 1671.75,-236.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-222.05" font-family="Helvetica,sans-Serif" font-size="11.00">status: pending/processing/...</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-208.55" font-family="Helvetica,sans-Serif" font-size="11.00">progress: float (0-100)</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-195.05" font-family="Helvetica,sans-Serif" font-size="11.00">current_frame: int?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-181.55" font-family="Helvetica,sans-Serif" font-size="11.00">current_time: float?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-168.05" font-family="Helvetica,sans-Serif" font-size="11.00">speed: str?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1679.75" y="-154.55" font-family="Helvetica,sans-Serif" font-size="11.00">error_message: str?</text>
|
||||
<polyline fill="none" stroke="black" points="1851.25,-147.5 1851.25,-236.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1859.25" y="-201.8" font-family="Helvetica,sans-Serif" font-size="11.00">celery_task_id: str?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1859.25" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">execution_arn: str?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1859.25" y="-174.8" font-family="Helvetica,sans-Serif" font-size="11.00">priority: int</text>
|
||||
<polyline fill="none" stroke="black" points="1973,-147.5 1973,-236.5"/>
|
||||
<text xml:space="preserve" text-anchor="start" x="1981" y="-201.8" font-family="Helvetica,sans-Serif" font-size="11.00">created_at: datetime</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1981" y="-188.3" font-family="Helvetica,sans-Serif" font-size="11.00">started_at: datetime?</text>
|
||||
<text xml:space="preserve" text-anchor="start" x="1981" y="-174.8" font-family="Helvetica,sans-Serif" font-size="11.00">completed_at: datetime?</text>
|
||||
</g>
|
||||
<!-- MediaAsset->TranscodeJob -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>MediaAsset->TranscodeJob</title>
|
||||
<path fill="none" stroke="black" d="M708.15,-147.67C708.15,-147.67 983.49,-147.67 983.49,-147.67"/>
|
||||
<polygon fill="black" stroke="black" points="983.49,-151.17 993.49,-147.67 983.49,-144.17 983.49,-151.17"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="910.62" y="-195.25" font-family="Helvetica,sans-Serif" font-size="10.00">1:N source_asset</text>
|
||||
<path fill="none" stroke="black" d="M708.33,-192C708.33,-192 900.24,-192 900.24,-192"/>
|
||||
<polygon fill="black" stroke="black" points="900.24,-195.5 910.24,-192 900.24,-188.5 900.24,-195.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="762.66" y="-182.5" font-family="Helvetica,sans-Serif" font-size="10.00">1:N source_asset</text>
|
||||
</g>
|
||||
<!-- TranscodePreset -->
|
||||
<g id="node2" class="node">
|
||||
@@ -112,16 +113,16 @@
|
||||
<!-- TranscodePreset->TranscodeJob -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>TranscodePreset->TranscodeJob</title>
|
||||
<path fill="none" stroke="black" d="M766.5,-89.89C766.5,-101.97 766.5,-111.75 766.5,-111.75 766.5,-111.75 983.39,-111.75 983.39,-111.75"/>
|
||||
<polygon fill="black" stroke="black" points="983.39,-115.25 993.39,-111.75 983.39,-108.25 983.39,-115.25"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="910.62" y="-48.25" font-family="Helvetica,sans-Serif" font-size="10.00">1:N preset</text>
|
||||
<path fill="none" stroke="black" d="M767.25,-89.95C767.25,-125.61 767.25,-169.5 767.25,-169.5 767.25,-169.5 900.26,-169.5 900.26,-169.5"/>
|
||||
<polygon fill="black" stroke="black" points="900.26,-173 910.26,-169.5 900.26,-166 900.26,-173"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="768.85" y="-160" font-family="Helvetica,sans-Serif" font-size="10.00">1:N preset</text>
|
||||
</g>
|
||||
<!-- TranscodeJob->MediaAsset -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>TranscodeJob->MediaAsset</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M995.06,-161.83C995.06,-161.83 719.99,-161.83 719.99,-161.83"/>
|
||||
<polygon fill="black" stroke="black" points="719.99,-158.33 709.99,-161.83 719.99,-165.33 719.99,-158.33"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="910.62" y="-134.25" font-family="Helvetica,sans-Serif" font-size="10.00">1:1 output_asset</text>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M911.86,-214.5C911.86,-214.5 719.76,-214.5 719.76,-214.5"/>
|
||||
<polygon fill="black" stroke="black" points="719.76,-211 709.76,-214.5 719.76,-218 719.76,-211"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="775.31" y="-205" font-family="Helvetica,sans-Serif" font-size="10.00">1:1 output_asset</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 13 KiB |
@@ -3,7 +3,6 @@ digraph job_flow {
|
||||
node [shape=box, style=rounded, fontname="Helvetica"]
|
||||
edge [fontname="Helvetica", fontsize=10]
|
||||
|
||||
// Title
|
||||
labelloc="t"
|
||||
label="MPR - Job Flow"
|
||||
fontsize=16
|
||||
@@ -11,7 +10,19 @@ digraph job_flow {
|
||||
|
||||
graph [splines=ortho, nodesep=0.6, ranksep=0.6]
|
||||
|
||||
// States
|
||||
// API entry points
|
||||
subgraph cluster_api {
|
||||
label="API Entry Points"
|
||||
style=dashed
|
||||
color=gray
|
||||
|
||||
rest_create [label="POST /api/jobs/", shape=ellipse]
|
||||
gql_create [label="mutation createJob", shape=ellipse]
|
||||
rest_cancel [label="POST /api/jobs/{id}/cancel", shape=ellipse]
|
||||
rest_callback [label="POST /api/jobs/{id}/callback", shape=ellipse]
|
||||
}
|
||||
|
||||
// Job states
|
||||
subgraph cluster_states {
|
||||
label="Job States"
|
||||
style=filled
|
||||
@@ -24,78 +35,70 @@ digraph job_flow {
|
||||
cancelled [label="CANCELLED", fillcolor="#6c757d", style="filled,rounded", fontcolor=white]
|
||||
}
|
||||
|
||||
// Transitions
|
||||
pending -> processing [label="worker picks up"]
|
||||
processing -> completed [label="success"]
|
||||
processing -> failed [label="error"]
|
||||
pending -> cancelled [label="user cancels"]
|
||||
processing -> cancelled [label="user cancels"]
|
||||
failed -> pending [label="retry"]
|
||||
// State transitions
|
||||
pending -> processing [xlabel="worker picks up"]
|
||||
processing -> completed [xlabel="success"]
|
||||
processing -> failed [xlabel="error"]
|
||||
pending -> cancelled [xlabel="user cancels"]
|
||||
processing -> cancelled [xlabel="user cancels"]
|
||||
failed -> pending [xlabel="retry"]
|
||||
|
||||
// API actions
|
||||
subgraph cluster_api {
|
||||
label="API Actions"
|
||||
style=dashed
|
||||
color=gray
|
||||
rest_create -> pending
|
||||
gql_create -> pending
|
||||
rest_cancel -> cancelled [style=dashed]
|
||||
|
||||
create_job [label="POST /jobs/", shape=ellipse]
|
||||
cancel_job [label="POST /jobs/{id}/cancel", shape=ellipse]
|
||||
retry_job [label="POST /jobs/{id}/retry", shape=ellipse]
|
||||
}
|
||||
|
||||
create_job -> pending
|
||||
cancel_job -> cancelled [style=dashed]
|
||||
retry_job -> pending [style=dashed]
|
||||
|
||||
// Executor layer
|
||||
subgraph cluster_executor {
|
||||
label="Executor Layer"
|
||||
// Executor dispatch
|
||||
subgraph cluster_dispatch {
|
||||
label="Executor Dispatch"
|
||||
style=filled
|
||||
fillcolor="#fff8e8"
|
||||
|
||||
executor [label="Executor\n(abstract)", shape=diamond]
|
||||
local [label="LocalExecutor\nCelery + FFmpeg"]
|
||||
lambda_exec [label="LambdaExecutor\nSQS + Lambda"]
|
||||
dispatch [label="MPR_EXECUTOR", shape=diamond]
|
||||
}
|
||||
|
||||
processing -> executor
|
||||
executor -> local [label="MPR_EXECUTOR=local"]
|
||||
executor -> lambda_exec [label="MPR_EXECUTOR=lambda", style=dashed]
|
||||
pending -> dispatch
|
||||
|
||||
// FFmpeg operations
|
||||
subgraph cluster_ffmpeg {
|
||||
label="FFmpeg Operations"
|
||||
// Local path
|
||||
subgraph cluster_local {
|
||||
label="Local Mode (Celery)"
|
||||
style=filled
|
||||
fillcolor="#e8f4e8"
|
||||
|
||||
transcode [label="Transcode\n(with preset)"]
|
||||
trim [label="Trim\n(-c:v copy -c:a copy)"]
|
||||
celery_task [label="Celery Task\n(transcode queue)"]
|
||||
s3_download [label="S3 Download\n(MinIO)"]
|
||||
ffmpeg_local [label="FFmpeg\ntranscode/trim"]
|
||||
s3_upload [label="S3 Upload\n(MinIO)"]
|
||||
db_update [label="DB Update\n(update_job_progress)"]
|
||||
}
|
||||
|
||||
local -> transcode
|
||||
local -> trim
|
||||
dispatch -> celery_task [xlabel="local"]
|
||||
celery_task -> s3_download
|
||||
s3_download -> ffmpeg_local
|
||||
ffmpeg_local -> s3_upload
|
||||
s3_upload -> db_update
|
||||
db_update -> completed [style=dotted]
|
||||
|
||||
// gRPC streaming
|
||||
subgraph cluster_grpc {
|
||||
label="gRPC Communication"
|
||||
// Lambda path
|
||||
subgraph cluster_lambda {
|
||||
label="Lambda Mode (AWS)"
|
||||
style=filled
|
||||
fillcolor="#e8e8f8"
|
||||
fillcolor="#fde8d0"
|
||||
|
||||
grpc_stream [label="StreamProgress\n(server streaming)", shape=parallelogram]
|
||||
grpc_submit [label="SubmitJob\n(unary)", shape=parallelogram]
|
||||
grpc_cancel [label="CancelJob\n(unary)", shape=parallelogram]
|
||||
sfn_start [label="Step Functions\nstart_execution"]
|
||||
lambda_fn [label="Lambda\nFFmpeg container"]
|
||||
s3_dl_aws [label="S3 Download\n(AWS)"]
|
||||
ffmpeg_aws [label="FFmpeg\ntranscode/trim"]
|
||||
s3_ul_aws [label="S3 Upload\n(AWS)"]
|
||||
callback [label="HTTP Callback\nPOST /jobs/{id}/callback"]
|
||||
}
|
||||
|
||||
// Progress tracking via gRPC
|
||||
progress [label="Progress Updates\n(gRPC → Redis → DB)", shape=note]
|
||||
transcode -> progress [style=dotted]
|
||||
trim -> progress [style=dotted]
|
||||
progress -> grpc_stream [style=dotted, label="stream to client"]
|
||||
grpc_stream -> processing [style=dotted, label="update status"]
|
||||
dispatch -> sfn_start [xlabel="lambda"]
|
||||
sfn_start -> lambda_fn
|
||||
lambda_fn -> s3_dl_aws
|
||||
s3_dl_aws -> ffmpeg_aws
|
||||
ffmpeg_aws -> s3_ul_aws
|
||||
s3_ul_aws -> callback
|
||||
callback -> completed [style=dotted]
|
||||
|
||||
// gRPC job control
|
||||
create_job -> grpc_submit [label="via gRPC"]
|
||||
grpc_submit -> pending [style=dashed]
|
||||
cancel_job -> grpc_cancel [label="via gRPC"]
|
||||
grpc_cancel -> cancelled [style=dashed]
|
||||
rest_callback -> completed [style=dashed, xlabel="Lambda reports"]
|
||||
}
|
||||
|
||||
@@ -1,296 +1,329 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 14.1.1 (0)
|
||||
<!-- Generated by graphviz version 14.1.2 (0)
|
||||
-->
|
||||
<!-- Title: job_flow Pages: 1 -->
|
||||
<svg width="1398pt" height="843pt"
|
||||
viewBox="0.00 0.00 1398.00 843.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 838.75)">
|
||||
<svg width="1621pt" height="655pt"
|
||||
viewBox="0.00 0.00 1621.00 655.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 650.5)">
|
||||
<title>job_flow</title>
|
||||
<polygon fill="white" stroke="none" points="-4,4 -4,-838.75 1394,-838.75 1394,4 -4,4"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="695" y="-815.55" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR - Job Flow</text>
|
||||
<polygon fill="white" stroke="none" points="-4,4 -4,-650.5 1617,-650.5 1617,4 -4,4"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="806.5" y="-627.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR - Job Flow</text>
|
||||
<g id="clust1" class="cluster">
|
||||
<title>cluster_states</title>
|
||||
<polygon fill="#f8f8f8" stroke="black" points="774,-8 774,-297.5 1154,-297.5 1154,-8 774,-8"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="964" y="-278.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Job States</text>
|
||||
<title>cluster_api</title>
|
||||
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="297,-269.75 297,-349.25 1395,-349.25 1395,-269.75 297,-269.75"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="846" y="-330.05" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">API Entry Points</text>
|
||||
</g>
|
||||
<g id="clust2" class="cluster">
|
||||
<title>cluster_api</title>
|
||||
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="674,-360 674,-439.5 1382,-439.5 1382,-360 674,-360"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1028" y="-420.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">API Actions</text>
|
||||
<title>cluster_states</title>
|
||||
<polygon fill="#f8f8f8" stroke="black" points="572,-11.25 572,-261.75 939,-261.75 939,-11.25 572,-11.25"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="755.5" y="-242.55" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Job States</text>
|
||||
</g>
|
||||
<g id="clust3" class="cluster">
|
||||
<title>cluster_executor</title>
|
||||
<polygon fill="#fff8e8" stroke="black" points="8,-571.5 8,-799.25 352,-799.25 352,-571.5 8,-571.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="180" y="-780.05" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Executor Layer</text>
|
||||
<title>cluster_dispatch</title>
|
||||
<polygon fill="#fff8e8" stroke="black" points="103,-531.5 103,-611 377,-611 377,-531.5 103,-531.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="240" y="-591.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Executor Dispatch</text>
|
||||
</g>
|
||||
<g id="clust4" class="cluster">
|
||||
<title>cluster_ffmpeg</title>
|
||||
<polygon fill="#e8f4e8" stroke="black" points="73,-462.5 73,-548.5 393,-548.5 393,-462.5 73,-462.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="233" y="-529.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">FFmpeg Operations</text>
|
||||
<title>cluster_local</title>
|
||||
<polygon fill="#e8f4e8" stroke="black" points="8,-93.5 8,-523.5 203,-523.5 203,-93.5 8,-93.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="105.5" y="-504.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Local Mode (Celery)</text>
|
||||
</g>
|
||||
<g id="clust5" class="cluster">
|
||||
<title>cluster_grpc</title>
|
||||
<polygon fill="#e8e8f8" stroke="black" points="8,-193.5 8,-322 766,-322 766,-193.5 8,-193.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="387" y="-302.8" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">gRPC Communication</text>
|
||||
<title>cluster_lambda</title>
|
||||
<polygon fill="#fde8d0" stroke="black" points="1403,-8 1403,-523.5 1605,-523.5 1605,-8 1403,-8"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1504" y="-504.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Lambda Mode (AWS)</text>
|
||||
</g>
|
||||
<!-- rest_create -->
|
||||
<g id="node1" class="node">
|
||||
<title>rest_create</title>
|
||||
<ellipse fill="none" stroke="black" cx="389" cy="-295.75" rx="84.35" ry="18"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="389" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">POST /api/jobs/</text>
|
||||
</g>
|
||||
<!-- pending -->
|
||||
<g id="node1" class="node">
|
||||
<g id="node5" class="node">
|
||||
<title>pending</title>
|
||||
<path fill="#ffc107" stroke="black" d="M971.88,-262C971.88,-262 916.12,-262 916.12,-262 910.12,-262 904.12,-256 904.12,-250 904.12,-250 904.12,-238 904.12,-238 904.12,-232 910.12,-226 916.12,-226 916.12,-226 971.88,-226 971.88,-226 977.88,-226 983.88,-232 983.88,-238 983.88,-238 983.88,-250 983.88,-250 983.88,-256 977.88,-262 971.88,-262"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="944" y="-239.32" font-family="Helvetica,sans-Serif" font-size="14.00">PENDING</text>
|
||||
<path fill="#ffc107" stroke="black" d="M647.88,-226.25C647.88,-226.25 592.12,-226.25 592.12,-226.25 586.12,-226.25 580.12,-220.25 580.12,-214.25 580.12,-214.25 580.12,-202.25 580.12,-202.25 580.12,-196.25 586.12,-190.25 592.12,-190.25 592.12,-190.25 647.88,-190.25 647.88,-190.25 653.88,-190.25 659.88,-196.25 659.88,-202.25 659.88,-202.25 659.88,-214.25 659.88,-214.25 659.88,-220.25 653.88,-226.25 647.88,-226.25"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="620" y="-203.57" font-family="Helvetica,sans-Serif" font-size="14.00">PENDING</text>
|
||||
</g>
|
||||
<!-- rest_create->pending -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>rest_create->pending</title>
|
||||
<path fill="none" stroke="black" d="M389,-277.61C389,-253.52 389,-214 389,-214 389,-214 568.25,-214 568.25,-214"/>
|
||||
<polygon fill="black" stroke="black" points="568.25,-217.5 578.25,-214 568.25,-210.5 568.25,-217.5"/>
|
||||
</g>
|
||||
<!-- gql_create -->
|
||||
<g id="node2" class="node">
|
||||
<title>gql_create</title>
|
||||
<ellipse fill="none" stroke="black" cx="620" cy="-295.75" rx="103.29" ry="18"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="620" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">mutation createJob</text>
|
||||
</g>
|
||||
<!-- gql_create->pending -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>gql_create->pending</title>
|
||||
<path fill="none" stroke="black" d="M620,-277.62C620,-277.62 620,-238.17 620,-238.17"/>
|
||||
<polygon fill="black" stroke="black" points="623.5,-238.17 620,-228.17 616.5,-238.17 623.5,-238.17"/>
|
||||
</g>
|
||||
<!-- rest_cancel -->
|
||||
<g id="node3" class="node">
|
||||
<title>rest_cancel</title>
|
||||
<ellipse fill="none" stroke="black" cx="1247" cy="-295.75" rx="140.12" ry="18"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1247" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">POST /api/jobs/{id}/cancel</text>
|
||||
</g>
|
||||
<!-- cancelled -->
|
||||
<g id="node9" class="node">
|
||||
<title>cancelled</title>
|
||||
<path fill="#6c757d" stroke="black" d="M918.62,-55.25C918.62,-55.25 843.38,-55.25 843.38,-55.25 837.38,-55.25 831.38,-49.25 831.38,-43.25 831.38,-43.25 831.38,-31.25 831.38,-31.25 831.38,-25.25 837.38,-19.25 843.38,-19.25 843.38,-19.25 918.62,-19.25 918.62,-19.25 924.62,-19.25 930.62,-25.25 930.62,-31.25 930.62,-31.25 930.62,-43.25 930.62,-43.25 930.62,-49.25 924.62,-55.25 918.62,-55.25"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="881" y="-32.58" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">CANCELLED</text>
|
||||
</g>
|
||||
<!-- rest_cancel->cancelled -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>rest_cancel->cancelled</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1247,-277.56C1247,-218.66 1247,-37 1247,-37 1247,-37 942.64,-37 942.64,-37"/>
|
||||
<polygon fill="black" stroke="black" points="942.64,-33.5 932.64,-37 942.64,-40.5 942.64,-33.5"/>
|
||||
</g>
|
||||
<!-- rest_callback -->
|
||||
<g id="node4" class="node">
|
||||
<title>rest_callback</title>
|
||||
<ellipse fill="none" stroke="black" cx="915" cy="-295.75" rx="148.54" ry="18"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="915" y="-291.07" font-family="Helvetica,sans-Serif" font-size="14.00">POST /api/jobs/{id}/callback</text>
|
||||
</g>
|
||||
<!-- completed -->
|
||||
<g id="node7" class="node">
|
||||
<title>completed</title>
|
||||
<path fill="#28a745" stroke="black" d="M776.75,-55.25C776.75,-55.25 699.25,-55.25 699.25,-55.25 693.25,-55.25 687.25,-49.25 687.25,-43.25 687.25,-43.25 687.25,-31.25 687.25,-31.25 687.25,-25.25 693.25,-19.25 699.25,-19.25 699.25,-19.25 776.75,-19.25 776.75,-19.25 782.75,-19.25 788.75,-25.25 788.75,-31.25 788.75,-31.25 788.75,-43.25 788.75,-43.25 788.75,-49.25 782.75,-55.25 776.75,-55.25"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="738" y="-32.58" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">COMPLETED</text>
|
||||
</g>
|
||||
<!-- rest_callback->completed -->
|
||||
<g id="edge24" class="edge">
|
||||
<title>rest_callback->completed</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M783.42,-287.15C783.42,-287.15 783.42,-67.24 783.42,-67.24"/>
|
||||
<polygon fill="black" stroke="black" points="786.92,-67.24 783.42,-57.24 779.92,-67.24 786.92,-67.24"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="745.17" y="-180.44" font-family="Helvetica,sans-Serif" font-size="10.00">Lambda reports</text>
|
||||
</g>
|
||||
<!-- processing -->
|
||||
<g id="node2" class="node">
|
||||
<g id="node6" class="node">
|
||||
<title>processing</title>
|
||||
<path fill="#17a2b8" stroke="black" d="M877.75,-144.75C877.75,-144.75 794.25,-144.75 794.25,-144.75 788.25,-144.75 782.25,-138.75 782.25,-132.75 782.25,-132.75 782.25,-120.75 782.25,-120.75 782.25,-114.75 788.25,-108.75 794.25,-108.75 794.25,-108.75 877.75,-108.75 877.75,-108.75 883.75,-108.75 889.75,-114.75 889.75,-120.75 889.75,-120.75 889.75,-132.75 889.75,-132.75 889.75,-138.75 883.75,-144.75 877.75,-144.75"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="836" y="-122.08" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">PROCESSING</text>
|
||||
<path fill="#17a2b8" stroke="black" d="M768.75,-140.75C768.75,-140.75 685.25,-140.75 685.25,-140.75 679.25,-140.75 673.25,-134.75 673.25,-128.75 673.25,-128.75 673.25,-116.75 673.25,-116.75 673.25,-110.75 679.25,-104.75 685.25,-104.75 685.25,-104.75 768.75,-104.75 768.75,-104.75 774.75,-104.75 780.75,-110.75 780.75,-116.75 780.75,-116.75 780.75,-128.75 780.75,-128.75 780.75,-134.75 774.75,-140.75 768.75,-140.75"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="727" y="-118.08" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">PROCESSING</text>
|
||||
</g>
|
||||
<!-- pending->processing -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>pending->processing</title>
|
||||
<path fill="none" stroke="black" d="M920.04,-225.68C920.04,-194.87 920.04,-136 920.04,-136 920.04,-136 901.69,-136 901.69,-136"/>
|
||||
<polygon fill="black" stroke="black" points="901.69,-132.5 891.69,-136 901.69,-139.5 901.69,-132.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="902.25" y="-170" font-family="Helvetica,sans-Serif" font-size="10.00">worker picks up</text>
|
||||
</g>
|
||||
<!-- cancelled -->
|
||||
<g id="node5" class="node">
|
||||
<title>cancelled</title>
|
||||
<path fill="#6c757d" stroke="black" d="M1122.62,-52C1122.62,-52 1047.38,-52 1047.38,-52 1041.38,-52 1035.38,-46 1035.38,-40 1035.38,-40 1035.38,-28 1035.38,-28 1035.38,-22 1041.38,-16 1047.38,-16 1047.38,-16 1122.62,-16 1122.62,-16 1128.62,-16 1134.62,-22 1134.62,-28 1134.62,-28 1134.62,-40 1134.62,-40 1134.62,-46 1128.62,-52 1122.62,-52"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1085" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">CANCELLED</text>
|
||||
<path fill="none" stroke="black" d="M654.58,-189.87C654.58,-166.46 654.58,-129 654.58,-129 654.58,-129 661.34,-129 661.34,-129"/>
|
||||
<polygon fill="black" stroke="black" points="661.34,-132.5 671.34,-129 661.34,-125.5 661.34,-132.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="616.33" y="-159.3" font-family="Helvetica,sans-Serif" font-size="10.00">worker picks up</text>
|
||||
</g>
|
||||
<!-- pending->cancelled -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>pending->cancelled</title>
|
||||
<path fill="none" stroke="black" d="M984.17,-238C1022.83,-238 1075.49,-238 1075.49,-238 1075.49,-238 1075.49,-63.98 1075.49,-63.98"/>
|
||||
<polygon fill="black" stroke="black" points="1078.99,-63.98 1075.49,-53.98 1071.99,-63.98 1078.99,-63.98"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1115.38" y="-123.62" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
|
||||
<path fill="none" stroke="black" d="M660.36,-208C737.33,-208 897.54,-208 897.54,-208 897.54,-208 897.54,-67.04 897.54,-67.04"/>
|
||||
<polygon fill="black" stroke="black" points="901.04,-67.04 897.54,-57.04 894.04,-67.04 901.04,-67.04"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="819.06" y="-211.25" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
|
||||
</g>
|
||||
<!-- completed -->
|
||||
<g id="node3" class="node">
|
||||
<title>completed</title>
|
||||
<path fill="#28a745" stroke="black" d="M871.75,-52C871.75,-52 794.25,-52 794.25,-52 788.25,-52 782.25,-46 782.25,-40 782.25,-40 782.25,-28 782.25,-28 782.25,-22 788.25,-16 794.25,-16 794.25,-16 871.75,-16 871.75,-16 877.75,-16 883.75,-22 883.75,-28 883.75,-28 883.75,-40 883.75,-40 883.75,-46 877.75,-52 871.75,-52"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="833" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">COMPLETED</text>
|
||||
<!-- dispatch -->
|
||||
<g id="node10" class="node">
|
||||
<title>dispatch</title>
|
||||
<path fill="none" stroke="black" d="M228.12,-573.84C228.12,-573.84 122.92,-559.16 122.92,-559.16 116.98,-558.33 116.98,-556.67 122.92,-555.84 122.92,-555.84 228.12,-541.16 228.12,-541.16 234.06,-540.33 245.94,-540.33 251.88,-541.16 251.88,-541.16 357.08,-555.84 357.08,-555.84 363.02,-556.67 363.02,-558.33 357.08,-559.16 357.08,-559.16 251.88,-573.84 251.88,-573.84 245.94,-574.67 234.06,-574.67 228.12,-573.84"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="240" y="-552.83" font-family="Helvetica,sans-Serif" font-size="14.00">MPR_EXECUTOR</text>
|
||||
</g>
|
||||
<!-- pending->dispatch -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>pending->dispatch</title>
|
||||
<path fill="none" stroke="black" d="M579.92,-202C483.92,-202 248.76,-202 248.76,-202 248.76,-202 248.76,-528.84 248.76,-528.84"/>
|
||||
<polygon fill="black" stroke="black" points="245.26,-528.84 248.76,-538.84 252.26,-528.84 245.26,-528.84"/>
|
||||
</g>
|
||||
<!-- processing->completed -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>processing->completed</title>
|
||||
<path fill="none" stroke="black" d="M833,-108.43C833,-108.43 833,-63.8 833,-63.8"/>
|
||||
<polygon fill="black" stroke="black" points="836.5,-63.8 833,-53.8 829.5,-63.8 836.5,-63.8"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="844.12" y="-77.25" font-family="Helvetica,sans-Serif" font-size="10.00">success</text>
|
||||
<path fill="none" stroke="black" d="M734,-104.62C734,-104.62 734,-67.16 734,-67.16"/>
|
||||
<polygon fill="black" stroke="black" points="737.5,-67.16 734,-57.16 730.5,-67.16 737.5,-67.16"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="714.88" y="-89.14" font-family="Helvetica,sans-Serif" font-size="10.00">success</text>
|
||||
</g>
|
||||
<!-- failed -->
|
||||
<g id="node4" class="node">
|
||||
<g id="node8" class="node">
|
||||
<title>failed</title>
|
||||
<path fill="#dc3545" stroke="black" d="M980,-52C980,-52 940,-52 940,-52 934,-52 928,-46 928,-40 928,-40 928,-28 928,-28 928,-22 934,-16 940,-16 940,-16 980,-16 980,-16 986,-16 992,-22 992,-28 992,-28 992,-40 992,-40 992,-46 986,-52 980,-52"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="960" y="-29.32" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">FAILED</text>
|
||||
<path fill="#dc3545" stroke="black" d="M632,-55.25C632,-55.25 592,-55.25 592,-55.25 586,-55.25 580,-49.25 580,-43.25 580,-43.25 580,-31.25 580,-31.25 580,-25.25 586,-19.25 592,-19.25 592,-19.25 632,-19.25 632,-19.25 638,-19.25 644,-25.25 644,-31.25 644,-31.25 644,-43.25 644,-43.25 644,-49.25 638,-55.25 632,-55.25"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="612" y="-32.58" font-family="Helvetica,sans-Serif" font-size="14.00" fill="white">FAILED</text>
|
||||
</g>
|
||||
<!-- processing->failed -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>processing->failed</title>
|
||||
<path fill="none" stroke="black" d="M890.02,-118C918.1,-118 946.62,-118 946.62,-118 946.62,-118 946.62,-63.74 946.62,-63.74"/>
|
||||
<polygon fill="black" stroke="black" points="950.13,-63.74 946.63,-53.74 943.13,-63.74 950.13,-63.74"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="922.62" y="-77.25" font-family="Helvetica,sans-Serif" font-size="10.00">error</text>
|
||||
<path fill="none" stroke="black" d="M680.25,-104.62C680.25,-77.88 680.25,-31 680.25,-31 680.25,-31 655.64,-31 655.64,-31"/>
|
||||
<polygon fill="black" stroke="black" points="655.64,-27.5 645.64,-31 655.64,-34.5 655.64,-27.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="668.62" y="-58.76" font-family="Helvetica,sans-Serif" font-size="10.00">error</text>
|
||||
</g>
|
||||
<!-- processing->cancelled -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>processing->cancelled</title>
|
||||
<path fill="none" stroke="black" d="M890.24,-127C953.27,-127 1048.75,-127 1048.75,-127 1048.75,-127 1048.75,-63.89 1048.75,-63.89"/>
|
||||
<polygon fill="black" stroke="black" points="1052.25,-63.89 1048.75,-53.89 1045.25,-63.89 1052.25,-63.89"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1012.38" y="-77.25" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
|
||||
</g>
|
||||
<!-- executor -->
|
||||
<g id="node9" class="node">
|
||||
<title>executor</title>
|
||||
<path fill="none" stroke="black" d="M89.31,-758.31C89.31,-758.31 27.19,-726.69 27.19,-726.69 21.85,-723.97 21.85,-718.53 27.19,-715.81 27.19,-715.81 89.31,-684.19 89.31,-684.19 94.65,-681.47 105.35,-681.47 110.69,-684.19 110.69,-684.19 172.81,-715.81 172.81,-715.81 178.15,-718.53 178.15,-723.97 172.81,-726.69 172.81,-726.69 110.69,-758.31 110.69,-758.31 105.35,-761.03 94.65,-761.03 89.31,-758.31"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="100" y="-725.2" font-family="Helvetica,sans-Serif" font-size="14.00">Executor</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="100" y="-707.95" font-family="Helvetica,sans-Serif" font-size="14.00">(abstract)</text>
|
||||
</g>
|
||||
<!-- processing->executor -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>processing->executor</title>
|
||||
<path fill="none" stroke="black" d="M836.12,-145.19C836.12,-245.49 836.12,-721 836.12,-721 836.12,-721 195.6,-721 195.6,-721"/>
|
||||
<polygon fill="black" stroke="black" points="195.6,-717.5 185.6,-721 195.6,-724.5 195.6,-717.5"/>
|
||||
<path fill="none" stroke="black" d="M780.93,-123C819.44,-123 864.46,-123 864.46,-123 864.46,-123 864.46,-66.95 864.46,-66.95"/>
|
||||
<polygon fill="black" stroke="black" points="867.96,-66.95 864.46,-56.95 860.96,-66.95 867.96,-66.95"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="820.35" y="-126.25" font-family="Helvetica,sans-Serif" font-size="10.00">user cancels</text>
|
||||
</g>
|
||||
<!-- failed->pending -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>failed->pending</title>
|
||||
<path fill="none" stroke="black" d="M965.25,-52.27C965.25,-52.27 965.25,-214.11 965.25,-214.11"/>
|
||||
<polygon fill="black" stroke="black" points="961.75,-214.11 965.25,-224.11 968.75,-214.11 961.75,-214.11"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="987.62" y="-123.62" font-family="Helvetica,sans-Serif" font-size="10.00">retry</text>
|
||||
<path fill="none" stroke="black" d="M612.06,-55.55C612.06,-55.55 612.06,-178.31 612.06,-178.31"/>
|
||||
<polygon fill="black" stroke="black" points="608.56,-178.31 612.06,-188.31 615.56,-178.31 608.56,-178.31"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="600.44" y="-120.18" font-family="Helvetica,sans-Serif" font-size="10.00">retry</text>
|
||||
</g>
|
||||
<!-- create_job -->
|
||||
<g id="node6" class="node">
|
||||
<title>create_job</title>
|
||||
<ellipse fill="none" stroke="black" cx="748" cy="-386" rx="66.47" ry="18"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="748" y="-381.32" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/</text>
|
||||
</g>
|
||||
<!-- create_job->pending -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>create_job->pending</title>
|
||||
<path fill="none" stroke="black" d="M798.36,-373.89C798.36,-339.55 798.36,-244 798.36,-244 798.36,-244 892.3,-244 892.3,-244"/>
|
||||
<polygon fill="black" stroke="black" points="892.3,-247.5 902.3,-244 892.3,-240.5 892.3,-247.5"/>
|
||||
</g>
|
||||
<!-- grpc_submit -->
|
||||
<g id="node15" class="node">
|
||||
<title>grpc_submit</title>
|
||||
<path fill="none" stroke="black" d="M528.46,-286.5C528.46,-286.5 408.56,-286.5 408.56,-286.5 402.56,-286.5 394.16,-281 391.77,-275.5 391.77,-275.5 364.33,-212.5 364.33,-212.5 361.94,-207 365.54,-201.5 371.54,-201.5 371.54,-201.5 491.44,-201.5 491.44,-201.5 497.44,-201.5 505.84,-207 508.23,-212.5 508.23,-212.5 535.67,-275.5 535.67,-275.5 538.06,-281 534.46,-286.5 528.46,-286.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="450" y="-247.95" font-family="Helvetica,sans-Serif" font-size="14.00">SubmitJob</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="450" y="-230.7" font-family="Helvetica,sans-Serif" font-size="14.00">(unary)</text>
|
||||
</g>
|
||||
<!-- create_job->grpc_submit -->
|
||||
<g id="edge19" class="edge">
|
||||
<title>create_job->grpc_submit</title>
|
||||
<path fill="none" stroke="black" d="M681.06,-386C596.67,-386 462.48,-386 462.48,-386 462.48,-386 462.48,-298.5 462.48,-298.5"/>
|
||||
<polygon fill="black" stroke="black" points="465.98,-298.5 462.48,-288.5 458.98,-298.5 465.98,-298.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="620.75" y="-333.25" font-family="Helvetica,sans-Serif" font-size="10.00">via gRPC</text>
|
||||
</g>
|
||||
<!-- cancel_job -->
|
||||
<g id="node7" class="node">
|
||||
<title>cancel_job</title>
|
||||
<ellipse fill="none" stroke="black" cx="980" cy="-386" rx="122.23" ry="18"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="980" y="-381.32" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/{id}/cancel</text>
|
||||
</g>
|
||||
<!-- cancel_job->cancelled -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>cancel_job->cancelled</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1088.86,-377.65C1088.86,-377.65 1088.86,-63.86 1088.86,-63.86"/>
|
||||
<polygon fill="black" stroke="black" points="1092.36,-63.86 1088.86,-53.86 1085.36,-63.86 1092.36,-63.86"/>
|
||||
</g>
|
||||
<!-- grpc_cancel -->
|
||||
<g id="node16" class="node">
|
||||
<title>grpc_cancel</title>
|
||||
<path fill="none" stroke="black" d="M746.35,-286.5C746.35,-286.5 631.4,-286.5 631.4,-286.5 625.4,-286.5 617.07,-280.97 614.75,-275.44 614.75,-275.44 588.31,-212.56 588.31,-212.56 585.98,-207.03 589.65,-201.5 595.65,-201.5 595.65,-201.5 710.6,-201.5 710.6,-201.5 716.6,-201.5 724.93,-207.03 727.25,-212.56 727.25,-212.56 753.69,-275.44 753.69,-275.44 756.02,-280.97 752.35,-286.5 746.35,-286.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="671" y="-247.95" font-family="Helvetica,sans-Serif" font-size="14.00">CancelJob</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="671" y="-230.7" font-family="Helvetica,sans-Serif" font-size="14.00">(unary)</text>
|
||||
</g>
|
||||
<!-- cancel_job->grpc_cancel -->
|
||||
<g id="edge21" class="edge">
|
||||
<title>cancel_job->grpc_cancel</title>
|
||||
<path fill="none" stroke="black" d="M873.76,-376.83C873.76,-350.09 873.76,-274 873.76,-274 873.76,-274 764.98,-274 764.98,-274"/>
|
||||
<polygon fill="black" stroke="black" points="764.98,-270.5 754.98,-274 764.98,-277.5 764.98,-270.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="870.75" y="-333.25" font-family="Helvetica,sans-Serif" font-size="10.00">via gRPC</text>
|
||||
</g>
|
||||
<!-- retry_job -->
|
||||
<g id="node8" class="node">
|
||||
<title>retry_job</title>
|
||||
<ellipse fill="none" stroke="black" cx="1260" cy="-386" rx="114.34" ry="18"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1260" y="-381.32" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/{id}/retry</text>
|
||||
</g>
|
||||
<!-- retry_job->pending -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>retry_job->pending</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M1260,-367.66C1260,-330.54 1260,-250 1260,-250 1260,-250 995.86,-250 995.86,-250"/>
|
||||
<polygon fill="black" stroke="black" points="995.86,-246.5 985.86,-250 995.86,-253.5 995.86,-246.5"/>
|
||||
</g>
|
||||
<!-- local -->
|
||||
<g id="node10" class="node">
|
||||
<title>local</title>
|
||||
<path fill="none" stroke="black" d="M316.75,-622C316.75,-622 203.25,-622 203.25,-622 197.25,-622 191.25,-616 191.25,-610 191.25,-610 191.25,-591.5 191.25,-591.5 191.25,-585.5 197.25,-579.5 203.25,-579.5 203.25,-579.5 316.75,-579.5 316.75,-579.5 322.75,-579.5 328.75,-585.5 328.75,-591.5 328.75,-591.5 328.75,-610 328.75,-610 328.75,-616 322.75,-622 316.75,-622"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="260" y="-604.7" font-family="Helvetica,sans-Serif" font-size="14.00">LocalExecutor</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="260" y="-587.45" font-family="Helvetica,sans-Serif" font-size="14.00">Celery + FFmpeg</text>
|
||||
</g>
|
||||
<!-- executor->local -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>executor->local</title>
|
||||
<path fill="none" stroke="black" d="M165.81,-711.81C165.81,-683.47 165.81,-601 165.81,-601 165.81,-601 179.54,-601 179.54,-601"/>
|
||||
<polygon fill="black" stroke="black" points="179.54,-604.5 189.54,-601 179.54,-597.5 179.54,-604.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="287.88" y="-647.25" font-family="Helvetica,sans-Serif" font-size="10.00">MPR_EXECUTOR=local</text>
|
||||
</g>
|
||||
<!-- lambda_exec -->
|
||||
<!-- celery_task -->
|
||||
<g id="node11" class="node">
|
||||
<title>lambda_exec</title>
|
||||
<path fill="none" stroke="black" d="M136.12,-622C136.12,-622 27.88,-622 27.88,-622 21.88,-622 15.88,-616 15.88,-610 15.88,-610 15.88,-591.5 15.88,-591.5 15.88,-585.5 21.88,-579.5 27.88,-579.5 27.88,-579.5 136.12,-579.5 136.12,-579.5 142.12,-579.5 148.12,-585.5 148.12,-591.5 148.12,-591.5 148.12,-610 148.12,-610 148.12,-616 142.12,-622 136.12,-622"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="82" y="-604.7" font-family="Helvetica,sans-Serif" font-size="14.00">LambdaExecutor</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="82" y="-587.45" font-family="Helvetica,sans-Serif" font-size="14.00">SQS + Lambda</text>
|
||||
<title>celery_task</title>
|
||||
<path fill="none" stroke="black" d="M162.75,-488C162.75,-488 43.25,-488 43.25,-488 37.25,-488 31.25,-482 31.25,-476 31.25,-476 31.25,-457.5 31.25,-457.5 31.25,-451.5 37.25,-445.5 43.25,-445.5 43.25,-445.5 162.75,-445.5 162.75,-445.5 168.75,-445.5 174.75,-451.5 174.75,-457.5 174.75,-457.5 174.75,-476 174.75,-476 174.75,-482 168.75,-488 162.75,-488"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="103" y="-470.7" font-family="Helvetica,sans-Serif" font-size="14.00">Celery Task</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="103" y="-453.45" font-family="Helvetica,sans-Serif" font-size="14.00">(transcode queue)</text>
|
||||
</g>
|
||||
<!-- executor->lambda_exec -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>executor->lambda_exec</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M82.31,-687.36C82.31,-687.36 82.31,-633.77 82.31,-633.77"/>
|
||||
<polygon fill="black" stroke="black" points="85.81,-633.77 82.31,-623.77 78.81,-633.77 85.81,-633.77"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="121.62" y="-647.25" font-family="Helvetica,sans-Serif" font-size="10.00">MPR_EXECUTOR=lambda</text>
|
||||
<!-- dispatch->celery_task -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>dispatch->celery_task</title>
|
||||
<path fill="none" stroke="black" d="M142.89,-552.62C142.89,-552.62 142.89,-499.67 142.89,-499.67"/>
|
||||
<polygon fill="black" stroke="black" points="146.39,-499.67 142.89,-489.67 139.39,-499.67 146.39,-499.67"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="131.27" y="-529.4" font-family="Helvetica,sans-Serif" font-size="10.00">local</text>
|
||||
</g>
|
||||
<!-- transcode -->
|
||||
<g id="node12" class="node">
|
||||
<title>transcode</title>
|
||||
<path fill="none" stroke="black" d="M172.88,-513C172.88,-513 93.12,-513 93.12,-513 87.12,-513 81.12,-507 81.12,-501 81.12,-501 81.12,-482.5 81.12,-482.5 81.12,-476.5 87.12,-470.5 93.12,-470.5 93.12,-470.5 172.88,-470.5 172.88,-470.5 178.88,-470.5 184.88,-476.5 184.88,-482.5 184.88,-482.5 184.88,-501 184.88,-501 184.88,-507 178.88,-513 172.88,-513"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="133" y="-495.7" font-family="Helvetica,sans-Serif" font-size="14.00">Transcode</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="133" y="-478.45" font-family="Helvetica,sans-Serif" font-size="14.00">(with preset)</text>
|
||||
<!-- sfn_start -->
|
||||
<g id="node16" class="node">
|
||||
<title>sfn_start</title>
|
||||
<path fill="none" stroke="black" d="M1525.88,-488C1525.88,-488 1428.12,-488 1428.12,-488 1422.12,-488 1416.12,-482 1416.12,-476 1416.12,-476 1416.12,-457.5 1416.12,-457.5 1416.12,-451.5 1422.12,-445.5 1428.12,-445.5 1428.12,-445.5 1525.88,-445.5 1525.88,-445.5 1531.88,-445.5 1537.88,-451.5 1537.88,-457.5 1537.88,-457.5 1537.88,-476 1537.88,-476 1537.88,-482 1531.88,-488 1525.88,-488"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1477" y="-470.7" font-family="Helvetica,sans-Serif" font-size="14.00">Step Functions</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1477" y="-453.45" font-family="Helvetica,sans-Serif" font-size="14.00">start_execution</text>
|
||||
</g>
|
||||
<!-- local->transcode -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>local->transcode</title>
|
||||
<path fill="none" stroke="black" d="M209.38,-579C209.38,-547.27 209.38,-492 209.38,-492 209.38,-492 196.72,-492 196.72,-492"/>
|
||||
<polygon fill="black" stroke="black" points="196.72,-488.5 186.72,-492 196.72,-495.5 196.72,-488.5"/>
|
||||
</g>
|
||||
<!-- trim -->
|
||||
<g id="node13" class="node">
|
||||
<title>trim</title>
|
||||
<path fill="none" stroke="black" d="M372.5,-513C372.5,-513 239.5,-513 239.5,-513 233.5,-513 227.5,-507 227.5,-501 227.5,-501 227.5,-482.5 227.5,-482.5 227.5,-476.5 233.5,-470.5 239.5,-470.5 239.5,-470.5 372.5,-470.5 372.5,-470.5 378.5,-470.5 384.5,-476.5 384.5,-482.5 384.5,-482.5 384.5,-501 384.5,-501 384.5,-507 378.5,-513 372.5,-513"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="306" y="-495.7" font-family="Helvetica,sans-Serif" font-size="14.00">Trim</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="306" y="-478.45" font-family="Helvetica,sans-Serif" font-size="14.00">(-c:v copy -c:a copy)</text>
|
||||
</g>
|
||||
<!-- local->trim -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>local->trim</title>
|
||||
<path fill="none" stroke="black" d="M278.12,-579.22C278.12,-579.22 278.12,-524.75 278.12,-524.75"/>
|
||||
<polygon fill="black" stroke="black" points="281.63,-524.75 278.13,-514.75 274.63,-524.75 281.63,-524.75"/>
|
||||
</g>
|
||||
<!-- progress -->
|
||||
<g id="node17" class="node">
|
||||
<title>progress</title>
|
||||
<polygon fill="none" stroke="black" points="241.5,-407.25 84.5,-407.25 84.5,-364.75 247.5,-364.75 247.5,-401.25 241.5,-407.25"/>
|
||||
<polyline fill="none" stroke="black" points="241.5,-407.25 241.5,-401.25"/>
|
||||
<polyline fill="none" stroke="black" points="247.5,-401.25 241.5,-401.25"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="166" y="-389.95" font-family="Helvetica,sans-Serif" font-size="14.00">Progress Updates</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="166" y="-372.7" font-family="Helvetica,sans-Serif" font-size="14.00">(gRPC → Redis → DB)</text>
|
||||
</g>
|
||||
<!-- transcode->progress -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>transcode->progress</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M134.69,-470.09C134.69,-470.09 134.69,-419.14 134.69,-419.14"/>
|
||||
<polygon fill="black" stroke="black" points="138.19,-419.14 134.69,-409.14 131.19,-419.14 138.19,-419.14"/>
|
||||
</g>
|
||||
<!-- trim->progress -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>trim->progress</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M237.5,-470.09C237.5,-470.09 237.5,-419.14 237.5,-419.14"/>
|
||||
<polygon fill="black" stroke="black" points="241,-419.14 237.5,-409.14 234,-419.14 241,-419.14"/>
|
||||
</g>
|
||||
<!-- grpc_stream -->
|
||||
<g id="node14" class="node">
|
||||
<title>grpc_stream</title>
|
||||
<path fill="none" stroke="black" d="M304.33,-286.5C304.33,-286.5 89.19,-286.5 89.19,-286.5 83.19,-286.5 73.67,-281.64 70.15,-276.78 70.15,-276.78 22.71,-211.22 22.71,-211.22 19.19,-206.36 21.67,-201.5 27.67,-201.5 27.67,-201.5 242.81,-201.5 242.81,-201.5 248.81,-201.5 258.33,-206.36 261.85,-211.22 261.85,-211.22 309.29,-276.78 309.29,-276.78 312.81,-281.64 310.33,-286.5 304.33,-286.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="166" y="-247.95" font-family="Helvetica,sans-Serif" font-size="14.00">StreamProgress</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="166" y="-230.7" font-family="Helvetica,sans-Serif" font-size="14.00">(server streaming)</text>
|
||||
</g>
|
||||
<!-- grpc_stream->processing -->
|
||||
<g id="edge18" class="edge">
|
||||
<title>grpc_stream->processing</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M166,-201.1C166,-167.71 166,-127 166,-127 166,-127 770.51,-127 770.51,-127"/>
|
||||
<polygon fill="black" stroke="black" points="770.51,-130.5 780.51,-127 770.51,-123.5 770.51,-130.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="476.38" y="-170" font-family="Helvetica,sans-Serif" font-size="10.00">update status</text>
|
||||
</g>
|
||||
<!-- grpc_submit->pending -->
|
||||
<g id="edge20" class="edge">
|
||||
<title>grpc_submit->pending</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M450,-201.06C450,-186.11 450,-173 450,-173 450,-173 912.08,-173 912.08,-173 912.08,-173 912.08,-214.2 912.08,-214.2"/>
|
||||
<polygon fill="black" stroke="black" points="908.58,-214.2 912.08,-224.2 915.58,-214.2 908.58,-214.2"/>
|
||||
</g>
|
||||
<!-- grpc_cancel->cancelled -->
|
||||
<g id="edge22" class="edge">
|
||||
<title>grpc_cancel->cancelled</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M728.29,-214C836.93,-214 1062.12,-214 1062.12,-214 1062.12,-214 1062.12,-63.76 1062.12,-63.76"/>
|
||||
<polygon fill="black" stroke="black" points="1065.62,-63.76 1062.12,-53.76 1058.62,-63.76 1065.62,-63.76"/>
|
||||
</g>
|
||||
<!-- progress->grpc_stream -->
|
||||
<!-- dispatch->sfn_start -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>progress->grpc_stream</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M166,-364.43C166,-364.43 166,-298.49 166,-298.49"/>
|
||||
<polygon fill="black" stroke="black" points="169.5,-298.49 166,-288.49 162.5,-298.49 169.5,-298.49"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="204.62" y="-333.25" font-family="Helvetica,sans-Serif" font-size="10.00">stream to client</text>
|
||||
<title>dispatch->sfn_start</title>
|
||||
<path fill="none" stroke="black" d="M336.81,-552.63C336.81,-533.84 336.81,-467 336.81,-467 336.81,-467 1404.18,-467 1404.18,-467"/>
|
||||
<polygon fill="black" stroke="black" points="1404.18,-470.5 1414.18,-467 1404.18,-463.5 1404.18,-470.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="809.3" y="-470.25" font-family="Helvetica,sans-Serif" font-size="10.00">lambda</text>
|
||||
</g>
|
||||
<!-- s3_download -->
|
||||
<g id="node12" class="node">
|
||||
<title>s3_download</title>
|
||||
<path fill="none" stroke="black" d="M144.38,-402.5C144.38,-402.5 61.62,-402.5 61.62,-402.5 55.62,-402.5 49.62,-396.5 49.62,-390.5 49.62,-390.5 49.62,-372 49.62,-372 49.62,-366 55.62,-360 61.62,-360 61.62,-360 144.38,-360 144.38,-360 150.38,-360 156.38,-366 156.38,-372 156.38,-372 156.38,-390.5 156.38,-390.5 156.38,-396.5 150.38,-402.5 144.38,-402.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="103" y="-385.2" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Download</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="103" y="-367.95" font-family="Helvetica,sans-Serif" font-size="14.00">(MinIO)</text>
|
||||
</g>
|
||||
<!-- celery_task->s3_download -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>celery_task->s3_download</title>
|
||||
<path fill="none" stroke="black" d="M103,-445.17C103,-445.17 103,-414.33 103,-414.33"/>
|
||||
<polygon fill="black" stroke="black" points="106.5,-414.33 103,-404.33 99.5,-414.33 106.5,-414.33"/>
|
||||
</g>
|
||||
<!-- ffmpeg_local -->
|
||||
<g id="node13" class="node">
|
||||
<title>ffmpeg_local</title>
|
||||
<path fill="none" stroke="black" d="M153,-317C153,-317 59,-317 59,-317 53,-317 47,-311 47,-305 47,-305 47,-286.5 47,-286.5 47,-280.5 53,-274.5 59,-274.5 59,-274.5 153,-274.5 153,-274.5 159,-274.5 165,-280.5 165,-286.5 165,-286.5 165,-305 165,-305 165,-311 159,-317 153,-317"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="106" y="-299.7" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="106" y="-282.45" font-family="Helvetica,sans-Serif" font-size="14.00">transcode/trim</text>
|
||||
</g>
|
||||
<!-- s3_download->ffmpeg_local -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>s3_download->ffmpeg_local</title>
|
||||
<path fill="none" stroke="black" d="M103,-359.67C103,-359.67 103,-328.83 103,-328.83"/>
|
||||
<polygon fill="black" stroke="black" points="106.5,-328.83 103,-318.83 99.5,-328.83 106.5,-328.83"/>
|
||||
</g>
|
||||
<!-- s3_upload -->
|
||||
<g id="node14" class="node">
|
||||
<title>s3_upload</title>
|
||||
<path fill="none" stroke="black" d="M138.62,-229.5C138.62,-229.5 75.38,-229.5 75.38,-229.5 69.38,-229.5 63.38,-223.5 63.38,-217.5 63.38,-217.5 63.38,-199 63.38,-199 63.38,-193 69.38,-187 75.38,-187 75.38,-187 138.62,-187 138.62,-187 144.62,-187 150.62,-193 150.62,-199 150.62,-199 150.62,-217.5 150.62,-217.5 150.62,-223.5 144.62,-229.5 138.62,-229.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="107" y="-212.2" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Upload</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="107" y="-194.95" font-family="Helvetica,sans-Serif" font-size="14.00">(MinIO)</text>
|
||||
</g>
|
||||
<!-- ffmpeg_local->s3_upload -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>ffmpeg_local->s3_upload</title>
|
||||
<path fill="none" stroke="black" d="M107,-274.12C107,-274.12 107,-241.45 107,-241.45"/>
|
||||
<polygon fill="black" stroke="black" points="110.5,-241.45 107,-231.45 103.5,-241.45 110.5,-241.45"/>
|
||||
</g>
|
||||
<!-- db_update -->
|
||||
<g id="node15" class="node">
|
||||
<title>db_update</title>
|
||||
<path fill="none" stroke="black" d="M180.88,-144C180.88,-144 35.12,-144 35.12,-144 29.12,-144 23.12,-138 23.12,-132 23.12,-132 23.12,-113.5 23.12,-113.5 23.12,-107.5 29.12,-101.5 35.12,-101.5 35.12,-101.5 180.88,-101.5 180.88,-101.5 186.88,-101.5 192.88,-107.5 192.88,-113.5 192.88,-113.5 192.88,-132 192.88,-132 192.88,-138 186.88,-144 180.88,-144"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="108" y="-126.7" font-family="Helvetica,sans-Serif" font-size="14.00">DB Update</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="108" y="-109.45" font-family="Helvetica,sans-Serif" font-size="14.00">(update_job_progress)</text>
|
||||
</g>
|
||||
<!-- s3_upload->db_update -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>s3_upload->db_update</title>
|
||||
<path fill="none" stroke="black" d="M107,-186.67C107,-186.67 107,-155.83 107,-155.83"/>
|
||||
<polygon fill="black" stroke="black" points="110.5,-155.83 107,-145.83 103.5,-155.83 110.5,-155.83"/>
|
||||
</g>
|
||||
<!-- db_update->completed -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>db_update->completed</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M193.17,-117C345.61,-117 649.29,-117 649.29,-117 649.29,-117 649.29,-43 649.29,-43 649.29,-43 675.4,-43 675.4,-43"/>
|
||||
<polygon fill="black" stroke="black" points="675.4,-46.5 685.4,-43 675.4,-39.5 675.4,-46.5"/>
|
||||
</g>
|
||||
<!-- lambda_fn -->
|
||||
<g id="node17" class="node">
|
||||
<title>lambda_fn</title>
|
||||
<path fill="none" stroke="black" d="M1546,-402.5C1546,-402.5 1428,-402.5 1428,-402.5 1422,-402.5 1416,-396.5 1416,-390.5 1416,-390.5 1416,-372 1416,-372 1416,-366 1422,-360 1428,-360 1428,-360 1546,-360 1546,-360 1552,-360 1558,-366 1558,-372 1558,-372 1558,-390.5 1558,-390.5 1558,-396.5 1552,-402.5 1546,-402.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1487" y="-385.2" font-family="Helvetica,sans-Serif" font-size="14.00">Lambda</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1487" y="-367.95" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg container</text>
|
||||
</g>
|
||||
<!-- sfn_start->lambda_fn -->
|
||||
<g id="edge18" class="edge">
|
||||
<title>sfn_start->lambda_fn</title>
|
||||
<path fill="none" stroke="black" d="M1477,-445.17C1477,-445.17 1477,-414.33 1477,-414.33"/>
|
||||
<polygon fill="black" stroke="black" points="1480.5,-414.33 1477,-404.33 1473.5,-414.33 1480.5,-414.33"/>
|
||||
</g>
|
||||
<!-- s3_dl_aws -->
|
||||
<g id="node18" class="node">
|
||||
<title>s3_dl_aws</title>
|
||||
<path fill="none" stroke="black" d="M1534.38,-317C1534.38,-317 1451.62,-317 1451.62,-317 1445.62,-317 1439.62,-311 1439.62,-305 1439.62,-305 1439.62,-286.5 1439.62,-286.5 1439.62,-280.5 1445.62,-274.5 1451.62,-274.5 1451.62,-274.5 1534.38,-274.5 1534.38,-274.5 1540.38,-274.5 1546.38,-280.5 1546.38,-286.5 1546.38,-286.5 1546.38,-305 1546.38,-305 1546.38,-311 1540.38,-317 1534.38,-317"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1493" y="-299.7" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Download</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1493" y="-282.45" font-family="Helvetica,sans-Serif" font-size="14.00">(AWS)</text>
|
||||
</g>
|
||||
<!-- lambda_fn->s3_dl_aws -->
|
||||
<g id="edge19" class="edge">
|
||||
<title>lambda_fn->s3_dl_aws</title>
|
||||
<path fill="none" stroke="black" d="M1493,-359.67C1493,-359.67 1493,-328.83 1493,-328.83"/>
|
||||
<polygon fill="black" stroke="black" points="1496.5,-328.83 1493,-318.83 1489.5,-328.83 1496.5,-328.83"/>
|
||||
</g>
|
||||
<!-- ffmpeg_aws -->
|
||||
<g id="node19" class="node">
|
||||
<title>ffmpeg_aws</title>
|
||||
<path fill="none" stroke="black" d="M1545,-229.5C1545,-229.5 1451,-229.5 1451,-229.5 1445,-229.5 1439,-223.5 1439,-217.5 1439,-217.5 1439,-199 1439,-199 1439,-193 1445,-187 1451,-187 1451,-187 1545,-187 1545,-187 1551,-187 1557,-193 1557,-199 1557,-199 1557,-217.5 1557,-217.5 1557,-223.5 1551,-229.5 1545,-229.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1498" y="-212.2" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1498" y="-194.95" font-family="Helvetica,sans-Serif" font-size="14.00">transcode/trim</text>
|
||||
</g>
|
||||
<!-- s3_dl_aws->ffmpeg_aws -->
|
||||
<g id="edge20" class="edge">
|
||||
<title>s3_dl_aws->ffmpeg_aws</title>
|
||||
<path fill="none" stroke="black" d="M1493,-274.12C1493,-274.12 1493,-241.45 1493,-241.45"/>
|
||||
<polygon fill="black" stroke="black" points="1496.5,-241.45 1493,-231.45 1489.5,-241.45 1496.5,-241.45"/>
|
||||
</g>
|
||||
<!-- s3_ul_aws -->
|
||||
<g id="node20" class="node">
|
||||
<title>s3_ul_aws</title>
|
||||
<path fill="none" stroke="black" d="M1532.62,-144C1532.62,-144 1469.38,-144 1469.38,-144 1463.38,-144 1457.38,-138 1457.38,-132 1457.38,-132 1457.38,-113.5 1457.38,-113.5 1457.38,-107.5 1463.38,-101.5 1469.38,-101.5 1469.38,-101.5 1532.62,-101.5 1532.62,-101.5 1538.62,-101.5 1544.62,-107.5 1544.62,-113.5 1544.62,-113.5 1544.62,-132 1544.62,-132 1544.62,-138 1538.62,-144 1532.62,-144"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1501" y="-126.7" font-family="Helvetica,sans-Serif" font-size="14.00">S3 Upload</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1501" y="-109.45" font-family="Helvetica,sans-Serif" font-size="14.00">(AWS)</text>
|
||||
</g>
|
||||
<!-- ffmpeg_aws->s3_ul_aws -->
|
||||
<g id="edge21" class="edge">
|
||||
<title>ffmpeg_aws->s3_ul_aws</title>
|
||||
<path fill="none" stroke="black" d="M1501,-186.67C1501,-186.67 1501,-155.83 1501,-155.83"/>
|
||||
<polygon fill="black" stroke="black" points="1504.5,-155.83 1501,-145.83 1497.5,-155.83 1504.5,-155.83"/>
|
||||
</g>
|
||||
<!-- callback -->
|
||||
<g id="node21" class="node">
|
||||
<title>callback</title>
|
||||
<path fill="none" stroke="black" d="M1585.12,-58.5C1585.12,-58.5 1422.88,-58.5 1422.88,-58.5 1416.88,-58.5 1410.88,-52.5 1410.88,-46.5 1410.88,-46.5 1410.88,-28 1410.88,-28 1410.88,-22 1416.88,-16 1422.88,-16 1422.88,-16 1585.12,-16 1585.12,-16 1591.12,-16 1597.12,-22 1597.12,-28 1597.12,-28 1597.12,-46.5 1597.12,-46.5 1597.12,-52.5 1591.12,-58.5 1585.12,-58.5"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1504" y="-41.2" font-family="Helvetica,sans-Serif" font-size="14.00">HTTP Callback</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="1504" y="-23.95" font-family="Helvetica,sans-Serif" font-size="14.00">POST /jobs/{id}/callback</text>
|
||||
</g>
|
||||
<!-- s3_ul_aws->callback -->
|
||||
<g id="edge22" class="edge">
|
||||
<title>s3_ul_aws->callback</title>
|
||||
<path fill="none" stroke="black" d="M1501,-101.17C1501,-101.17 1501,-70.33 1501,-70.33"/>
|
||||
<polygon fill="black" stroke="black" points="1504.5,-70.33 1501,-60.33 1497.5,-70.33 1504.5,-70.33"/>
|
||||
</g>
|
||||
<!-- callback->completed -->
|
||||
<g id="edge23" class="edge">
|
||||
<title>callback->completed</title>
|
||||
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M1427.5,-58.88C1427.5,-69.48 1427.5,-80 1427.5,-80 1427.5,-80 786.08,-80 786.08,-80 786.08,-80 786.08,-67.14 786.08,-67.14"/>
|
||||
<polygon fill="black" stroke="black" points="789.58,-67.14 786.08,-57.14 782.58,-67.14 789.58,-67.14"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 21 KiB After Width: | Height: | Size: 24 KiB |
@@ -1,14 +1,17 @@
|
||||
<!DOCTYPE html>
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>MPR - Architecture</title>
|
||||
<link rel="stylesheet" href="styles.css">
|
||||
<link rel="stylesheet" href="styles.css" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>MPR - Media Processor</h1>
|
||||
<p>A web-based media transcoding tool with professional architecture.</p>
|
||||
<p>
|
||||
Media transcoding platform with dual execution modes: local (Celery
|
||||
+ MinIO) and cloud (AWS Step Functions + Lambda + S3).
|
||||
</p>
|
||||
|
||||
<nav>
|
||||
<a href="#overview">System Overview</a>
|
||||
@@ -21,20 +24,42 @@
|
||||
<div class="diagram">
|
||||
<h3>Architecture</h3>
|
||||
<object type="image/svg+xml" data="01-system-overview.svg">
|
||||
<img src="01-system-overview.svg" alt="System Overview">
|
||||
<img src="01-system-overview.svg" alt="System Overview" />
|
||||
</object>
|
||||
<a href="01-system-overview.svg" target="_blank">Open full size</a>
|
||||
<a href="01-system-overview.svg" target="_blank"
|
||||
>Open full size</a
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="legend">
|
||||
<h3>Components</h3>
|
||||
<ul>
|
||||
<li><span class="color-box" style="background: #e8f4f8"></span> Reverse Proxy (nginx)</li>
|
||||
<li><span class="color-box" style="background: #f0f8e8"></span> Application Layer (Django, FastAPI, UI)</li>
|
||||
<li><span class="color-box" style="background: #fff8e8"></span> Worker Layer (Celery, Lambda)</li>
|
||||
<li><span class="color-box" style="background: #f8e8f0"></span> Data Layer (PostgreSQL, Redis, SQS)</li>
|
||||
<li><span class="color-box" style="background: #f0f0f0"></span> Storage (Local FS, S3)</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #e8f4f8"></span>
|
||||
Reverse Proxy (nginx)
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #f0f8e8"></span>
|
||||
Application Layer (Django Admin, FastAPI + GraphQL, Timeline
|
||||
UI)
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #fff8e8"></span>
|
||||
Worker Layer (Celery local mode)
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #fde8d0"></span>
|
||||
AWS (Step Functions, Lambda - cloud mode)
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #f8e8f0"></span>
|
||||
Data Layer (PostgreSQL, Redis)
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #f0f0f0"></span>
|
||||
S3 Storage (MinIO local / AWS S3 cloud)
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
@@ -43,7 +68,7 @@
|
||||
<div class="diagram">
|
||||
<h3>Entity Relationships</h3>
|
||||
<object type="image/svg+xml" data="02-data-model.svg">
|
||||
<img src="02-data-model.svg" alt="Data Model">
|
||||
<img src="02-data-model.svg" alt="Data Model" />
|
||||
</object>
|
||||
<a href="02-data-model.svg" target="_blank">Open full size</a>
|
||||
</div>
|
||||
@@ -52,9 +77,19 @@
|
||||
<div class="legend">
|
||||
<h3>Entities</h3>
|
||||
<ul>
|
||||
<li><span class="color-box" style="background: #4a90d9"></span> MediaAsset - Video/audio files with metadata</li>
|
||||
<li><span class="color-box" style="background: #50b050"></span> TranscodePreset - Encoding configurations</li>
|
||||
<li><span class="color-box" style="background: #d9534f"></span> TranscodeJob - Processing queue items</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #4a90d9"></span>
|
||||
MediaAsset - Video/audio files (S3 keys as paths)
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #50b050"></span>
|
||||
TranscodePreset - Encoding configurations
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #d9534f"></span>
|
||||
TranscodeJob - Processing queue (celery_task_id or
|
||||
execution_arn)
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
@@ -63,7 +98,7 @@
|
||||
<div class="diagram">
|
||||
<h3>Job Lifecycle</h3>
|
||||
<object type="image/svg+xml" data="03-job-flow.svg">
|
||||
<img src="03-job-flow.svg" alt="Job Flow">
|
||||
<img src="03-job-flow.svg" alt="Job Flow" />
|
||||
</object>
|
||||
<a href="03-job-flow.svg" target="_blank">Open full size</a>
|
||||
</div>
|
||||
@@ -72,30 +107,71 @@
|
||||
<div class="legend">
|
||||
<h3>Job States</h3>
|
||||
<ul>
|
||||
<li><span class="color-box" style="background: #ffc107"></span> PENDING - Waiting in queue</li>
|
||||
<li><span class="color-box" style="background: #17a2b8"></span> PROCESSING - Worker executing</li>
|
||||
<li><span class="color-box" style="background: #28a745"></span> COMPLETED - Success</li>
|
||||
<li><span class="color-box" style="background: #dc3545"></span> FAILED - Error occurred</li>
|
||||
<li><span class="color-box" style="background: #6c757d"></span> CANCELLED - User cancelled</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #ffc107"></span>
|
||||
PENDING - Waiting in queue
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #17a2b8"></span>
|
||||
PROCESSING - Worker executing
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #28a745"></span>
|
||||
COMPLETED - Success
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #dc3545"></span>
|
||||
FAILED - Error occurred
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #6c757d"></span>
|
||||
CANCELLED - User cancelled
|
||||
</li>
|
||||
</ul>
|
||||
<h3>Execution Modes</h3>
|
||||
<ul>
|
||||
<li>
|
||||
<span class="color-box" style="background: #e8f4e8"></span>
|
||||
Local: Celery + MinIO (S3 API) + FFmpeg
|
||||
</li>
|
||||
<li>
|
||||
<span class="color-box" style="background: #fde8d0"></span>
|
||||
Lambda: Step Functions + Lambda + AWS S3
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<h2>Quick Reference</h2>
|
||||
<pre><code># Generate SVGs from DOT files
|
||||
dot -Tsvg 01-system-overview.dot -o 01-system-overview.svg
|
||||
dot -Tsvg 02-data-model.dot -o 02-data-model.svg
|
||||
dot -Tsvg 03-job-flow.dot -o 03-job-flow.svg
|
||||
<h2>API Interfaces</h2>
|
||||
<pre><code># REST API
|
||||
http://mpr.local.ar/api/docs - Swagger UI
|
||||
POST /api/assets/scan - Scan S3 bucket for media
|
||||
POST /api/jobs/ - Create transcode job
|
||||
POST /api/jobs/{id}/callback - Lambda completion callback
|
||||
|
||||
# Or generate all at once
|
||||
for f in *.dot; do dot -Tsvg "$f" -o "${f%.dot}.svg"; done</code></pre>
|
||||
# GraphQL (GraphiQL)
|
||||
http://mpr.local.ar/graphql - GraphiQL IDE
|
||||
query { assets { id filename } }
|
||||
mutation { createJob(input: {...}) { id status } }
|
||||
mutation { scanMediaFolder { found registered } }</code></pre>
|
||||
|
||||
<h2>Access Points</h2>
|
||||
<pre><code># Add to /etc/hosts
|
||||
<pre><code># Local development
|
||||
127.0.0.1 mpr.local.ar
|
||||
|
||||
# URLs
|
||||
http://mpr.local.ar/admin - Django Admin
|
||||
http://mpr.local.ar/api - FastAPI (docs at /api/docs)
|
||||
http://mpr.local.ar/ui - Timeline UI</code></pre>
|
||||
http://mpr.local.ar/api/docs - FastAPI Swagger
|
||||
http://mpr.local.ar/graphql - GraphiQL
|
||||
http://mpr.local.ar/ - Timeline UI
|
||||
http://localhost:9001 - MinIO Console
|
||||
|
||||
# AWS deployment
|
||||
https://mpr.mcrn.ar/ - Production</code></pre>
|
||||
|
||||
<h2>Quick Reference</h2>
|
||||
<pre><code># Render SVGs from DOT files
|
||||
for f in *.dot; do dot -Tsvg "$f" -o "${f%.dot}.svg"; done
|
||||
|
||||
# Switch executor mode
|
||||
MPR_EXECUTOR=local # Celery + MinIO
|
||||
MPR_EXECUTOR=lambda # Step Functions + Lambda + S3</code></pre>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
122
docs/media-storage.md
Normal file
122
docs/media-storage.md
Normal file
@@ -0,0 +1,122 @@
|
||||
# Media Storage Architecture
|
||||
|
||||
## Overview
|
||||
|
||||
MPR uses **S3-compatible storage** everywhere. Locally via MinIO, in production via AWS S3. The same boto3 code and S3 keys work in both environments - the only difference is the `S3_ENDPOINT_URL` env var.
|
||||
|
||||
## Storage Strategy
|
||||
|
||||
### S3 Buckets
|
||||
|
||||
| Bucket | Env Var | Purpose |
|
||||
|--------|---------|---------|
|
||||
| `mpr-media-in` | `S3_BUCKET_IN` | Source media files |
|
||||
| `mpr-media-out` | `S3_BUCKET_OUT` | Transcoded/trimmed output |
|
||||
|
||||
### S3 Keys as File Paths
|
||||
- **Database**: Stores S3 object keys (e.g., `video1.mp4`, `subfolder/video3.mp4`)
|
||||
- **Local dev**: MinIO serves these via S3 API on port 9000
|
||||
- **AWS**: Real S3, same keys, different endpoint
|
||||
|
||||
### Why S3 Everywhere?
|
||||
1. **Identical code paths** - no branching between local and cloud
|
||||
2. **Seamless executor switching** - Celery and Lambda both use boto3
|
||||
3. **Cloud-native** - ready for production without refactoring
|
||||
|
||||
## Local Development (MinIO)
|
||||
|
||||
### Configuration
|
||||
```bash
|
||||
S3_ENDPOINT_URL=http://minio:9000
|
||||
S3_BUCKET_IN=mpr-media-in
|
||||
S3_BUCKET_OUT=mpr-media-out
|
||||
AWS_ACCESS_KEY_ID=minioadmin
|
||||
AWS_SECRET_ACCESS_KEY=minioadmin
|
||||
```
|
||||
|
||||
### How It Works
|
||||
- MinIO runs as a Docker container (port 9000 API, port 9001 console)
|
||||
- `minio-init` container creates buckets and sets public read access on startup
|
||||
- Nginx proxies `/media/in/` and `/media/out/` to MinIO buckets
|
||||
- Upload files via MinIO Console (http://localhost:9001) or `mc` CLI
|
||||
|
||||
### Upload Files to MinIO
|
||||
```bash
|
||||
# Using mc CLI
|
||||
mc alias set local http://localhost:9000 minioadmin minioadmin
|
||||
mc cp video.mp4 local/mpr-media-in/
|
||||
|
||||
# Using aws CLI with endpoint override
|
||||
aws --endpoint-url http://localhost:9000 s3 cp video.mp4 s3://mpr-media-in/
|
||||
```
|
||||
|
||||
## AWS Production (S3)
|
||||
|
||||
### Configuration
|
||||
```bash
|
||||
# No S3_ENDPOINT_URL = uses real AWS S3
|
||||
S3_BUCKET_IN=mpr-media-in
|
||||
S3_BUCKET_OUT=mpr-media-out
|
||||
AWS_REGION=us-east-1
|
||||
AWS_ACCESS_KEY_ID=<real-key>
|
||||
AWS_SECRET_ACCESS_KEY=<real-secret>
|
||||
```
|
||||
|
||||
### Upload Files to S3
|
||||
```bash
|
||||
aws s3 cp video.mp4 s3://mpr-media-in/
|
||||
aws s3 sync /local/media/ s3://mpr-media-in/
|
||||
```
|
||||
|
||||
## Storage Module
|
||||
|
||||
`core/storage.py` provides all S3 operations:
|
||||
|
||||
```python
|
||||
from core.storage import (
|
||||
get_s3_client, # boto3 client (MinIO or AWS)
|
||||
list_objects, # List bucket contents, filter by extension
|
||||
download_file, # Download S3 object to local path
|
||||
download_to_temp, # Download to temp file (caller cleans up)
|
||||
upload_file, # Upload local file to S3
|
||||
get_presigned_url, # Generate presigned URL
|
||||
BUCKET_IN, # Input bucket name
|
||||
BUCKET_OUT, # Output bucket name
|
||||
)
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### Scan Media (REST)
|
||||
```http
|
||||
POST /api/assets/scan
|
||||
```
|
||||
Lists objects in `S3_BUCKET_IN`, registers new media files.
|
||||
|
||||
### Scan Media (GraphQL)
|
||||
```graphql
|
||||
mutation { scanMediaFolder { found registered skipped files } }
|
||||
```
|
||||
|
||||
## Job Flow with S3
|
||||
|
||||
### Local Mode (Celery)
|
||||
1. Celery task receives `source_key` and `output_key`
|
||||
2. Downloads source from `S3_BUCKET_IN` to temp file
|
||||
3. Runs FFmpeg locally
|
||||
4. Uploads result to `S3_BUCKET_OUT`
|
||||
5. Cleans up temp files
|
||||
|
||||
### Lambda Mode (AWS)
|
||||
1. Step Functions invokes Lambda with S3 keys
|
||||
2. Lambda downloads source from `S3_BUCKET_IN` to `/tmp`
|
||||
3. Runs FFmpeg in container
|
||||
4. Uploads result to `S3_BUCKET_OUT`
|
||||
5. Calls back to API with result
|
||||
|
||||
Both paths use the same S3 buckets and key structure.
|
||||
|
||||
## Supported File Types
|
||||
|
||||
**Video:** `.mp4`, `.mkv`, `.avi`, `.mov`, `.webm`, `.flv`, `.wmv`, `.m4v`
|
||||
**Audio:** `.mp3`, `.wav`, `.flac`, `.aac`, `.ogg`, `.m4a`
|
||||
@@ -1,21 +0,0 @@
|
||||
"""
|
||||
MPR gRPC Module
|
||||
|
||||
Provides gRPC server and client for worker communication.
|
||||
|
||||
Generated stubs (worker_pb2.py, worker_pb2_grpc.py) are created by:
|
||||
python schema/generate.py --proto
|
||||
|
||||
Requires: grpcio, grpcio-tools
|
||||
"""
|
||||
|
||||
from .client import WorkerClient, get_client
|
||||
from .server import WorkerServicer, serve, update_job_progress
|
||||
|
||||
__all__ = [
|
||||
"WorkerClient",
|
||||
"WorkerServicer",
|
||||
"get_client",
|
||||
"serve",
|
||||
"update_job_progress",
|
||||
]
|
||||
0
media/in/.gitkeep
Normal file
0
media/in/.gitkeep
Normal file
0
media/out/.gitkeep
Normal file
0
media/out/.gitkeep
Normal file
41
modelgen/__init__.py
Normal file
41
modelgen/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""
|
||||
Modelgen - Generic Model Generation Tool
|
||||
|
||||
Generates typed models from various sources to various output formats.
|
||||
|
||||
Input sources:
|
||||
- Configuration files (soleprint config.json style)
|
||||
- Python dataclasses in schema/ folder
|
||||
- Existing codebases: Django, SQLAlchemy, Prisma (for extraction)
|
||||
|
||||
Output formats:
|
||||
- pydantic: Pydantic BaseModel classes
|
||||
- django: Django ORM models
|
||||
- typescript: TypeScript interfaces
|
||||
- protobuf: Protocol Buffer definitions
|
||||
- prisma: Prisma schema
|
||||
|
||||
Usage:
|
||||
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||
python -m soleprint.station.tools.modelgen list-formats
|
||||
"""
|
||||
|
||||
__version__ = "0.2.0"
|
||||
|
||||
from .generator import GENERATORS, BaseGenerator
|
||||
from .loader import ConfigLoader, load_config
|
||||
from .model_generator import ModelGenerator
|
||||
|
||||
# Backwards compatibility
|
||||
WRITERS = GENERATORS
|
||||
|
||||
__all__ = [
|
||||
"ModelGenerator",
|
||||
"ConfigLoader",
|
||||
"load_config",
|
||||
"GENERATORS",
|
||||
"WRITERS",
|
||||
"BaseGenerator",
|
||||
]
|
||||
367
modelgen/__main__.py
Normal file
367
modelgen/__main__.py
Normal file
@@ -0,0 +1,367 @@
|
||||
"""
|
||||
Modelgen - Generic Model Generation Tool
|
||||
|
||||
Generates typed models from various sources to various formats.
|
||||
|
||||
Input sources:
|
||||
- from-config: Configuration files (soleprint config.json style)
|
||||
- from-schema: Python dataclasses in schema/ folder
|
||||
- extract: Existing codebases (Django, SQLAlchemy, Prisma)
|
||||
|
||||
Output formats:
|
||||
- pydantic: Pydantic BaseModel classes
|
||||
- django: Django ORM models
|
||||
- typescript: TypeScript interfaces
|
||||
- protobuf: Protocol Buffer definitions
|
||||
- prisma: Prisma schema
|
||||
|
||||
Usage:
|
||||
python -m soleprint.station.tools.modelgen --help
|
||||
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||
python -m soleprint.station.tools.modelgen generate --config schema/modelgen.json
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from .generator import GENERATORS
|
||||
|
||||
|
||||
def cmd_from_config(args):
|
||||
"""Generate models from a configuration file (soleprint config.json style)."""
|
||||
from .loader import load_config
|
||||
from .model_generator import ModelGenerator
|
||||
|
||||
config_path = Path(args.config)
|
||||
if not config_path.exists():
|
||||
print(f"Error: Config file not found: {config_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
output_path = Path(args.output)
|
||||
|
||||
print(f"Loading config: {config_path}")
|
||||
config = load_config(config_path)
|
||||
|
||||
print(f"Generating {args.format} models to: {output_path}")
|
||||
generator = ModelGenerator(
|
||||
config=config,
|
||||
output_path=output_path,
|
||||
output_format=args.format,
|
||||
)
|
||||
result_path = generator.generate()
|
||||
|
||||
print(f"Models generated: {result_path}")
|
||||
|
||||
|
||||
def cmd_from_schema(args):
|
||||
"""Generate models from Python dataclasses in schema/ folder."""
|
||||
from .loader import load_schema
|
||||
from .writer import write_file
|
||||
|
||||
# Determine schema path
|
||||
schema_path = Path(args.schema) if args.schema else Path.cwd() / "schema"
|
||||
|
||||
if not schema_path.exists():
|
||||
print(f"Error: Schema folder not found: {schema_path}", file=sys.stderr)
|
||||
print(
|
||||
"Create a schema/ folder with Python dataclasses and an __init__.py",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print("that exports DATACLASSES and ENUMS lists.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Parse include groups
|
||||
include = None
|
||||
if args.include:
|
||||
include = {g.strip() for g in args.include.split(",")}
|
||||
|
||||
print(f"Loading schema: {schema_path}")
|
||||
schema = load_schema(schema_path, include=include)
|
||||
|
||||
loaded = []
|
||||
if schema.models:
|
||||
loaded.append(f"{len(schema.models)} models")
|
||||
if schema.enums:
|
||||
loaded.append(f"{len(schema.enums)} enums")
|
||||
if schema.api_models:
|
||||
loaded.append(f"{len(schema.api_models)} api models")
|
||||
if schema.grpc_messages:
|
||||
loaded.append(f"{len(schema.grpc_messages)} grpc messages")
|
||||
print(f"Found {', '.join(loaded)}")
|
||||
|
||||
# Parse targets
|
||||
targets = [t.strip() for t in args.targets.split(",")]
|
||||
output_dir = Path(args.output)
|
||||
|
||||
for target in targets:
|
||||
if target not in GENERATORS:
|
||||
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||
continue
|
||||
|
||||
generator = GENERATORS[target]()
|
||||
ext = generator.file_extension()
|
||||
|
||||
# Determine output filename (use target name to avoid overwrites)
|
||||
if len(targets) == 1 and args.output.endswith(ext):
|
||||
output_file = output_dir
|
||||
else:
|
||||
output_file = output_dir / f"models_{target}{ext}"
|
||||
|
||||
print(f"Generating {target} to: {output_file}")
|
||||
generator.generate(schema, output_file)
|
||||
|
||||
print("Done!")
|
||||
|
||||
|
||||
def cmd_extract(args):
|
||||
"""Extract models from existing codebase."""
|
||||
from .loader.extract import EXTRACTORS
|
||||
|
||||
source_path = Path(args.source)
|
||||
if not source_path.exists():
|
||||
print(f"Error: Source path not found: {source_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Auto-detect or use specified framework
|
||||
framework = args.framework
|
||||
extractor = None
|
||||
|
||||
if framework == "auto":
|
||||
for name, extractor_cls in EXTRACTORS.items():
|
||||
ext = extractor_cls(source_path)
|
||||
if ext.detect():
|
||||
framework = name
|
||||
extractor = ext
|
||||
print(f"Detected framework: {framework}")
|
||||
break
|
||||
|
||||
if not extractor:
|
||||
print("Error: Could not auto-detect framework", file=sys.stderr)
|
||||
print(f"Available frameworks: {list(EXTRACTORS.keys())}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
if framework not in EXTRACTORS:
|
||||
print(f"Error: Unknown framework: {framework}", file=sys.stderr)
|
||||
print(f"Available: {list(EXTRACTORS.keys())}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
extractor = EXTRACTORS[framework](source_path)
|
||||
|
||||
print(f"Extracting from: {source_path}")
|
||||
models, enums = extractor.extract()
|
||||
|
||||
print(f"Extracted {len(models)} models, {len(enums)} enums")
|
||||
|
||||
# Parse targets
|
||||
targets = [t.strip() for t in args.targets.split(",")]
|
||||
output_dir = Path(args.output)
|
||||
|
||||
for target in targets:
|
||||
if target not in GENERATORS:
|
||||
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||
continue
|
||||
|
||||
generator = GENERATORS[target]()
|
||||
ext = generator.file_extension()
|
||||
|
||||
# Determine output filename (use target name to avoid overwrites)
|
||||
if len(targets) == 1 and args.output.endswith(ext):
|
||||
output_file = output_dir
|
||||
else:
|
||||
output_file = output_dir / f"models_{target}{ext}"
|
||||
|
||||
print(f"Generating {target} to: {output_file}")
|
||||
generator.generate((models, enums), output_file)
|
||||
|
||||
print("Done!")
|
||||
|
||||
|
||||
def cmd_generate(args):
|
||||
"""Generate all targets from a JSON config file."""
|
||||
import json
|
||||
from .loader import load_schema
|
||||
|
||||
config_path = Path(args.config)
|
||||
if not config_path.exists():
|
||||
print(f"Error: Config file not found: {config_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
with open(config_path) as f:
|
||||
config = json.load(f)
|
||||
|
||||
# Resolve paths relative to current working directory
|
||||
schema_path = Path(config["schema"])
|
||||
if not schema_path.exists():
|
||||
print(f"Error: Schema folder not found: {schema_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Loading schema: {schema_path}")
|
||||
|
||||
for target_conf in config["targets"]:
|
||||
target = target_conf["target"]
|
||||
output = Path(target_conf["output"])
|
||||
include = set(target_conf.get("include", []))
|
||||
name_map = target_conf.get("name_map", {})
|
||||
|
||||
if target not in GENERATORS:
|
||||
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Load schema with this target's include filter
|
||||
schema = load_schema(schema_path, include=include or None)
|
||||
|
||||
generator = GENERATORS[target](name_map=name_map)
|
||||
print(f"Generating {target} to: {output}")
|
||||
generator.generate(schema, output)
|
||||
|
||||
print("Done!")
|
||||
|
||||
|
||||
def cmd_list_formats(args):
|
||||
"""List available output formats."""
|
||||
print("Available output formats:")
|
||||
for fmt in GENERATORS.keys():
|
||||
print(f" - {fmt}")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Modelgen - Generic Model Generation Tool",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
# Available formats for help text
|
||||
formats = list(GENERATORS.keys())
|
||||
formats_str = ", ".join(formats)
|
||||
|
||||
# from-config command
|
||||
config_parser = subparsers.add_parser(
|
||||
"from-config",
|
||||
help="Generate models from soleprint configuration file",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--config",
|
||||
"-c",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to configuration file (e.g., config.json)",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--format",
|
||||
"-f",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic"], # Only pydantic for config mode
|
||||
help="Output format (default: pydantic)",
|
||||
)
|
||||
config_parser.set_defaults(func=cmd_from_config)
|
||||
|
||||
# from-schema command
|
||||
schema_parser = subparsers.add_parser(
|
||||
"from-schema",
|
||||
help="Generate models from Python dataclasses in schema/ folder",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--schema",
|
||||
"-s",
|
||||
type=str,
|
||||
default=None,
|
||||
help="Path to schema folder (default: ./schema)",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--targets",
|
||||
"-t",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
help=f"Comma-separated output targets ({formats_str})",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--include",
|
||||
type=str,
|
||||
default=None,
|
||||
help="Comma-separated model groups to include (dataclasses,enums,api,grpc). Default: all.",
|
||||
)
|
||||
schema_parser.set_defaults(func=cmd_from_schema)
|
||||
|
||||
# extract command
|
||||
extract_parser = subparsers.add_parser(
|
||||
"extract",
|
||||
help="Extract models from existing codebase",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--source",
|
||||
"-s",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to source codebase",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--framework",
|
||||
"-f",
|
||||
type=str,
|
||||
choices=["django", "sqlalchemy", "prisma", "auto"],
|
||||
default="auto",
|
||||
help="Source framework (default: auto-detect)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--targets",
|
||||
"-t",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
help=f"Comma-separated output targets ({formats_str})",
|
||||
)
|
||||
extract_parser.set_defaults(func=cmd_extract)
|
||||
|
||||
|
||||
# generate command (config-driven multi-target)
|
||||
gen_parser = subparsers.add_parser(
|
||||
"generate",
|
||||
help="Generate all targets from a JSON config file",
|
||||
)
|
||||
gen_parser.add_argument(
|
||||
"--config",
|
||||
"-c",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to generation config file (e.g., schema/modelgen.json)",
|
||||
)
|
||||
gen_parser.set_defaults(func=cmd_generate)
|
||||
|
||||
# list-formats command
|
||||
formats_parser = subparsers.add_parser(
|
||||
"list-formats",
|
||||
help="List available output formats",
|
||||
)
|
||||
formats_parser.set_defaults(func=cmd_list_formats)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
44
modelgen/generator/__init__.py
Normal file
44
modelgen/generator/__init__.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
Generator - Stack-specific code generators for modelgen.
|
||||
|
||||
Supported generators:
|
||||
- PydanticGenerator: Pydantic BaseModel classes
|
||||
- DjangoGenerator: Django ORM models
|
||||
- TypeScriptGenerator: TypeScript interfaces
|
||||
- ProtobufGenerator: Protocol Buffer definitions
|
||||
- PrismaGenerator: Prisma schema
|
||||
- GrapheneGenerator: Graphene ObjectType/InputObjectType classes
|
||||
"""
|
||||
|
||||
from typing import Dict, Type
|
||||
|
||||
from .base import BaseGenerator
|
||||
from .django import DjangoGenerator
|
||||
from .graphene import GrapheneGenerator
|
||||
from .prisma import PrismaGenerator
|
||||
from .protobuf import ProtobufGenerator
|
||||
from .pydantic import PydanticGenerator
|
||||
from .typescript import TypeScriptGenerator
|
||||
|
||||
# Registry of available generators
|
||||
GENERATORS: Dict[str, Type[BaseGenerator]] = {
|
||||
"pydantic": PydanticGenerator,
|
||||
"django": DjangoGenerator,
|
||||
"typescript": TypeScriptGenerator,
|
||||
"ts": TypeScriptGenerator, # Alias
|
||||
"protobuf": ProtobufGenerator,
|
||||
"proto": ProtobufGenerator, # Alias
|
||||
"prisma": PrismaGenerator,
|
||||
"graphene": GrapheneGenerator,
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"BaseGenerator",
|
||||
"PydanticGenerator",
|
||||
"DjangoGenerator",
|
||||
"GrapheneGenerator",
|
||||
"TypeScriptGenerator",
|
||||
"ProtobufGenerator",
|
||||
"PrismaGenerator",
|
||||
"GENERATORS",
|
||||
]
|
||||
30
modelgen/generator/base.py
Normal file
30
modelgen/generator/base.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""
|
||||
Base Generator
|
||||
|
||||
Abstract base class for all code generators.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
class BaseGenerator(ABC):
|
||||
"""Abstract base for code generators."""
|
||||
|
||||
def __init__(self, name_map: Dict[str, str] = None):
|
||||
self.name_map = name_map or {}
|
||||
|
||||
def map_name(self, name: str) -> str:
|
||||
"""Apply name_map to a model name."""
|
||||
return self.name_map.get(name, name)
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, models: Any, output_path: Path) -> None:
|
||||
"""Generate code for the given models to the specified path."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def file_extension(self) -> str:
|
||||
"""Return the file extension for this format."""
|
||||
pass
|
||||
270
modelgen/generator/django.py
Normal file
270
modelgen/generator/django.py
Normal file
@@ -0,0 +1,270 @@
|
||||
"""
|
||||
Django Generator
|
||||
|
||||
Generates Django ORM models from model definitions.
|
||||
"""
|
||||
|
||||
import dataclasses as dc
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import format_opts, get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||
from ..types import DJANGO_SPECIAL, DJANGO_TYPES
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class DjangoGenerator(BaseGenerator):
|
||||
"""Generates Django ORM model files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Django models to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader or similar
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums as TextChoices
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_text_choices(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate models
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.extend(["", ""])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.extend(["", ""])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header."""
|
||||
return [
|
||||
'"""',
|
||||
"Django ORM Models - GENERATED FILE",
|
||||
"",
|
||||
"Do not edit directly. Regenerate using modelgen.",
|
||||
'"""',
|
||||
"",
|
||||
"import uuid",
|
||||
"from django.db import models",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_text_choices(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate Django TextChoices from EnumDefinition."""
|
||||
lines = [
|
||||
f"class {enum_def.name}(models.TextChoices):",
|
||||
]
|
||||
for name, value in enum_def.values:
|
||||
label = name.replace("_", " ").title()
|
||||
lines.append(f' {name} = "{value}", "{label}"')
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate Django model from ModelDefinition."""
|
||||
docstring = model_def.docstring or model_def.name
|
||||
lines = [
|
||||
f"class {model_def.name}(models.Model):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
"",
|
||||
]
|
||||
|
||||
for field in model_def.fields:
|
||||
django_field = self._resolve_field_type(
|
||||
field.name, field.type_hint, field.default, field.optional
|
||||
)
|
||||
lines.append(f" {field.name} = {django_field}")
|
||||
|
||||
# Add Meta and __str__
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
" class Meta:",
|
||||
' ordering = ["-created_at"]'
|
||||
if any(f.name == "created_at" for f in model_def.fields)
|
||||
else " pass",
|
||||
"",
|
||||
" def __str__(self):",
|
||||
]
|
||||
)
|
||||
|
||||
# Determine __str__ return
|
||||
field_names = [f.name for f in model_def.fields]
|
||||
if "filename" in field_names:
|
||||
lines.append(" return self.filename")
|
||||
elif "name" in field_names:
|
||||
lines.append(" return self.name")
|
||||
else:
|
||||
lines.append(" return str(self.id)")
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate Django model from a dataclass (MPR style)."""
|
||||
docstring = cls.__doc__ or cls.__name__
|
||||
lines = [
|
||||
f"class {cls.__name__}(models.Model):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
"",
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
# Check for enums and add Status inner class if needed
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
lines.append(" class Status(models.TextChoices):")
|
||||
for member in base:
|
||||
label = member.name.replace("_", " ").title()
|
||||
lines.append(f' {member.name} = "{member.value}", "{label}"')
|
||||
lines.append("")
|
||||
break
|
||||
|
||||
# Generate fields
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
field = fields.get(name)
|
||||
default = dc.MISSING
|
||||
if field and field.default is not dc.MISSING:
|
||||
default = field.default
|
||||
django_field = self._resolve_field_type(name, type_hint, default, False)
|
||||
lines.append(f" {name} = {django_field}")
|
||||
|
||||
# Add Meta and __str__
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
" class Meta:",
|
||||
' ordering = ["-created_at"]'
|
||||
if "created_at" in hints
|
||||
else " pass",
|
||||
"",
|
||||
" def __str__(self):",
|
||||
]
|
||||
)
|
||||
|
||||
if "filename" in hints:
|
||||
lines.append(" return self.filename")
|
||||
elif "name" in hints:
|
||||
lines.append(" return self.name")
|
||||
else:
|
||||
lines.append(" return str(self.id)")
|
||||
|
||||
return lines
|
||||
|
||||
def _resolve_field_type(
|
||||
self, name: str, type_hint: Any, default: Any, optional: bool
|
||||
) -> str:
|
||||
"""Resolve Python type to Django field."""
|
||||
# Special fields
|
||||
if name in DJANGO_SPECIAL:
|
||||
return DJANGO_SPECIAL[name]
|
||||
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
opts = format_opts(optional)
|
||||
|
||||
# Container types
|
||||
if origin == "dict":
|
||||
return DJANGO_TYPES["dict"]
|
||||
if origin == "list":
|
||||
return DJANGO_TYPES["list"]
|
||||
|
||||
# UUID / datetime
|
||||
if type_name == "UUID":
|
||||
return DJANGO_TYPES["UUID"].format(opts=opts)
|
||||
if type_name == "datetime":
|
||||
return DJANGO_TYPES["datetime"].format(opts=opts)
|
||||
|
||||
# Enum
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
enum_name = base.__name__
|
||||
extra = []
|
||||
if optional:
|
||||
extra.append("null=True, blank=True")
|
||||
if default is not dc.MISSING and isinstance(default, Enum):
|
||||
extra.append(f"default={enum_name}.{default.name}")
|
||||
return DJANGO_TYPES["enum"].format(
|
||||
enum_name=enum_name,
|
||||
opts=", " + ", ".join(extra) if extra else ""
|
||||
)
|
||||
|
||||
# Text fields (based on name heuristics)
|
||||
if base is str and any(
|
||||
x in name for x in ("message", "comments", "description")
|
||||
):
|
||||
return DJANGO_TYPES["text"]
|
||||
|
||||
# BigInt fields
|
||||
if base is int and name in ("file_size", "bitrate"):
|
||||
return DJANGO_TYPES["bigint"].format(opts=opts)
|
||||
|
||||
# String with max_length
|
||||
if base is str:
|
||||
max_length = 1000 if "path" in name else 500 if "filename" in name else 255
|
||||
return DJANGO_TYPES[str].format(
|
||||
max_length=max_length, opts=", " + opts if opts else ""
|
||||
)
|
||||
|
||||
# Integer
|
||||
if base is int:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[int].format(opts=", ".join(extra))
|
||||
|
||||
# Float
|
||||
if base is float:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[float].format(opts=", ".join(extra))
|
||||
|
||||
# Boolean
|
||||
if base is bool:
|
||||
default_val = default if default is not dc.MISSING else False
|
||||
return DJANGO_TYPES[bool].format(default=default_val)
|
||||
|
||||
# Fallback to CharField
|
||||
return DJANGO_TYPES[str].format(
|
||||
max_length=255, opts=", " + opts if opts else ""
|
||||
)
|
||||
236
modelgen/generator/graphene.py
Normal file
236
modelgen/generator/graphene.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""
|
||||
Graphene Generator
|
||||
|
||||
Generates graphene ObjectType and InputObjectType classes from model definitions.
|
||||
Only generates type definitions — queries, mutations, and resolvers are hand-written.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from ..types import GRAPHENE_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class GrapheneGenerator(BaseGenerator):
|
||||
"""Generates graphene type definition files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate graphene types to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
content = self._generate_from_definitions(
|
||||
models.models,
|
||||
getattr(models, "enums", []),
|
||||
getattr(models, "api_models", []),
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
content = self._generate_from_definitions(models[0], models[1], [])
|
||||
elif isinstance(models, list):
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self,
|
||||
models: List[ModelDefinition],
|
||||
enums: List[EnumDefinition],
|
||||
api_models: List[ModelDefinition],
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums as graphene.Enum
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
lines.append("")
|
||||
|
||||
# Generate domain models as ObjectType
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_object_type(model_def))
|
||||
lines.append("")
|
||||
lines.append("")
|
||||
|
||||
# Generate API models — request types as InputObjectType, others as ObjectType
|
||||
for model_def in api_models:
|
||||
if model_def.name.endswith("Request"):
|
||||
lines.extend(self._generate_input_type(model_def))
|
||||
else:
|
||||
lines.extend(self._generate_object_type(model_def))
|
||||
lines.append("")
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines).rstrip() + "\n"
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses."""
|
||||
lines = self._generate_header()
|
||||
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
lines.extend(self._generate_enum_from_python(base))
|
||||
lines.append("")
|
||||
lines.append("")
|
||||
enums_generated.add(base.__name__)
|
||||
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_object_type_from_dataclass(cls))
|
||||
lines.append("")
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines).rstrip() + "\n"
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
return [
|
||||
'"""',
|
||||
"Graphene Types - GENERATED FILE",
|
||||
"",
|
||||
"Do not edit directly. Regenerate using modelgen.",
|
||||
'"""',
|
||||
"",
|
||||
"import graphene",
|
||||
"",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate graphene.Enum from EnumDefinition."""
|
||||
lines = [f"class {enum_def.name}(graphene.Enum):"]
|
||||
for name, value in enum_def.values:
|
||||
lines.append(f' {name} = "{value}"')
|
||||
return lines
|
||||
|
||||
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||
"""Generate graphene.Enum from Python Enum."""
|
||||
lines = [f"class {enum_cls.__name__}(graphene.Enum):"]
|
||||
for member in enum_cls:
|
||||
lines.append(f' {member.name} = "{member.value}"')
|
||||
return lines
|
||||
|
||||
def _generate_object_type(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate graphene.ObjectType from ModelDefinition."""
|
||||
name = model_def.name
|
||||
# Append Type suffix if not already present
|
||||
type_name = f"{name}Type" if not name.endswith("Type") else name
|
||||
|
||||
lines = [f"class {type_name}(graphene.ObjectType):"]
|
||||
if model_def.docstring:
|
||||
doc = model_def.docstring.strip().split("\n")[0]
|
||||
lines.append(f' """{doc}"""')
|
||||
lines.append("")
|
||||
|
||||
if not model_def.fields:
|
||||
lines.append(" pass")
|
||||
else:
|
||||
for field in model_def.fields:
|
||||
graphene_type = self._resolve_type(field.type_hint, field.optional)
|
||||
lines.append(f" {field.name} = {graphene_type}")
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_input_type(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate graphene.InputObjectType from ModelDefinition."""
|
||||
import dataclasses as dc
|
||||
|
||||
name = model_def.name
|
||||
# Convert FooRequest -> FooInput
|
||||
if name.endswith("Request"):
|
||||
input_name = name[: -len("Request")] + "Input"
|
||||
else:
|
||||
input_name = f"{name}Input"
|
||||
|
||||
lines = [f"class {input_name}(graphene.InputObjectType):"]
|
||||
if model_def.docstring:
|
||||
doc = model_def.docstring.strip().split("\n")[0]
|
||||
lines.append(f' """{doc}"""')
|
||||
lines.append("")
|
||||
|
||||
if not model_def.fields:
|
||||
lines.append(" pass")
|
||||
else:
|
||||
for field in model_def.fields:
|
||||
graphene_type = self._resolve_type(field.type_hint, field.optional)
|
||||
# Required only if not optional AND no default value
|
||||
has_default = field.default is not dc.MISSING
|
||||
if not field.optional and not has_default:
|
||||
graphene_type = self._make_required(graphene_type)
|
||||
elif has_default and not field.optional:
|
||||
graphene_type = self._add_default(graphene_type, field.default)
|
||||
lines.append(f" {field.name} = {graphene_type}")
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_object_type_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate graphene.ObjectType from a dataclass."""
|
||||
import dataclasses as dc
|
||||
|
||||
type_name = f"{cls.__name__}Type"
|
||||
lines = [f"class {type_name}(graphene.ObjectType):"]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
graphene_type = self._resolve_type(type_hint, False)
|
||||
lines.append(f" {name} = {graphene_type}")
|
||||
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to graphene field call string."""
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = (
|
||||
GRAPHENE_RESOLVERS.get(origin)
|
||||
or GRAPHENE_RESOLVERS.get(type_name)
|
||||
or GRAPHENE_RESOLVERS.get(base)
|
||||
or (
|
||||
GRAPHENE_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "graphene.String"
|
||||
|
||||
# List types already have () syntax from resolver
|
||||
if result.startswith("graphene.List("):
|
||||
return result
|
||||
|
||||
# Scalar types: add () call
|
||||
return f"{result}()"
|
||||
|
||||
def _make_required(self, field_str: str) -> str:
|
||||
"""Add required=True to a graphene field."""
|
||||
if field_str.endswith("()"):
|
||||
return field_str[:-1] + "required=True)"
|
||||
return field_str
|
||||
|
||||
def _add_default(self, field_str: str, default: Any) -> str:
|
||||
"""Add default_value to a graphene field."""
|
||||
if callable(default):
|
||||
# default_factory — skip, graphene doesn't support factories
|
||||
return field_str
|
||||
if field_str.endswith("()"):
|
||||
return field_str[:-1] + f"default_value={default!r})"
|
||||
return field_str
|
||||
173
modelgen/generator/prisma.py
Normal file
173
modelgen/generator/prisma.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""
|
||||
Prisma Generator
|
||||
|
||||
Generates Prisma schema from model definitions.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||
from ..types import PRISMA_SPECIAL, PRISMA_TYPES
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class PrismaGenerator(BaseGenerator):
|
||||
"""Generates Prisma schema files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".prisma"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Prisma schema to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate models
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Collect and generate enums first
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
lines.extend(self._generate_enum_from_python(base))
|
||||
lines.append("")
|
||||
enums_generated.add(base.__name__)
|
||||
|
||||
# Generate models
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header with datasource and generator."""
|
||||
return [
|
||||
"// Prisma Schema - GENERATED FILE",
|
||||
"//",
|
||||
"// Do not edit directly. Regenerate using modelgen.",
|
||||
"",
|
||||
"generator client {",
|
||||
' provider = "prisma-client-py"',
|
||||
"}",
|
||||
"",
|
||||
"datasource db {",
|
||||
' provider = "postgresql"',
|
||||
' url = env("DATABASE_URL")',
|
||||
"}",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate Prisma enum from EnumDefinition."""
|
||||
lines = [f"enum {enum_def.name} {{"]
|
||||
for name, _ in enum_def.values:
|
||||
lines.append(f" {name}")
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||
"""Generate Prisma enum from Python Enum."""
|
||||
lines = [f"enum {enum_cls.__name__} {{"]
|
||||
for member in enum_cls:
|
||||
lines.append(f" {member.name}")
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate Prisma model from ModelDefinition."""
|
||||
lines = [f"model {model_def.name} {{"]
|
||||
|
||||
for field in model_def.fields:
|
||||
prisma_type = self._resolve_type(
|
||||
field.name, field.type_hint, field.optional
|
||||
)
|
||||
lines.append(f" {field.name} {prisma_type}")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate Prisma model from a dataclass."""
|
||||
lines = [f"model {cls.__name__} {{"]
|
||||
|
||||
for name, type_hint in get_type_hints(cls).items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
prisma_type = self._resolve_type(name, type_hint, False)
|
||||
lines.append(f" {name} {prisma_type}")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, name: str, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to Prisma type string."""
|
||||
# Special fields
|
||||
if name in PRISMA_SPECIAL:
|
||||
return PRISMA_SPECIAL[name]
|
||||
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Container types
|
||||
if origin == "dict" or origin == "list":
|
||||
result = PRISMA_TYPES.get(origin, "Json")
|
||||
return f"{result}?" if optional else result
|
||||
|
||||
# UUID / datetime
|
||||
if type_name in ("UUID", "datetime"):
|
||||
result = PRISMA_TYPES.get(type_name, "String")
|
||||
return f"{result}?" if optional else result
|
||||
|
||||
# Enum
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
result = base.__name__
|
||||
return f"{result}?" if optional else result
|
||||
|
||||
# Basic types
|
||||
result = PRISMA_TYPES.get(base, "String")
|
||||
return f"{result}?" if optional else result
|
||||
168
modelgen/generator/protobuf.py
Normal file
168
modelgen/generator/protobuf.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
Protobuf Generator
|
||||
|
||||
Generates Protocol Buffer definitions from model definitions.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, unwrap_optional
|
||||
from ..loader.schema import GrpcServiceDefinition, ModelDefinition
|
||||
from ..types import PROTO_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class ProtobufGenerator(BaseGenerator):
|
||||
"""Generates Protocol Buffer definition files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".proto"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate protobuf definitions to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "grpc_messages"):
|
||||
# SchemaLoader with gRPC definitions
|
||||
content = self._generate_from_loader(models)
|
||||
elif isinstance(models, tuple) and len(models) >= 3:
|
||||
# (messages, service_def) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_loader(self, loader) -> str:
|
||||
"""Generate from SchemaLoader."""
|
||||
messages = loader.grpc_messages
|
||||
service = loader.grpc_service
|
||||
|
||||
lines = self._generate_header(
|
||||
service.package if service else "service",
|
||||
service.name if service else "Service",
|
||||
service.methods if service else [],
|
||||
)
|
||||
|
||||
for model_def in messages:
|
||||
lines.extend(self._generate_message_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, messages: List[ModelDefinition], service: GrpcServiceDefinition
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header(service.package, service.name, service.methods)
|
||||
|
||||
for model_def in messages:
|
||||
lines.extend(self._generate_message_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header("service", "Service", [])
|
||||
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_message_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(
|
||||
self, package: str, service_name: str, methods: List[dict]
|
||||
) -> List[str]:
|
||||
"""Generate file header with service definition."""
|
||||
lines = [
|
||||
"// Protocol Buffer Definitions - GENERATED FILE",
|
||||
"//",
|
||||
"// Do not edit directly. Regenerate using modelgen.",
|
||||
"",
|
||||
'syntax = "proto3";',
|
||||
"",
|
||||
f"package {package};",
|
||||
"",
|
||||
]
|
||||
|
||||
if methods:
|
||||
lines.append(f"service {service_name} {{")
|
||||
for m in methods:
|
||||
req = (
|
||||
m["request"].__name__
|
||||
if hasattr(m["request"], "__name__")
|
||||
else str(m["request"])
|
||||
)
|
||||
resp = (
|
||||
m["response"].__name__
|
||||
if hasattr(m["response"], "__name__")
|
||||
else str(m["response"])
|
||||
)
|
||||
returns = f"stream {resp}" if m.get("stream_response") else resp
|
||||
lines.append(f" rpc {m['name']}({req}) returns ({returns});")
|
||||
lines.extend(["}", ""])
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_message_from_definition(
|
||||
self, model_def: ModelDefinition
|
||||
) -> List[str]:
|
||||
"""Generate proto message from ModelDefinition."""
|
||||
lines = [f"message {model_def.name} {{"]
|
||||
|
||||
if not model_def.fields:
|
||||
lines.append(" // Empty")
|
||||
else:
|
||||
for i, field in enumerate(model_def.fields, 1):
|
||||
proto_type, optional = self._resolve_type(field.type_hint)
|
||||
prefix = (
|
||||
"optional "
|
||||
if optional and not proto_type.startswith("repeated")
|
||||
else ""
|
||||
)
|
||||
lines.append(f" {prefix}{proto_type} {field.name} = {i};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_message_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate proto message from a dataclass."""
|
||||
lines = [f"message {cls.__name__} {{"]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
if not hints:
|
||||
lines.append(" // Empty")
|
||||
else:
|
||||
for i, (name, type_hint) in enumerate(hints.items(), 1):
|
||||
proto_type, optional = self._resolve_type(type_hint)
|
||||
prefix = (
|
||||
"optional "
|
||||
if optional and not proto_type.startswith("repeated")
|
||||
else ""
|
||||
)
|
||||
lines.append(f" {prefix}{proto_type} {name} = {i};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any) -> tuple[str, bool]:
|
||||
"""Resolve Python type to proto type. Returns (type, is_optional)."""
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = PROTO_RESOLVERS.get(origin) or PROTO_RESOLVERS.get(base)
|
||||
|
||||
if resolver:
|
||||
result = resolver(base)
|
||||
is_repeated = result.startswith("repeated")
|
||||
return result, optional and not is_repeated
|
||||
|
||||
return "string", optional
|
||||
548
modelgen/generator/pydantic.py
Normal file
548
modelgen/generator/pydantic.py
Normal file
@@ -0,0 +1,548 @@
|
||||
"""
|
||||
Pydantic Generator
|
||||
|
||||
Generates Pydantic BaseModel classes from model definitions.
|
||||
Supports two output modes:
|
||||
- File output: flat models (backwards compatible)
|
||||
- Directory output: CRUD variants (Create/Update/Response) per model
|
||||
"""
|
||||
|
||||
import dataclasses as dc
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from ..types import PYDANTIC_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
# Fields to skip per CRUD variant
|
||||
SKIP_FIELDS = {
|
||||
"Create": {"id", "created_at", "updated_at", "status", "error_message"},
|
||||
"Update": {"id", "created_at", "updated_at"},
|
||||
"Response": set(),
|
||||
}
|
||||
|
||||
|
||||
class PydanticGenerator(BaseGenerator):
|
||||
"""Generates Pydantic model files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Pydantic models to output_path.
|
||||
|
||||
If output_path is a directory (or doesn't end in .py), generate
|
||||
multi-file CRUD variants. Otherwise, generate flat models to a
|
||||
single file.
|
||||
"""
|
||||
output_path = Path(output_path)
|
||||
|
||||
if output_path.suffix != ".py":
|
||||
# Directory mode: CRUD variants
|
||||
self._generate_crud_directory(models, output_path)
|
||||
else:
|
||||
# File mode: flat models (backwards compatible)
|
||||
self._generate_flat_file(models, output_path)
|
||||
|
||||
def _generate_flat_file(self, models, output_path: Path) -> None:
|
||||
"""Generate flat models to a single file (original behavior)."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if hasattr(models, "get_shared_component"):
|
||||
content = self._generate_from_config(models)
|
||||
elif hasattr(models, "models"):
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_crud_directory(self, models, output_dir: Path) -> None:
|
||||
"""Generate CRUD variant files in a directory."""
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if hasattr(models, "models"):
|
||||
model_defs = models.models
|
||||
enum_defs = getattr(models, "enums", [])
|
||||
elif isinstance(models, tuple):
|
||||
model_defs = models[0]
|
||||
enum_defs = models[1]
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type for CRUD mode: {type(models)}")
|
||||
|
||||
# base.py
|
||||
base_content = "\n".join([
|
||||
'"""Pydantic Base Schema - GENERATED FILE"""',
|
||||
"",
|
||||
"from pydantic import BaseModel, ConfigDict",
|
||||
"",
|
||||
"",
|
||||
"class BaseSchema(BaseModel):",
|
||||
' """Base schema with ORM mode."""',
|
||||
" model_config = ConfigDict(from_attributes=True)",
|
||||
"",
|
||||
])
|
||||
(output_dir / "base.py").write_text(base_content)
|
||||
|
||||
# Per-model files
|
||||
imports = ["from .base import BaseSchema"]
|
||||
all_exports = ['"BaseSchema"']
|
||||
|
||||
for model_def in model_defs:
|
||||
mapped = self.map_name(model_def.name)
|
||||
module_name = mapped.lower()
|
||||
|
||||
lines = [
|
||||
f'"""{model_def.name} Schemas - GENERATED FILE"""',
|
||||
"",
|
||||
"from datetime import datetime",
|
||||
"from enum import Enum",
|
||||
"from typing import Any, Dict, List, Optional",
|
||||
"from uuid import UUID",
|
||||
"",
|
||||
"from .base import BaseSchema",
|
||||
"",
|
||||
]
|
||||
|
||||
# Inline enums used by this model
|
||||
model_enums = self._collect_model_enums(model_def, enum_defs)
|
||||
for enum_def in model_enums:
|
||||
lines.append("")
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# CRUD variants
|
||||
for suffix in ["Create", "Update", "Response"]:
|
||||
lines.append("")
|
||||
lines.extend(self._generate_crud_model(model_def, mapped, suffix))
|
||||
|
||||
lines.append("")
|
||||
content = "\n".join(lines)
|
||||
(output_dir / f"{module_name}.py").write_text(content)
|
||||
|
||||
# Track imports
|
||||
imports.append(
|
||||
f"from .{module_name} import {mapped}Create, {mapped}Update, {mapped}Response"
|
||||
)
|
||||
all_exports.extend([
|
||||
f'"{mapped}Create"', f'"{mapped}Update"', f'"{mapped}Response"'
|
||||
])
|
||||
|
||||
for enum_def in model_enums:
|
||||
imports.append(f"from .{module_name} import {enum_def.name}")
|
||||
all_exports.append(f'"{enum_def.name}"')
|
||||
|
||||
# __init__.py
|
||||
init_content = "\n".join([
|
||||
'"""API Schemas - GENERATED FILE"""',
|
||||
"",
|
||||
*imports,
|
||||
"",
|
||||
f"__all__ = [{', '.join(all_exports)}]",
|
||||
"",
|
||||
])
|
||||
(output_dir / "__init__.py").write_text(init_content)
|
||||
|
||||
def _collect_model_enums(
|
||||
self, model_def: ModelDefinition, enum_defs: List[EnumDefinition]
|
||||
) -> List[EnumDefinition]:
|
||||
"""Find enums referenced by a model's fields."""
|
||||
enum_names = set()
|
||||
for field in model_def.fields:
|
||||
base, _ = unwrap_optional(field.type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
enum_names.add(base.__name__)
|
||||
return [e for e in enum_defs if e.name in enum_names]
|
||||
|
||||
def _generate_crud_model(
|
||||
self, model_def: ModelDefinition, mapped_name: str, suffix: str
|
||||
) -> List[str]:
|
||||
"""Generate a single CRUD variant (Create/Update/Response)."""
|
||||
class_name = f"{mapped_name}{suffix}"
|
||||
skip = SKIP_FIELDS.get(suffix, set())
|
||||
|
||||
lines = [
|
||||
f"class {class_name}(BaseSchema):",
|
||||
f' """{class_name} schema."""',
|
||||
]
|
||||
|
||||
has_fields = False
|
||||
for field in model_def.fields:
|
||||
if field.name.startswith("_") or field.name in skip:
|
||||
continue
|
||||
|
||||
has_fields = True
|
||||
py_type = self._resolve_type(field.type_hint, field.optional)
|
||||
|
||||
# Update variant: all fields optional
|
||||
if suffix == "Update" and "Optional" not in py_type:
|
||||
py_type = f"Optional[{py_type}]"
|
||||
|
||||
default = self._format_default(field.default, "Optional" in py_type)
|
||||
lines.append(f" {field.name}: {py_type}{default}")
|
||||
|
||||
if not has_fields:
|
||||
lines.append(" pass")
|
||||
|
||||
return lines
|
||||
|
||||
# =========================================================================
|
||||
# Flat file generation (original behavior)
|
||||
# =========================================================================
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
lines = self._generate_header()
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.append("")
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
lines = self._generate_header()
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
lines.extend(self._generate_enum_from_python(base))
|
||||
lines.append("")
|
||||
enums_generated.add(base.__name__)
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.append("")
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
return [
|
||||
'"""',
|
||||
"Pydantic Models - GENERATED FILE",
|
||||
"",
|
||||
"Do not edit directly. Regenerate using modelgen.",
|
||||
'"""',
|
||||
"",
|
||||
"from datetime import datetime",
|
||||
"from enum import Enum",
|
||||
"from typing import Any, Dict, List, Optional",
|
||||
"from uuid import UUID",
|
||||
"",
|
||||
"from pydantic import BaseModel, Field",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||
lines = [f"class {enum_def.name}(str, Enum):"]
|
||||
for name, value in enum_def.values:
|
||||
lines.append(f' {name} = "{value}"')
|
||||
return lines
|
||||
|
||||
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||
lines = [f"class {enum_cls.__name__}(str, Enum):"]
|
||||
for member in enum_cls:
|
||||
lines.append(f' {member.name} = "{member.value}"')
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
docstring = model_def.docstring or model_def.name
|
||||
lines = [
|
||||
f"class {model_def.name}(BaseModel):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
]
|
||||
if not model_def.fields:
|
||||
lines.append(" pass")
|
||||
else:
|
||||
for field in model_def.fields:
|
||||
py_type = self._resolve_type(field.type_hint, field.optional)
|
||||
default = self._format_default(field.default, field.optional)
|
||||
lines.append(f" {field.name}: {py_type}{default}")
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
docstring = cls.__doc__ or cls.__name__
|
||||
lines = [
|
||||
f"class {cls.__name__}(BaseModel):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
]
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
field = fields.get(name)
|
||||
default_val = dc.MISSING
|
||||
if field:
|
||||
if field.default is not dc.MISSING:
|
||||
default_val = field.default
|
||||
py_type = self._resolve_type(type_hint, False)
|
||||
default = self._format_default(default_val, "Optional" in py_type)
|
||||
lines.append(f" {name}: {py_type}{default}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
resolver = (
|
||||
PYDANTIC_RESOLVERS.get(origin)
|
||||
or PYDANTIC_RESOLVERS.get(type_name)
|
||||
or PYDANTIC_RESOLVERS.get(base)
|
||||
or (
|
||||
PYDANTIC_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
result = resolver(base) if resolver else "str"
|
||||
return f"Optional[{result}]" if optional else result
|
||||
|
||||
def _format_default(self, default: Any, optional: bool) -> str:
|
||||
if optional:
|
||||
return " = None"
|
||||
if default is dc.MISSING or default is None:
|
||||
return ""
|
||||
if isinstance(default, str):
|
||||
return f' = "{default}"'
|
||||
if isinstance(default, Enum):
|
||||
return f" = {default.__class__.__name__}.{default.name}"
|
||||
if callable(default):
|
||||
return " = Field(default_factory=list)" if "list" in str(default) else ""
|
||||
return f" = {default!r}"
|
||||
|
||||
def _generate_from_config(self, config) -> str:
|
||||
"""Generate from ConfigLoader (soleprint config.json mode)."""
|
||||
config_comp = config.get_shared_component("config")
|
||||
data_comp = config.get_shared_component("data")
|
||||
|
||||
data_flow_sys = config.get_system("data_flow")
|
||||
doc_sys = config.get_system("documentation")
|
||||
exec_sys = config.get_system("execution")
|
||||
|
||||
connector_comp = config.get_component("data_flow", "connector")
|
||||
pulse_comp = config.get_component("data_flow", "composed")
|
||||
|
||||
pattern_comp = config.get_component("documentation", "pattern")
|
||||
doc_composed = config.get_component("documentation", "composed")
|
||||
|
||||
tool_comp = config.get_component("execution", "utility")
|
||||
monitor_comp = config.get_component("execution", "watcher")
|
||||
cabinet_comp = config.get_component("execution", "container")
|
||||
exec_composed = config.get_component("execution", "composed")
|
||||
|
||||
return f'''"""
|
||||
Pydantic models - Generated from {config.framework.name}.config.json
|
||||
|
||||
DO NOT EDIT MANUALLY - Regenerate from config
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Literal, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class Status(str, Enum):
|
||||
PENDING = "pending"
|
||||
PLANNED = "planned"
|
||||
BUILDING = "building"
|
||||
DEV = "dev"
|
||||
LIVE = "live"
|
||||
READY = "ready"
|
||||
|
||||
|
||||
class System(str, Enum):
|
||||
{data_flow_sys.name.upper()} = "{data_flow_sys.name}"
|
||||
{doc_sys.name.upper()} = "{doc_sys.name}"
|
||||
{exec_sys.name.upper()} = "{exec_sys.name}"
|
||||
|
||||
|
||||
class ToolType(str, Enum):
|
||||
APP = "app"
|
||||
CLI = "cli"
|
||||
|
||||
|
||||
# === Shared Components ===
|
||||
|
||||
|
||||
class {config_comp.title}(BaseModel):
|
||||
"""{config_comp.description}. Shared across {data_flow_sys.name}, {exec_sys.name}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
config_path: Optional[str] = None
|
||||
|
||||
|
||||
class {data_comp.title}(BaseModel):
|
||||
"""{data_comp.description}. Shared across all systems."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
source_template: Optional[str] = None
|
||||
data_path: Optional[str] = None
|
||||
|
||||
|
||||
# === System-Specific Components ===
|
||||
|
||||
|
||||
class {connector_comp.title}(BaseModel):
|
||||
"""{connector_comp.description} ({data_flow_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
mock: Optional[bool] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class {pattern_comp.title}(BaseModel):
|
||||
"""{pattern_comp.description} ({doc_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template_path: Optional[str] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {tool_comp.title}(BaseModel):
|
||||
"""{tool_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
type: Optional[ToolType] = None
|
||||
description: Optional[str] = None
|
||||
path: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
cli: Optional[str] = None
|
||||
|
||||
|
||||
class {monitor_comp.title}(BaseModel):
|
||||
"""{monitor_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
class {cabinet_comp.title}(BaseModel):
|
||||
"""{cabinet_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
tools: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Composed Types ===
|
||||
|
||||
|
||||
class {pulse_comp.title}(BaseModel):
|
||||
"""{pulse_comp.description} ({data_flow_sys.name}). Formula: {pulse_comp.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
{connector_comp.name}: Optional[{connector_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
|
||||
|
||||
class {doc_composed.title}(BaseModel):
|
||||
"""{doc_composed.description} ({doc_sys.name}). Formula: {doc_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template: Optional[{pattern_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
output_{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {exec_composed.title}(BaseModel):
|
||||
"""{exec_composed.description} ({exec_sys.name}). Formula: {exec_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
cabinet: Optional[{cabinet_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.plural}: List[{data_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Collection wrappers for JSON files ===
|
||||
|
||||
|
||||
class {config_comp.title}Collection(BaseModel):
|
||||
items: List[{config_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {data_comp.title}Collection(BaseModel):
|
||||
items: List[{data_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {connector_comp.title}Collection(BaseModel):
|
||||
items: List[{connector_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pattern_comp.title}Collection(BaseModel):
|
||||
items: List[{pattern_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {tool_comp.title}Collection(BaseModel):
|
||||
items: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {monitor_comp.title}Collection(BaseModel):
|
||||
items: List[{monitor_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {cabinet_comp.title}Collection(BaseModel):
|
||||
items: List[{cabinet_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pulse_comp.title}Collection(BaseModel):
|
||||
items: List[{pulse_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {doc_composed.title}Collection(BaseModel):
|
||||
items: List[{doc_composed.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {exec_composed.title}Collection(BaseModel):
|
||||
items: List[{exec_composed.title}] = Field(default_factory=list)
|
||||
'''
|
||||
145
modelgen/generator/typescript.py
Normal file
145
modelgen/generator/typescript.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""
|
||||
TypeScript Generator
|
||||
|
||||
Generates TypeScript interfaces from model definitions.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from ..types import TS_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class TypeScriptGenerator(BaseGenerator):
|
||||
"""Generates TypeScript interface files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".ts"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate TypeScript types to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader — include api_models if present
|
||||
all_models = models.models + getattr(models, "api_models", [])
|
||||
content = self._generate_from_definitions(
|
||||
all_models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums as union types
|
||||
for enum_def in enums:
|
||||
values = " | ".join(f'"{v}"' for _, v in enum_def.values)
|
||||
lines.append(f"export type {enum_def.name} = {values};")
|
||||
lines.append("")
|
||||
|
||||
# Generate interfaces
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_interface_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Collect and generate enums first
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
values = " | ".join(f'"{m.value}"' for m in base)
|
||||
lines.append(f"export type {base.__name__} = {values};")
|
||||
enums_generated.add(base.__name__)
|
||||
lines.append("")
|
||||
|
||||
# Generate interfaces
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_interface_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header."""
|
||||
return [
|
||||
"/**",
|
||||
" * TypeScript Types - GENERATED FILE",
|
||||
" *",
|
||||
" * Do not edit directly. Regenerate using modelgen.",
|
||||
" */",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_interface_from_definition(
|
||||
self, model_def: ModelDefinition
|
||||
) -> List[str]:
|
||||
"""Generate TypeScript interface from ModelDefinition."""
|
||||
lines = [f"export interface {model_def.name} {{"]
|
||||
|
||||
for field in model_def.fields:
|
||||
ts_type = self._resolve_type(field.type_hint, field.optional)
|
||||
lines.append(f" {field.name}: {ts_type};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_interface_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate TypeScript interface from a dataclass."""
|
||||
lines = [f"export interface {cls.__name__} {{"]
|
||||
|
||||
for name, type_hint in get_type_hints(cls).items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
ts_type = self._resolve_type(type_hint, False)
|
||||
lines.append(f" {name}: {ts_type};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to TypeScript type string."""
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = (
|
||||
TS_RESOLVERS.get(origin)
|
||||
or TS_RESOLVERS.get(type_name)
|
||||
or TS_RESOLVERS.get(base)
|
||||
or (
|
||||
TS_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "string"
|
||||
return f"{result} | null" if optional else result
|
||||
72
modelgen/helpers.py
Normal file
72
modelgen/helpers.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""
|
||||
Type Helpers
|
||||
|
||||
Utilities for type introspection and resolution.
|
||||
Used by generators and loaders.
|
||||
"""
|
||||
|
||||
import dataclasses as dc
|
||||
from enum import Enum
|
||||
from typing import Any, Union, get_args, get_origin
|
||||
|
||||
|
||||
def unwrap_optional(type_hint: Any) -> tuple[Any, bool]:
|
||||
"""Unwrap Optional[T] -> (T, True) or (T, False) if not optional."""
|
||||
origin = get_origin(type_hint)
|
||||
if origin is Union:
|
||||
args = [a for a in get_args(type_hint) if a is not type(None)]
|
||||
return (args[0] if args else str, True)
|
||||
return (type_hint, False)
|
||||
|
||||
|
||||
def get_origin_name(type_hint: Any) -> str | None:
|
||||
"""Get origin type name: 'dict', 'list', or None."""
|
||||
origin = get_origin(type_hint)
|
||||
if origin is dict:
|
||||
return "dict"
|
||||
if origin is list:
|
||||
return "list"
|
||||
return None
|
||||
|
||||
|
||||
def get_type_name(type_hint: Any) -> str | None:
|
||||
"""Get type name for special types like UUID, datetime."""
|
||||
if hasattr(type_hint, "__name__"):
|
||||
return type_hint.__name__
|
||||
return None
|
||||
|
||||
|
||||
def get_list_inner(type_hint: Any) -> str:
|
||||
"""Get inner type of List[T]."""
|
||||
args = get_args(type_hint)
|
||||
if args and args[0] in (str, int, float, bool):
|
||||
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||
return "str"
|
||||
|
||||
|
||||
def get_field_default(field: dc.Field) -> Any:
|
||||
"""Get default value from dataclass field."""
|
||||
if field.default is not dc.MISSING:
|
||||
return field.default
|
||||
return dc.MISSING
|
||||
|
||||
|
||||
def format_opts(optional: bool, extra: list[str] | None = None) -> str:
|
||||
"""Format field options string for Django."""
|
||||
parts = []
|
||||
if optional:
|
||||
parts.append("null=True, blank=True")
|
||||
if extra:
|
||||
parts.extend(extra)
|
||||
return ", ".join(parts)
|
||||
|
||||
|
||||
def is_enum(type_hint: Any) -> bool:
|
||||
"""Check if type is an Enum."""
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
return isinstance(base, type) and issubclass(base, Enum)
|
||||
|
||||
|
||||
def get_enum_values(enum_class: type) -> list[tuple[str, str]]:
|
||||
"""Get list of (name, value) pairs from an Enum."""
|
||||
return [(m.name, m.value) for m in enum_class]
|
||||
37
modelgen/loader/__init__.py
Normal file
37
modelgen/loader/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
Loader - Input source handlers for modelgen.
|
||||
|
||||
Supported loaders:
|
||||
- ConfigLoader: Load from soleprint config.json
|
||||
- SchemaLoader: Load from Python dataclasses in schema/ folder
|
||||
- Extractors: Extract from existing codebases (Django, SQLAlchemy, Prisma)
|
||||
"""
|
||||
|
||||
from .config import ConfigLoader, load_config
|
||||
from .extract import EXTRACTORS, BaseExtractor, DjangoExtractor
|
||||
from .schema import (
|
||||
EnumDefinition,
|
||||
FieldDefinition,
|
||||
GrpcServiceDefinition,
|
||||
ModelDefinition,
|
||||
SchemaLoader,
|
||||
load_schema,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Config loader
|
||||
"ConfigLoader",
|
||||
"load_config",
|
||||
# Schema loader
|
||||
"SchemaLoader",
|
||||
"load_schema",
|
||||
# Model definitions
|
||||
"ModelDefinition",
|
||||
"FieldDefinition",
|
||||
"EnumDefinition",
|
||||
"GrpcServiceDefinition",
|
||||
# Extractors
|
||||
"BaseExtractor",
|
||||
"DjangoExtractor",
|
||||
"EXTRACTORS",
|
||||
]
|
||||
116
modelgen/loader/config.py
Normal file
116
modelgen/loader/config.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
Configuration Loader
|
||||
|
||||
Loads and validates framework configuration files (soleprint config.json style).
|
||||
"""
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class FrameworkConfig:
|
||||
"""Framework metadata"""
|
||||
|
||||
name: str
|
||||
slug: str
|
||||
version: str
|
||||
description: str
|
||||
tagline: str
|
||||
icon: str
|
||||
hub_port: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class SystemConfig:
|
||||
"""System configuration"""
|
||||
|
||||
key: str
|
||||
name: str
|
||||
slug: str = ""
|
||||
title: str = ""
|
||||
tagline: str = ""
|
||||
icon: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class ComponentConfig:
|
||||
"""Component configuration"""
|
||||
|
||||
name: str
|
||||
title: str
|
||||
description: str
|
||||
plural: Optional[str] = None
|
||||
formula: Optional[str] = None
|
||||
|
||||
|
||||
class ConfigLoader:
|
||||
"""Loads and parses framework configuration"""
|
||||
|
||||
def __init__(self, config_path: Path):
|
||||
self.config_path = Path(config_path)
|
||||
self.raw_config: Dict[str, Any] = {}
|
||||
self.framework: Optional[FrameworkConfig] = None
|
||||
self.systems: List[SystemConfig] = []
|
||||
self.components: Dict[str, Dict[str, ComponentConfig]] = {}
|
||||
|
||||
def load(self) -> "ConfigLoader":
|
||||
"""Load configuration from file"""
|
||||
with open(self.config_path) as f:
|
||||
self.raw_config = json.load(f)
|
||||
|
||||
self._parse_framework()
|
||||
self._parse_systems()
|
||||
self._parse_components()
|
||||
|
||||
return self
|
||||
|
||||
def _parse_framework(self):
|
||||
"""Parse framework metadata"""
|
||||
fw = self.raw_config["framework"]
|
||||
self.framework = FrameworkConfig(**fw)
|
||||
|
||||
def _parse_systems(self):
|
||||
"""Parse system configurations"""
|
||||
for sys in self.raw_config["systems"]:
|
||||
self.systems.append(SystemConfig(**sys))
|
||||
|
||||
def _parse_components(self):
|
||||
"""Parse component configurations"""
|
||||
comps = self.raw_config["components"]
|
||||
|
||||
# Shared components
|
||||
self.components["shared"] = {}
|
||||
for key, value in comps.get("shared", {}).items():
|
||||
self.components["shared"][key] = ComponentConfig(**value)
|
||||
|
||||
# System-specific components
|
||||
for system_key in ["data_flow", "documentation", "execution"]:
|
||||
self.components[system_key] = {}
|
||||
for comp_key, comp_value in comps.get(system_key, {}).items():
|
||||
self.components[system_key][comp_key] = ComponentConfig(**comp_value)
|
||||
|
||||
def get_system(self, key: str) -> Optional[SystemConfig]:
|
||||
"""Get system config by key"""
|
||||
for sys in self.systems:
|
||||
if sys.key == key:
|
||||
return sys
|
||||
return None
|
||||
|
||||
def get_component(
|
||||
self, system_key: str, component_key: str
|
||||
) -> Optional[ComponentConfig]:
|
||||
"""Get component config"""
|
||||
return self.components.get(system_key, {}).get(component_key)
|
||||
|
||||
def get_shared_component(self, key: str) -> Optional[ComponentConfig]:
|
||||
"""Get shared component config"""
|
||||
return self.components.get("shared", {}).get(key)
|
||||
|
||||
|
||||
def load_config(config_path: str | Path) -> ConfigLoader:
|
||||
"""Load and validate configuration file"""
|
||||
loader = ConfigLoader(config_path)
|
||||
return loader.load()
|
||||
20
modelgen/loader/extract/__init__.py
Normal file
20
modelgen/loader/extract/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
Extractors - Extract model definitions from existing codebases.
|
||||
|
||||
Supported frameworks:
|
||||
- Django: Extract from Django ORM models
|
||||
- SQLAlchemy: Extract from SQLAlchemy models (planned)
|
||||
- Prisma: Extract from Prisma schema (planned)
|
||||
"""
|
||||
|
||||
from typing import Dict, Type
|
||||
|
||||
from .base import BaseExtractor
|
||||
from .django import DjangoExtractor
|
||||
|
||||
# Registry of available extractors
|
||||
EXTRACTORS: Dict[str, Type[BaseExtractor]] = {
|
||||
"django": DjangoExtractor,
|
||||
}
|
||||
|
||||
__all__ = ["BaseExtractor", "DjangoExtractor", "EXTRACTORS"]
|
||||
38
modelgen/loader/extract/base.py
Normal file
38
modelgen/loader/extract/base.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
Base Extractor
|
||||
|
||||
Abstract base class for model extractors.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from ..schema import EnumDefinition, ModelDefinition
|
||||
|
||||
|
||||
class BaseExtractor(ABC):
|
||||
"""Abstract base for codebase model extractors."""
|
||||
|
||||
def __init__(self, source_path: Path):
|
||||
self.source_path = Path(source_path)
|
||||
|
||||
@abstractmethod
|
||||
def extract(self) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||
"""
|
||||
Extract model definitions from source codebase.
|
||||
|
||||
Returns:
|
||||
Tuple of (models, enums)
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def detect(self) -> bool:
|
||||
"""
|
||||
Detect if this extractor can handle the source path.
|
||||
|
||||
Returns:
|
||||
True if this extractor can handle the source
|
||||
"""
|
||||
pass
|
||||
237
modelgen/loader/extract/django.py
Normal file
237
modelgen/loader/extract/django.py
Normal file
@@ -0,0 +1,237 @@
|
||||
"""
|
||||
Django Extractor
|
||||
|
||||
Extracts model definitions from Django ORM models.
|
||||
"""
|
||||
|
||||
import ast
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from ..schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from .base import BaseExtractor
|
||||
|
||||
# Django field type mappings to Python types
|
||||
DJANGO_FIELD_TYPES = {
|
||||
"CharField": str,
|
||||
"TextField": str,
|
||||
"EmailField": str,
|
||||
"URLField": str,
|
||||
"SlugField": str,
|
||||
"UUIDField": "UUID",
|
||||
"IntegerField": int,
|
||||
"BigIntegerField": "bigint",
|
||||
"SmallIntegerField": int,
|
||||
"PositiveIntegerField": int,
|
||||
"FloatField": float,
|
||||
"DecimalField": float,
|
||||
"BooleanField": bool,
|
||||
"NullBooleanField": bool,
|
||||
"DateField": "datetime",
|
||||
"DateTimeField": "datetime",
|
||||
"TimeField": "datetime",
|
||||
"JSONField": "dict",
|
||||
"ForeignKey": "FK",
|
||||
"OneToOneField": "FK",
|
||||
"ManyToManyField": "M2M",
|
||||
}
|
||||
|
||||
|
||||
class DjangoExtractor(BaseExtractor):
|
||||
"""Extracts models from Django ORM."""
|
||||
|
||||
def detect(self) -> bool:
|
||||
"""Check if this is a Django project."""
|
||||
# Look for manage.py or settings.py
|
||||
manage_py = self.source_path / "manage.py"
|
||||
settings_py = self.source_path / "settings.py"
|
||||
|
||||
if manage_py.exists():
|
||||
return True
|
||||
|
||||
# Check for Django imports in any models.py
|
||||
for models_file in self.source_path.rglob("models.py"):
|
||||
content = models_file.read_text()
|
||||
if "from django.db import models" in content:
|
||||
return True
|
||||
|
||||
return settings_py.exists()
|
||||
|
||||
def extract(self) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||
"""Extract Django models using AST parsing."""
|
||||
models = []
|
||||
enums = []
|
||||
|
||||
# Find all models.py files
|
||||
for models_file in self.source_path.rglob("models.py"):
|
||||
file_models, file_enums = self._extract_from_file(models_file)
|
||||
models.extend(file_models)
|
||||
enums.extend(file_enums)
|
||||
|
||||
return models, enums
|
||||
|
||||
def _extract_from_file(
|
||||
self, file_path: Path
|
||||
) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||
"""Extract models from a single models.py file."""
|
||||
models = []
|
||||
enums = []
|
||||
|
||||
content = file_path.read_text()
|
||||
tree = ast.parse(content)
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
# Check if it inherits from models.Model
|
||||
if self._is_django_model(node):
|
||||
model_def = self._parse_model_class(node)
|
||||
if model_def:
|
||||
models.append(model_def)
|
||||
# Check if it's a TextChoices/IntegerChoices enum
|
||||
elif self._is_django_choices(node):
|
||||
enum_def = self._parse_choices_class(node)
|
||||
if enum_def:
|
||||
enums.append(enum_def)
|
||||
|
||||
return models, enums
|
||||
|
||||
def _is_django_model(self, node: ast.ClassDef) -> bool:
|
||||
"""Check if class inherits from models.Model."""
|
||||
for base in node.bases:
|
||||
if isinstance(base, ast.Attribute):
|
||||
if base.attr == "Model":
|
||||
return True
|
||||
elif isinstance(base, ast.Name):
|
||||
if base.id in ("Model", "AbstractUser", "AbstractBaseUser"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_django_choices(self, node: ast.ClassDef) -> bool:
|
||||
"""Check if class is a Django TextChoices/IntegerChoices."""
|
||||
for base in node.bases:
|
||||
if isinstance(base, ast.Attribute):
|
||||
if base.attr in ("TextChoices", "IntegerChoices"):
|
||||
return True
|
||||
elif isinstance(base, ast.Name):
|
||||
if base.id in ("TextChoices", "IntegerChoices"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _parse_model_class(self, node: ast.ClassDef) -> Optional[ModelDefinition]:
|
||||
"""Parse a Django model class into ModelDefinition."""
|
||||
fields = []
|
||||
|
||||
for item in node.body:
|
||||
if isinstance(item, ast.Assign):
|
||||
field_def = self._parse_field_assignment(item)
|
||||
if field_def:
|
||||
fields.append(field_def)
|
||||
elif isinstance(item, ast.AnnAssign):
|
||||
# Handle annotated assignments (Django 4.0+ style)
|
||||
field_def = self._parse_annotated_field(item)
|
||||
if field_def:
|
||||
fields.append(field_def)
|
||||
|
||||
# Get docstring
|
||||
docstring = ast.get_docstring(node)
|
||||
|
||||
return ModelDefinition(
|
||||
name=node.name,
|
||||
fields=fields,
|
||||
docstring=docstring,
|
||||
)
|
||||
|
||||
def _parse_field_assignment(self, node: ast.Assign) -> Optional[FieldDefinition]:
|
||||
"""Parse a field assignment like: name = models.CharField(...)"""
|
||||
if not node.targets or not isinstance(node.targets[0], ast.Name):
|
||||
return None
|
||||
|
||||
field_name = node.targets[0].id
|
||||
|
||||
# Skip private fields and Meta class
|
||||
if field_name.startswith("_") or field_name == "Meta":
|
||||
return None
|
||||
|
||||
# Parse the field call
|
||||
if isinstance(node.value, ast.Call):
|
||||
return self._parse_field_call(field_name, node.value)
|
||||
|
||||
return None
|
||||
|
||||
def _parse_annotated_field(self, node: ast.AnnAssign) -> Optional[FieldDefinition]:
|
||||
"""Parse an annotated field assignment."""
|
||||
if not isinstance(node.target, ast.Name):
|
||||
return None
|
||||
|
||||
field_name = node.target.id
|
||||
|
||||
if field_name.startswith("_"):
|
||||
return None
|
||||
|
||||
if node.value and isinstance(node.value, ast.Call):
|
||||
return self._parse_field_call(field_name, node.value)
|
||||
|
||||
return None
|
||||
|
||||
def _parse_field_call(
|
||||
self, field_name: str, call: ast.Call
|
||||
) -> Optional[FieldDefinition]:
|
||||
"""Parse a Django field call like models.CharField(max_length=100)."""
|
||||
# Get field type name
|
||||
field_type_name = None
|
||||
|
||||
if isinstance(call.func, ast.Attribute):
|
||||
field_type_name = call.func.attr
|
||||
elif isinstance(call.func, ast.Name):
|
||||
field_type_name = call.func.id
|
||||
|
||||
if not field_type_name:
|
||||
return None
|
||||
|
||||
# Map to Python type
|
||||
python_type = DJANGO_FIELD_TYPES.get(field_type_name, str)
|
||||
|
||||
# Check for null=True
|
||||
optional = False
|
||||
default = None
|
||||
|
||||
for keyword in call.keywords:
|
||||
if keyword.arg == "null":
|
||||
if isinstance(keyword.value, ast.Constant):
|
||||
optional = keyword.value.value is True
|
||||
elif keyword.arg == "default":
|
||||
if isinstance(keyword.value, ast.Constant):
|
||||
default = keyword.value.value
|
||||
|
||||
return FieldDefinition(
|
||||
name=field_name,
|
||||
type_hint=python_type,
|
||||
default=default if default is not None else None,
|
||||
optional=optional,
|
||||
)
|
||||
|
||||
def _parse_choices_class(self, node: ast.ClassDef) -> Optional[EnumDefinition]:
|
||||
"""Parse a Django TextChoices/IntegerChoices class."""
|
||||
values = []
|
||||
|
||||
for item in node.body:
|
||||
if isinstance(item, ast.Assign):
|
||||
if item.targets and isinstance(item.targets[0], ast.Name):
|
||||
name = item.targets[0].id
|
||||
if name.isupper(): # Enum values are typically uppercase
|
||||
# Get the value
|
||||
value = name.lower() # Default to lowercase name
|
||||
if isinstance(item.value, ast.Constant):
|
||||
value = str(item.value.value)
|
||||
elif isinstance(item.value, ast.Tuple) and item.value.elts:
|
||||
# TextChoices: NAME = "value", "Label"
|
||||
if isinstance(item.value.elts[0], ast.Constant):
|
||||
value = str(item.value.elts[0].value)
|
||||
|
||||
values.append((name, value))
|
||||
|
||||
if not values:
|
||||
return None
|
||||
|
||||
return EnumDefinition(name=node.name, values=values)
|
||||
188
modelgen/loader/schema.py
Normal file
188
modelgen/loader/schema.py
Normal file
@@ -0,0 +1,188 @@
|
||||
"""
|
||||
Schema Loader
|
||||
|
||||
Loads Python dataclasses from a schema/ folder.
|
||||
Expects the folder to have an __init__.py that exports:
|
||||
- DATACLASSES: List of dataclass types to generate
|
||||
- ENUMS: List of Enum types to include
|
||||
- API_MODELS: (optional) List of API request/response types
|
||||
- GRPC_MESSAGES: (optional) List of gRPC message types
|
||||
- GRPC_SERVICE: (optional) gRPC service definition dict
|
||||
"""
|
||||
|
||||
import dataclasses as dc
|
||||
import importlib.util
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Type, get_type_hints
|
||||
|
||||
|
||||
@dataclass
|
||||
class FieldDefinition:
|
||||
"""Represents a model field."""
|
||||
|
||||
name: str
|
||||
type_hint: Any
|
||||
default: Any = dc.MISSING
|
||||
optional: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelDefinition:
|
||||
"""Represents a model/dataclass."""
|
||||
|
||||
name: str
|
||||
fields: List[FieldDefinition]
|
||||
docstring: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnumDefinition:
|
||||
"""Represents an enum."""
|
||||
|
||||
name: str
|
||||
values: List[tuple[str, str]] # (name, value) pairs
|
||||
|
||||
|
||||
@dataclass
|
||||
class GrpcServiceDefinition:
|
||||
"""Represents a gRPC service."""
|
||||
|
||||
package: str
|
||||
name: str
|
||||
methods: List[Dict[str, Any]]
|
||||
|
||||
|
||||
class SchemaLoader:
|
||||
"""Loads model definitions from Python dataclasses in schema/ folder."""
|
||||
|
||||
def __init__(self, schema_path: Path):
|
||||
self.schema_path = Path(schema_path)
|
||||
self.models: List[ModelDefinition] = []
|
||||
self.api_models: List[ModelDefinition] = []
|
||||
self.enums: List[EnumDefinition] = []
|
||||
self.grpc_messages: List[ModelDefinition] = []
|
||||
self.grpc_service: Optional[GrpcServiceDefinition] = None
|
||||
|
||||
def load(self, include: Optional[set] = None) -> "SchemaLoader":
|
||||
"""Load schema definitions from the schema folder.
|
||||
|
||||
Args:
|
||||
include: Set of groups to load (dataclasses, enums, api, grpc).
|
||||
None means load all groups.
|
||||
"""
|
||||
init_path = self.schema_path / "__init__.py"
|
||||
|
||||
if not init_path.exists():
|
||||
raise FileNotFoundError(f"Schema folder must have __init__.py: {init_path}")
|
||||
|
||||
# Import the schema module
|
||||
module = self._import_module(init_path)
|
||||
|
||||
load_all = include is None
|
||||
|
||||
# Extract DATACLASSES
|
||||
if load_all or "dataclasses" in include:
|
||||
dataclasses = getattr(module, "DATACLASSES", [])
|
||||
for cls in dataclasses:
|
||||
self.models.append(self._parse_dataclass(cls))
|
||||
|
||||
# Extract API_MODELS (request/response types)
|
||||
if load_all or "api" in include:
|
||||
api_models = getattr(module, "API_MODELS", [])
|
||||
for cls in api_models:
|
||||
self.api_models.append(self._parse_dataclass(cls))
|
||||
|
||||
# Extract ENUMS
|
||||
if load_all or "enums" in include:
|
||||
enums = getattr(module, "ENUMS", [])
|
||||
for enum_cls in enums:
|
||||
self.enums.append(self._parse_enum(enum_cls))
|
||||
|
||||
# Extract GRPC_MESSAGES (optional)
|
||||
if load_all or "grpc" in include:
|
||||
grpc_messages = getattr(module, "GRPC_MESSAGES", [])
|
||||
for cls in grpc_messages:
|
||||
self.grpc_messages.append(self._parse_dataclass(cls))
|
||||
|
||||
# Extract GRPC_SERVICE (optional)
|
||||
if load_all or "grpc" in include:
|
||||
grpc_service = getattr(module, "GRPC_SERVICE", None)
|
||||
if grpc_service:
|
||||
self.grpc_service = GrpcServiceDefinition(
|
||||
package=grpc_service.get("package", "service"),
|
||||
name=grpc_service.get("name", "Service"),
|
||||
methods=grpc_service.get("methods", []),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def _import_module(self, path: Path):
|
||||
"""Import a Python module from a file path."""
|
||||
spec = importlib.util.spec_from_file_location("schema", path)
|
||||
if spec is None or spec.loader is None:
|
||||
raise ImportError(f"Could not load module from {path}")
|
||||
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
sys.modules["schema"] = module
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
def _parse_dataclass(self, cls: Type) -> ModelDefinition:
|
||||
"""Parse a dataclass into a ModelDefinition."""
|
||||
hints = get_type_hints(cls)
|
||||
fields_info = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
fields = []
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
|
||||
field_info = fields_info.get(name)
|
||||
default = dc.MISSING
|
||||
if field_info:
|
||||
if field_info.default is not dc.MISSING:
|
||||
default = field_info.default
|
||||
elif field_info.default_factory is not dc.MISSING:
|
||||
default = field_info.default_factory
|
||||
|
||||
# Check if optional (Union with None)
|
||||
optional = self._is_optional(type_hint)
|
||||
|
||||
fields.append(
|
||||
FieldDefinition(
|
||||
name=name,
|
||||
type_hint=type_hint,
|
||||
default=default,
|
||||
optional=optional,
|
||||
)
|
||||
)
|
||||
|
||||
return ModelDefinition(
|
||||
name=cls.__name__,
|
||||
fields=fields,
|
||||
docstring=cls.__doc__,
|
||||
)
|
||||
|
||||
def _parse_enum(self, enum_cls: Type[Enum]) -> EnumDefinition:
|
||||
"""Parse an Enum into an EnumDefinition."""
|
||||
values = [(m.name, m.value) for m in enum_cls]
|
||||
return EnumDefinition(name=enum_cls.__name__, values=values)
|
||||
|
||||
def _is_optional(self, type_hint: Any) -> bool:
|
||||
"""Check if a type hint is Optional (Union with None)."""
|
||||
from typing import Union, get_args, get_origin
|
||||
|
||||
origin = get_origin(type_hint)
|
||||
if origin is Union:
|
||||
args = get_args(type_hint)
|
||||
return type(None) in args
|
||||
return False
|
||||
|
||||
|
||||
def load_schema(schema_path: str | Path, include: Optional[set] = None) -> SchemaLoader:
|
||||
"""Load schema definitions from folder."""
|
||||
loader = SchemaLoader(schema_path)
|
||||
return loader.load(include=include)
|
||||
77
modelgen/model_generator.py
Normal file
77
modelgen/model_generator.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""
|
||||
Model Generator
|
||||
|
||||
Orchestrates model generation from various sources to various formats.
|
||||
Delegates to loaders for input and generators for output.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Dict, Type
|
||||
|
||||
from .generator import GENERATORS, BaseGenerator
|
||||
from .loader import ConfigLoader
|
||||
|
||||
|
||||
class ModelGenerator:
|
||||
"""
|
||||
Generates typed models from configuration.
|
||||
|
||||
This is the main entry point for model generation.
|
||||
Delegates to format-specific generators.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: ConfigLoader,
|
||||
output_path: Path,
|
||||
output_format: str = "pydantic",
|
||||
):
|
||||
"""
|
||||
Initialize the generator.
|
||||
|
||||
Args:
|
||||
config: Loaded configuration
|
||||
output_path: Exact path where to write (file or directory depending on format)
|
||||
output_format: Output format (pydantic, django, prisma, typescript, protobuf)
|
||||
"""
|
||||
self.config = config
|
||||
self.output_path = Path(output_path)
|
||||
self.output_format = output_format
|
||||
|
||||
if output_format not in GENERATORS:
|
||||
raise ValueError(
|
||||
f"Unknown output format: {output_format}. "
|
||||
f"Available: {list(GENERATORS.keys())}"
|
||||
)
|
||||
|
||||
self.generator = GENERATORS[output_format]()
|
||||
|
||||
def generate(self) -> Path:
|
||||
"""
|
||||
Generate models to the specified output path.
|
||||
|
||||
Returns:
|
||||
Path to the generated file/directory
|
||||
"""
|
||||
# Determine output file path
|
||||
if self.output_path.suffix:
|
||||
# User specified a file path
|
||||
output_file = self.output_path
|
||||
else:
|
||||
# User specified a directory, add default filename
|
||||
output_file = (
|
||||
self.output_path / f"__init__{self.generator.file_extension()}"
|
||||
)
|
||||
|
||||
self.generator.generate(self.config, output_file)
|
||||
print(f"Generated {self.output_format} models: {output_file}")
|
||||
return output_file
|
||||
|
||||
@classmethod
|
||||
def available_formats(cls) -> list:
|
||||
"""Return list of available output formats."""
|
||||
return list(GENERATORS.keys())
|
||||
|
||||
|
||||
# Re-export for backwards compatibility
|
||||
WRITERS = GENERATORS
|
||||
0
modelgen/schema/.gitkeep
Normal file
0
modelgen/schema/.gitkeep
Normal file
172
modelgen/types.py
Normal file
172
modelgen/types.py
Normal file
@@ -0,0 +1,172 @@
|
||||
"""
|
||||
Type Dispatch Tables
|
||||
|
||||
Type mappings for each output format.
|
||||
Used by generators to convert Python types to target framework types.
|
||||
"""
|
||||
|
||||
from typing import Any, Callable, get_args
|
||||
|
||||
# =============================================================================
|
||||
# Django Type Mappings
|
||||
# =============================================================================
|
||||
|
||||
DJANGO_TYPES: dict[Any, str] = {
|
||||
str: "models.CharField(max_length={max_length}{opts})",
|
||||
int: "models.IntegerField({opts})",
|
||||
float: "models.FloatField({opts})",
|
||||
bool: "models.BooleanField(default={default})",
|
||||
"UUID": "models.UUIDField({opts})",
|
||||
"datetime": "models.DateTimeField({opts})",
|
||||
"dict": "models.JSONField(default=dict, blank=True)",
|
||||
"list": "models.JSONField(default=list, blank=True)",
|
||||
"text": "models.TextField(blank=True, default='')",
|
||||
"bigint": "models.BigIntegerField({opts})",
|
||||
"enum": "models.CharField(max_length=20, choices={enum_name}.choices{opts})",
|
||||
}
|
||||
|
||||
DJANGO_SPECIAL: dict[str, str] = {
|
||||
"id": "models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)",
|
||||
"created_at": "models.DateTimeField(auto_now_add=True)",
|
||||
"updated_at": "models.DateTimeField(auto_now=True)",
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Pydantic Type Resolvers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def _get_list_inner(type_hint: Any) -> str:
|
||||
"""Get inner type of List[T] for Pydantic."""
|
||||
args = get_args(type_hint)
|
||||
if args and args[0] in (str, int, float, bool):
|
||||
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||
return "str"
|
||||
|
||||
|
||||
PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "str",
|
||||
int: lambda _: "int",
|
||||
float: lambda _: "float",
|
||||
bool: lambda _: "bool",
|
||||
"UUID": lambda _: "UUID",
|
||||
"datetime": lambda _: "datetime",
|
||||
"dict": lambda _: "Dict[str, Any]",
|
||||
"list": lambda base: f"List[{_get_list_inner(base)}]",
|
||||
"enum": lambda base: base.__name__,
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# TypeScript Type Resolvers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def _resolve_ts_list(base: Any) -> str:
|
||||
"""Resolve TypeScript list type."""
|
||||
args = get_args(base)
|
||||
if args:
|
||||
inner = args[0]
|
||||
if inner is str:
|
||||
return "string[]"
|
||||
elif inner is int or inner is float:
|
||||
return "number[]"
|
||||
elif inner is bool:
|
||||
return "boolean[]"
|
||||
return "string[]"
|
||||
|
||||
|
||||
TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "string",
|
||||
int: lambda _: "number",
|
||||
float: lambda _: "number",
|
||||
bool: lambda _: "boolean",
|
||||
"UUID": lambda _: "string",
|
||||
"datetime": lambda _: "string",
|
||||
"dict": lambda _: "Record<string, unknown>",
|
||||
"list": _resolve_ts_list,
|
||||
"enum": lambda base: base.__name__,
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Protobuf Type Resolvers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def _resolve_proto_list(base: Any) -> str:
|
||||
"""Resolve Protobuf repeated type."""
|
||||
args = get_args(base)
|
||||
if args:
|
||||
inner = args[0]
|
||||
if inner is str:
|
||||
return "repeated string"
|
||||
elif inner is int:
|
||||
return "repeated int32"
|
||||
elif inner is float:
|
||||
return "repeated float"
|
||||
elif inner is bool:
|
||||
return "repeated bool"
|
||||
return "repeated string"
|
||||
|
||||
|
||||
PROTO_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "string",
|
||||
int: lambda _: "int32",
|
||||
float: lambda _: "float",
|
||||
bool: lambda _: "bool",
|
||||
"list": _resolve_proto_list,
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Prisma Type Mappings
|
||||
# =============================================================================
|
||||
|
||||
PRISMA_TYPES: dict[Any, str] = {
|
||||
str: "String",
|
||||
int: "Int",
|
||||
float: "Float",
|
||||
bool: "Boolean",
|
||||
"UUID": "String @default(uuid())",
|
||||
"datetime": "DateTime",
|
||||
"dict": "Json",
|
||||
"list": "Json",
|
||||
"bigint": "BigInt",
|
||||
}
|
||||
|
||||
PRISMA_SPECIAL: dict[str, str] = {
|
||||
"id": "String @id @default(uuid())",
|
||||
"created_at": "DateTime @default(now())",
|
||||
"updated_at": "DateTime @updatedAt",
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Graphene Type Resolvers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def _resolve_graphene_list(base: Any) -> str:
|
||||
"""Resolve graphene List type."""
|
||||
args = get_args(base)
|
||||
if args:
|
||||
inner = args[0]
|
||||
if inner is str:
|
||||
return "graphene.List(graphene.String)"
|
||||
elif inner is int:
|
||||
return "graphene.List(graphene.Int)"
|
||||
elif inner is float:
|
||||
return "graphene.List(graphene.Float)"
|
||||
elif inner is bool:
|
||||
return "graphene.List(graphene.Boolean)"
|
||||
return "graphene.List(graphene.String)"
|
||||
|
||||
|
||||
GRAPHENE_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "graphene.String",
|
||||
int: lambda _: "graphene.Int",
|
||||
float: lambda _: "graphene.Float",
|
||||
bool: lambda _: "graphene.Boolean",
|
||||
"UUID": lambda _: "graphene.UUID",
|
||||
"datetime": lambda _: "graphene.DateTime",
|
||||
"dict": lambda _: "graphene.JSONString",
|
||||
"list": _resolve_graphene_list,
|
||||
"enum": lambda base: f"graphene.String", # Enums exposed as strings in GQL
|
||||
}
|
||||
7
modelgen/writer/__init__.py
Normal file
7
modelgen/writer/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
Writer - File writing utilities for modelgen.
|
||||
"""
|
||||
|
||||
from .file import write_file, write_multiple
|
||||
|
||||
__all__ = ["write_file", "write_multiple"]
|
||||
30
modelgen/writer/file.py
Normal file
30
modelgen/writer/file.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""
|
||||
File Writer
|
||||
|
||||
Utilities for writing generated files to disk.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
|
||||
def write_file(path: Path, content: str) -> None:
|
||||
"""Write content to file, creating directories as needed."""
|
||||
path = Path(path)
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(content)
|
||||
|
||||
|
||||
def write_multiple(directory: Path, files: Dict[str, str]) -> None:
|
||||
"""Write multiple files to a directory.
|
||||
|
||||
Args:
|
||||
directory: Target directory
|
||||
files: Dict mapping filename to content
|
||||
"""
|
||||
directory = Path(directory)
|
||||
directory.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for filename, content in files.items():
|
||||
file_path = directory / filename
|
||||
file_path.write_text(content)
|
||||
@@ -7,3 +7,4 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mpr.settings")
|
||||
app = Celery("mpr")
|
||||
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
app.autodiscover_tasks()
|
||||
app.autodiscover_tasks(["task"])
|
||||
|
||||
@@ -108,14 +108,13 @@ class TranscodePresetAdmin(admin.ModelAdmin):
|
||||
class TranscodeJobAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
"id_short",
|
||||
"source_asset",
|
||||
"preset",
|
||||
"source_asset_id_short",
|
||||
"status",
|
||||
"progress_display",
|
||||
"created_at",
|
||||
]
|
||||
list_filter = ["status", "preset"]
|
||||
search_fields = ["source_asset__filename", "output_filename"]
|
||||
list_filter = ["status"]
|
||||
search_fields = ["output_filename"]
|
||||
readonly_fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
@@ -128,15 +127,14 @@ class TranscodeJobAdmin(admin.ModelAdmin):
|
||||
"celery_task_id",
|
||||
"preset_snapshot",
|
||||
]
|
||||
raw_id_fields = ["source_asset", "preset", "output_asset"]
|
||||
|
||||
fieldsets = [
|
||||
(None, {"fields": ["id", "source_asset", "status", "error_message"]}),
|
||||
(None, {"fields": ["id", "source_asset_id", "status", "error_message"]}),
|
||||
(
|
||||
"Configuration",
|
||||
{
|
||||
"fields": [
|
||||
"preset",
|
||||
"preset_id",
|
||||
"preset_snapshot",
|
||||
"trim_start",
|
||||
"trim_end",
|
||||
@@ -144,7 +142,7 @@ class TranscodeJobAdmin(admin.ModelAdmin):
|
||||
]
|
||||
},
|
||||
),
|
||||
("Output", {"fields": ["output_filename", "output_path", "output_asset"]}),
|
||||
("Output", {"fields": ["output_filename", "output_path", "output_asset_id"]}),
|
||||
(
|
||||
"Progress",
|
||||
{"fields": ["progress", "current_frame", "current_time", "speed"]},
|
||||
@@ -168,6 +166,11 @@ class TranscodeJobAdmin(admin.ModelAdmin):
|
||||
|
||||
id_short.short_description = "ID"
|
||||
|
||||
def source_asset_id_short(self, obj):
|
||||
return str(obj.source_asset_id)[:8] if obj.source_asset_id else "-"
|
||||
|
||||
source_asset_id_short.short_description = "Source"
|
||||
|
||||
def progress_display(self, obj):
|
||||
return f"{obj.progress:.1f}%"
|
||||
|
||||
|
||||
@@ -1,25 +1,31 @@
|
||||
"""
|
||||
Django ORM Models - GENERATED FILE
|
||||
|
||||
Do not edit directly. Modify schema/models/*.py and run:
|
||||
python schema/generate.py --django
|
||||
Do not edit directly. Regenerate using modelgen.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from django.db import models
|
||||
|
||||
class MediaAsset(models.Model):
|
||||
"""A video/audio file registered in the system."""
|
||||
|
||||
class Status(models.TextChoices):
|
||||
class AssetStatus(models.TextChoices):
|
||||
PENDING = "pending", "Pending"
|
||||
READY = "ready", "Ready"
|
||||
ERROR = "error", "Error"
|
||||
|
||||
class JobStatus(models.TextChoices):
|
||||
PENDING = "pending", "Pending"
|
||||
PROCESSING = "processing", "Processing"
|
||||
COMPLETED = "completed", "Completed"
|
||||
FAILED = "failed", "Failed"
|
||||
CANCELLED = "cancelled", "Cancelled"
|
||||
|
||||
class MediaAsset(models.Model):
|
||||
"""A video/audio file registered in the system."""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
filename = models.CharField(max_length=500)
|
||||
file_path = models.CharField(max_length=1000)
|
||||
status = models.CharField(max_length=20, choices=Status.choices, default=Status.PENDING)
|
||||
status = models.CharField(max_length=20, choices=AssetStatus.choices, default=AssetStatus.PENDING)
|
||||
error_message = models.TextField(blank=True, default='')
|
||||
file_size = models.BigIntegerField(null=True, blank=True)
|
||||
duration = models.FloatField(null=True, blank=True, default=None)
|
||||
@@ -74,13 +80,6 @@ class TranscodePreset(models.Model):
|
||||
class TranscodeJob(models.Model):
|
||||
"""A transcoding or trimming job in the queue."""
|
||||
|
||||
class Status(models.TextChoices):
|
||||
PENDING = "pending", "Pending"
|
||||
PROCESSING = "processing", "Processing"
|
||||
COMPLETED = "completed", "Completed"
|
||||
FAILED = "failed", "Failed"
|
||||
CANCELLED = "cancelled", "Cancelled"
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
source_asset_id = models.UUIDField()
|
||||
preset_id = models.UUIDField(null=True, blank=True)
|
||||
@@ -90,13 +89,14 @@ class TranscodeJob(models.Model):
|
||||
output_filename = models.CharField(max_length=500)
|
||||
output_path = models.CharField(max_length=1000, null=True, blank=True)
|
||||
output_asset_id = models.UUIDField(null=True, blank=True)
|
||||
status = models.CharField(max_length=20, choices=Status.choices, default=Status.PENDING)
|
||||
status = models.CharField(max_length=20, choices=JobStatus.choices, default=JobStatus.PENDING)
|
||||
progress = models.FloatField(default=0.0)
|
||||
current_frame = models.IntegerField(null=True, blank=True, default=None)
|
||||
current_time = models.FloatField(null=True, blank=True, default=None)
|
||||
speed = models.CharField(max_length=255, null=True, blank=True)
|
||||
error_message = models.TextField(blank=True, default='')
|
||||
celery_task_id = models.CharField(max_length=255, null=True, blank=True)
|
||||
execution_arn = models.CharField(max_length=255, null=True, blank=True)
|
||||
priority = models.IntegerField(default=0)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
started_at = models.DateTimeField(null=True, blank=True)
|
||||
|
||||
@@ -15,6 +15,17 @@ redis>=5.0.0
|
||||
# FFmpeg
|
||||
ffmpeg-python>=0.2.0
|
||||
|
||||
# gRPC
|
||||
grpcio>=1.60.0
|
||||
grpcio-tools>=1.60.0
|
||||
|
||||
# AWS
|
||||
boto3>=1.34.0
|
||||
|
||||
# GraphQL
|
||||
graphene>=3.3
|
||||
starlette-graphene3>=0.6.0
|
||||
|
||||
# Testing
|
||||
pytest>=7.4.0
|
||||
pytest-django>=4.7.0
|
||||
|
||||
10
rpc/__init__.py
Normal file
10
rpc/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""
|
||||
MPR gRPC Module
|
||||
|
||||
Provides gRPC server and client for worker communication.
|
||||
|
||||
Generated stubs (worker_pb2.py, worker_pb2_grpc.py) are created by:
|
||||
python schema/generate.py --proto
|
||||
|
||||
Requires: grpcio, grpcio-tools
|
||||
"""
|
||||
@@ -10,11 +10,7 @@ from typing import Callable, Iterator, Optional
|
||||
import grpc
|
||||
|
||||
# Generated stubs - run `python schema/generate.py --proto` if missing
|
||||
try:
|
||||
from . import worker_pb2, worker_pb2_grpc
|
||||
except ImportError:
|
||||
import worker_pb2
|
||||
import worker_pb2_grpc
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
// MPR Worker Service - GENERATED FILE
|
||||
// Protocol Buffer Definitions - GENERATED FILE
|
||||
//
|
||||
// Do not edit directly. Modify schema/models/grpc.py and run:
|
||||
// python schema/generate.py --proto
|
||||
// Do not edit directly. Regenerate using modelgen.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
@@ -18,11 +18,7 @@ GRPC_PORT = int(os.environ.get("GRPC_PORT", "50051"))
|
||||
GRPC_MAX_WORKERS = int(os.environ.get("GRPC_MAX_WORKERS", "10"))
|
||||
|
||||
# Generated stubs - run `python schema/generate.py --proto` if missing
|
||||
try:
|
||||
from . import worker_pb2, worker_pb2_grpc
|
||||
except ImportError:
|
||||
import worker_pb2
|
||||
import worker_pb2_grpc
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -63,7 +59,7 @@ class WorkerServicer(worker_pb2_grpc.WorkerServiceServicer):
|
||||
|
||||
# Dispatch to Celery if available
|
||||
if self.celery_app:
|
||||
from worker.tasks import run_transcode_job
|
||||
from task.tasks import run_transcode_job
|
||||
|
||||
task = run_transcode_job.delay(
|
||||
job_id=job_id,
|
||||
@@ -205,7 +201,7 @@ def update_job_progress(
|
||||
"""
|
||||
Update job progress (called from worker tasks).
|
||||
|
||||
This updates the in-memory state that StreamProgress reads from.
|
||||
Updates both the in-memory gRPC state and the Django database.
|
||||
"""
|
||||
if job_id in _active_jobs:
|
||||
_active_jobs[job_id].update(
|
||||
@@ -219,6 +215,36 @@ def update_job_progress(
|
||||
}
|
||||
)
|
||||
|
||||
# Update Django database
|
||||
try:
|
||||
from django.utils import timezone
|
||||
|
||||
from mpr.media_assets.models import TranscodeJob
|
||||
|
||||
update_fields = ["progress", "current_frame", "current_time", "speed", "status"]
|
||||
updates = {
|
||||
"progress": progress,
|
||||
"current_frame": current_frame,
|
||||
"current_time": current_time,
|
||||
"speed": str(speed),
|
||||
"status": status,
|
||||
}
|
||||
|
||||
if error:
|
||||
updates["error_message"] = error
|
||||
update_fields.append("error_message")
|
||||
|
||||
if status == "processing":
|
||||
updates["started_at"] = timezone.now()
|
||||
update_fields.append("started_at")
|
||||
elif status in ("completed", "failed"):
|
||||
updates["completed_at"] = timezone.now()
|
||||
update_fields.append("completed_at")
|
||||
|
||||
TranscodeJob.objects.filter(id=job_id).update(**updates)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update job {job_id} in DB: {e}")
|
||||
|
||||
|
||||
def serve(port: int = None, celery_app=None) -> grpc.Server:
|
||||
"""
|
||||
52
rpc/worker_pb2.py
Normal file
52
rpc/worker_pb2.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# NO CHECKED-IN PROTOBUF GENCODE
|
||||
# source: worker.proto
|
||||
# Protobuf Python Version: 6.31.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import runtime_version as _runtime_version
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
_runtime_version.ValidateProtobufRuntimeVersion(
|
||||
_runtime_version.Domain.PUBLIC,
|
||||
6,
|
||||
31,
|
||||
1,
|
||||
'',
|
||||
'worker.proto'
|
||||
)
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cworker.proto\x12\nmpr.worker\"\xa7\x01\n\nJobRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x13\n\x0bsource_path\x18\x02 \x01(\t\x12\x13\n\x0boutput_path\x18\x03 \x01(\t\x12\x13\n\x0bpreset_json\x18\x04 \x01(\t\x12\x17\n\ntrim_start\x18\x05 \x01(\x02H\x00\x88\x01\x01\x12\x15\n\x08trim_end\x18\x06 \x01(\x02H\x01\x88\x01\x01\x42\r\n\x0b_trim_startB\x0b\n\t_trim_end\"@\n\x0bJobResponse\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x10\n\x08\x61\x63\x63\x65pted\x18\x02 \x01(\x08\x12\x0f\n\x07message\x18\x03 \x01(\t\"!\n\x0fProgressRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\"\x9c\x01\n\x0eProgressUpdate\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x10\n\x08progress\x18\x02 \x01(\x05\x12\x15\n\rcurrent_frame\x18\x03 \x01(\x05\x12\x14\n\x0c\x63urrent_time\x18\x04 \x01(\x02\x12\r\n\x05speed\x18\x05 \x01(\x02\x12\x0e\n\x06status\x18\x06 \x01(\t\x12\x12\n\x05\x65rror\x18\x07 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_error\"\x1f\n\rCancelRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\"D\n\x0e\x43\x61ncelResponse\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x11\n\tcancelled\x18\x02 \x01(\x08\x12\x0f\n\x07message\x18\x03 \x01(\t\"g\n\x0cWorkerStatus\x12\x11\n\tavailable\x18\x01 \x01(\x08\x12\x13\n\x0b\x61\x63tive_jobs\x18\x02 \x01(\x05\x12\x18\n\x10supported_codecs\x18\x03 \x03(\t\x12\x15\n\rgpu_available\x18\x04 \x01(\x08\"\x07\n\x05\x45mpty2\x9e\x02\n\rWorkerService\x12<\n\tSubmitJob\x12\x16.mpr.worker.JobRequest\x1a\x17.mpr.worker.JobResponse\x12K\n\x0eStreamProgress\x12\x1b.mpr.worker.ProgressRequest\x1a\x1a.mpr.worker.ProgressUpdate0\x01\x12\x42\n\tCancelJob\x12\x19.mpr.worker.CancelRequest\x1a\x1a.mpr.worker.CancelResponse\x12>\n\x0fGetWorkerStatus\x12\x11.mpr.worker.Empty\x1a\x18.mpr.worker.WorkerStatusb\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'worker_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
DESCRIPTOR._loaded_options = None
|
||||
_globals['_JOBREQUEST']._serialized_start=29
|
||||
_globals['_JOBREQUEST']._serialized_end=196
|
||||
_globals['_JOBRESPONSE']._serialized_start=198
|
||||
_globals['_JOBRESPONSE']._serialized_end=262
|
||||
_globals['_PROGRESSREQUEST']._serialized_start=264
|
||||
_globals['_PROGRESSREQUEST']._serialized_end=297
|
||||
_globals['_PROGRESSUPDATE']._serialized_start=300
|
||||
_globals['_PROGRESSUPDATE']._serialized_end=456
|
||||
_globals['_CANCELREQUEST']._serialized_start=458
|
||||
_globals['_CANCELREQUEST']._serialized_end=489
|
||||
_globals['_CANCELRESPONSE']._serialized_start=491
|
||||
_globals['_CANCELRESPONSE']._serialized_end=559
|
||||
_globals['_WORKERSTATUS']._serialized_start=561
|
||||
_globals['_WORKERSTATUS']._serialized_end=664
|
||||
_globals['_EMPTY']._serialized_start=666
|
||||
_globals['_EMPTY']._serialized_end=673
|
||||
_globals['_WORKERSERVICE']._serialized_start=676
|
||||
_globals['_WORKERSERVICE']._serialized_end=962
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
226
rpc/worker_pb2_grpc.py
Normal file
226
rpc/worker_pb2_grpc.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
||||
"""Client and server classes corresponding to protobuf-defined services."""
|
||||
import grpc
|
||||
import warnings
|
||||
|
||||
from . import worker_pb2 as worker__pb2
|
||||
|
||||
GRPC_GENERATED_VERSION = '1.76.0'
|
||||
GRPC_VERSION = grpc.__version__
|
||||
_version_not_supported = False
|
||||
|
||||
try:
|
||||
from grpc._utilities import first_version_is_lower
|
||||
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
||||
except ImportError:
|
||||
_version_not_supported = True
|
||||
|
||||
if _version_not_supported:
|
||||
raise RuntimeError(
|
||||
f'The grpc package installed is at version {GRPC_VERSION},'
|
||||
+ ' but the generated code in worker_pb2_grpc.py depends on'
|
||||
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
||||
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
||||
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
||||
)
|
||||
|
||||
|
||||
class WorkerServiceStub(object):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
|
||||
def __init__(self, channel):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
channel: A grpc.Channel.
|
||||
"""
|
||||
self.SubmitJob = channel.unary_unary(
|
||||
'/mpr.worker.WorkerService/SubmitJob',
|
||||
request_serializer=worker__pb2.JobRequest.SerializeToString,
|
||||
response_deserializer=worker__pb2.JobResponse.FromString,
|
||||
_registered_method=True)
|
||||
self.StreamProgress = channel.unary_stream(
|
||||
'/mpr.worker.WorkerService/StreamProgress',
|
||||
request_serializer=worker__pb2.ProgressRequest.SerializeToString,
|
||||
response_deserializer=worker__pb2.ProgressUpdate.FromString,
|
||||
_registered_method=True)
|
||||
self.CancelJob = channel.unary_unary(
|
||||
'/mpr.worker.WorkerService/CancelJob',
|
||||
request_serializer=worker__pb2.CancelRequest.SerializeToString,
|
||||
response_deserializer=worker__pb2.CancelResponse.FromString,
|
||||
_registered_method=True)
|
||||
self.GetWorkerStatus = channel.unary_unary(
|
||||
'/mpr.worker.WorkerService/GetWorkerStatus',
|
||||
request_serializer=worker__pb2.Empty.SerializeToString,
|
||||
response_deserializer=worker__pb2.WorkerStatus.FromString,
|
||||
_registered_method=True)
|
||||
|
||||
|
||||
class WorkerServiceServicer(object):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
|
||||
def SubmitJob(self, request, context):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||
context.set_details('Method not implemented!')
|
||||
raise NotImplementedError('Method not implemented!')
|
||||
|
||||
def StreamProgress(self, request, context):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||
context.set_details('Method not implemented!')
|
||||
raise NotImplementedError('Method not implemented!')
|
||||
|
||||
def CancelJob(self, request, context):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||
context.set_details('Method not implemented!')
|
||||
raise NotImplementedError('Method not implemented!')
|
||||
|
||||
def GetWorkerStatus(self, request, context):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||
context.set_details('Method not implemented!')
|
||||
raise NotImplementedError('Method not implemented!')
|
||||
|
||||
|
||||
def add_WorkerServiceServicer_to_server(servicer, server):
|
||||
rpc_method_handlers = {
|
||||
'SubmitJob': grpc.unary_unary_rpc_method_handler(
|
||||
servicer.SubmitJob,
|
||||
request_deserializer=worker__pb2.JobRequest.FromString,
|
||||
response_serializer=worker__pb2.JobResponse.SerializeToString,
|
||||
),
|
||||
'StreamProgress': grpc.unary_stream_rpc_method_handler(
|
||||
servicer.StreamProgress,
|
||||
request_deserializer=worker__pb2.ProgressRequest.FromString,
|
||||
response_serializer=worker__pb2.ProgressUpdate.SerializeToString,
|
||||
),
|
||||
'CancelJob': grpc.unary_unary_rpc_method_handler(
|
||||
servicer.CancelJob,
|
||||
request_deserializer=worker__pb2.CancelRequest.FromString,
|
||||
response_serializer=worker__pb2.CancelResponse.SerializeToString,
|
||||
),
|
||||
'GetWorkerStatus': grpc.unary_unary_rpc_method_handler(
|
||||
servicer.GetWorkerStatus,
|
||||
request_deserializer=worker__pb2.Empty.FromString,
|
||||
response_serializer=worker__pb2.WorkerStatus.SerializeToString,
|
||||
),
|
||||
}
|
||||
generic_handler = grpc.method_handlers_generic_handler(
|
||||
'mpr.worker.WorkerService', rpc_method_handlers)
|
||||
server.add_generic_rpc_handlers((generic_handler,))
|
||||
server.add_registered_method_handlers('mpr.worker.WorkerService', rpc_method_handlers)
|
||||
|
||||
|
||||
# This class is part of an EXPERIMENTAL API.
|
||||
class WorkerService(object):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
|
||||
@staticmethod
|
||||
def SubmitJob(request,
|
||||
target,
|
||||
options=(),
|
||||
channel_credentials=None,
|
||||
call_credentials=None,
|
||||
insecure=False,
|
||||
compression=None,
|
||||
wait_for_ready=None,
|
||||
timeout=None,
|
||||
metadata=None):
|
||||
return grpc.experimental.unary_unary(
|
||||
request,
|
||||
target,
|
||||
'/mpr.worker.WorkerService/SubmitJob',
|
||||
worker__pb2.JobRequest.SerializeToString,
|
||||
worker__pb2.JobResponse.FromString,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
call_credentials,
|
||||
compression,
|
||||
wait_for_ready,
|
||||
timeout,
|
||||
metadata,
|
||||
_registered_method=True)
|
||||
|
||||
@staticmethod
|
||||
def StreamProgress(request,
|
||||
target,
|
||||
options=(),
|
||||
channel_credentials=None,
|
||||
call_credentials=None,
|
||||
insecure=False,
|
||||
compression=None,
|
||||
wait_for_ready=None,
|
||||
timeout=None,
|
||||
metadata=None):
|
||||
return grpc.experimental.unary_stream(
|
||||
request,
|
||||
target,
|
||||
'/mpr.worker.WorkerService/StreamProgress',
|
||||
worker__pb2.ProgressRequest.SerializeToString,
|
||||
worker__pb2.ProgressUpdate.FromString,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
call_credentials,
|
||||
compression,
|
||||
wait_for_ready,
|
||||
timeout,
|
||||
metadata,
|
||||
_registered_method=True)
|
||||
|
||||
@staticmethod
|
||||
def CancelJob(request,
|
||||
target,
|
||||
options=(),
|
||||
channel_credentials=None,
|
||||
call_credentials=None,
|
||||
insecure=False,
|
||||
compression=None,
|
||||
wait_for_ready=None,
|
||||
timeout=None,
|
||||
metadata=None):
|
||||
return grpc.experimental.unary_unary(
|
||||
request,
|
||||
target,
|
||||
'/mpr.worker.WorkerService/CancelJob',
|
||||
worker__pb2.CancelRequest.SerializeToString,
|
||||
worker__pb2.CancelResponse.FromString,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
call_credentials,
|
||||
compression,
|
||||
wait_for_ready,
|
||||
timeout,
|
||||
metadata,
|
||||
_registered_method=True)
|
||||
|
||||
@staticmethod
|
||||
def GetWorkerStatus(request,
|
||||
target,
|
||||
options=(),
|
||||
channel_credentials=None,
|
||||
call_credentials=None,
|
||||
insecure=False,
|
||||
compression=None,
|
||||
wait_for_ready=None,
|
||||
timeout=None,
|
||||
metadata=None):
|
||||
return grpc.experimental.unary_unary(
|
||||
request,
|
||||
target,
|
||||
'/mpr.worker.WorkerService/GetWorkerStatus',
|
||||
worker__pb2.Empty.SerializeToString,
|
||||
worker__pb2.WorkerStatus.FromString,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
call_credentials,
|
||||
compression,
|
||||
wait_for_ready,
|
||||
timeout,
|
||||
metadata,
|
||||
_registered_method=True)
|
||||
@@ -4,7 +4,7 @@ MPR Schema Definitions - Source of Truth
|
||||
This package defines the core data models as Python dataclasses.
|
||||
These definitions are used to generate:
|
||||
- Django ORM models (mpr/media_assets/models.py)
|
||||
- Pydantic schemas (api/schemas/*.py)
|
||||
- Pydantic schemas (api/schema/*.py)
|
||||
- TypeScript types (ui/timeline/src/types.ts)
|
||||
- Protobuf definitions (grpc/protos/worker.proto)
|
||||
|
||||
|
||||
@@ -1,709 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
MPR Model Generator
|
||||
|
||||
Generates framework-specific models from schema/models/:
|
||||
- Django ORM models -> mpr/media_assets/models.py
|
||||
- Pydantic schemas -> api/schemas/*.py
|
||||
- TypeScript types -> ui/timeline/src/types.ts
|
||||
- Protobuf -> grpc/protos/worker.proto
|
||||
|
||||
Usage:
|
||||
python schema/generate.py [--django] [--pydantic] [--typescript] [--proto] [--all]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import dataclasses as dc
|
||||
import subprocess
|
||||
import sys
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Union, get_args, get_origin, get_type_hints
|
||||
|
||||
PROJECT_ROOT = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
|
||||
from schema.models import API_MODELS, DATACLASSES, ENUMS, GRPC_MESSAGES, GRPC_SERVICE
|
||||
|
||||
# =============================================================================
|
||||
# Type Dispatch Tables
|
||||
# =============================================================================
|
||||
|
||||
DJANGO_TYPES: dict[Any, str] = {
|
||||
str: "models.CharField(max_length={max_length}{opts})",
|
||||
int: "models.IntegerField({opts})",
|
||||
float: "models.FloatField({opts})",
|
||||
bool: "models.BooleanField(default={default})",
|
||||
"UUID": "models.UUIDField({opts})",
|
||||
"datetime": "models.DateTimeField({opts})",
|
||||
"dict": "models.JSONField(default=dict, blank=True)",
|
||||
"list": "models.JSONField(default=list, blank=True)",
|
||||
"text": "models.TextField(blank=True, default='')",
|
||||
"bigint": "models.BigIntegerField({opts})",
|
||||
"enum": "models.CharField(max_length=20, choices=Status.choices{opts})",
|
||||
}
|
||||
|
||||
DJANGO_SPECIAL: dict[str, str] = {
|
||||
"id": "models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)",
|
||||
"created_at": "models.DateTimeField(auto_now_add=True)",
|
||||
"updated_at": "models.DateTimeField(auto_now=True)",
|
||||
}
|
||||
|
||||
PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "str",
|
||||
int: lambda _: "int",
|
||||
float: lambda _: "float",
|
||||
bool: lambda _: "bool",
|
||||
"UUID": lambda _: "UUID",
|
||||
"datetime": lambda _: "datetime",
|
||||
"dict": lambda _: "Dict[str, Any]",
|
||||
"list": lambda base: f"List[{get_list_inner(base)}]",
|
||||
"enum": lambda base: base.__name__,
|
||||
}
|
||||
|
||||
TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "string",
|
||||
int: lambda _: "number",
|
||||
float: lambda _: "number",
|
||||
bool: lambda _: "boolean",
|
||||
"UUID": lambda _: "string",
|
||||
"datetime": lambda _: "string",
|
||||
"dict": lambda _: "Record<string, unknown>",
|
||||
"list": lambda base: f"{TS_RESOLVERS.get(get_args(base)[0], lambda _: 'string')(None)}[]"
|
||||
if get_args(base)
|
||||
else "string[]",
|
||||
"enum": lambda base: base.__name__,
|
||||
}
|
||||
|
||||
PROTO_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "string",
|
||||
int: lambda _: "int32",
|
||||
float: lambda _: "float",
|
||||
bool: lambda _: "bool",
|
||||
"list": lambda base: f"repeated {PROTO_RESOLVERS.get(get_args(base)[0], lambda _: 'string')(None)}"
|
||||
if get_args(base)
|
||||
else "repeated string",
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Type Helpers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def unwrap_optional(type_hint: Any) -> tuple[Any, bool]:
|
||||
"""Unwrap Optional[T] -> (T, True) or (T, False) if not optional."""
|
||||
origin = get_origin(type_hint)
|
||||
if origin is Union:
|
||||
args = [a for a in get_args(type_hint) if a is not type(None)]
|
||||
return (args[0] if args else str, True)
|
||||
return (type_hint, False)
|
||||
|
||||
|
||||
def get_origin_name(type_hint: Any) -> str | None:
|
||||
"""Get origin type name: 'dict', 'list', or None."""
|
||||
origin = get_origin(type_hint)
|
||||
if origin is dict:
|
||||
return "dict"
|
||||
if origin is list:
|
||||
return "list"
|
||||
return None
|
||||
|
||||
|
||||
def get_type_name(type_hint: Any) -> str | None:
|
||||
"""Get type name for special types like UUID, datetime."""
|
||||
if hasattr(type_hint, "__name__"):
|
||||
return type_hint.__name__
|
||||
return None
|
||||
|
||||
|
||||
def get_list_inner(type_hint: Any) -> str:
|
||||
"""Get inner type of List[T]."""
|
||||
args = get_args(type_hint)
|
||||
if args and args[0] in (str, int, float, bool):
|
||||
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||
return "str"
|
||||
|
||||
|
||||
def get_field_default(field: dc.Field) -> Any:
|
||||
"""Get default value from dataclass field."""
|
||||
if field.default is not dc.MISSING:
|
||||
return field.default
|
||||
return dc.MISSING
|
||||
|
||||
|
||||
def format_opts(optional: bool, extra: list[str] | None = None) -> str:
|
||||
"""Format field options string."""
|
||||
parts = []
|
||||
if optional:
|
||||
parts.append("null=True, blank=True")
|
||||
if extra:
|
||||
parts.extend(extra)
|
||||
return ", ".join(parts)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Django Generator
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def resolve_django_type(name: str, type_hint: Any, default: Any) -> str:
|
||||
"""Resolve Python type to Django field."""
|
||||
# Special fields
|
||||
if name in DJANGO_SPECIAL:
|
||||
return DJANGO_SPECIAL[name]
|
||||
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
opts = format_opts(optional)
|
||||
|
||||
# Container types
|
||||
if origin == "dict":
|
||||
return DJANGO_TYPES["dict"]
|
||||
if origin == "list":
|
||||
return DJANGO_TYPES["list"]
|
||||
|
||||
# UUID / datetime
|
||||
if type_name == "UUID":
|
||||
return DJANGO_TYPES["UUID"].format(opts=opts)
|
||||
if type_name == "datetime":
|
||||
return DJANGO_TYPES["datetime"].format(opts=opts)
|
||||
|
||||
# Enum
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
extra = []
|
||||
if optional:
|
||||
extra.append("null=True, blank=True")
|
||||
if default is not dc.MISSING and isinstance(default, Enum):
|
||||
extra.append(f"default=Status.{default.name}")
|
||||
return DJANGO_TYPES["enum"].format(
|
||||
opts=", " + ", ".join(extra) if extra else ""
|
||||
)
|
||||
|
||||
# Text fields
|
||||
if base is str and any(x in name for x in ("message", "comments", "description")):
|
||||
return DJANGO_TYPES["text"]
|
||||
|
||||
# BigInt fields
|
||||
if base is int and name in ("file_size", "bitrate"):
|
||||
return DJANGO_TYPES["bigint"].format(opts=opts)
|
||||
|
||||
# Basic types
|
||||
if base is str:
|
||||
max_length = 1000 if "path" in name else 500 if "filename" in name else 255
|
||||
return DJANGO_TYPES[str].format(
|
||||
max_length=max_length, opts=", " + opts if opts else ""
|
||||
)
|
||||
|
||||
if base is int:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[int].format(opts=", ".join(extra))
|
||||
|
||||
if base is float:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[float].format(opts=", ".join(extra))
|
||||
|
||||
if base is bool:
|
||||
default_val = default if default is not dc.MISSING else False
|
||||
return DJANGO_TYPES[bool].format(default=default_val)
|
||||
|
||||
# Fallback
|
||||
return DJANGO_TYPES[str].format(max_length=255, opts=", " + opts if opts else "")
|
||||
|
||||
|
||||
def generate_django_model(cls: type) -> list[str]:
|
||||
"""Generate Django model lines from dataclass."""
|
||||
lines = [
|
||||
f"class {cls.__name__}(models.Model):",
|
||||
f' """{(cls.__doc__ or cls.__name__).strip().split(chr(10))[0]}"""',
|
||||
"",
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
# Add Status inner class for enum fields
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
lines.append(" class Status(models.TextChoices):")
|
||||
for member in base:
|
||||
label = member.name.replace("_", " ").title()
|
||||
lines.append(f' {member.name} = "{member.value}", "{label}"')
|
||||
lines.append("")
|
||||
break
|
||||
|
||||
# Fields
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
field = fields.get(name)
|
||||
default = get_field_default(field) if field else dc.MISSING
|
||||
django_field = resolve_django_type(name, type_hint, default)
|
||||
lines.append(f" {name} = {django_field}")
|
||||
|
||||
# Meta and __str__
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
" class Meta:",
|
||||
' ordering = ["-created_at"]',
|
||||
"",
|
||||
" def __str__(self):",
|
||||
]
|
||||
)
|
||||
|
||||
if "filename" in hints:
|
||||
lines.append(" return self.filename")
|
||||
elif "name" in hints:
|
||||
lines.append(" return self.name")
|
||||
else:
|
||||
lines.append(" return str(self.id)")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def generate_django() -> str:
|
||||
"""Generate complete Django models file."""
|
||||
header = [
|
||||
'"""',
|
||||
"Django ORM Models - GENERATED FILE",
|
||||
"",
|
||||
"Do not edit directly. Modify schema/models/*.py and run:",
|
||||
" python schema/generate.py --django",
|
||||
'"""',
|
||||
"",
|
||||
"import uuid",
|
||||
"from django.db import models",
|
||||
"",
|
||||
]
|
||||
|
||||
body = []
|
||||
for cls in DATACLASSES:
|
||||
body.extend(generate_django_model(cls))
|
||||
body.extend(["", ""])
|
||||
|
||||
return "\n".join(header + body)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pydantic Generator
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def resolve_pydantic_type(type_hint: Any) -> str:
|
||||
"""Resolve Python type to Pydantic type string."""
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver by origin, type name, base type, or enum
|
||||
resolver = (
|
||||
PYDANTIC_RESOLVERS.get(origin)
|
||||
or PYDANTIC_RESOLVERS.get(type_name)
|
||||
or PYDANTIC_RESOLVERS.get(base)
|
||||
or (
|
||||
PYDANTIC_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "str"
|
||||
return f"Optional[{result}]" if optional else result
|
||||
|
||||
|
||||
def generate_pydantic_schema(cls: type, suffix: str) -> list[str]:
|
||||
"""Generate Pydantic schema lines from dataclass."""
|
||||
name = cls.__name__.replace("Transcode", "").replace("Media", "")
|
||||
class_name = f"{name}{suffix}"
|
||||
|
||||
skip_fields = {
|
||||
"Create": {"id", "created_at", "updated_at", "status", "error_message"},
|
||||
"Update": {"id", "created_at", "updated_at"},
|
||||
"Response": set(),
|
||||
}
|
||||
|
||||
lines = [
|
||||
f"class {class_name}(BaseSchema):",
|
||||
f' """{class_name} schema."""',
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_") or name in skip_fields.get(suffix, set()):
|
||||
continue
|
||||
|
||||
py_type = resolve_pydantic_type(type_hint)
|
||||
|
||||
# Update schemas: all fields optional
|
||||
if suffix == "Update" and "Optional" not in py_type:
|
||||
py_type = f"Optional[{py_type}]"
|
||||
|
||||
field = fields.get(name)
|
||||
default = get_field_default(field) if field else dc.MISSING
|
||||
|
||||
if "Optional" in py_type:
|
||||
lines.append(f" {name}: {py_type} = None")
|
||||
elif default is not dc.MISSING and not callable(default):
|
||||
if isinstance(default, str):
|
||||
lines.append(f' {name}: {py_type} = "{default}"')
|
||||
elif isinstance(default, Enum):
|
||||
lines.append(
|
||||
f" {name}: {py_type} = {default.__class__.__name__}.{default.name}"
|
||||
)
|
||||
else:
|
||||
lines.append(f" {name}: {py_type} = {default!r}")
|
||||
else:
|
||||
lines.append(f" {name}: {py_type}")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def generate_pydantic() -> dict[str, str]:
|
||||
"""Generate all Pydantic schema files."""
|
||||
files = {}
|
||||
|
||||
# base.py
|
||||
files["base.py"] = "\n".join(
|
||||
[
|
||||
'"""Pydantic Base Schema - GENERATED FILE"""',
|
||||
"",
|
||||
"from pydantic import BaseModel, ConfigDict",
|
||||
"",
|
||||
"",
|
||||
"class BaseSchema(BaseModel):",
|
||||
' """Base schema with ORM mode."""',
|
||||
" model_config = ConfigDict(from_attributes=True)",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
# Schema files per model
|
||||
for cls in DATACLASSES:
|
||||
module_name = cls.__name__.replace("Transcode", "").replace("Media", "").lower()
|
||||
|
||||
lines = [
|
||||
f'"""{cls.__name__} Schemas - GENERATED FILE"""',
|
||||
"",
|
||||
"from datetime import datetime",
|
||||
"from enum import Enum",
|
||||
"from typing import Any, Dict, List, Optional",
|
||||
"from uuid import UUID",
|
||||
"",
|
||||
"from .base import BaseSchema",
|
||||
"",
|
||||
]
|
||||
|
||||
# Add enum if present
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
f"class {base.__name__}(str, Enum):",
|
||||
]
|
||||
)
|
||||
for m in base:
|
||||
lines.append(f' {m.name} = "{m.value}"')
|
||||
lines.append("")
|
||||
break
|
||||
|
||||
# Schemas
|
||||
for suffix in ["Create", "Update", "Response"]:
|
||||
lines.append("")
|
||||
lines.extend(generate_pydantic_schema(cls, suffix))
|
||||
|
||||
lines.append("")
|
||||
files[f"{module_name}.py"] = "\n".join(lines)
|
||||
|
||||
# __init__.py
|
||||
imports = ["from .base import BaseSchema"]
|
||||
all_exports = ['"BaseSchema"']
|
||||
|
||||
for cls in DATACLASSES:
|
||||
name = cls.__name__.replace("Transcode", "").replace("Media", "")
|
||||
module = name.lower()
|
||||
imports.append(
|
||||
f"from .{module} import {name}Create, {name}Update, {name}Response"
|
||||
)
|
||||
all_exports.extend([f'"{name}Create"', f'"{name}Update"', f'"{name}Response"'])
|
||||
|
||||
# Add enum export
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
imports.append(f"from .{module} import {base.__name__}")
|
||||
all_exports.append(f'"{base.__name__}"')
|
||||
break
|
||||
|
||||
files["__init__.py"] = "\n".join(
|
||||
[
|
||||
'"""API Schemas - GENERATED FILE"""',
|
||||
"",
|
||||
*imports,
|
||||
"",
|
||||
f"__all__ = [{', '.join(all_exports)}]",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
return files
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TypeScript Generator
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def resolve_ts_type(type_hint: Any) -> str:
|
||||
"""Resolve Python type to TypeScript type string."""
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver by origin, type name, base type, or enum
|
||||
resolver = (
|
||||
TS_RESOLVERS.get(origin)
|
||||
or TS_RESOLVERS.get(type_name)
|
||||
or TS_RESOLVERS.get(base)
|
||||
or (
|
||||
TS_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "string"
|
||||
return f"{result} | null" if optional else result
|
||||
|
||||
|
||||
def generate_ts_interface(cls: type) -> list[str]:
|
||||
"""Generate TypeScript interface lines from dataclass."""
|
||||
lines = [f"export interface {cls.__name__} {{"]
|
||||
|
||||
for name, type_hint in get_type_hints(cls).items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
ts_type = resolve_ts_type(type_hint)
|
||||
lines.append(f" {name}: {ts_type};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
|
||||
def generate_typescript() -> str:
|
||||
"""Generate complete TypeScript file."""
|
||||
lines = [
|
||||
"/**",
|
||||
" * MPR TypeScript Types - GENERATED FILE",
|
||||
" *",
|
||||
" * Do not edit directly. Modify schema/models/*.py and run:",
|
||||
" * python schema/generate.py --typescript",
|
||||
" */",
|
||||
"",
|
||||
]
|
||||
|
||||
# Enums as union types
|
||||
for enum in ENUMS:
|
||||
values = " | ".join(f'"{m.value}"' for m in enum)
|
||||
lines.append(f"export type {enum.__name__} = {values};")
|
||||
lines.append("")
|
||||
|
||||
# Interfaces - domain models
|
||||
for cls in DATACLASSES:
|
||||
lines.extend(generate_ts_interface(cls))
|
||||
lines.append("")
|
||||
|
||||
# Interfaces - API request/response models
|
||||
lines.append("// API Request/Response Types")
|
||||
lines.append("")
|
||||
for cls in API_MODELS:
|
||||
lines.extend(generate_ts_interface(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Proto Generator
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def resolve_proto_type(type_hint: Any) -> tuple[str, bool]:
|
||||
"""Resolve Python type to proto type. Returns (type, is_optional)."""
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
|
||||
# Look up resolver by origin or base type
|
||||
resolver = PROTO_RESOLVERS.get(origin) or PROTO_RESOLVERS.get(base)
|
||||
|
||||
if resolver:
|
||||
result = resolver(base)
|
||||
is_repeated = result.startswith("repeated")
|
||||
return result, optional and not is_repeated
|
||||
|
||||
return "string", optional
|
||||
|
||||
|
||||
def generate_proto_message(cls: type) -> list[str]:
|
||||
"""Generate proto message lines from dataclass."""
|
||||
lines = [f"message {cls.__name__} {{"]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
if not hints:
|
||||
lines.append(" // Empty")
|
||||
else:
|
||||
for i, (name, type_hint) in enumerate(hints.items(), 1):
|
||||
proto_type, optional = resolve_proto_type(type_hint)
|
||||
prefix = (
|
||||
"optional "
|
||||
if optional and not proto_type.startswith("repeated")
|
||||
else ""
|
||||
)
|
||||
lines.append(f" {prefix}{proto_type} {name} = {i};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
|
||||
def generate_proto() -> str:
|
||||
"""Generate complete proto file."""
|
||||
lines = [
|
||||
"// MPR Worker Service - GENERATED FILE",
|
||||
"//",
|
||||
"// Do not edit directly. Modify schema/models/grpc.py and run:",
|
||||
"// python schema/generate.py --proto",
|
||||
"",
|
||||
'syntax = "proto3";',
|
||||
"",
|
||||
f"package {GRPC_SERVICE['package']};",
|
||||
"",
|
||||
f"service {GRPC_SERVICE['name']} {{",
|
||||
]
|
||||
|
||||
# Methods
|
||||
for m in GRPC_SERVICE["methods"]:
|
||||
req = m["request"].__name__
|
||||
resp = m["response"].__name__
|
||||
returns = f"stream {resp}" if m["stream_response"] else resp
|
||||
lines.append(f" rpc {m['name']}({req}) returns ({returns});")
|
||||
|
||||
lines.extend(["}", ""])
|
||||
|
||||
# Messages
|
||||
for cls in GRPC_MESSAGES:
|
||||
lines.extend(generate_proto_message(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Writers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def write_file(path: Path, content: str) -> None:
|
||||
"""Write content to file, creating directories as needed."""
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(content)
|
||||
print(f" {path}")
|
||||
|
||||
|
||||
def write_django(output_dir: Path) -> None:
|
||||
"""Write Django models."""
|
||||
write_file(output_dir / "mpr" / "media_assets" / "models.py", generate_django())
|
||||
|
||||
|
||||
def write_pydantic(output_dir: Path) -> None:
|
||||
"""Write Pydantic schemas."""
|
||||
schemas_dir = output_dir / "api" / "schemas"
|
||||
for filename, content in generate_pydantic().items():
|
||||
write_file(schemas_dir / filename, content)
|
||||
|
||||
|
||||
def write_typescript(output_dir: Path) -> None:
|
||||
"""Write TypeScript types."""
|
||||
write_file(
|
||||
output_dir / "ui" / "timeline" / "src" / "types.ts", generate_typescript()
|
||||
)
|
||||
|
||||
|
||||
def write_proto(output_dir: Path) -> None:
|
||||
"""Write proto and generate stubs."""
|
||||
proto_dir = output_dir / "grpc" / "protos"
|
||||
proto_path = proto_dir / "worker.proto"
|
||||
write_file(proto_path, generate_proto())
|
||||
|
||||
# Generate Python stubs
|
||||
grpc_dir = output_dir / "grpc"
|
||||
result = subprocess.run(
|
||||
[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"grpc_tools.protoc",
|
||||
f"-I{proto_dir}",
|
||||
f"--python_out={grpc_dir}",
|
||||
f"--grpc_python_out={grpc_dir}",
|
||||
str(proto_path),
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
print(f" {grpc_dir}/worker_pb2.py")
|
||||
print(f" {grpc_dir}/worker_pb2_grpc.py")
|
||||
else:
|
||||
print(" Warning: grpc_tools failed - pip install grpcio-tools")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Main
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Generate from schema")
|
||||
parser.add_argument("--django", action="store_true")
|
||||
parser.add_argument("--pydantic", action="store_true")
|
||||
parser.add_argument("--typescript", action="store_true")
|
||||
parser.add_argument("--proto", action="store_true")
|
||||
parser.add_argument("--all", action="store_true")
|
||||
parser.add_argument("--output", type=Path, default=PROJECT_ROOT)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not any([args.django, args.pydantic, args.typescript, args.proto, args.all]):
|
||||
args.all = True
|
||||
|
||||
print(f"Generating to {args.output}\n")
|
||||
|
||||
targets: list[tuple[bool, str, Callable]] = [
|
||||
(args.django or args.all, "Django", write_django),
|
||||
(args.pydantic or args.all, "Pydantic", write_pydantic),
|
||||
(args.typescript or args.all, "TypeScript", write_typescript),
|
||||
(args.proto or args.all, "Proto", write_proto),
|
||||
]
|
||||
|
||||
for enabled, name, writer in targets:
|
||||
if enabled:
|
||||
print(f"{name}:")
|
||||
writer(args.output)
|
||||
print()
|
||||
|
||||
print("Done!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
35
schema/modelgen.json
Normal file
35
schema/modelgen.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"schema": "schema/models",
|
||||
"targets": [
|
||||
{
|
||||
"target": "django",
|
||||
"output": "mpr/media_assets/models.py",
|
||||
"include": ["dataclasses", "enums"]
|
||||
},
|
||||
{
|
||||
"target": "pydantic",
|
||||
"output": "api/schema/",
|
||||
"include": ["dataclasses", "enums"],
|
||||
"name_map": {
|
||||
"TranscodeJob": "Job",
|
||||
"MediaAsset": "Asset",
|
||||
"TranscodePreset": "Preset"
|
||||
}
|
||||
},
|
||||
{
|
||||
"target": "graphene",
|
||||
"output": "api/schema/graphql.py",
|
||||
"include": ["dataclasses", "enums", "api"]
|
||||
},
|
||||
{
|
||||
"target": "typescript",
|
||||
"output": "ui/timeline/src/types.ts",
|
||||
"include": ["dataclasses", "enums", "api"]
|
||||
},
|
||||
{
|
||||
"target": "protobuf",
|
||||
"output": "rpc/protos/worker.proto",
|
||||
"include": ["grpc"]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -5,7 +5,7 @@ This module exports all dataclasses, enums, and constants that the generator
|
||||
should process. Add new models here to have them included in generation.
|
||||
"""
|
||||
|
||||
from .api import CreateJobRequest, SystemStatus
|
||||
from .api import CreateJobRequest, ScanResult, SystemStatus
|
||||
from .grpc import (
|
||||
GRPC_SERVICE,
|
||||
CancelRequest,
|
||||
@@ -26,7 +26,7 @@ DATACLASSES = [MediaAsset, TranscodePreset, TranscodeJob]
|
||||
|
||||
# API request/response models - generates TypeScript only (no Django)
|
||||
# WorkerStatus from grpc.py is reused here
|
||||
API_MODELS = [CreateJobRequest, SystemStatus, WorkerStatus]
|
||||
API_MODELS = [CreateJobRequest, SystemStatus, ScanResult, WorkerStatus]
|
||||
|
||||
# Status enums - included in generated code
|
||||
ENUMS = [AssetStatus, JobStatus]
|
||||
@@ -50,6 +50,7 @@ __all__ = [
|
||||
"TranscodeJob",
|
||||
# API Models
|
||||
"CreateJobRequest",
|
||||
"ScanResult",
|
||||
"SystemStatus",
|
||||
# Enums
|
||||
"AssetStatus",
|
||||
|
||||
@@ -5,8 +5,8 @@ These are separate from the main domain models and represent
|
||||
the shape of data sent to/from the API endpoints.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ class CreateJobRequest:
|
||||
trim_start: Optional[float] = None # seconds
|
||||
trim_end: Optional[float] = None # seconds
|
||||
output_filename: Optional[str] = None
|
||||
priority: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -29,4 +30,14 @@ class SystemStatus:
|
||||
version: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ScanResult:
|
||||
"""Result of scanning the media input bucket."""
|
||||
|
||||
found: int = 0
|
||||
registered: int = 0
|
||||
skipped: int = 0
|
||||
files: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
# Note: WorkerStatus is defined in grpc.py and reused here
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
gRPC message definitions for MPR worker communication.
|
||||
|
||||
This is the source of truth for gRPC messages. The generator creates:
|
||||
- grpc/protos/worker.proto (protobuf definition)
|
||||
- grpc/worker_pb2.py (generated Python classes)
|
||||
- grpc/worker_pb2_grpc.py (generated gRPC stubs)
|
||||
- rpc/protos/worker.proto (protobuf definition)
|
||||
- rpc/worker_pb2.py (generated Python classes)
|
||||
- rpc/worker_pb2_grpc.py (generated gRPC stubs)
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
@@ -63,6 +63,7 @@ class TranscodeJob:
|
||||
|
||||
# Worker tracking
|
||||
celery_task_id: Optional[str] = None
|
||||
execution_arn: Optional[str] = None # AWS Step Functions execution ARN
|
||||
priority: int = 0 # Lower = higher priority
|
||||
|
||||
# Timestamps
|
||||
|
||||
@@ -110,7 +110,16 @@ class LocalExecutor(Executor):
|
||||
|
||||
|
||||
class LambdaExecutor(Executor):
|
||||
"""Execute jobs via AWS Lambda (future implementation)."""
|
||||
"""Execute jobs via AWS Step Functions + Lambda."""
|
||||
|
||||
def __init__(self):
|
||||
import boto3
|
||||
|
||||
region = os.environ.get("AWS_REGION", "us-east-1")
|
||||
self.sfn = boto3.client("stepfunctions", region_name=region)
|
||||
self.state_machine_arn = os.environ["STEP_FUNCTION_ARN"]
|
||||
self.callback_url = os.environ.get("CALLBACK_URL", "")
|
||||
self.callback_api_key = os.environ.get("CALLBACK_API_KEY", "")
|
||||
|
||||
def run(
|
||||
self,
|
||||
@@ -123,8 +132,36 @@ class LambdaExecutor(Executor):
|
||||
duration: Optional[float] = None,
|
||||
progress_callback: Optional[Callable[[int, Dict[str, Any]], None]] = None,
|
||||
) -> bool:
|
||||
"""Execute job via AWS Lambda."""
|
||||
raise NotImplementedError("LambdaExecutor not yet implemented")
|
||||
"""Start a Step Functions execution for this job."""
|
||||
import json
|
||||
|
||||
payload = {
|
||||
"job_id": job_id,
|
||||
"source_key": source_path,
|
||||
"output_key": output_path,
|
||||
"preset": preset,
|
||||
"trim_start": trim_start,
|
||||
"trim_end": trim_end,
|
||||
"duration": duration,
|
||||
"callback_url": self.callback_url,
|
||||
"api_key": self.callback_api_key,
|
||||
}
|
||||
|
||||
response = self.sfn.start_execution(
|
||||
stateMachineArn=self.state_machine_arn,
|
||||
name=f"mpr-{job_id}",
|
||||
input=json.dumps(payload),
|
||||
)
|
||||
|
||||
# Store execution ARN on the job
|
||||
execution_arn = response["executionArn"]
|
||||
try:
|
||||
from mpr.media_assets.models import TranscodeJob
|
||||
TranscodeJob.objects.filter(id=job_id).update(execution_arn=execution_arn)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# Executor registry
|
||||
148
task/lambda_handler.py
Normal file
148
task/lambda_handler.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""
|
||||
AWS Lambda handler for media transcoding.
|
||||
|
||||
Receives a job payload from Step Functions, downloads source from S3,
|
||||
runs FFmpeg, uploads result to S3, and calls back to the API.
|
||||
|
||||
Uses the same core/ffmpeg module as the local Celery worker.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import boto3
|
||||
import requests
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# S3 config
|
||||
S3_BUCKET_IN = os.environ.get("S3_BUCKET_IN", "mpr-media-in")
|
||||
S3_BUCKET_OUT = os.environ.get("S3_BUCKET_OUT", "mpr-media-out")
|
||||
AWS_REGION = os.environ.get("AWS_REGION", "us-east-1")
|
||||
|
||||
s3 = boto3.client("s3", region_name=AWS_REGION)
|
||||
|
||||
|
||||
def handler(event, context):
|
||||
"""
|
||||
Lambda entry point.
|
||||
|
||||
Event payload (from Step Functions):
|
||||
{
|
||||
"job_id": "uuid",
|
||||
"source_key": "path/to/source.mp4",
|
||||
"output_key": "output_filename.mp4",
|
||||
"preset": {...} or null,
|
||||
"trim_start": float or null,
|
||||
"trim_end": float or null,
|
||||
"duration": float or null,
|
||||
"callback_url": "https://mpr.mcrn.ar/api",
|
||||
"api_key": "secret"
|
||||
}
|
||||
"""
|
||||
job_id = event["job_id"]
|
||||
source_key = event["source_key"]
|
||||
output_key = event["output_key"]
|
||||
preset = event.get("preset")
|
||||
trim_start = event.get("trim_start")
|
||||
trim_end = event.get("trim_end")
|
||||
duration = event.get("duration")
|
||||
callback_url = event.get("callback_url", "")
|
||||
api_key = event.get("api_key", "")
|
||||
|
||||
logger.info(f"Starting job {job_id}: {source_key} -> {output_key}")
|
||||
|
||||
# Download source from S3
|
||||
ext_in = Path(source_key).suffix or ".mp4"
|
||||
tmp_source = tempfile.mktemp(suffix=ext_in, dir="/tmp")
|
||||
logger.info(f"Downloading s3://{S3_BUCKET_IN}/{source_key}")
|
||||
s3.download_file(S3_BUCKET_IN, source_key, tmp_source)
|
||||
|
||||
# Prepare output temp file
|
||||
ext_out = Path(output_key).suffix or ".mp4"
|
||||
tmp_output = tempfile.mktemp(suffix=ext_out, dir="/tmp")
|
||||
|
||||
try:
|
||||
# Import ffmpeg module (bundled in container)
|
||||
from core.ffmpeg.transcode import TranscodeConfig, transcode
|
||||
|
||||
if preset:
|
||||
config = TranscodeConfig(
|
||||
input_path=tmp_source,
|
||||
output_path=tmp_output,
|
||||
video_codec=preset.get("video_codec", "libx264"),
|
||||
video_bitrate=preset.get("video_bitrate"),
|
||||
video_crf=preset.get("video_crf"),
|
||||
video_preset=preset.get("video_preset"),
|
||||
resolution=preset.get("resolution"),
|
||||
framerate=preset.get("framerate"),
|
||||
audio_codec=preset.get("audio_codec", "aac"),
|
||||
audio_bitrate=preset.get("audio_bitrate"),
|
||||
audio_channels=preset.get("audio_channels"),
|
||||
audio_samplerate=preset.get("audio_samplerate"),
|
||||
container=preset.get("container", "mp4"),
|
||||
extra_args=preset.get("extra_args", []),
|
||||
trim_start=trim_start,
|
||||
trim_end=trim_end,
|
||||
)
|
||||
else:
|
||||
config = TranscodeConfig(
|
||||
input_path=tmp_source,
|
||||
output_path=tmp_output,
|
||||
video_codec="copy",
|
||||
audio_codec="copy",
|
||||
trim_start=trim_start,
|
||||
trim_end=trim_end,
|
||||
)
|
||||
|
||||
success = transcode(config, duration=duration)
|
||||
|
||||
if not success:
|
||||
raise RuntimeError("Transcode returned False")
|
||||
|
||||
# Upload result to S3
|
||||
logger.info(f"Uploading s3://{S3_BUCKET_OUT}/{output_key}")
|
||||
s3.upload_file(tmp_output, S3_BUCKET_OUT, output_key)
|
||||
|
||||
result = {"status": "completed", "job_id": job_id, "output_key": output_key}
|
||||
|
||||
# Callback to API
|
||||
_callback(callback_url, job_id, api_key, {"status": "completed"})
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Job {job_id} failed: {e}")
|
||||
|
||||
_callback(callback_url, job_id, api_key, {
|
||||
"status": "failed",
|
||||
"error": str(e),
|
||||
})
|
||||
|
||||
return {"status": "failed", "job_id": job_id, "error": str(e)}
|
||||
|
||||
finally:
|
||||
for f in [tmp_source, tmp_output]:
|
||||
try:
|
||||
os.unlink(f)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def _callback(callback_url, job_id, api_key, payload):
|
||||
"""Call back to API with job result."""
|
||||
if not callback_url:
|
||||
return
|
||||
try:
|
||||
url = f"{callback_url}/jobs/{job_id}/callback"
|
||||
headers = {}
|
||||
if api_key:
|
||||
headers["X-API-Key"] = api_key
|
||||
resp = requests.post(url, json=payload, headers=headers, timeout=10)
|
||||
logger.info(f"Callback response: {resp.status_code}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Callback failed: {e}")
|
||||
@@ -8,21 +8,19 @@ from typing import Any, Dict, Optional
|
||||
|
||||
from celery import shared_task
|
||||
|
||||
from grpc.server import update_job_progress
|
||||
from worker.executor import get_executor
|
||||
from core.storage import BUCKET_IN, BUCKET_OUT, download_to_temp, upload_file
|
||||
from rpc.server import update_job_progress
|
||||
from task.executor import get_executor
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Media paths from environment
|
||||
MEDIA_ROOT = os.environ.get("MEDIA_ROOT", "/app/media")
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
@shared_task(bind=True, queue="transcode", max_retries=3, default_retry_delay=60)
|
||||
def run_transcode_job(
|
||||
self,
|
||||
job_id: str,
|
||||
source_path: str,
|
||||
output_path: str,
|
||||
source_key: str,
|
||||
output_key: str,
|
||||
preset: Optional[Dict[str, Any]] = None,
|
||||
trim_start: Optional[float] = None,
|
||||
trim_end: Optional[float] = None,
|
||||
@@ -31,25 +29,25 @@ def run_transcode_job(
|
||||
"""
|
||||
Celery task to run a transcode/trim job.
|
||||
|
||||
Args:
|
||||
job_id: Unique job identifier
|
||||
source_path: Path to source file
|
||||
output_path: Path for output file
|
||||
preset: Transcode preset dict (optional)
|
||||
trim_start: Trim start time in seconds (optional)
|
||||
trim_end: Trim end time in seconds (optional)
|
||||
duration: Source duration for progress calculation
|
||||
|
||||
Returns:
|
||||
Result dict with status and output_path
|
||||
Downloads source from S3, runs FFmpeg, uploads result to S3.
|
||||
"""
|
||||
logger.info(f"Starting job {job_id}: {source_path} -> {output_path}")
|
||||
logger.info(f"Starting job {job_id}: {source_key} -> {output_key}")
|
||||
|
||||
# Update status to processing
|
||||
update_job_progress(job_id, progress=0, status="processing")
|
||||
|
||||
# Download source from S3 to temp file
|
||||
logger.info(f"Downloading {source_key} from {BUCKET_IN}")
|
||||
tmp_source = download_to_temp(BUCKET_IN, source_key)
|
||||
|
||||
# Create temp output path with same extension
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
ext = Path(output_key).suffix or ".mp4"
|
||||
fd, tmp_output = tempfile.mkstemp(suffix=ext)
|
||||
os.close(fd)
|
||||
|
||||
def progress_callback(percent: int, details: Dict[str, Any]) -> None:
|
||||
"""Update gRPC progress state."""
|
||||
update_job_progress(
|
||||
job_id,
|
||||
progress=percent,
|
||||
@@ -61,8 +59,8 @@ def run_transcode_job(
|
||||
executor = get_executor()
|
||||
success = executor.run(
|
||||
job_id=job_id,
|
||||
source_path=source_path,
|
||||
output_path=output_path,
|
||||
source_path=tmp_source,
|
||||
output_path=tmp_output,
|
||||
preset=preset,
|
||||
trim_start=trim_start,
|
||||
trim_end=trim_end,
|
||||
@@ -71,12 +69,16 @@ def run_transcode_job(
|
||||
)
|
||||
|
||||
if success:
|
||||
# Upload result to S3
|
||||
logger.info(f"Uploading {output_key} to {BUCKET_OUT}")
|
||||
upload_file(tmp_output, BUCKET_OUT, output_key)
|
||||
|
||||
logger.info(f"Job {job_id} completed successfully")
|
||||
update_job_progress(job_id, progress=100, status="completed")
|
||||
return {
|
||||
"status": "completed",
|
||||
"job_id": job_id,
|
||||
"output_path": output_path,
|
||||
"output_key": output_key,
|
||||
}
|
||||
else:
|
||||
raise RuntimeError("Executor returned False")
|
||||
@@ -85,7 +87,6 @@ def run_transcode_job(
|
||||
logger.exception(f"Job {job_id} failed: {e}")
|
||||
update_job_progress(job_id, progress=0, status="failed", error=str(e))
|
||||
|
||||
# Retry on transient errors
|
||||
if self.request.retries < self.max_retries:
|
||||
raise self.retry(exc=e)
|
||||
|
||||
@@ -94,3 +95,11 @@ def run_transcode_job(
|
||||
"job_id": job_id,
|
||||
"error": str(e),
|
||||
}
|
||||
|
||||
finally:
|
||||
# Clean up temp files
|
||||
for f in [tmp_source, tmp_output]:
|
||||
try:
|
||||
os.unlink(f)
|
||||
except OSError:
|
||||
pass
|
||||
@@ -5,7 +5,8 @@
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
||||
font-family:
|
||||
-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
background: #1a1a1a;
|
||||
color: #e0e0e0;
|
||||
}
|
||||
@@ -46,16 +47,91 @@ body {
|
||||
background: #202020;
|
||||
border-right: 1px solid #333;
|
||||
overflow-y: auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.sidebar h2 {
|
||||
.sidebar-section {
|
||||
border-bottom: 1px solid #333;
|
||||
}
|
||||
|
||||
.sidebar-section:first-child {
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.sidebar-count {
|
||||
font-size: 0.7rem;
|
||||
background: #333;
|
||||
color: #888;
|
||||
padding: 0.125rem 0.375rem;
|
||||
border-radius: 8px;
|
||||
}
|
||||
|
||||
.sidebar-list {
|
||||
max-height: 200px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.sidebar-empty {
|
||||
padding: 0.5rem 1rem;
|
||||
font-size: 0.8rem;
|
||||
color: #555;
|
||||
}
|
||||
|
||||
.output-item {
|
||||
display: block;
|
||||
padding: 0.5rem 1rem;
|
||||
font-size: 0.8rem;
|
||||
color: #10b981;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px solid #2a2a2a;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.output-item:hover {
|
||||
background: #2a2a2a;
|
||||
}
|
||||
|
||||
.sidebar-header {
|
||||
padding: 1rem;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.sidebar-header h2 {
|
||||
font-size: 0.875rem;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: #888;
|
||||
}
|
||||
|
||||
.scan-button {
|
||||
padding: 0.375rem 0.75rem;
|
||||
font-size: 0.75rem;
|
||||
background: #3b82f6;
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
|
||||
.scan-button:hover:not(:disabled) {
|
||||
background: #2563eb;
|
||||
}
|
||||
|
||||
.scan-button:disabled {
|
||||
background: #4b5563;
|
||||
cursor: not-allowed;
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.asset-list {
|
||||
list-style: none;
|
||||
}
|
||||
@@ -148,41 +224,221 @@ body {
|
||||
}
|
||||
|
||||
.timeline-container {
|
||||
height: 120px;
|
||||
background: #252525;
|
||||
border-top: 1px solid #333;
|
||||
padding: 0.75rem 1rem;
|
||||
}
|
||||
|
||||
.timeline-placeholder {
|
||||
/* Timeline component */
|
||||
|
||||
.timeline {
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.timeline-times {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100%;
|
||||
color: #666;
|
||||
justify-content: space-between;
|
||||
font-size: 0.75rem;
|
||||
color: #aaa;
|
||||
margin-bottom: 0.5rem;
|
||||
font-variant-numeric: tabular-nums;
|
||||
}
|
||||
|
||||
.info {
|
||||
padding: 1rem;
|
||||
.timeline-track {
|
||||
position: relative;
|
||||
height: 40px;
|
||||
background: #333;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.timeline-dim {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
height: 100%;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.timeline-selection {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
height: 100%;
|
||||
background: rgba(59, 130, 246, 0.15);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.timeline-playhead {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
width: 2px;
|
||||
height: 100%;
|
||||
background: #fff;
|
||||
pointer-events: none;
|
||||
transform: translateX(-1px);
|
||||
z-index: 2;
|
||||
}
|
||||
|
||||
.timeline-handle {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
width: 12px;
|
||||
height: 100%;
|
||||
cursor: ew-resize;
|
||||
transform: translateX(-6px);
|
||||
z-index: 3;
|
||||
border-radius: 2px;
|
||||
transition: background 0.1s;
|
||||
}
|
||||
|
||||
.timeline-handle::after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 5px;
|
||||
width: 2px;
|
||||
height: 100%;
|
||||
background: #3b82f6;
|
||||
}
|
||||
|
||||
.timeline-handle:hover,
|
||||
.timeline-handle.dragging {
|
||||
background: rgba(59, 130, 246, 0.3);
|
||||
}
|
||||
|
||||
.timeline-handle.dragging {
|
||||
cursor: grabbing;
|
||||
}
|
||||
|
||||
.timeline-duration {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
font-size: 0.625rem;
|
||||
color: #666;
|
||||
margin-top: 0.25rem;
|
||||
}
|
||||
|
||||
/* Job panel */
|
||||
|
||||
.job-panel {
|
||||
padding: 0.75rem 1rem;
|
||||
background: #202020;
|
||||
border-top: 1px solid #333;
|
||||
}
|
||||
|
||||
.info h3 {
|
||||
margin-bottom: 0.5rem;
|
||||
font-size: 1rem;
|
||||
.job-controls {
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.info dl {
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr;
|
||||
gap: 0.25rem 1rem;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.info dt {
|
||||
color: #888;
|
||||
}
|
||||
|
||||
.info dd {
|
||||
.preset-select {
|
||||
flex: 1;
|
||||
padding: 0.375rem 0.5rem;
|
||||
font-size: 0.8rem;
|
||||
background: #333;
|
||||
color: #e0e0e0;
|
||||
border: 1px solid #444;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.preset-select:focus {
|
||||
outline: none;
|
||||
border-color: #3b82f6;
|
||||
}
|
||||
|
||||
.enqueue-button {
|
||||
padding: 0.375rem 1rem;
|
||||
font-size: 0.8rem;
|
||||
background: #10b981;
|
||||
color: #000;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-weight: 500;
|
||||
white-space: nowrap;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
|
||||
.enqueue-button:hover:not(:disabled) {
|
||||
background: #059669;
|
||||
}
|
||||
|
||||
.enqueue-button:disabled {
|
||||
background: #4b5563;
|
||||
color: #888;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Job items */
|
||||
|
||||
.job-item {
|
||||
padding: 0.5rem 1rem;
|
||||
border-bottom: 1px solid #2a2a2a;
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
.job-item-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.job-filename {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
color: #ccc;
|
||||
}
|
||||
|
||||
.job-status {
|
||||
font-size: 0.7rem;
|
||||
padding: 0.125rem 0.375rem;
|
||||
border-radius: 3px;
|
||||
text-transform: uppercase;
|
||||
font-weight: 500;
|
||||
flex-shrink: 0;
|
||||
margin-left: 0.5rem;
|
||||
}
|
||||
|
||||
.job-status.pending {
|
||||
background: #f59e0b;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.job-status.processing {
|
||||
background: #3b82f6;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.job-status.completed {
|
||||
background: #10b981;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.job-status.failed {
|
||||
background: #ef4444;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.job-status.cancelled {
|
||||
background: #6b7280;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.job-progress-bar {
|
||||
height: 4px;
|
||||
background: #444;
|
||||
border-radius: 2px;
|
||||
margin-top: 0.375rem;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.job-progress-fill {
|
||||
height: 100%;
|
||||
background: #3b82f6;
|
||||
border-radius: 2px;
|
||||
transition: width 0.3s;
|
||||
}
|
||||
|
||||
@@ -1,14 +1,25 @@
|
||||
import { useState, useEffect } from 'react'
|
||||
import { getAssets, getSystemStatus } from './api'
|
||||
import type { MediaAsset, SystemStatus } from './types'
|
||||
import './App.css'
|
||||
import { useState, useEffect, useRef, useCallback } from "react";
|
||||
import { getAssets, getJobs, getSystemStatus, scanMediaFolder } from "./api";
|
||||
import type { MediaAsset, TranscodeJob, SystemStatus } from "./types";
|
||||
import Timeline from "./Timeline";
|
||||
import JobPanel from "./JobPanel";
|
||||
import "./App.css";
|
||||
|
||||
function App() {
|
||||
const [assets, setAssets] = useState<MediaAsset[]>([])
|
||||
const [status, setStatus] = useState<SystemStatus | null>(null)
|
||||
const [selectedAsset, setSelectedAsset] = useState<MediaAsset | null>(null)
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [assets, setAssets] = useState<MediaAsset[]>([]);
|
||||
const [jobs, setJobs] = useState<TranscodeJob[]>([]);
|
||||
const [status, setStatus] = useState<SystemStatus | null>(null);
|
||||
const [selectedAsset, setSelectedAsset] = useState<MediaAsset | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [scanning, setScanning] = useState(false);
|
||||
|
||||
// Video sync state
|
||||
const videoRef = useRef<HTMLVideoElement>(null);
|
||||
const [currentTime, setCurrentTime] = useState(0);
|
||||
const [duration, setDuration] = useState(0);
|
||||
const [trimStart, setTrimStart] = useState(0);
|
||||
const [trimEnd, setTrimEnd] = useState(0);
|
||||
|
||||
useEffect(() => {
|
||||
async function load() {
|
||||
@@ -16,25 +27,99 @@ function App() {
|
||||
const [assetsData, statusData] = await Promise.all([
|
||||
getAssets(),
|
||||
getSystemStatus(),
|
||||
])
|
||||
setAssets(assetsData)
|
||||
setStatus(statusData)
|
||||
]);
|
||||
setAssets(
|
||||
assetsData.sort((a, b) => a.filename.localeCompare(b.filename)),
|
||||
);
|
||||
setStatus(statusData);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : 'Failed to load')
|
||||
setError(e instanceof Error ? e.message : "Failed to load");
|
||||
} finally {
|
||||
setLoading(false)
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
load()
|
||||
}, [])
|
||||
load();
|
||||
}, []);
|
||||
|
||||
if (loading) {
|
||||
return <div className="loading">Loading...</div>
|
||||
// Poll jobs
|
||||
useEffect(() => {
|
||||
let active = true;
|
||||
const fetchJobs = () => {
|
||||
getJobs()
|
||||
.then((data) => {
|
||||
if (active) setJobs(data);
|
||||
})
|
||||
.catch(console.error);
|
||||
};
|
||||
fetchJobs();
|
||||
const interval = setInterval(fetchJobs, 3000);
|
||||
return () => {
|
||||
active = false;
|
||||
clearInterval(interval);
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Reset trim state when asset changes
|
||||
useEffect(() => {
|
||||
setTrimStart(0);
|
||||
setTrimEnd(0);
|
||||
setCurrentTime(0);
|
||||
setDuration(0);
|
||||
}, [selectedAsset?.id]);
|
||||
|
||||
const handleTimeUpdate = useCallback(() => {
|
||||
if (videoRef.current) setCurrentTime(videoRef.current.currentTime);
|
||||
}, []);
|
||||
|
||||
const handleLoadedMetadata = useCallback(() => {
|
||||
if (videoRef.current) {
|
||||
const dur = videoRef.current.duration;
|
||||
setDuration(dur);
|
||||
setTrimEnd(dur);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleSeek = useCallback((time: number) => {
|
||||
if (videoRef.current) {
|
||||
videoRef.current.currentTime = time;
|
||||
setCurrentTime(time);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleTrimChange = useCallback((start: number, end: number) => {
|
||||
setTrimStart(start);
|
||||
setTrimEnd(end);
|
||||
}, []);
|
||||
|
||||
async function handleScan() {
|
||||
setScanning(true);
|
||||
setError(null);
|
||||
try {
|
||||
const result = await scanMediaFolder();
|
||||
alert(
|
||||
`Scan complete!\nFound: ${result.found}\nRegistered: ${result.registered}\nSkipped: ${result.skipped}`,
|
||||
);
|
||||
const assetsData = await getAssets();
|
||||
setAssets(
|
||||
assetsData.sort((a, b) => a.filename.localeCompare(b.filename)),
|
||||
);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Scan failed");
|
||||
} finally {
|
||||
setScanning(false);
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return <div className="error">Error: {error}</div>
|
||||
}
|
||||
const refreshJobs = async () => {
|
||||
const data = await getJobs();
|
||||
setJobs(data);
|
||||
};
|
||||
|
||||
const assetJobs = jobs.filter((j) => j.source_asset_id === selectedAsset?.id);
|
||||
const completedJobs = jobs.filter((j) => j.status === "completed");
|
||||
|
||||
if (loading) return <div className="loading">Loading...</div>;
|
||||
if (error) return <div className="error">Error: {error}</div>;
|
||||
|
||||
return (
|
||||
<div className="app">
|
||||
@@ -49,21 +134,88 @@ function App() {
|
||||
|
||||
<div className="layout">
|
||||
<aside className="sidebar">
|
||||
<div className="sidebar-section">
|
||||
<div className="sidebar-header">
|
||||
<h2>Assets</h2>
|
||||
<button
|
||||
onClick={handleScan}
|
||||
disabled={scanning}
|
||||
className="scan-button"
|
||||
>
|
||||
{scanning ? "Scanning..." : "Scan Folder"}
|
||||
</button>
|
||||
</div>
|
||||
<ul className="asset-list">
|
||||
{assets.map((asset) => (
|
||||
<li
|
||||
key={asset.id}
|
||||
className={selectedAsset?.id === asset.id ? 'selected' : ''}
|
||||
className={selectedAsset?.id === asset.id ? "selected" : ""}
|
||||
onClick={() => setSelectedAsset(asset)}
|
||||
title={asset.filename}
|
||||
>
|
||||
<span className="filename">{asset.filename}</span>
|
||||
<span className={`status-badge ${asset.status}`}>
|
||||
{asset.status}
|
||||
</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div className="sidebar-section">
|
||||
<div className="sidebar-header">
|
||||
<h2>Jobs</h2>
|
||||
<span className="sidebar-count">{jobs.length}</span>
|
||||
</div>
|
||||
<div className="sidebar-list">
|
||||
{jobs.length === 0 ? (
|
||||
<div className="sidebar-empty">No jobs</div>
|
||||
) : (
|
||||
jobs.map((job) => (
|
||||
<div key={job.id} className="job-item">
|
||||
<div className="job-item-header">
|
||||
<span className="job-filename">
|
||||
{job.output_filename}
|
||||
</span>
|
||||
<span className={`job-status ${job.status}`}>
|
||||
{job.status}
|
||||
</span>
|
||||
</div>
|
||||
{job.status === "processing" && (
|
||||
<div className="job-progress-bar">
|
||||
<div
|
||||
className="job-progress-fill"
|
||||
style={{ width: `${job.progress}%` }}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="sidebar-section">
|
||||
<div className="sidebar-header">
|
||||
<h2>Output</h2>
|
||||
<span className="sidebar-count">{completedJobs.length}</span>
|
||||
</div>
|
||||
<div className="sidebar-list">
|
||||
{completedJobs.length === 0 ? (
|
||||
<div className="sidebar-empty">No output files</div>
|
||||
) : (
|
||||
completedJobs.map((job) => (
|
||||
<a
|
||||
key={job.id}
|
||||
className="output-item"
|
||||
href={`/media/out/${job.output_filename}`}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
title={job.output_filename}
|
||||
>
|
||||
<span className="filename">{job.output_filename}</span>
|
||||
</a>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
<main className="main">
|
||||
@@ -71,29 +223,29 @@ function App() {
|
||||
<div className="editor">
|
||||
<div className="video-container">
|
||||
<video
|
||||
ref={videoRef}
|
||||
controls
|
||||
src={`/media/${selectedAsset.file_path}`}
|
||||
src={`/media/in/${selectedAsset.file_path}`}
|
||||
onTimeUpdate={handleTimeUpdate}
|
||||
onLoadedMetadata={handleLoadedMetadata}
|
||||
/>
|
||||
</div>
|
||||
<div className="timeline-container">
|
||||
{/* Timeline component will go here */}
|
||||
<div className="timeline-placeholder">
|
||||
Timeline: {selectedAsset.duration?.toFixed(1)}s
|
||||
</div>
|
||||
</div>
|
||||
<div className="info">
|
||||
<h3>{selectedAsset.filename}</h3>
|
||||
<dl>
|
||||
<dt>Duration</dt>
|
||||
<dd>{selectedAsset.duration?.toFixed(2)}s</dd>
|
||||
<dt>Resolution</dt>
|
||||
<dd>{selectedAsset.width}x{selectedAsset.height}</dd>
|
||||
<dt>Video</dt>
|
||||
<dd>{selectedAsset.video_codec}</dd>
|
||||
<dt>Audio</dt>
|
||||
<dd>{selectedAsset.audio_codec}</dd>
|
||||
</dl>
|
||||
<Timeline
|
||||
duration={duration}
|
||||
currentTime={currentTime}
|
||||
trimStart={trimStart}
|
||||
trimEnd={trimEnd}
|
||||
onTrimChange={handleTrimChange}
|
||||
onSeek={handleSeek}
|
||||
/>
|
||||
</div>
|
||||
<JobPanel
|
||||
asset={selectedAsset}
|
||||
trimStart={trimStart}
|
||||
trimEnd={trimEnd}
|
||||
onJobCreated={refreshJobs}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<div className="empty">Select an asset to begin</div>
|
||||
@@ -101,7 +253,7 @@ function App() {
|
||||
</main>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export default App
|
||||
export default App;
|
||||
|
||||
79
ui/timeline/src/JobPanel.tsx
Normal file
79
ui/timeline/src/JobPanel.tsx
Normal file
@@ -0,0 +1,79 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { getPresets, createJob } from "./api";
|
||||
import type { MediaAsset, TranscodePreset } from "./types";
|
||||
|
||||
interface JobPanelProps {
|
||||
asset: MediaAsset;
|
||||
trimStart: number;
|
||||
trimEnd: number;
|
||||
onJobCreated: () => void;
|
||||
}
|
||||
|
||||
export default function JobPanel({
|
||||
asset,
|
||||
trimStart,
|
||||
trimEnd,
|
||||
onJobCreated,
|
||||
}: JobPanelProps) {
|
||||
const [presets, setPresets] = useState<TranscodePreset[]>([]);
|
||||
const [selectedPresetId, setSelectedPresetId] = useState<string>("");
|
||||
const [submitting, setSubmitting] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
getPresets().then(setPresets).catch(console.error);
|
||||
}, []);
|
||||
|
||||
const hasTrim =
|
||||
trimStart > 0 || (asset.duration != null && trimEnd < asset.duration);
|
||||
const hasPreset = selectedPresetId !== "";
|
||||
const canSubmit = hasTrim || hasPreset;
|
||||
|
||||
const buttonLabel = hasPreset
|
||||
? "Transcode"
|
||||
: hasTrim
|
||||
? "Trim (Copy)"
|
||||
: "Select trim or preset";
|
||||
|
||||
async function handleSubmit() {
|
||||
setSubmitting(true);
|
||||
try {
|
||||
await createJob({
|
||||
source_asset_id: asset.id,
|
||||
preset_id: selectedPresetId || null,
|
||||
trim_start: hasTrim ? trimStart : null,
|
||||
trim_end: hasTrim ? trimEnd : null,
|
||||
});
|
||||
onJobCreated();
|
||||
} catch (e) {
|
||||
alert(e instanceof Error ? e.message : "Failed to create job");
|
||||
} finally {
|
||||
setSubmitting(false);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="job-panel">
|
||||
<div className="job-controls">
|
||||
<select
|
||||
value={selectedPresetId}
|
||||
onChange={(e) => setSelectedPresetId(e.target.value)}
|
||||
className="preset-select"
|
||||
>
|
||||
<option value="">No preset (trim only)</option>
|
||||
{presets.map((p) => (
|
||||
<option key={p.id} value={p.id}>
|
||||
{p.name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
<button
|
||||
onClick={handleSubmit}
|
||||
disabled={!canSubmit || submitting}
|
||||
className="enqueue-button"
|
||||
>
|
||||
{submitting ? "Submitting..." : buttonLabel}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
121
ui/timeline/src/Timeline.tsx
Normal file
121
ui/timeline/src/Timeline.tsx
Normal file
@@ -0,0 +1,121 @@
|
||||
import { useRef, useCallback, useState, useEffect } from "react";
|
||||
|
||||
interface TimelineProps {
|
||||
duration: number;
|
||||
currentTime: number;
|
||||
trimStart: number;
|
||||
trimEnd: number;
|
||||
onTrimChange: (start: number, end: number) => void;
|
||||
onSeek: (time: number) => void;
|
||||
}
|
||||
|
||||
function formatTime(seconds: number): string {
|
||||
const m = Math.floor(seconds / 60);
|
||||
const s = Math.floor(seconds % 60);
|
||||
const ms = Math.floor((seconds % 1) * 10);
|
||||
return `${m}:${s.toString().padStart(2, "0")}.${ms}`;
|
||||
}
|
||||
|
||||
export default function Timeline({
|
||||
duration,
|
||||
currentTime,
|
||||
trimStart,
|
||||
trimEnd,
|
||||
onTrimChange,
|
||||
onSeek,
|
||||
}: TimelineProps) {
|
||||
const trackRef = useRef<HTMLDivElement>(null);
|
||||
const [dragging, setDragging] = useState<"in" | "out" | null>(null);
|
||||
|
||||
const timeToPercent = (t: number) => (duration > 0 ? (t / duration) * 100 : 0);
|
||||
|
||||
const positionToTime = useCallback(
|
||||
(clientX: number) => {
|
||||
const track = trackRef.current;
|
||||
if (!track || duration <= 0) return 0;
|
||||
const rect = track.getBoundingClientRect();
|
||||
const ratio = Math.max(0, Math.min(1, (clientX - rect.left) / rect.width));
|
||||
return ratio * duration;
|
||||
},
|
||||
[duration],
|
||||
);
|
||||
|
||||
const handleTrackClick = (e: React.MouseEvent) => {
|
||||
if (dragging) return;
|
||||
onSeek(positionToTime(e.clientX));
|
||||
};
|
||||
|
||||
const handleMouseDown = (handle: "in" | "out") => (e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
setDragging(handle);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!dragging) return;
|
||||
|
||||
const minGap = 0.1;
|
||||
|
||||
const handleMove = (e: MouseEvent) => {
|
||||
const time = positionToTime(e.clientX);
|
||||
if (dragging === "in") {
|
||||
onTrimChange(Math.min(time, trimEnd - minGap), trimEnd);
|
||||
} else {
|
||||
onTrimChange(trimStart, Math.max(time, trimStart + minGap));
|
||||
}
|
||||
};
|
||||
|
||||
const handleUp = () => setDragging(null);
|
||||
|
||||
document.addEventListener("mousemove", handleMove);
|
||||
document.addEventListener("mouseup", handleUp);
|
||||
return () => {
|
||||
document.removeEventListener("mousemove", handleMove);
|
||||
document.removeEventListener("mouseup", handleUp);
|
||||
};
|
||||
}, [dragging, trimStart, trimEnd, positionToTime, onTrimChange]);
|
||||
|
||||
const inPct = timeToPercent(trimStart);
|
||||
const outPct = timeToPercent(trimEnd);
|
||||
const playheadPct = timeToPercent(currentTime);
|
||||
const selectionDuration = trimEnd - trimStart;
|
||||
|
||||
return (
|
||||
<div className="timeline">
|
||||
<div className="timeline-times">
|
||||
<span>In: {formatTime(trimStart)}</span>
|
||||
<span>Selection: {formatTime(selectionDuration)}</span>
|
||||
<span>Out: {formatTime(trimEnd)}</span>
|
||||
</div>
|
||||
<div className="timeline-track" ref={trackRef} onClick={handleTrackClick}>
|
||||
{/* Dimmed regions */}
|
||||
<div className="timeline-dim" style={{ left: 0, width: `${inPct}%` }} />
|
||||
<div className="timeline-dim" style={{ left: `${outPct}%`, width: `${100 - outPct}%` }} />
|
||||
|
||||
{/* Selection highlight */}
|
||||
<div
|
||||
className="timeline-selection"
|
||||
style={{ left: `${inPct}%`, width: `${outPct - inPct}%` }}
|
||||
/>
|
||||
|
||||
{/* Playhead */}
|
||||
<div className="timeline-playhead" style={{ left: `${playheadPct}%` }} />
|
||||
|
||||
{/* Handles */}
|
||||
<div
|
||||
className={`timeline-handle timeline-handle-in ${dragging === "in" ? "dragging" : ""}`}
|
||||
style={{ left: `${inPct}%` }}
|
||||
onMouseDown={handleMouseDown("in")}
|
||||
/>
|
||||
<div
|
||||
className={`timeline-handle timeline-handle-out ${dragging === "out" ? "dragging" : ""}`}
|
||||
style={{ left: `${outPct}%` }}
|
||||
onMouseDown={handleMouseDown("out")}
|
||||
/>
|
||||
</div>
|
||||
<div className="timeline-duration">
|
||||
<span>0:00</span>
|
||||
<span>{formatTime(duration)}</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -38,6 +38,17 @@ export async function getAsset(id: string): Promise<MediaAsset> {
|
||||
return request(`/assets/${id}`);
|
||||
}
|
||||
|
||||
export async function scanMediaFolder(): Promise<{
|
||||
found: number;
|
||||
registered: number;
|
||||
skipped: number;
|
||||
files: string[];
|
||||
}> {
|
||||
return request("/assets/scan", {
|
||||
method: "POST",
|
||||
});
|
||||
}
|
||||
|
||||
// Presets
|
||||
export async function getPresets(): Promise<TranscodePreset[]> {
|
||||
return request("/presets/");
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
/**
|
||||
* MPR TypeScript Types - GENERATED FILE
|
||||
* TypeScript Types - GENERATED FILE
|
||||
*
|
||||
* Do not edit directly. Modify schema/models/*.py and run:
|
||||
* python schema/generate.py --typescript
|
||||
* Do not edit directly. Regenerate using modelgen.
|
||||
*/
|
||||
|
||||
export type AssetStatus = "pending" | "ready" | "error";
|
||||
@@ -67,20 +66,20 @@ export interface TranscodeJob {
|
||||
speed: string | null;
|
||||
error_message: string | null;
|
||||
celery_task_id: string | null;
|
||||
execution_arn: string | null;
|
||||
priority: number;
|
||||
created_at: string | null;
|
||||
started_at: string | null;
|
||||
completed_at: string | null;
|
||||
}
|
||||
|
||||
// API Request/Response Types
|
||||
|
||||
export interface CreateJobRequest {
|
||||
source_asset_id: string;
|
||||
preset_id: string | null;
|
||||
trim_start: number | null;
|
||||
trim_end: number | null;
|
||||
output_filename: string | null;
|
||||
priority: number;
|
||||
}
|
||||
|
||||
export interface SystemStatus {
|
||||
@@ -88,6 +87,13 @@ export interface SystemStatus {
|
||||
version: string;
|
||||
}
|
||||
|
||||
export interface ScanResult {
|
||||
found: number;
|
||||
registered: number;
|
||||
skipped: number;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
export interface WorkerStatus {
|
||||
available: boolean;
|
||||
active_jobs: number;
|
||||
|
||||
@@ -6,6 +6,7 @@ export default defineConfig({
|
||||
server: {
|
||||
host: "0.0.0.0",
|
||||
port: 5173,
|
||||
allowedHosts: process.env.VITE_ALLOWED_HOSTS?.split(",") || [],
|
||||
proxy: {
|
||||
"/api": {
|
||||
target: "http://fastapi:8702",
|
||||
|
||||
Reference in New Issue
Block a user