major refactor

This commit is contained in:
2026-03-13 01:07:02 -03:00
parent eaaf2ad60c
commit 3eeedebb15
61 changed files with 441 additions and 242 deletions

View File

@@ -71,12 +71,12 @@ docker compose logs -f
docker compose logs -f celery
# Create admin user
docker compose exec django python manage.py createsuperuser
docker compose exec django python admin/manage.py createsuperuser
```
## Code Generation
Models are defined as dataclasses in `schema/models/` and generated via `modelgen`:
Models are defined as dataclasses in `core/schema/models/` and generated via `modelgen`:
- **Django ORM** models (`--include dataclasses,enums`)
- **Pydantic** schemas (`--include dataclasses,enums`)
- **TypeScript** types (`--include dataclasses,enums,api`)
@@ -113,26 +113,29 @@ See [docs/media-storage.md](docs/media-storage.md) for full details.
```
mpr/
├── api/ # FastAPI application
│ ├── routes/ # API endpoints
│ └── schemas/ # Pydantic models (generated)
├── core/ # Core utilities
│ └── ffmpeg/ # FFmpeg wrappers
├── admin/ # Django project
│ ├── manage.py # Django management script
│ └── mpr/ # Django settings & app
│ └── media_assets/# Django app
├── core/ # Core application logic
│ ├── api/ # FastAPI + GraphQL API
│ │ └── schema/ # GraphQL types (generated)
│ ├── ffmpeg/ # FFmpeg wrappers
│ ├── rpc/ # gRPC server & client
│ │ └── protos/ # Protobuf definitions (generated)
│ ├── schema/ # Source of truth
│ │ └── models/ # Dataclass definitions
│ ├── storage/ # S3/GCP/local storage backends
│ └── task/ # Celery job execution
│ ├── executor.py # Executor abstraction
│ └── tasks.py # Celery tasks
├── ctrl/ # Docker & deployment
│ ├── docker-compose.yml
│ └── nginx.conf
├── media/
│ ├── in/ # Source media files
│ └── out/ # Transcoded output
├── rpc/ # gRPC server & client
│ └── protos/ # Protobuf definitions (generated)
├── mpr/ # Django project
│ └── media_assets/ # Django app
├── schema/ # Source of truth
│ └── models/ # Dataclass definitions
├── task/ # Celery job execution
│ ├── executor.py # Executor abstraction
│ └── tasks.py # Celery tasks
├── modelgen/ # Code generation tool
└── ui/ # Frontend
└── timeline/ # React app
```

View File

@@ -6,7 +6,9 @@ import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mpr.settings')
# Ensure project root is on sys.path
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'admin.mpr.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:

View File

@@ -11,6 +11,6 @@ import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mpr.settings')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'admin.mpr.settings')
application = get_asgi_application()

View File

@@ -2,9 +2,9 @@ import os
from celery import Celery
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mpr.settings")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "admin.mpr.settings")
app = Celery("mpr")
app.config_from_object("django.conf:settings", namespace="CELERY")
app.autodiscover_tasks()
app.autodiscover_tasks(["task"])
app.autodiscover_tasks(["core.task"])

View File

@@ -3,5 +3,6 @@ from django.apps import AppConfig
class MediaAssetsConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "mpr.media_assets"
name = "admin.mpr.media_assets"
label = "media_assets"
verbose_name = "Media Assets"

View File

@@ -4,10 +4,10 @@ from pathlib import Path
from django.core.management.base import BaseCommand
from mpr.media_assets.models import TranscodePreset
from admin.mpr.media_assets.models import TranscodePreset
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent.parent.parent))
from schema.models import BUILTIN_PRESETS
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent.parent.parent.parent))
from core.schema.models import BUILTIN_PRESETS
class Command(BaseCommand):

View File

@@ -1,8 +1,7 @@
# Generated by Django 6.0.1 on 2026-02-01 15:13
# Generated by Django 4.2.29 on 2026-03-13 04:04
import django.db.models.deletion
import uuid
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
@@ -13,47 +12,21 @@ class Migration(migrations.Migration):
]
operations = [
migrations.CreateModel(
name='TranscodePreset',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('name', models.CharField(max_length=100, unique=True)),
('description', models.TextField(blank=True, default='')),
('is_builtin', models.BooleanField(default=False)),
('container', models.CharField(default='mp4', max_length=20)),
('video_codec', models.CharField(default='libx264', max_length=50)),
('video_bitrate', models.CharField(blank=True, max_length=20, null=True)),
('video_crf', models.IntegerField(blank=True, null=True)),
('video_preset', models.CharField(blank=True, max_length=20, null=True)),
('resolution', models.CharField(blank=True, max_length=20, null=True)),
('framerate', models.FloatField(blank=True, null=True)),
('audio_codec', models.CharField(default='aac', max_length=50)),
('audio_bitrate', models.CharField(blank=True, max_length=20, null=True)),
('audio_channels', models.IntegerField(blank=True, null=True)),
('audio_samplerate', models.IntegerField(blank=True, null=True)),
('extra_args', models.JSONField(blank=True, default=list)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='MediaAsset',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('filename', models.CharField(max_length=500)),
('file_path', models.CharField(max_length=1000)),
('status', models.CharField(choices=[('pending', 'Pending Probe'), ('ready', 'Ready'), ('error', 'Error')], default='pending', max_length=20)),
('error_message', models.TextField(blank=True, null=True)),
('status', models.CharField(choices=[('pending', 'Pending'), ('ready', 'Ready'), ('error', 'Error')], default='pending', max_length=20)),
('error_message', models.TextField(blank=True, default='')),
('file_size', models.BigIntegerField(blank=True, null=True)),
('duration', models.FloatField(blank=True, null=True)),
('video_codec', models.CharField(blank=True, max_length=50, null=True)),
('audio_codec', models.CharField(blank=True, max_length=50, null=True)),
('width', models.IntegerField(blank=True, null=True)),
('height', models.IntegerField(blank=True, null=True)),
('framerate', models.FloatField(blank=True, null=True)),
('duration', models.FloatField(blank=True, default=None, null=True)),
('video_codec', models.CharField(blank=True, max_length=255, null=True)),
('audio_codec', models.CharField(blank=True, max_length=255, null=True)),
('width', models.IntegerField(blank=True, default=None, null=True)),
('height', models.IntegerField(blank=True, default=None, null=True)),
('framerate', models.FloatField(blank=True, default=None, null=True)),
('bitrate', models.BigIntegerField(blank=True, null=True)),
('properties', models.JSONField(blank=True, default=dict)),
('comments', models.TextField(blank=True, default='')),
@@ -63,36 +36,61 @@ class Migration(migrations.Migration):
],
options={
'ordering': ['-created_at'],
'indexes': [models.Index(fields=['status'], name='media_asset_status_9ea2f2_idx'), models.Index(fields=['created_at'], name='media_asset_created_368039_idx')],
},
),
migrations.CreateModel(
name='TranscodeJob',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('source_asset_id', models.UUIDField()),
('preset_id', models.UUIDField(blank=True, null=True)),
('preset_snapshot', models.JSONField(blank=True, default=dict)),
('trim_start', models.FloatField(blank=True, null=True)),
('trim_end', models.FloatField(blank=True, null=True)),
('trim_start', models.FloatField(blank=True, default=None, null=True)),
('trim_end', models.FloatField(blank=True, default=None, null=True)),
('output_filename', models.CharField(max_length=500)),
('output_path', models.CharField(blank=True, max_length=1000, null=True)),
('output_asset_id', models.UUIDField(blank=True, null=True)),
('status', models.CharField(choices=[('pending', 'Pending'), ('processing', 'Processing'), ('completed', 'Completed'), ('failed', 'Failed'), ('cancelled', 'Cancelled')], default='pending', max_length=20)),
('progress', models.FloatField(default=0.0)),
('current_frame', models.IntegerField(blank=True, null=True)),
('current_time', models.FloatField(blank=True, null=True)),
('speed', models.CharField(blank=True, max_length=20, null=True)),
('error_message', models.TextField(blank=True, null=True)),
('celery_task_id', models.CharField(blank=True, max_length=100, null=True)),
('current_frame', models.IntegerField(blank=True, default=None, null=True)),
('current_time', models.FloatField(blank=True, default=None, null=True)),
('speed', models.CharField(blank=True, max_length=255, null=True)),
('error_message', models.TextField(blank=True, default='')),
('celery_task_id', models.CharField(blank=True, max_length=255, null=True)),
('execution_arn', models.CharField(blank=True, max_length=255, null=True)),
('priority', models.IntegerField(default=0)),
('created_at', models.DateTimeField(auto_now_add=True)),
('started_at', models.DateTimeField(blank=True, null=True)),
('completed_at', models.DateTimeField(blank=True, null=True)),
('output_asset', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='source_jobs', to='media_assets.mediaasset')),
('source_asset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='transcode_jobs', to='media_assets.mediaasset')),
('preset', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='jobs', to='media_assets.transcodepreset')),
],
options={
'ordering': ['priority', 'created_at'],
'indexes': [models.Index(fields=['status', 'priority'], name='media_asset_status_e6ac18_idx'), models.Index(fields=['created_at'], name='media_asset_created_ba3a46_idx'), models.Index(fields=['celery_task_id'], name='media_asset_celery__81a88e_idx')],
'ordering': ['-created_at'],
},
),
migrations.CreateModel(
name='TranscodePreset',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
('description', models.TextField(blank=True, default='')),
('is_builtin', models.BooleanField(default=False)),
('container', models.CharField(max_length=255)),
('video_codec', models.CharField(max_length=255)),
('video_bitrate', models.CharField(blank=True, max_length=255, null=True)),
('video_crf', models.IntegerField(blank=True, default=None, null=True)),
('video_preset', models.CharField(blank=True, max_length=255, null=True)),
('resolution', models.CharField(blank=True, max_length=255, null=True)),
('framerate', models.FloatField(blank=True, default=None, null=True)),
('audio_codec', models.CharField(max_length=255)),
('audio_bitrate', models.CharField(blank=True, max_length=255, null=True)),
('audio_channels', models.IntegerField(blank=True, default=None, null=True)),
('audio_samplerate', models.IntegerField(blank=True, default=None, null=True)),
('extra_args', models.JSONField(blank=True, default=list)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['-created_at'],
},
),
]

View File

@@ -7,7 +7,7 @@ from pathlib import Path
import environ
BASE_DIR = Path(__file__).resolve().parent.parent
BASE_DIR = Path(__file__).resolve().parent.parent.parent
env = environ.Env(
DEBUG=(bool, False),
@@ -27,7 +27,7 @@ INSTALLED_APPS = [
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"mpr.media_assets",
"admin.mpr.media_assets",
]
MIDDLEWARE = [
@@ -40,7 +40,7 @@ MIDDLEWARE = [
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "mpr.urls"
ROOT_URLCONF = "admin.mpr.urls"
TEMPLATES = [
{
@@ -57,7 +57,7 @@ TEMPLATES = [
},
]
WSGI_APPLICATION = "mpr.wsgi.application"
WSGI_APPLICATION = "admin.mpr.wsgi.application"
# Database
DATABASE_URL = env("DATABASE_URL", default="sqlite:///db.sqlite3")

View File

@@ -11,6 +11,6 @@ import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mpr.settings')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'admin.mpr.settings')
application = get_wsgi_application()

View File

@@ -2,7 +2,7 @@
GraphQL API using strawberry, served via FastAPI.
Primary API for MPR all client interactions go through GraphQL.
Uses Django ORM directly for data access.
Uses core.db for data access.
Types are generated from schema/ via modelgen see api/schema/graphql.py.
"""
@@ -11,9 +11,10 @@ from typing import List, Optional
from uuid import UUID
import strawberry
from strawberry.schema.config import StrawberryConfig
from strawberry.types import Info
from api.schema.graphql import (
from core.api.schema.graphql import (
CreateJobInput,
DeleteResultType,
MediaAssetType,
@@ -44,22 +45,17 @@ class Query:
status: Optional[str] = None,
search: Optional[str] = None,
) -> List[MediaAssetType]:
from mpr.media_assets.models import MediaAsset
from core.db import list_assets
qs = MediaAsset.objects.all()
if status:
qs = qs.filter(status=status)
if search:
qs = qs.filter(filename__icontains=search)
return list(qs)
return list_assets(status=status, search=search)
@strawberry.field
def asset(self, info: Info, id: UUID) -> Optional[MediaAssetType]:
from mpr.media_assets.models import MediaAsset
from core.db import get_asset
try:
return MediaAsset.objects.get(id=id)
except MediaAsset.DoesNotExist:
return get_asset(id)
except Exception:
return None
@strawberry.field
@@ -69,29 +65,24 @@ class Query:
status: Optional[str] = None,
source_asset_id: Optional[UUID] = None,
) -> List[TranscodeJobType]:
from mpr.media_assets.models import TranscodeJob
from core.db import list_jobs
qs = TranscodeJob.objects.all()
if status:
qs = qs.filter(status=status)
if source_asset_id:
qs = qs.filter(source_asset_id=source_asset_id)
return list(qs)
return list_jobs(status=status, source_asset_id=source_asset_id)
@strawberry.field
def job(self, info: Info, id: UUID) -> Optional[TranscodeJobType]:
from mpr.media_assets.models import TranscodeJob
from core.db import get_job
try:
return TranscodeJob.objects.get(id=id)
except TranscodeJob.DoesNotExist:
return get_job(id)
except Exception:
return None
@strawberry.field
def presets(self, info: Info) -> List[TranscodePresetType]:
from mpr.media_assets.models import TranscodePreset
from core.db import list_presets
return list(TranscodePreset.objects.all())
return list_presets()
@strawberry.field
def system_status(self, info: Info) -> SystemStatusType:
@@ -107,10 +98,10 @@ class Query:
class Mutation:
@strawberry.mutation
def scan_media_folder(self, info: Info) -> ScanResultType:
from mpr.media_assets.models import MediaAsset
from core.db import create_asset, get_asset_filenames
objects = list_objects(BUCKET_IN, extensions=MEDIA_EXTS)
existing = set(MediaAsset.objects.values_list("filename", flat=True))
existing = get_asset_filenames()
registered = []
skipped = []
@@ -120,7 +111,7 @@ class Mutation:
skipped.append(obj["filename"])
continue
try:
MediaAsset.objects.create(
create_asset(
filename=obj["filename"],
file_path=obj["key"],
file_size=obj["size"],
@@ -140,25 +131,25 @@ class Mutation:
def create_job(self, info: Info, input: CreateJobInput) -> TranscodeJobType:
from pathlib import Path
from mpr.media_assets.models import MediaAsset, TranscodeJob, TranscodePreset
from core.db import create_job, get_asset, get_preset
try:
source = MediaAsset.objects.get(id=input.source_asset_id)
except MediaAsset.DoesNotExist:
source = get_asset(input.source_asset_id)
except Exception:
raise Exception("Source asset not found")
preset = None
preset_snapshot = {}
if input.preset_id:
try:
preset = TranscodePreset.objects.get(id=input.preset_id)
preset = get_preset(input.preset_id)
preset_snapshot = {
"name": preset.name,
"container": preset.container,
"video_codec": preset.video_codec,
"audio_codec": preset.audio_codec,
}
except TranscodePreset.DoesNotExist:
except Exception:
raise Exception("Preset not found")
if not preset and not input.trim_start and not input.trim_end:
@@ -170,7 +161,7 @@ class Mutation:
ext = preset_snapshot.get("container", "mp4") if preset else "mp4"
output_filename = f"{stem}_output.{ext}"
job = TranscodeJob.objects.create(
job = create_job(
source_asset_id=source.id,
preset_id=preset.id if preset else None,
preset_snapshot=preset_snapshot,
@@ -183,7 +174,7 @@ class Mutation:
executor_mode = os.environ.get("MPR_EXECUTOR", "local")
if executor_mode in ("lambda", "gcp"):
from task.executor import get_executor
from core.task.executor import get_executor
get_executor().run(
job_id=str(job.id),
@@ -195,7 +186,7 @@ class Mutation:
duration=source.duration,
)
else:
from task.tasks import run_transcode_job
from core.task.tasks import run_transcode_job
result = run_transcode_job.delay(
job_id=str(job.id),
@@ -213,69 +204,61 @@ class Mutation:
@strawberry.mutation
def cancel_job(self, info: Info, id: UUID) -> TranscodeJobType:
from mpr.media_assets.models import TranscodeJob
from core.db import get_job, update_job
try:
job = TranscodeJob.objects.get(id=id)
except TranscodeJob.DoesNotExist:
job = get_job(id)
except Exception:
raise Exception("Job not found")
if job.status not in ("pending", "processing"):
raise Exception(f"Cannot cancel job with status: {job.status}")
job.status = "cancelled"
job.save(update_fields=["status"])
return job
return update_job(job, status="cancelled")
@strawberry.mutation
def retry_job(self, info: Info, id: UUID) -> TranscodeJobType:
from mpr.media_assets.models import TranscodeJob
from core.db import get_job, update_job
try:
job = TranscodeJob.objects.get(id=id)
except TranscodeJob.DoesNotExist:
job = get_job(id)
except Exception:
raise Exception("Job not found")
if job.status != "failed":
raise Exception("Only failed jobs can be retried")
job.status = "pending"
job.progress = 0
job.error_message = None
job.save(update_fields=["status", "progress", "error_message"])
return job
return update_job(job, status="pending", progress=0, error_message=None)
@strawberry.mutation
def update_asset(self, info: Info, id: UUID, input: UpdateAssetInput) -> MediaAssetType:
from mpr.media_assets.models import MediaAsset
from core.db import get_asset, update_asset
try:
asset = MediaAsset.objects.get(id=id)
except MediaAsset.DoesNotExist:
asset = get_asset(id)
except Exception:
raise Exception("Asset not found")
update_fields = []
fields = {}
if input.comments is not None:
asset.comments = input.comments
update_fields.append("comments")
fields["comments"] = input.comments
if input.tags is not None:
asset.tags = input.tags
update_fields.append("tags")
fields["tags"] = input.tags
if update_fields:
asset.save(update_fields=update_fields)
if fields:
asset = update_asset(asset, **fields)
return asset
@strawberry.mutation
def delete_asset(self, info: Info, id: UUID) -> DeleteResultType:
from mpr.media_assets.models import MediaAsset
from core.db import delete_asset, get_asset
try:
asset = MediaAsset.objects.get(id=id)
asset.delete()
asset = get_asset(id)
delete_asset(asset)
return DeleteResultType(ok=True)
except MediaAsset.DoesNotExist:
except Exception:
raise Exception("Asset not found")
@@ -283,4 +266,8 @@ class Mutation:
# Schema
# ---------------------------------------------------------------------------
schema = strawberry.Schema(query=Query, mutation=Mutation)
schema = strawberry.Schema(
query=Query,
mutation=Mutation,
config=StrawberryConfig(auto_camel_case=False),
)

View File

@@ -10,10 +10,10 @@ from typing import Optional
from uuid import UUID
# Add project root to path
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
# Initialize Django before importing models
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mpr.settings")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "admin.mpr.settings")
import django
@@ -23,7 +23,7 @@ from fastapi import FastAPI, Header, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from strawberry.fastapi import GraphQLRouter
from api.graphql import schema as graphql_schema
from core.api.graphql import schema as graphql_schema
CALLBACK_API_KEY = os.environ.get("CALLBACK_API_KEY", "")
@@ -74,26 +74,25 @@ def job_callback(
from django.utils import timezone
from mpr.media_assets.models import TranscodeJob
from core.db import get_job, update_job
try:
job = TranscodeJob.objects.get(id=job_id)
except TranscodeJob.DoesNotExist:
job = get_job(job_id)
except Exception:
raise HTTPException(status_code=404, detail="Job not found")
status = payload.get("status", "failed")
job.status = status
job.progress = 100.0 if status == "completed" else job.progress
update_fields = ["status", "progress"]
fields = {
"status": status,
"progress": 100.0 if status == "completed" else job.progress,
}
if payload.get("error"):
job.error_message = payload["error"]
update_fields.append("error_message")
fields["error_message"] = payload["error"]
if status in ("completed", "failed"):
job.completed_at = timezone.now()
update_fields.append("completed_at")
fields["completed_at"] = timezone.now()
job.save(update_fields=update_fields)
update_job(job, **fields)
return {"ok": True}

19
core/db/__init__.py Normal file
View File

@@ -0,0 +1,19 @@
from .assets import (
create_asset,
delete_asset,
get_asset,
get_asset_filenames,
list_assets,
update_asset,
)
from .jobs import (
create_job,
get_job,
list_jobs,
update_job,
update_job_fields,
)
from .presets import (
get_preset,
list_presets,
)

48
core/db/assets.py Normal file
View File

@@ -0,0 +1,48 @@
"""Database operations for MediaAsset."""
from typing import Optional
from uuid import UUID
def list_assets(status: Optional[str] = None, search: Optional[str] = None):
from admin.mpr.media_assets.models import MediaAsset
qs = MediaAsset.objects.all()
if status:
qs = qs.filter(status=status)
if search:
qs = qs.filter(filename__icontains=search)
return list(qs)
def get_asset(id: UUID):
from admin.mpr.media_assets.models import MediaAsset
return MediaAsset.objects.get(id=id)
def get_asset_filenames() -> set[str]:
from admin.mpr.media_assets.models import MediaAsset
return set(MediaAsset.objects.values_list("filename", flat=True))
def create_asset(*, filename: str, file_path: str, file_size: int):
from admin.mpr.media_assets.models import MediaAsset
return MediaAsset.objects.create(
filename=filename,
file_path=file_path,
file_size=file_size,
)
def update_asset(asset, **fields):
for key, value in fields.items():
setattr(asset, key, value)
asset.save(update_fields=list(fields.keys()))
return asset
def delete_asset(asset):
asset.delete()

40
core/db/jobs.py Normal file
View File

@@ -0,0 +1,40 @@
"""Database operations for TranscodeJob."""
from typing import Optional
from uuid import UUID
def list_jobs(status: Optional[str] = None, source_asset_id: Optional[UUID] = None):
from admin.mpr.media_assets.models import TranscodeJob
qs = TranscodeJob.objects.all()
if status:
qs = qs.filter(status=status)
if source_asset_id:
qs = qs.filter(source_asset_id=source_asset_id)
return list(qs)
def get_job(id: UUID):
from admin.mpr.media_assets.models import TranscodeJob
return TranscodeJob.objects.get(id=id)
def create_job(**fields):
from admin.mpr.media_assets.models import TranscodeJob
return TranscodeJob.objects.create(**fields)
def update_job(job, **fields):
for key, value in fields.items():
setattr(job, key, value)
job.save(update_fields=list(fields.keys()))
return job
def update_job_fields(job_id, **fields):
from admin.mpr.media_assets.models import TranscodeJob
TranscodeJob.objects.filter(id=job_id).update(**fields)

15
core/db/presets.py Normal file
View File

@@ -0,0 +1,15 @@
"""Database operations for TranscodePreset."""
from uuid import UUID
def list_presets():
from admin.mpr.media_assets.models import TranscodePreset
return list(TranscodePreset.objects.all())
def get_preset(id: UUID):
from admin.mpr.media_assets.models import TranscodePreset
return TranscodePreset.objects.get(id=id)

View File

@@ -59,7 +59,7 @@ class WorkerServicer(worker_pb2_grpc.WorkerServiceServicer):
# Dispatch to Celery if available
if self.celery_app:
from task.tasks import run_transcode_job
from core.task.tasks import run_transcode_job
task = run_transcode_job.delay(
job_id=job_id,
@@ -219,9 +219,8 @@ def update_job_progress(
try:
from django.utils import timezone
from mpr.media_assets.models import TranscodeJob
from core.db import update_job_fields
update_fields = ["progress", "current_frame", "current_time", "speed", "status"]
updates = {
"progress": progress,
"current_frame": current_frame,
@@ -232,16 +231,13 @@ def update_job_progress(
if error:
updates["error_message"] = error
update_fields.append("error_message")
if status == "processing":
updates["started_at"] = timezone.now()
update_fields.append("started_at")
elif status in ("completed", "failed"):
updates["completed_at"] = timezone.now()
update_fields.append("completed_at")
TranscodeJob.objects.filter(id=job_id).update(**updates)
update_job_fields(job_id, **updates)
except Exception as e:
logger.warning(f"Failed to update job {job_id} in DB: {e}")

View File

@@ -1,14 +1,14 @@
{
"schema": "schema/models",
"schema": "core/schema/models",
"targets": [
{
"target": "django",
"output": "mpr/media_assets/models.py",
"output": "admin/mpr/media_assets/models.py",
"include": ["dataclasses", "enums"]
},
{
"target": "graphene",
"output": "api/schema/graphql.py",
"output": "core/api/schema/graphql.py",
"include": ["dataclasses", "enums", "api"]
},
{
@@ -18,7 +18,7 @@
},
{
"target": "protobuf",
"output": "rpc/protos/worker.proto",
"output": "core/rpc/protos/worker.proto",
"include": ["grpc"]
}
]

10
core/storage/__init__.py Normal file
View File

@@ -0,0 +1,10 @@
from .s3 import (
BUCKET_IN,
BUCKET_OUT,
download_file,
download_to_temp,
get_presigned_url,
get_s3_client,
list_objects,
upload_file,
)

1
core/storage/gcp.py Normal file
View File

@@ -0,0 +1 @@
"""GCP Cloud Storage backend (placeholder)."""

1
core/storage/local.py Normal file
View File

@@ -0,0 +1 @@
"""Local filesystem storage backend (placeholder)."""

View File

@@ -156,8 +156,8 @@ class LambdaExecutor(Executor):
# Store execution ARN on the job
execution_arn = response["executionArn"]
try:
from mpr.media_assets.models import TranscodeJob
TranscodeJob.objects.filter(id=job_id).update(execution_arn=execution_arn)
from core.db import update_job_fields
update_job_fields(job_id, execution_arn=execution_arn)
except Exception:
pass
@@ -228,9 +228,9 @@ class GCPExecutor(Executor):
execution_name = operation.metadata.name
try:
from mpr.media_assets.models import TranscodeJob
from core.db import update_job_fields
TranscodeJob.objects.filter(id=job_id).update(execution_arn=execution_name)
update_job_fields(job_id, execution_arn=execution_name)
except Exception:
pass

View File

@@ -9,8 +9,8 @@ from typing import Any, Dict, Optional
from celery import shared_task
from core.storage import BUCKET_IN, BUCKET_OUT, download_to_temp, upload_file
from rpc.server import update_job_progress
from task.executor import get_executor
from core.rpc.server import update_job_progress
from core.task.executor import get_executor
logger = logging.getLogger(__name__)

View File

@@ -16,7 +16,7 @@ REDIS_URL=redis://redis:6379/0
# Django
DEBUG=1
DJANGO_SETTINGS_MODULE=mpr.settings
DJANGO_SETTINGS_MODULE=admin.mpr.settings
SECRET_KEY=change-this-in-production
# Worker

View File

@@ -1,9 +1,5 @@
FROM python:3.11-slim
RUN apt-get update && apt-get install -y \
ffmpeg \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY requirements.txt .
@@ -11,4 +7,4 @@ RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"]
CMD ["python", "admin/manage.py", "runserver", "0.0.0.0:8000"]

14
ctrl/Dockerfile.worker Normal file
View File

@@ -0,0 +1,14 @@
FROM python:3.11-slim
RUN apt-get update && apt-get install -y \
ffmpeg \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY requirements.txt requirements-worker.txt ./
RUN pip install --no-cache-dir -r requirements-worker.txt
COPY . .
CMD ["celery", "-A", "admin.mpr", "worker", "--loglevel=info"]

View File

@@ -1,7 +1,7 @@
x-common-env: &common-env
DATABASE_URL: postgresql://mpr_user:mpr_pass@postgres:5432/mpr
REDIS_URL: redis://redis:6379/0
DJANGO_SETTINGS_MODULE: mpr.settings
DJANGO_SETTINGS_MODULE: admin.mpr.settings
DEBUG: 1
GRPC_HOST: grpc
GRPC_PORT: 50051
@@ -96,9 +96,9 @@ services:
context: ..
dockerfile: ctrl/Dockerfile
command: >
bash -c "python manage.py migrate &&
python manage.py loadbuiltins || true &&
python manage.py runserver 0.0.0.0:8701"
bash -c "python admin/manage.py migrate &&
python admin/manage.py loadbuiltins || true &&
python admin/manage.py runserver 0.0.0.0:8701"
ports:
- "8701:8701"
environment:
@@ -115,11 +115,12 @@ services:
build:
context: ..
dockerfile: ctrl/Dockerfile
command: uvicorn api.main:app --host 0.0.0.0 --port 8702 --reload
command: uvicorn core.api.main:app --host 0.0.0.0 --port 8702 --reload
ports:
- "8702:8702"
environment:
<<: *common-env
DJANGO_ALLOW_ASYNC_UNSAFE: "true"
volumes:
- ..:/app
depends_on:
@@ -132,7 +133,7 @@ services:
build:
context: ..
dockerfile: ctrl/Dockerfile
command: python -m rpc.server
command: python -m core.rpc.server
ports:
- "50052:50051"
environment:
@@ -150,8 +151,8 @@ services:
celery:
build:
context: ..
dockerfile: ctrl/Dockerfile
command: celery -A mpr worker -l info -Q transcode -c 2
dockerfile: ctrl/Dockerfile.worker
command: celery -A admin.mpr worker -l info -Q transcode -c 2
environment:
<<: *common-env
MPR_EXECUTOR: local

View File

@@ -1,22 +1,22 @@
#!/bin/bash
# Model generation script for MPR
# Generates all targets from schema/modelgen.json config
# Generates all targets from core/schema/modelgen.json config
set -e
cd "$(dirname "$0")/.."
echo "Generating models from schema/models..."
python -m modelgen generate --config schema/modelgen.json
echo "Generating models from core/schema/models..."
python -m modelgen generate --config core/schema/modelgen.json
# Generate gRPC stubs from proto
echo "Generating gRPC stubs..."
python -m grpc_tools.protoc \
-I rpc/protos \
--python_out=rpc \
--grpc_python_out=rpc \
rpc/protos/worker.proto
-I core/rpc/protos \
--python_out=core/rpc \
--grpc_python_out=core/rpc \
core/rpc/protos/worker.proto
# Fix relative import in generated grpc stub
sed -i 's/^import worker_pb2/from . import worker_pb2/' rpc/worker_pb2_grpc.py
sed -i 's/^import worker_pb2/from . import worker_pb2/' core/rpc/worker_pb2_grpc.py
echo "Done!"

View File

@@ -14,8 +14,8 @@ COPY ctrl/lambda/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY task/lambda_handler.py ${LAMBDA_TASK_ROOT}/task/lambda_handler.py
COPY task/__init__.py ${LAMBDA_TASK_ROOT}/task/__init__.py
COPY core/task/lambda_handler.py ${LAMBDA_TASK_ROOT}/core/task/lambda_handler.py
COPY core/task/__init__.py ${LAMBDA_TASK_ROOT}/core/task/__init__.py
COPY core/ ${LAMBDA_TASK_ROOT}/core/
CMD ["task.lambda_handler.handler"]
CMD ["core.task.lambda_handler.handler"]

View File

@@ -44,9 +44,9 @@ http {
proxy_set_header Host $host;
}
# FastAPI
# FastAPI — trailing slash strips /api prefix before forwarding
location /api/ {
proxy_pass http://fastapi;
proxy_pass http://fastapi/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@@ -70,7 +70,7 @@ aws s3 sync /local/media/ s3://mpr-media-in/
## GCP Production (GCS via S3 compatibility)
GCS exposes an S3-compatible API. The same `core/storage.py` boto3 code works
GCS exposes an S3-compatible API. The same `core/storage/s3.py` boto3 code works
with no changes — only the endpoint and credentials differ.
### GCS HMAC Keys
@@ -103,15 +103,15 @@ aws --endpoint-url https://storage.googleapis.com s3 cp video.mp4 s3://mpr-media
```
### Cloud Run Job Handler
`task/gcp_handler.py` is the Cloud Run Job entrypoint. It reads the job payload
`core/task/gcp_handler.py` is the Cloud Run Job entrypoint. It reads the job payload
from `MPR_JOB_PAYLOAD` (injected by `GCPExecutor`), uses `core/storage` for all
GCS access (S3 compat), and POSTs the completion callback to the API.
Set the Cloud Run Job command to: `python -m task.gcp_handler`
Set the Cloud Run Job command to: `python -m core.task.gcp_handler`
## Storage Module
`core/storage.py` provides all S3 operations:
`core/storage/` package provides all S3 operations:
```python
from core.storage import (
@@ -157,7 +157,7 @@ mutation { scanMediaFolder { found registered skipped files } }
### Cloud Run Job Mode (GCP)
1. `GCPExecutor` triggers Cloud Run Job with payload in `MPR_JOB_PAYLOAD`
2. `task/gcp_handler.py` downloads source from `S3_BUCKET_IN` (GCS S3 compat)
2. `core/task/gcp_handler.py` downloads source from `S3_BUCKET_IN` (GCS S3 compat)
3. Runs FFmpeg in container
4. Uploads result to `S3_BUCKET_OUT` (GCS S3 compat)
5. Calls back to API with result

2
requirements-worker.txt Normal file
View File

@@ -0,0 +1,2 @@
-r requirements.txt
ffmpeg-python>=0.2.0

View File

@@ -12,9 +12,6 @@ pydantic>=2.5.0
celery[redis]>=5.3.0
redis>=5.0.0
# FFmpeg
ffmpeg-python>=0.2.0
# gRPC
grpcio>=1.60.0
grpcio-tools>=1.60.0

View File

@@ -1,5 +1,5 @@
/**
* API client for FastAPI backend
* GraphQL API client
*/
import type {
@@ -8,34 +8,51 @@ import type {
TranscodeJob,
CreateJobRequest,
SystemStatus,
WorkerStatus,
} from "./types";
const API_BASE = "/api";
const GRAPHQL_URL = "/api/graphql";
async function request<T>(path: string, options?: RequestInit): Promise<T> {
const response = await fetch(`${API_BASE}${path}`, {
headers: {
"Content-Type": "application/json",
},
...options,
async function gql<T>(query: string, variables?: Record<string, unknown>): Promise<T> {
const response = await fetch(GRAPHQL_URL, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ query, variables }),
});
if (!response.ok) {
const error = await response.text();
throw new Error(`API error: ${response.status} - ${error}`);
const json = await response.json();
if (json.errors?.length) {
throw new Error(json.errors[0].message);
}
return response.json();
return json.data as T;
}
// Assets
export async function getAssets(): Promise<MediaAsset[]> {
return request("/assets/");
const data = await gql<{ assets: MediaAsset[] }>(`
query {
assets {
id filename file_path status error_message file_size duration
video_codec audio_codec width height framerate bitrate
properties comments tags created_at updated_at
}
}
`);
return data.assets;
}
export async function getAsset(id: string): Promise<MediaAsset> {
return request(`/assets/${id}`);
const data = await gql<{ asset: MediaAsset }>(`
query($id: UUID!) {
asset(id: $id) {
id filename file_path status error_message file_size duration
video_codec audio_codec width height framerate bitrate
properties comments tags created_at updated_at
}
}
`, { id });
return data.asset;
}
export async function scanMediaFolder(): Promise<{
@@ -44,43 +61,95 @@ export async function scanMediaFolder(): Promise<{
skipped: number;
files: string[];
}> {
return request("/assets/scan", {
method: "POST",
});
const data = await gql<{ scan_media_folder: { found: number; registered: number; skipped: number; files: string[] } }>(`
mutation {
scan_media_folder { found registered skipped files }
}
`);
return data.scan_media_folder;
}
// Presets
export async function getPresets(): Promise<TranscodePreset[]> {
return request("/presets/");
const data = await gql<{ presets: TranscodePreset[] }>(`
query {
presets {
id name description is_builtin container
video_codec video_bitrate video_crf video_preset resolution framerate
audio_codec audio_bitrate audio_channels audio_samplerate
extra_args created_at updated_at
}
}
`);
return data.presets;
}
// Jobs
export async function getJobs(): Promise<TranscodeJob[]> {
return request("/jobs/");
const data = await gql<{ jobs: TranscodeJob[] }>(`
query {
jobs {
id source_asset_id preset_id preset_snapshot trim_start trim_end
output_filename output_path output_asset_id status progress
current_frame current_time speed error_message celery_task_id
execution_arn priority created_at started_at completed_at
}
}
`);
return data.jobs;
}
export async function getJob(id: string): Promise<TranscodeJob> {
return request(`/jobs/${id}`);
const data = await gql<{ job: TranscodeJob }>(`
query($id: UUID!) {
job(id: $id) {
id source_asset_id preset_id preset_snapshot trim_start trim_end
output_filename output_path output_asset_id status progress
current_frame current_time speed error_message celery_task_id
execution_arn priority created_at started_at completed_at
}
}
`, { id });
return data.job;
}
export async function createJob(data: CreateJobRequest): Promise<TranscodeJob> {
return request("/jobs/", {
method: "POST",
body: JSON.stringify(data),
export async function createJob(req: CreateJobRequest): Promise<TranscodeJob> {
const data = await gql<{ create_job: TranscodeJob }>(`
mutation($input: CreateJobInput!) {
create_job(input: $input) {
id source_asset_id status output_filename progress created_at
}
}
`, {
input: {
source_asset_id: req.source_asset_id,
preset_id: req.preset_id,
trim_start: req.trim_start,
trim_end: req.trim_end,
output_filename: req.output_filename ?? null,
priority: req.priority ?? 0,
},
});
return data.create_job;
}
export async function cancelJob(id: string): Promise<TranscodeJob> {
return request(`/jobs/${id}/cancel`, {
method: "POST",
});
const data = await gql<{ cancel_job: TranscodeJob }>(`
mutation($id: UUID!) {
cancel_job(id: $id) {
id status
}
}
`, { id });
return data.cancel_job;
}
// System
export async function getSystemStatus(): Promise<SystemStatus> {
return request("/system/status");
}
export async function getWorkerStatus(): Promise<WorkerStatus> {
return request("/system/worker");
const data = await gql<{ system_status: SystemStatus }>(`
query {
system_status { status version }
}
`);
return data.system_status;
}