executor abstraction, graphene to strawberry

This commit is contained in:
2026-03-12 23:27:34 -03:00
parent 4e9d731cff
commit eaaf2ad60c
13 changed files with 796 additions and 276 deletions

View File

@@ -1,5 +1,5 @@
"""
GraphQL API using graphene, mounted on FastAPI/Starlette.
GraphQL API using strawberry, served via FastAPI.
Primary API for MPR — all client interactions go through GraphQL.
Uses Django ORM directly for data access.
@@ -7,8 +7,11 @@ Types are generated from schema/ via modelgen — see api/schema/graphql.py.
"""
import os
from typing import List, Optional
from uuid import UUID
import graphene
import strawberry
from strawberry.types import Info
from api.schema.graphql import (
CreateJobInput,
@@ -22,7 +25,6 @@ from api.schema.graphql import (
)
from core.storage import BUCKET_IN, list_objects
# Media extensions (same as assets route)
VIDEO_EXTS = {".mp4", ".mkv", ".avi", ".mov", ".webm", ".flv", ".wmv", ".m4v"}
AUDIO_EXTS = {".mp3", ".wav", ".flac", ".aac", ".ogg", ".m4a"}
MEDIA_EXTS = VIDEO_EXTS | AUDIO_EXTS
@@ -33,23 +35,15 @@ MEDIA_EXTS = VIDEO_EXTS | AUDIO_EXTS
# ---------------------------------------------------------------------------
class Query(graphene.ObjectType):
assets = graphene.List(
MediaAssetType,
status=graphene.String(),
search=graphene.String(),
)
asset = graphene.Field(MediaAssetType, id=graphene.UUID(required=True))
jobs = graphene.List(
TranscodeJobType,
status=graphene.String(),
source_asset_id=graphene.UUID(),
)
job = graphene.Field(TranscodeJobType, id=graphene.UUID(required=True))
presets = graphene.List(TranscodePresetType)
system_status = graphene.Field(SystemStatusType)
def resolve_assets(self, info, status=None, search=None):
@strawberry.type
class Query:
@strawberry.field
def assets(
self,
info: Info,
status: Optional[str] = None,
search: Optional[str] = None,
) -> List[MediaAssetType]:
from mpr.media_assets.models import MediaAsset
qs = MediaAsset.objects.all()
@@ -57,9 +51,10 @@ class Query(graphene.ObjectType):
qs = qs.filter(status=status)
if search:
qs = qs.filter(filename__icontains=search)
return qs
return list(qs)
def resolve_asset(self, info, id):
@strawberry.field
def asset(self, info: Info, id: UUID) -> Optional[MediaAssetType]:
from mpr.media_assets.models import MediaAsset
try:
@@ -67,7 +62,13 @@ class Query(graphene.ObjectType):
except MediaAsset.DoesNotExist:
return None
def resolve_jobs(self, info, status=None, source_asset_id=None):
@strawberry.field
def jobs(
self,
info: Info,
status: Optional[str] = None,
source_asset_id: Optional[UUID] = None,
) -> List[TranscodeJobType]:
from mpr.media_assets.models import TranscodeJob
qs = TranscodeJob.objects.all()
@@ -75,9 +76,10 @@ class Query(graphene.ObjectType):
qs = qs.filter(status=status)
if source_asset_id:
qs = qs.filter(source_asset_id=source_asset_id)
return qs
return list(qs)
def resolve_job(self, info, id):
@strawberry.field
def job(self, info: Info, id: UUID) -> Optional[TranscodeJobType]:
from mpr.media_assets.models import TranscodeJob
try:
@@ -85,13 +87,15 @@ class Query(graphene.ObjectType):
except TranscodeJob.DoesNotExist:
return None
def resolve_presets(self, info):
@strawberry.field
def presets(self, info: Info) -> List[TranscodePresetType]:
from mpr.media_assets.models import TranscodePreset
return TranscodePreset.objects.all()
return list(TranscodePreset.objects.all())
def resolve_system_status(self, info):
return {"status": "ok", "version": "0.1.0"}
@strawberry.field
def system_status(self, info: Info) -> SystemStatusType:
return SystemStatusType(status="ok", version="0.1.0")
# ---------------------------------------------------------------------------
@@ -99,13 +103,10 @@ class Query(graphene.ObjectType):
# ---------------------------------------------------------------------------
class ScanMediaFolder(graphene.Mutation):
class Arguments:
pass
Output = ScanResultType
def mutate(self, info):
@strawberry.type
class Mutation:
@strawberry.mutation
def scan_media_folder(self, info: Info) -> ScanResultType:
from mpr.media_assets.models import MediaAsset
objects = list_objects(BUCKET_IN, extensions=MEDIA_EXTS)
@@ -135,14 +136,8 @@ class ScanMediaFolder(graphene.Mutation):
files=registered,
)
class CreateJob(graphene.Mutation):
class Arguments:
input = CreateJobInput(required=True)
Output = TranscodeJobType
def mutate(self, info, input):
@strawberry.mutation
def create_job(self, info: Info, input: CreateJobInput) -> TranscodeJobType:
from pathlib import Path
from mpr.media_assets.models import MediaAsset, TranscodeJob, TranscodePreset
@@ -186,9 +181,8 @@ class CreateJob(graphene.Mutation):
priority=input.priority or 0,
)
# Dispatch
executor_mode = os.environ.get("MPR_EXECUTOR", "local")
if executor_mode == "lambda":
if executor_mode in ("lambda", "gcp"):
from task.executor import get_executor
get_executor().run(
@@ -217,14 +211,8 @@ class CreateJob(graphene.Mutation):
return job
class CancelJob(graphene.Mutation):
class Arguments:
id = graphene.UUID(required=True)
Output = TranscodeJobType
def mutate(self, info, id):
@strawberry.mutation
def cancel_job(self, info: Info, id: UUID) -> TranscodeJobType:
from mpr.media_assets.models import TranscodeJob
try:
@@ -239,14 +227,8 @@ class CancelJob(graphene.Mutation):
job.save(update_fields=["status"])
return job
class RetryJob(graphene.Mutation):
class Arguments:
id = graphene.UUID(required=True)
Output = TranscodeJobType
def mutate(self, info, id):
@strawberry.mutation
def retry_job(self, info: Info, id: UUID) -> TranscodeJobType:
from mpr.media_assets.models import TranscodeJob
try:
@@ -263,15 +245,8 @@ class RetryJob(graphene.Mutation):
job.save(update_fields=["status", "progress", "error_message"])
return job
class UpdateAsset(graphene.Mutation):
class Arguments:
id = graphene.UUID(required=True)
input = UpdateAssetInput(required=True)
Output = MediaAssetType
def mutate(self, info, id, input):
@strawberry.mutation
def update_asset(self, info: Info, id: UUID, input: UpdateAssetInput) -> MediaAssetType:
from mpr.media_assets.models import MediaAsset
try:
@@ -292,14 +267,8 @@ class UpdateAsset(graphene.Mutation):
return asset
class DeleteAsset(graphene.Mutation):
class Arguments:
id = graphene.UUID(required=True)
Output = DeleteResultType
def mutate(self, info, id):
@strawberry.mutation
def delete_asset(self, info: Info, id: UUID) -> DeleteResultType:
from mpr.media_assets.models import MediaAsset
try:
@@ -310,17 +279,8 @@ class DeleteAsset(graphene.Mutation):
raise Exception("Asset not found")
class Mutation(graphene.ObjectType):
scan_media_folder = ScanMediaFolder.Field()
create_job = CreateJob.Field()
cancel_job = CancelJob.Field()
retry_job = RetryJob.Field()
update_asset = UpdateAsset.Field()
delete_asset = DeleteAsset.Field()
# ---------------------------------------------------------------------------
# Schema
# ---------------------------------------------------------------------------
schema = graphene.Schema(query=Query, mutation=Mutation)
schema = strawberry.Schema(query=Query, mutation=Mutation)

View File

@@ -21,7 +21,7 @@ django.setup()
from fastapi import FastAPI, Header, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from starlette_graphene3 import GraphQLApp, make_graphiql_handler
from strawberry.fastapi import GraphQLRouter
from api.graphql import schema as graphql_schema
@@ -45,7 +45,8 @@ app.add_middleware(
)
# GraphQL
app.mount("/graphql", GraphQLApp(schema=graphql_schema, on_get=make_graphiql_handler()))
graphql_router = GraphQLRouter(schema=graphql_schema, graphql_ide="graphiql")
app.include_router(graphql_router, prefix="/graphql")
@app.get("/")

View File

@@ -1,19 +1,26 @@
"""
Graphene Types - GENERATED FILE
Strawberry Types - GENERATED FILE
Do not edit directly. Regenerate using modelgen.
"""
import graphene
import strawberry
from enum import Enum
from typing import List, Optional
from uuid import UUID
from datetime import datetime
from strawberry.scalars import JSON
class AssetStatus(graphene.Enum):
@strawberry.enum
class AssetStatus(Enum):
PENDING = "pending"
READY = "ready"
ERROR = "error"
class JobStatus(graphene.Enum):
@strawberry.enum
class JobStatus(Enum):
PENDING = "pending"
PROCESSING = "processing"
COMPLETED = "completed"
@@ -21,122 +28,131 @@ class JobStatus(graphene.Enum):
CANCELLED = "cancelled"
class MediaAssetType(graphene.ObjectType):
@strawberry.type
class MediaAssetType:
"""A video/audio file registered in the system."""
id = graphene.UUID()
filename = graphene.String()
file_path = graphene.String()
status = graphene.String()
error_message = graphene.String()
file_size = graphene.Int()
duration = graphene.Float()
video_codec = graphene.String()
audio_codec = graphene.String()
width = graphene.Int()
height = graphene.Int()
framerate = graphene.Float()
bitrate = graphene.Int()
properties = graphene.JSONString()
comments = graphene.String()
tags = graphene.List(graphene.String)
created_at = graphene.DateTime()
updated_at = graphene.DateTime()
id: Optional[UUID] = None
filename: Optional[str] = None
file_path: Optional[str] = None
status: Optional[str] = None
error_message: Optional[str] = None
file_size: Optional[int] = None
duration: Optional[float] = None
video_codec: Optional[str] = None
audio_codec: Optional[str] = None
width: Optional[int] = None
height: Optional[int] = None
framerate: Optional[float] = None
bitrate: Optional[int] = None
properties: Optional[JSON] = None
comments: Optional[str] = None
tags: Optional[List[str]] = None
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
class TranscodePresetType(graphene.ObjectType):
@strawberry.type
class TranscodePresetType:
"""A reusable transcoding configuration (like Handbrake presets)."""
id = graphene.UUID()
name = graphene.String()
description = graphene.String()
is_builtin = graphene.Boolean()
container = graphene.String()
video_codec = graphene.String()
video_bitrate = graphene.String()
video_crf = graphene.Int()
video_preset = graphene.String()
resolution = graphene.String()
framerate = graphene.Float()
audio_codec = graphene.String()
audio_bitrate = graphene.String()
audio_channels = graphene.Int()
audio_samplerate = graphene.Int()
extra_args = graphene.List(graphene.String)
created_at = graphene.DateTime()
updated_at = graphene.DateTime()
id: Optional[UUID] = None
name: Optional[str] = None
description: Optional[str] = None
is_builtin: Optional[bool] = None
container: Optional[str] = None
video_codec: Optional[str] = None
video_bitrate: Optional[str] = None
video_crf: Optional[int] = None
video_preset: Optional[str] = None
resolution: Optional[str] = None
framerate: Optional[float] = None
audio_codec: Optional[str] = None
audio_bitrate: Optional[str] = None
audio_channels: Optional[int] = None
audio_samplerate: Optional[int] = None
extra_args: Optional[List[str]] = None
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
class TranscodeJobType(graphene.ObjectType):
@strawberry.type
class TranscodeJobType:
"""A transcoding or trimming job in the queue."""
id = graphene.UUID()
source_asset_id = graphene.UUID()
preset_id = graphene.UUID()
preset_snapshot = graphene.JSONString()
trim_start = graphene.Float()
trim_end = graphene.Float()
output_filename = graphene.String()
output_path = graphene.String()
output_asset_id = graphene.UUID()
status = graphene.String()
progress = graphene.Float()
current_frame = graphene.Int()
current_time = graphene.Float()
speed = graphene.String()
error_message = graphene.String()
celery_task_id = graphene.String()
execution_arn = graphene.String()
priority = graphene.Int()
created_at = graphene.DateTime()
started_at = graphene.DateTime()
completed_at = graphene.DateTime()
id: Optional[UUID] = None
source_asset_id: Optional[UUID] = None
preset_id: Optional[UUID] = None
preset_snapshot: Optional[JSON] = None
trim_start: Optional[float] = None
trim_end: Optional[float] = None
output_filename: Optional[str] = None
output_path: Optional[str] = None
output_asset_id: Optional[UUID] = None
status: Optional[str] = None
progress: Optional[float] = None
current_frame: Optional[int] = None
current_time: Optional[float] = None
speed: Optional[str] = None
error_message: Optional[str] = None
celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None
priority: Optional[int] = None
created_at: Optional[datetime] = None
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
class CreateJobInput(graphene.InputObjectType):
@strawberry.input
class CreateJobInput:
"""Request body for creating a transcode/trim job."""
source_asset_id = graphene.UUID(required=True)
preset_id = graphene.UUID()
trim_start = graphene.Float()
trim_end = graphene.Float()
output_filename = graphene.String()
priority = graphene.Int(default_value=0)
source_asset_id: UUID
preset_id: Optional[UUID] = None
trim_start: Optional[float] = None
trim_end: Optional[float] = None
output_filename: Optional[str] = None
priority: int = 0
class UpdateAssetInput(graphene.InputObjectType):
@strawberry.input
class UpdateAssetInput:
"""Request body for updating asset metadata."""
comments = graphene.String()
tags = graphene.List(graphene.String)
comments: Optional[str] = None
tags: Optional[List[str]] = None
class SystemStatusType(graphene.ObjectType):
@strawberry.type
class SystemStatusType:
"""System status response."""
status = graphene.String()
version = graphene.String()
status: Optional[str] = None
version: Optional[str] = None
class ScanResultType(graphene.ObjectType):
@strawberry.type
class ScanResultType:
"""Result of scanning the media input bucket."""
found = graphene.Int()
registered = graphene.Int()
skipped = graphene.Int()
files = graphene.List(graphene.String)
found: Optional[int] = None
registered: Optional[int] = None
skipped: Optional[int] = None
files: Optional[List[str]] = None
class DeleteResultType(graphene.ObjectType):
@strawberry.type
class DeleteResultType:
"""Result of a delete operation."""
ok = graphene.Boolean()
ok: Optional[bool] = None
class WorkerStatusType(graphene.ObjectType):
@strawberry.type
class WorkerStatusType:
"""Worker health and capabilities."""
available = graphene.Boolean()
active_jobs = graphene.Int()
supported_codecs = graphene.List(graphene.String)
gpu_available = graphene.Boolean()
available: Optional[bool] = None
active_jobs: Optional[int] = None
supported_codecs: Optional[List[str]] = None
gpu_available: Optional[bool] = None

View File

@@ -0,0 +1,83 @@
digraph gcp_architecture {
rankdir=TB
node [shape=box, style=rounded, fontname="Helvetica"]
edge [fontname="Helvetica", fontsize=10]
labelloc="t"
label="MPR - GCP Architecture (Cloud Run Jobs + GCS)"
fontsize=16
fontname="Helvetica-Bold"
graph [splines=ortho, nodesep=0.8, ranksep=0.8]
// External
subgraph cluster_external {
label="External"
style=dashed
color=gray
browser [label="Browser\nmpr.mcrn.ar", shape=ellipse]
}
// Nginx reverse proxy
subgraph cluster_proxy {
label="Reverse Proxy"
style=filled
fillcolor="#e8f4f8"
nginx [label="nginx\nport 80"]
}
// Application layer
subgraph cluster_apps {
label="Application Layer"
style=filled
fillcolor="#f0f8e8"
django [label="Django Admin\n/admin\nport 8701"]
fastapi [label="GraphQL API\n/graphql\nport 8702"]
timeline [label="Timeline UI\n/\nport 5173"]
}
// Data layer (still local)
subgraph cluster_data {
label="Data Layer"
style=filled
fillcolor="#f8e8f0"
postgres [label="PostgreSQL\nport 5436", shape=cylinder]
}
// GCP layer
subgraph cluster_gcp {
label="Google Cloud"
style=filled
fillcolor="#e8f0fd"
cloud_run_job [label="Cloud Run Job\nFFmpeg container\ntranscoding"]
gcs [label="GCS Buckets\n(S3-compat API)", shape=folder]
bucket_in [label="mpr-media-in/\ninput videos", shape=note]
bucket_out [label="mpr-media-out/\ntranscoded output", shape=note]
}
// Connections
browser -> nginx [label="HTTP"]
nginx -> django [xlabel="/admin"]
nginx -> fastapi [xlabel="/graphql"]
nginx -> timeline [xlabel="/"]
timeline -> fastapi [label="GraphQL"]
django -> postgres
fastapi -> postgres [label="read/write jobs"]
fastapi -> cloud_run_job [label="google-cloud-run\nrun_job() + payload\nexecution_name"]
cloud_run_job -> gcs [label="S3 compat (HMAC)\ndownload input\nupload output"]
cloud_run_job -> fastapi [label="POST /jobs/{id}/callback\nupdate status"]
fastapi -> postgres [label="callback updates\njob status"]
gcs -> bucket_in [style=dotted, arrowhead=none]
gcs -> bucket_out [style=dotted, arrowhead=none]
}

View File

@@ -0,0 +1,210 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 14.1.2 (0)
-->
<!-- Title: gcp_architecture Pages: 1 -->
<svg width="653pt" height="957pt"
viewBox="0.00 0.00 653.00 957.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 953.35)">
<title>gcp_architecture</title>
<polygon fill="white" stroke="none" points="-4,4 -4,-953.35 649.25,-953.35 649.25,4 -4,4"/>
<text xml:space="preserve" text-anchor="middle" x="322.62" y="-930.15" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">MPR &#45; GCP Architecture (Cloud Run Jobs + GCS)</text>
<g id="clust1" class="cluster">
<title>cluster_external</title>
<polygon fill="none" stroke="gray" stroke-dasharray="5,2" points="155,-810.25 155,-913.85 315,-913.85 315,-810.25 155,-810.25"/>
<text xml:space="preserve" text-anchor="middle" x="235" y="-894.65" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">External</text>
</g>
<g id="clust2" class="cluster">
<title>cluster_proxy</title>
<polygon fill="#e8f4f8" stroke="black" points="162,-682.5 162,-768.5 308,-768.5 308,-682.5 162,-682.5"/>
<text xml:space="preserve" text-anchor="middle" x="235" y="-749.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Reverse Proxy</text>
</g>
<g id="clust3" class="cluster">
<title>cluster_apps</title>
<polygon fill="#f0f8e8" stroke="black" points="8,-418.75 8,-652.5 290,-652.5 290,-418.75 8,-418.75"/>
<text xml:space="preserve" text-anchor="middle" x="149" y="-633.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Application Layer</text>
</g>
<g id="clust4" class="cluster">
<title>cluster_data</title>
<polygon fill="#f8e8f0" stroke="black" points="27,-248.91 27,-350.84 141,-350.84 141,-248.91 27,-248.91"/>
<text xml:space="preserve" text-anchor="middle" x="84" y="-331.64" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Data Layer</text>
</g>
<g id="clust5" class="cluster">
<title>cluster_gcp</title>
<polygon fill="#e8f0fd" stroke="black" points="299,-8 299,-351.5 631,-351.5 631,-8 299,-8"/>
<text xml:space="preserve" text-anchor="middle" x="465" y="-332.3" font-family="Helvetica,sans-Serif" font-weight="bold" font-size="16.00">Google Cloud</text>
</g>
<!-- browser -->
<g id="node1" class="node">
<title>browser</title>
<ellipse fill="none" stroke="black" cx="235" cy="-848.3" rx="71.77" ry="30.05"/>
<text xml:space="preserve" text-anchor="middle" x="235" y="-852.25" font-family="Helvetica,sans-Serif" font-size="14.00">Browser</text>
<text xml:space="preserve" text-anchor="middle" x="235" y="-835" font-family="Helvetica,sans-Serif" font-size="14.00">mpr.mcrn.ar</text>
</g>
<!-- nginx -->
<g id="node2" class="node">
<title>nginx</title>
<path fill="none" stroke="black" d="M256.5,-733C256.5,-733 213.5,-733 213.5,-733 207.5,-733 201.5,-727 201.5,-721 201.5,-721 201.5,-702.5 201.5,-702.5 201.5,-696.5 207.5,-690.5 213.5,-690.5 213.5,-690.5 256.5,-690.5 256.5,-690.5 262.5,-690.5 268.5,-696.5 268.5,-702.5 268.5,-702.5 268.5,-721 268.5,-721 268.5,-727 262.5,-733 256.5,-733"/>
<text xml:space="preserve" text-anchor="middle" x="235" y="-715.7" font-family="Helvetica,sans-Serif" font-size="14.00">nginx</text>
<text xml:space="preserve" text-anchor="middle" x="235" y="-698.45" font-family="Helvetica,sans-Serif" font-size="14.00">port 80</text>
</g>
<!-- browser&#45;&gt;nginx -->
<g id="edge1" class="edge">
<title>browser&#45;&gt;nginx</title>
<path fill="none" stroke="black" d="M235,-818C235,-818 235,-745 235,-745"/>
<polygon fill="black" stroke="black" points="238.5,-745 235,-735 231.5,-745 238.5,-745"/>
<text xml:space="preserve" text-anchor="middle" x="247.75" y="-779.75" font-family="Helvetica,sans-Serif" font-size="10.00">HTTP</text>
</g>
<!-- django -->
<g id="node3" class="node">
<title>django</title>
<path fill="none" stroke="black" d="M117.75,-617C117.75,-617 28.25,-617 28.25,-617 22.25,-617 16.25,-611 16.25,-605 16.25,-605 16.25,-569.25 16.25,-569.25 16.25,-563.25 22.25,-557.25 28.25,-557.25 28.25,-557.25 117.75,-557.25 117.75,-557.25 123.75,-557.25 129.75,-563.25 129.75,-569.25 129.75,-569.25 129.75,-605 129.75,-605 129.75,-611 123.75,-617 117.75,-617"/>
<text xml:space="preserve" text-anchor="middle" x="73" y="-599.7" font-family="Helvetica,sans-Serif" font-size="14.00">Django Admin</text>
<text xml:space="preserve" text-anchor="middle" x="73" y="-582.45" font-family="Helvetica,sans-Serif" font-size="14.00">/admin</text>
<text xml:space="preserve" text-anchor="middle" x="73" y="-565.2" font-family="Helvetica,sans-Serif" font-size="14.00">port 8701</text>
</g>
<!-- nginx&#45;&gt;django -->
<g id="edge2" class="edge">
<title>nginx&#45;&gt;django</title>
<path fill="none" stroke="black" d="M201.04,-719C153.54,-719 73,-719 73,-719 73,-719 73,-628.89 73,-628.89"/>
<polygon fill="black" stroke="black" points="76.5,-628.89 73,-618.89 69.5,-628.89 76.5,-628.89"/>
<text xml:space="preserve" text-anchor="middle" x="75.09" y="-722.25" font-family="Helvetica,sans-Serif" font-size="10.00">/admin</text>
</g>
<!-- fastapi -->
<g id="node4" class="node">
<title>fastapi</title>
<path fill="none" stroke="black" d="M270.25,-486.5C270.25,-486.5 189.75,-486.5 189.75,-486.5 183.75,-486.5 177.75,-480.5 177.75,-474.5 177.75,-474.5 177.75,-438.75 177.75,-438.75 177.75,-432.75 183.75,-426.75 189.75,-426.75 189.75,-426.75 270.25,-426.75 270.25,-426.75 276.25,-426.75 282.25,-432.75 282.25,-438.75 282.25,-438.75 282.25,-474.5 282.25,-474.5 282.25,-480.5 276.25,-486.5 270.25,-486.5"/>
<text xml:space="preserve" text-anchor="middle" x="230" y="-469.2" font-family="Helvetica,sans-Serif" font-size="14.00">GraphQL API</text>
<text xml:space="preserve" text-anchor="middle" x="230" y="-451.95" font-family="Helvetica,sans-Serif" font-size="14.00">/graphql</text>
<text xml:space="preserve" text-anchor="middle" x="230" y="-434.7" font-family="Helvetica,sans-Serif" font-size="14.00">port 8702</text>
</g>
<!-- nginx&#45;&gt;fastapi -->
<g id="edge3" class="edge">
<title>nginx&#45;&gt;fastapi</title>
<path fill="none" stroke="black" d="M201.11,-705C191.15,-705 182.88,-705 182.88,-705 182.88,-705 182.88,-498.1 182.88,-498.1"/>
<polygon fill="black" stroke="black" points="186.38,-498.1 182.88,-488.1 179.38,-498.1 186.38,-498.1"/>
<text xml:space="preserve" text-anchor="middle" x="163" y="-613.91" font-family="Helvetica,sans-Serif" font-size="10.00">/graphql</text>
</g>
<!-- timeline -->
<g id="node5" class="node">
<title>timeline</title>
<path fill="none" stroke="black" d="M270,-617C270,-617 200,-617 200,-617 194,-617 188,-611 188,-605 188,-605 188,-569.25 188,-569.25 188,-563.25 194,-557.25 200,-557.25 200,-557.25 270,-557.25 270,-557.25 276,-557.25 282,-563.25 282,-569.25 282,-569.25 282,-605 282,-605 282,-611 276,-617 270,-617"/>
<text xml:space="preserve" text-anchor="middle" x="235" y="-599.7" font-family="Helvetica,sans-Serif" font-size="14.00">Timeline UI</text>
<text xml:space="preserve" text-anchor="middle" x="235" y="-582.45" font-family="Helvetica,sans-Serif" font-size="14.00">/</text>
<text xml:space="preserve" text-anchor="middle" x="235" y="-565.2" font-family="Helvetica,sans-Serif" font-size="14.00">port 5173</text>
</g>
<!-- nginx&#45;&gt;timeline -->
<g id="edge4" class="edge">
<title>nginx&#45;&gt;timeline</title>
<path fill="none" stroke="black" d="M235,-690.04C235,-690.04 235,-628.97 235,-628.97"/>
<polygon fill="black" stroke="black" points="238.5,-628.97 235,-618.97 231.5,-628.97 238.5,-628.97"/>
<text xml:space="preserve" text-anchor="middle" x="233.5" y="-662.75" font-family="Helvetica,sans-Serif" font-size="10.00">/</text>
</g>
<!-- postgres -->
<g id="node6" class="node">
<title>postgres</title>
<path fill="none" stroke="black" d="M131.75,-310.03C131.75,-312.96 110.35,-315.34 84,-315.34 57.65,-315.34 36.25,-312.96 36.25,-310.03 36.25,-310.03 36.25,-262.22 36.25,-262.22 36.25,-259.29 57.65,-256.91 84,-256.91 110.35,-256.91 131.75,-259.29 131.75,-262.22 131.75,-262.22 131.75,-310.03 131.75,-310.03"/>
<path fill="none" stroke="black" d="M131.75,-310.03C131.75,-307.1 110.35,-304.72 84,-304.72 57.65,-304.72 36.25,-307.1 36.25,-310.03"/>
<text xml:space="preserve" text-anchor="middle" x="84" y="-290.07" font-family="Helvetica,sans-Serif" font-size="14.00">PostgreSQL</text>
<text xml:space="preserve" text-anchor="middle" x="84" y="-272.82" font-family="Helvetica,sans-Serif" font-size="14.00">port 5436</text>
</g>
<!-- django&#45;&gt;postgres -->
<g id="edge6" class="edge">
<title>django&#45;&gt;postgres</title>
<path fill="none" stroke="black" d="M59.62,-556.89C59.62,-556.89 59.62,-326.97 59.62,-326.97"/>
<polygon fill="black" stroke="black" points="63.13,-326.97 59.63,-316.97 56.13,-326.97 63.13,-326.97"/>
</g>
<!-- fastapi&#45;&gt;postgres -->
<g id="edge7" class="edge">
<title>fastapi&#45;&gt;postgres</title>
<path fill="none" stroke="black" d="M177.34,-467C135.16,-467 83,-467 83,-467 83,-467 83,-327.1 83,-327.1"/>
<polygon fill="black" stroke="black" points="86.5,-327.1 83,-317.1 79.5,-327.1 86.5,-327.1"/>
<text xml:space="preserve" text-anchor="middle" x="266.38" y="-375.5" font-family="Helvetica,sans-Serif" font-size="10.00">read/write jobs</text>
</g>
<!-- fastapi&#45;&gt;postgres -->
<g id="edge11" class="edge">
<title>fastapi&#45;&gt;postgres</title>
<path fill="none" stroke="black" d="M177.57,-447C143.88,-447 106.38,-447 106.38,-447 106.38,-447 106.38,-327.15 106.38,-327.15"/>
<polygon fill="black" stroke="black" points="109.88,-327.15 106.38,-317.15 102.88,-327.15 109.88,-327.15"/>
<text xml:space="preserve" text-anchor="middle" x="125.25" y="-381.88" font-family="Helvetica,sans-Serif" font-size="10.00">callback updates</text>
<text xml:space="preserve" text-anchor="middle" x="125.25" y="-369.12" font-family="Helvetica,sans-Serif" font-size="10.00">job status</text>
</g>
<!-- cloud_run_job -->
<g id="node7" class="node">
<title>cloud_run_job</title>
<path fill="none" stroke="black" d="M505,-316C505,-316 387,-316 387,-316 381,-316 375,-310 375,-304 375,-304 375,-268.25 375,-268.25 375,-262.25 381,-256.25 387,-256.25 387,-256.25 505,-256.25 505,-256.25 511,-256.25 517,-262.25 517,-268.25 517,-268.25 517,-304 517,-304 517,-310 511,-316 505,-316"/>
<text xml:space="preserve" text-anchor="middle" x="446" y="-298.7" font-family="Helvetica,sans-Serif" font-size="14.00">Cloud Run Job</text>
<text xml:space="preserve" text-anchor="middle" x="446" y="-281.45" font-family="Helvetica,sans-Serif" font-size="14.00">FFmpeg container</text>
<text xml:space="preserve" text-anchor="middle" x="446" y="-264.2" font-family="Helvetica,sans-Serif" font-size="14.00">transcoding</text>
</g>
<!-- fastapi&#45;&gt;cloud_run_job -->
<g id="edge8" class="edge">
<title>fastapi&#45;&gt;cloud_run_job</title>
<path fill="none" stroke="black" d="M247.42,-426.41C247.42,-379.88 247.42,-296 247.42,-296 247.42,-296 363.07,-296 363.07,-296"/>
<polygon fill="black" stroke="black" points="363.07,-299.5 373.07,-296 363.07,-292.5 363.07,-299.5"/>
<text xml:space="preserve" text-anchor="middle" x="414.38" y="-388.25" font-family="Helvetica,sans-Serif" font-size="10.00">google&#45;cloud&#45;run</text>
<text xml:space="preserve" text-anchor="middle" x="414.38" y="-375.5" font-family="Helvetica,sans-Serif" font-size="10.00">run_job() + payload</text>
<text xml:space="preserve" text-anchor="middle" x="414.38" y="-362.75" font-family="Helvetica,sans-Serif" font-size="10.00">execution_name</text>
</g>
<!-- timeline&#45;&gt;fastapi -->
<g id="edge5" class="edge">
<title>timeline&#45;&gt;fastapi</title>
<path fill="none" stroke="black" d="M235,-556.86C235,-556.86 235,-498.24 235,-498.24"/>
<polygon fill="black" stroke="black" points="238.5,-498.24 235,-488.24 231.5,-498.24 238.5,-498.24"/>
<text xml:space="preserve" text-anchor="middle" x="253" y="-518.75" font-family="Helvetica,sans-Serif" font-size="10.00">GraphQL</text>
</g>
<!-- cloud_run_job&#45;&gt;fastapi -->
<g id="edge10" class="edge">
<title>cloud_run_job&#45;&gt;fastapi</title>
<path fill="none" stroke="black" d="M374.7,-276C306.06,-276 212.58,-276 212.58,-276 212.58,-276 212.58,-414.88 212.58,-414.88"/>
<polygon fill="black" stroke="black" points="209.08,-414.88 212.58,-424.88 216.08,-414.88 209.08,-414.88"/>
<text xml:space="preserve" text-anchor="middle" x="585.62" y="-381.88" font-family="Helvetica,sans-Serif" font-size="10.00">POST /jobs/{id}/callback</text>
<text xml:space="preserve" text-anchor="middle" x="585.62" y="-369.12" font-family="Helvetica,sans-Serif" font-size="10.00">update status</text>
</g>
<!-- gcs -->
<g id="node8" class="node">
<title>gcs</title>
<polygon fill="none" stroke="black" points="510.25,-160 507.25,-164 486.25,-164 483.25,-160 381.75,-160 381.75,-117.5 510.25,-117.5 510.25,-160"/>
<text xml:space="preserve" text-anchor="middle" x="446" y="-142.7" font-family="Helvetica,sans-Serif" font-size="14.00">GCS Buckets</text>
<text xml:space="preserve" text-anchor="middle" x="446" y="-125.45" font-family="Helvetica,sans-Serif" font-size="14.00">(S3&#45;compat API)</text>
</g>
<!-- cloud_run_job&#45;&gt;gcs -->
<g id="edge9" class="edge">
<title>cloud_run_job&#45;&gt;gcs</title>
<path fill="none" stroke="black" d="M446,-255.95C446,-255.95 446,-171.81 446,-171.81"/>
<polygon fill="black" stroke="black" points="449.5,-171.81 446,-161.81 442.5,-171.81 449.5,-171.81"/>
<text xml:space="preserve" text-anchor="middle" x="492.12" y="-217.75" font-family="Helvetica,sans-Serif" font-size="10.00">S3 compat (HMAC)</text>
<text xml:space="preserve" text-anchor="middle" x="492.12" y="-205" font-family="Helvetica,sans-Serif" font-size="10.00">download input</text>
<text xml:space="preserve" text-anchor="middle" x="492.12" y="-192.25" font-family="Helvetica,sans-Serif" font-size="10.00">upload output</text>
</g>
<!-- bucket_in -->
<g id="node9" class="node">
<title>bucket_in</title>
<polygon fill="none" stroke="black" points="414.75,-58.5 307.25,-58.5 307.25,-16 420.75,-16 420.75,-52.5 414.75,-58.5"/>
<polyline fill="none" stroke="black" points="414.75,-58.5 414.75,-52.5"/>
<polyline fill="none" stroke="black" points="420.75,-52.5 414.75,-52.5"/>
<text xml:space="preserve" text-anchor="middle" x="364" y="-41.2" font-family="Helvetica,sans-Serif" font-size="14.00">mpr&#45;media&#45;in/</text>
<text xml:space="preserve" text-anchor="middle" x="364" y="-23.95" font-family="Helvetica,sans-Serif" font-size="14.00">input videos</text>
</g>
<!-- gcs&#45;&gt;bucket_in -->
<g id="edge12" class="edge">
<title>gcs&#45;&gt;bucket_in</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M401.25,-117.22C401.25,-100 401.25,-75.96 401.25,-58.74"/>
</g>
<!-- bucket_out -->
<g id="node10" class="node">
<title>bucket_out</title>
<polygon fill="none" stroke="black" points="617.12,-58.5 478.88,-58.5 478.88,-16 623.12,-16 623.12,-52.5 617.12,-58.5"/>
<polyline fill="none" stroke="black" points="617.12,-58.5 617.12,-52.5"/>
<polyline fill="none" stroke="black" points="623.12,-52.5 617.12,-52.5"/>
<text xml:space="preserve" text-anchor="middle" x="551" y="-41.2" font-family="Helvetica,sans-Serif" font-size="14.00">mpr&#45;media&#45;out/</text>
<text xml:space="preserve" text-anchor="middle" x="551" y="-23.95" font-family="Helvetica,sans-Serif" font-size="14.00">transcoded output</text>
</g>
<!-- gcs&#45;&gt;bucket_out -->
<g id="edge13" class="edge">
<title>gcs&#45;&gt;bucket_out</title>
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M494.56,-117.22C494.56,-100 494.56,-75.96 494.56,-58.74"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -68,6 +68,47 @@ aws s3 cp video.mp4 s3://mpr-media-in/
aws s3 sync /local/media/ s3://mpr-media-in/
```
## GCP Production (GCS via S3 compatibility)
GCS exposes an S3-compatible API. The same `core/storage.py` boto3 code works
with no changes — only the endpoint and credentials differ.
### GCS HMAC Keys
Generate under **Cloud Storage → Settings → Interoperability** in the GCP console.
These act as `AWS_ACCESS_KEY_ID` / `AWS_SECRET_ACCESS_KEY`.
### Configuration
```bash
S3_ENDPOINT_URL=https://storage.googleapis.com
S3_BUCKET_IN=mpr-media-in
S3_BUCKET_OUT=mpr-media-out
AWS_ACCESS_KEY_ID=<GCS HMAC access key>
AWS_SECRET_ACCESS_KEY=<GCS HMAC secret>
# Executor
MPR_EXECUTOR=gcp
GCP_PROJECT_ID=my-project
GCP_REGION=us-central1
CLOUD_RUN_JOB=mpr-transcode
CALLBACK_URL=https://mpr.mcrn.ar/api
CALLBACK_API_KEY=<secret>
```
### Upload Files to GCS
```bash
gcloud storage cp video.mp4 gs://mpr-media-in/
# Or with the aws CLI via compat endpoint
aws --endpoint-url https://storage.googleapis.com s3 cp video.mp4 s3://mpr-media-in/
```
### Cloud Run Job Handler
`task/gcp_handler.py` is the Cloud Run Job entrypoint. It reads the job payload
from `MPR_JOB_PAYLOAD` (injected by `GCPExecutor`), uses `core/storage` for all
GCS access (S3 compat), and POSTs the completion callback to the API.
Set the Cloud Run Job command to: `python -m task.gcp_handler`
## Storage Module
`core/storage.py` provides all S3 operations:
@@ -114,7 +155,14 @@ mutation { scanMediaFolder { found registered skipped files } }
4. Uploads result to `S3_BUCKET_OUT`
5. Calls back to API with result
Both paths use the same S3 buckets and key structure.
### Cloud Run Job Mode (GCP)
1. `GCPExecutor` triggers Cloud Run Job with payload in `MPR_JOB_PAYLOAD`
2. `task/gcp_handler.py` downloads source from `S3_BUCKET_IN` (GCS S3 compat)
3. Runs FFmpeg in container
4. Uploads result to `S3_BUCKET_OUT` (GCS S3 compat)
5. Calls back to API with result
All three paths use the same S3-compatible bucket names and key structure.
## Supported File Types

View File

@@ -9,8 +9,9 @@
<body>
<h1>MPR - Media Processor</h1>
<p>
Media transcoding platform with dual execution modes: local (Celery
+ MinIO) and cloud (AWS Step Functions + Lambda + S3).
Media transcoding platform with three execution modes: local (Celery
+ MinIO), AWS (Step Functions + Lambda + S3), and GCP (Cloud Run
Jobs + GCS). Storage is S3-compatible across all environments.
</p>
<nav>
@@ -54,6 +55,21 @@
>Open full size</a
>
</div>
<div class="diagram">
<h3>GCP Architecture (Production)</h3>
<object
type="image/svg+xml"
data="architecture/01c-gcp-architecture.svg"
>
<img
src="architecture/01c-gcp-architecture.svg"
alt="GCP Architecture"
/>
</object>
<a href="architecture/01c-gcp-architecture.svg" target="_blank"
>Open full size</a
>
</div>
</div>
<div class="legend">
@@ -73,7 +89,11 @@
</li>
<li>
<span class="color-box" style="background: #fde8d0"></span>
AWS (Step Functions, Lambda - cloud mode)
AWS (Step Functions, Lambda)
</li>
<li>
<span class="color-box" style="background: #e8f0fd"></span>
GCP (Cloud Run Jobs + GCS)
</li>
<li>
<span class="color-box" style="background: #f8e8f0"></span>
@@ -81,7 +101,7 @@
</li>
<li>
<span class="color-box" style="background: #f0f0f0"></span>
S3 Storage (MinIO local / AWS S3 cloud)
S3-compatible Storage (MinIO / AWS S3 / GCS)
</li>
</ul>
</div>

View File

@@ -7,17 +7,17 @@ Supported generators:
- TypeScriptGenerator: TypeScript interfaces
- ProtobufGenerator: Protocol Buffer definitions
- PrismaGenerator: Prisma schema
- GrapheneGenerator: Graphene ObjectType/InputObjectType classes
- StrawberryGenerator: Strawberry type/input/enum classes
"""
from typing import Dict, Type
from .base import BaseGenerator
from .django import DjangoGenerator
from .graphene import GrapheneGenerator
from .prisma import PrismaGenerator
from .protobuf import ProtobufGenerator
from .pydantic import PydanticGenerator
from .strawberry import StrawberryGenerator
from .typescript import TypeScriptGenerator
# Registry of available generators
@@ -29,14 +29,14 @@ GENERATORS: Dict[str, Type[BaseGenerator]] = {
"protobuf": ProtobufGenerator,
"proto": ProtobufGenerator, # Alias
"prisma": PrismaGenerator,
"graphene": GrapheneGenerator,
"strawberry": StrawberryGenerator,
}
__all__ = [
"BaseGenerator",
"PydanticGenerator",
"DjangoGenerator",
"GrapheneGenerator",
"StrawberryGenerator",
"TypeScriptGenerator",
"ProtobufGenerator",
"PrismaGenerator",

View File

@@ -1,28 +1,29 @@
"""
Graphene Generator
Strawberry Generator
Generates graphene ObjectType and InputObjectType classes from model definitions.
Generates strawberry type, input, and enum classes from model definitions.
Only generates type definitions queries, mutations, and resolvers are hand-written.
"""
import dataclasses as dc
from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
from ..helpers import get_origin_name, get_type_name, unwrap_optional
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import GRAPHENE_RESOLVERS
from ..types import STRAWBERRY_RESOLVERS
from .base import BaseGenerator
class GrapheneGenerator(BaseGenerator):
"""Generates graphene type definition files."""
class StrawberryGenerator(BaseGenerator):
"""Generates strawberry type definition files."""
def file_extension(self) -> str:
return ".py"
def generate(self, models, output_path: Path) -> None:
"""Generate graphene types to output_path."""
"""Generate strawberry types to output_path."""
output_path.parent.mkdir(parents=True, exist_ok=True)
if hasattr(models, "models"):
@@ -47,22 +48,18 @@ class GrapheneGenerator(BaseGenerator):
enums: List[EnumDefinition],
api_models: List[ModelDefinition],
) -> str:
"""Generate from ModelDefinition objects."""
lines = self._generate_header()
# Generate enums as graphene.Enum
for enum_def in enums:
lines.extend(self._generate_enum(enum_def))
lines.append("")
lines.append("")
# Generate domain models as ObjectType
for model_def in models:
lines.extend(self._generate_object_type(model_def))
lines.append("")
lines.append("")
# Generate API models — request types as InputObjectType, others as ObjectType
for model_def in api_models:
if model_def.name.endswith("Request"):
lines.extend(self._generate_input_type(model_def))
@@ -74,7 +71,6 @@ class GrapheneGenerator(BaseGenerator):
return "\n".join(lines).rstrip() + "\n"
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
"""Generate from Python dataclasses."""
lines = self._generate_header()
enums_generated = set()
@@ -99,37 +95,38 @@ class GrapheneGenerator(BaseGenerator):
def _generate_header(self) -> List[str]:
return [
'"""',
"Graphene Types - GENERATED FILE",
"Strawberry Types - GENERATED FILE",
"",
"Do not edit directly. Regenerate using modelgen.",
'"""',
"",
"import graphene",
"import strawberry",
"from enum import Enum",
"from typing import List, Optional",
"from uuid import UUID",
"from datetime import datetime",
"from strawberry.scalars import JSON",
"",
"",
]
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
"""Generate graphene.Enum from EnumDefinition."""
lines = [f"class {enum_def.name}(graphene.Enum):"]
lines = ["@strawberry.enum", f"class {enum_def.name}(Enum):"]
for name, value in enum_def.values:
lines.append(f' {name} = "{value}"')
return lines
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
"""Generate graphene.Enum from Python Enum."""
lines = [f"class {enum_cls.__name__}(graphene.Enum):"]
lines = ["@strawberry.enum", f"class {enum_cls.__name__}(Enum):"]
for member in enum_cls:
lines.append(f' {member.name} = "{member.value}"')
return lines
def _generate_object_type(self, model_def: ModelDefinition) -> List[str]:
"""Generate graphene.ObjectType from ModelDefinition."""
name = model_def.name
# Append Type suffix if not already present
type_name = f"{name}Type" if not name.endswith("Type") else name
lines = [f"class {type_name}(graphene.ObjectType):"]
lines = ["@strawberry.type", f"class {type_name}:"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
@@ -139,23 +136,19 @@ class GrapheneGenerator(BaseGenerator):
lines.append(" pass")
else:
for field in model_def.fields:
graphene_type = self._resolve_type(field.type_hint, field.optional)
lines.append(f" {field.name} = {graphene_type}")
type_str = self._resolve_type(field.type_hint, optional=True)
lines.append(f" {field.name}: {type_str} = None")
return lines
def _generate_input_type(self, model_def: ModelDefinition) -> List[str]:
"""Generate graphene.InputObjectType from ModelDefinition."""
import dataclasses as dc
name = model_def.name
# Convert FooRequest -> FooInput
if name.endswith("Request"):
input_name = name[: -len("Request")] + "Input"
else:
input_name = f"{name}Input"
lines = [f"class {input_name}(graphene.InputObjectType):"]
lines = ["@strawberry.input", f"class {input_name}:"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
@@ -164,73 +157,64 @@ class GrapheneGenerator(BaseGenerator):
if not model_def.fields:
lines.append(" pass")
else:
# Required fields first, then optional/defaulted
required = []
optional = []
for field in model_def.fields:
graphene_type = self._resolve_type(field.type_hint, field.optional)
# Required only if not optional AND no default value
has_default = field.default is not dc.MISSING
if not field.optional and not has_default:
graphene_type = self._make_required(graphene_type)
elif has_default and not field.optional:
graphene_type = self._add_default(graphene_type, field.default)
lines.append(f" {field.name} = {graphene_type}")
required.append(field)
else:
optional.append(field)
for field in required:
type_str = self._resolve_type(field.type_hint, optional=False)
lines.append(f" {field.name}: {type_str}")
for field in optional:
has_default = field.default is not dc.MISSING
if has_default and not callable(field.default):
type_str = self._resolve_type(field.type_hint, optional=False)
lines.append(f" {field.name}: {type_str} = {field.default!r}")
else:
type_str = self._resolve_type(field.type_hint, optional=True)
lines.append(f" {field.name}: {type_str} = None")
return lines
def _generate_object_type_from_dataclass(self, cls: type) -> List[str]:
"""Generate graphene.ObjectType from a dataclass."""
import dataclasses as dc
type_name = f"{cls.__name__}Type"
lines = [f"class {type_name}(graphene.ObjectType):"]
lines = ["@strawberry.type", f"class {type_name}:"]
hints = get_type_hints(cls)
for name, type_hint in hints.items():
if name.startswith("_"):
continue
graphene_type = self._resolve_type(type_hint, False)
lines.append(f" {name} = {graphene_type}")
type_str = self._resolve_type(type_hint, optional=True)
lines.append(f" {name}: {type_str} = None")
return lines
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
"""Resolve Python type to graphene field call string."""
"""Resolve Python type hint to a strawberry annotation string."""
base, is_optional = unwrap_optional(type_hint)
optional = optional or is_optional
origin = get_origin_name(base)
type_name = get_type_name(base)
# Look up resolver
resolver = (
GRAPHENE_RESOLVERS.get(origin)
or GRAPHENE_RESOLVERS.get(type_name)
or GRAPHENE_RESOLVERS.get(base)
STRAWBERRY_RESOLVERS.get(origin)
or STRAWBERRY_RESOLVERS.get(type_name)
or STRAWBERRY_RESOLVERS.get(base)
or (
GRAPHENE_RESOLVERS["enum"]
STRAWBERRY_RESOLVERS["enum"]
if isinstance(base, type) and issubclass(base, Enum)
else None
)
)
result = resolver(base) if resolver else "graphene.String"
inner = resolver(base) if resolver else "str"
# List types already have () syntax from resolver
if result.startswith("graphene.List("):
return result
# Scalar types: add () call
return f"{result}()"
def _make_required(self, field_str: str) -> str:
"""Add required=True to a graphene field."""
if field_str.endswith("()"):
return field_str[:-1] + "required=True)"
return field_str
def _add_default(self, field_str: str, default: Any) -> str:
"""Add default_value to a graphene field."""
if callable(default):
# default_factory — skip, graphene doesn't support factories
return field_str
if field_str.endswith("()"):
return field_str[:-1] + f"default_value={default!r})"
return field_str
if optional:
return f"Optional[{inner}]"
return inner

View File

@@ -139,34 +139,34 @@ PRISMA_SPECIAL: dict[str, str] = {
}
# =============================================================================
# Graphene Type Resolvers
# Strawberry Type Resolvers
# =============================================================================
def _resolve_graphene_list(base: Any) -> str:
"""Resolve graphene List type."""
def _resolve_strawberry_list(base: Any) -> str:
"""Resolve strawberry List type annotation."""
args = get_args(base)
if args:
inner = args[0]
if inner is str:
return "graphene.List(graphene.String)"
return "List[str]"
elif inner is int:
return "graphene.List(graphene.Int)"
return "List[int]"
elif inner is float:
return "graphene.List(graphene.Float)"
return "List[float]"
elif inner is bool:
return "graphene.List(graphene.Boolean)"
return "graphene.List(graphene.String)"
return "List[bool]"
return "List[str]"
GRAPHENE_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "graphene.String",
int: lambda _: "graphene.Int",
float: lambda _: "graphene.Float",
bool: lambda _: "graphene.Boolean",
"UUID": lambda _: "graphene.UUID",
"datetime": lambda _: "graphene.DateTime",
"dict": lambda _: "graphene.JSONString",
"list": _resolve_graphene_list,
"enum": lambda base: f"graphene.String", # Enums exposed as strings in GQL
STRAWBERRY_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "str",
int: lambda _: "int",
float: lambda _: "float",
bool: lambda _: "bool",
"UUID": lambda _: "UUID",
"datetime": lambda _: "datetime",
"dict": lambda _: "JSON",
"list": _resolve_strawberry_list,
"enum": lambda base: base.__name__,
}

View File

@@ -21,10 +21,13 @@ grpcio-tools>=1.60.0
# AWS
boto3>=1.34.0
requests>=2.31.0
# GCP (optional — only needed when MPR_EXECUTOR=gcp)
google-cloud-run>=0.10.0
# GraphQL
graphene>=3.3
starlette-graphene3>=0.6.0
strawberry-graphql[fastapi]>=0.311.0
# Testing
pytest>=7.4.0

View File

@@ -164,10 +164,84 @@ class LambdaExecutor(Executor):
return True
class GCPExecutor(Executor):
"""Execute jobs via Google Cloud Run Jobs."""
def __init__(self):
from google.cloud import run_v2
self.client = run_v2.JobsClient()
self.project_id = os.environ["GCP_PROJECT_ID"]
self.region = os.environ.get("GCP_REGION", "us-central1")
self.job_name = os.environ["CLOUD_RUN_JOB"]
self.callback_url = os.environ.get("CALLBACK_URL", "")
self.callback_api_key = os.environ.get("CALLBACK_API_KEY", "")
def run(
self,
job_id: str,
source_path: str,
output_path: str,
preset: Optional[Dict[str, Any]] = None,
trim_start: Optional[float] = None,
trim_end: Optional[float] = None,
duration: Optional[float] = None,
progress_callback: Optional[Callable[[int, Dict[str, Any]], None]] = None,
) -> bool:
"""Trigger a Cloud Run Job execution for this job."""
import json
from google.cloud import run_v2
payload = {
"job_id": job_id,
"source_key": source_path,
"output_key": output_path,
"preset": preset,
"trim_start": trim_start,
"trim_end": trim_end,
"duration": duration,
"callback_url": self.callback_url,
"api_key": self.callback_api_key,
}
job_path = (
f"projects/{self.project_id}/locations/{self.region}/jobs/{self.job_name}"
)
request = run_v2.RunJobRequest(
name=job_path,
overrides=run_v2.RunJobRequest.Overrides(
container_overrides=[
run_v2.RunJobRequest.Overrides.ContainerOverride(
env=[
run_v2.EnvVar(
name="MPR_JOB_PAYLOAD", value=json.dumps(payload)
)
]
)
]
),
)
operation = self.client.run_job(request=request)
execution_name = operation.metadata.name
try:
from mpr.media_assets.models import TranscodeJob
TranscodeJob.objects.filter(id=job_id).update(execution_arn=execution_name)
except Exception:
pass
return True
# Executor registry
_executors: Dict[str, type] = {
"local": LocalExecutor,
"lambda": LambdaExecutor,
"gcp": GCPExecutor,
}
_executor_instance: Optional[Executor] = None

121
task/gcp_handler.py Normal file
View File

@@ -0,0 +1,121 @@
"""
Google Cloud Run Job handler for media transcoding.
Reads job payload from the MPR_JOB_PAYLOAD env var (injected by GCPExecutor),
downloads source from S3-compatible storage (GCS via HMAC + S3 API),
runs FFmpeg, uploads result, and calls back to the API.
Uses core/storage and core/ffmpeg — same modules as the Celery worker.
No cloud-provider SDK required here; storage goes through core.storage (boto3 + S3 compat).
Entry point: python -m task.gcp_handler (set as Cloud Run Job command)
"""
import json
import logging
import os
import sys
import tempfile
from pathlib import Path
import requests
from core.ffmpeg.transcode import TranscodeConfig, transcode
from core.storage import BUCKET_IN, BUCKET_OUT, download_to_temp, upload_file
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def main() -> None:
raw = os.environ.get("MPR_JOB_PAYLOAD")
if not raw:
logger.error("MPR_JOB_PAYLOAD not set")
sys.exit(1)
event = json.loads(raw)
job_id = event["job_id"]
source_key = event["source_key"]
output_key = event["output_key"]
preset = event.get("preset")
trim_start = event.get("trim_start")
trim_end = event.get("trim_end")
duration = event.get("duration")
callback_url = event.get("callback_url", "")
api_key = event.get("api_key", "")
logger.info(f"Starting job {job_id}: {source_key} -> {output_key}")
tmp_source = download_to_temp(BUCKET_IN, source_key)
ext_out = Path(output_key).suffix or ".mp4"
fd, tmp_output = tempfile.mkstemp(suffix=ext_out)
os.close(fd)
try:
if preset:
config = TranscodeConfig(
input_path=tmp_source,
output_path=tmp_output,
video_codec=preset.get("video_codec", "libx264"),
video_bitrate=preset.get("video_bitrate"),
video_crf=preset.get("video_crf"),
video_preset=preset.get("video_preset"),
resolution=preset.get("resolution"),
framerate=preset.get("framerate"),
audio_codec=preset.get("audio_codec", "aac"),
audio_bitrate=preset.get("audio_bitrate"),
audio_channels=preset.get("audio_channels"),
audio_samplerate=preset.get("audio_samplerate"),
container=preset.get("container", "mp4"),
extra_args=preset.get("extra_args", []),
trim_start=trim_start,
trim_end=trim_end,
)
else:
config = TranscodeConfig(
input_path=tmp_source,
output_path=tmp_output,
video_codec="copy",
audio_codec="copy",
trim_start=trim_start,
trim_end=trim_end,
)
success = transcode(config, duration=duration)
if not success:
raise RuntimeError("Transcode returned False")
logger.info(f"Uploading to {BUCKET_OUT}/{output_key}")
upload_file(tmp_output, BUCKET_OUT, output_key)
_callback(callback_url, job_id, api_key, {"status": "completed"})
logger.info(f"Job {job_id} completed")
sys.exit(0)
except Exception as e:
logger.exception(f"Job {job_id} failed: {e}")
_callback(callback_url, job_id, api_key, {"status": "failed", "error": str(e)})
sys.exit(1)
finally:
for f in [tmp_source, tmp_output]:
try:
os.unlink(f)
except OSError:
pass
def _callback(callback_url: str, job_id: str, api_key: str, payload: dict) -> None:
if not callback_url:
return
try:
url = f"{callback_url}/jobs/{job_id}/callback"
headers = {"X-API-Key": api_key} if api_key else {}
resp = requests.post(url, json=payload, headers=headers, timeout=10)
logger.info(f"Callback response: {resp.status_code}")
except Exception as e:
logger.warning(f"Callback failed: {e}")
if __name__ == "__main__":
main()