shoehorning graphql, step functions and lamdas. aws deployment scripts

This commit is contained in:
2026-02-06 18:25:42 -03:00
parent 013587d108
commit e642908abb
35 changed files with 2354 additions and 930 deletions

View File

@@ -8,21 +8,19 @@ from typing import Any, Dict, Optional
from celery import shared_task
from core.storage import BUCKET_IN, BUCKET_OUT, download_to_temp, upload_file
from rpc.server import update_job_progress
from task.executor import get_executor
logger = logging.getLogger(__name__)
# Media paths from environment
MEDIA_ROOT = os.environ.get("MEDIA_ROOT", "/app/media")
@shared_task(bind=True, queue="transcode", max_retries=3, default_retry_delay=60)
def run_transcode_job(
self,
job_id: str,
source_path: str,
output_path: str,
source_key: str,
output_key: str,
preset: Optional[Dict[str, Any]] = None,
trim_start: Optional[float] = None,
trim_end: Optional[float] = None,
@@ -31,25 +29,25 @@ def run_transcode_job(
"""
Celery task to run a transcode/trim job.
Args:
job_id: Unique job identifier
source_path: Path to source file
output_path: Path for output file
preset: Transcode preset dict (optional)
trim_start: Trim start time in seconds (optional)
trim_end: Trim end time in seconds (optional)
duration: Source duration for progress calculation
Returns:
Result dict with status and output_path
Downloads source from S3, runs FFmpeg, uploads result to S3.
"""
logger.info(f"Starting job {job_id}: {source_path} -> {output_path}")
logger.info(f"Starting job {job_id}: {source_key} -> {output_key}")
# Update status to processing
update_job_progress(job_id, progress=0, status="processing")
# Download source from S3 to temp file
logger.info(f"Downloading {source_key} from {BUCKET_IN}")
tmp_source = download_to_temp(BUCKET_IN, source_key)
# Create temp output path with same extension
import tempfile
from pathlib import Path
ext = Path(output_key).suffix or ".mp4"
fd, tmp_output = tempfile.mkstemp(suffix=ext)
os.close(fd)
def progress_callback(percent: int, details: Dict[str, Any]) -> None:
"""Update gRPC progress state."""
update_job_progress(
job_id,
progress=percent,
@@ -61,8 +59,8 @@ def run_transcode_job(
executor = get_executor()
success = executor.run(
job_id=job_id,
source_path=source_path,
output_path=output_path,
source_path=tmp_source,
output_path=tmp_output,
preset=preset,
trim_start=trim_start,
trim_end=trim_end,
@@ -71,12 +69,16 @@ def run_transcode_job(
)
if success:
# Upload result to S3
logger.info(f"Uploading {output_key} to {BUCKET_OUT}")
upload_file(tmp_output, BUCKET_OUT, output_key)
logger.info(f"Job {job_id} completed successfully")
update_job_progress(job_id, progress=100, status="completed")
return {
"status": "completed",
"job_id": job_id,
"output_path": output_path,
"output_key": output_key,
}
else:
raise RuntimeError("Executor returned False")
@@ -85,7 +87,6 @@ def run_transcode_job(
logger.exception(f"Job {job_id} failed: {e}")
update_job_progress(job_id, progress=0, status="failed", error=str(e))
# Retry on transient errors
if self.request.retries < self.max_retries:
raise self.retry(exc=e)
@@ -94,3 +95,11 @@ def run_transcode_job(
"job_id": job_id,
"error": str(e),
}
finally:
# Clean up temp files
for f in [tmp_source, tmp_output]:
try:
os.unlink(f)
except OSError:
pass