106 lines
3.0 KiB
Python
106 lines
3.0 KiB
Python
"""
|
|
Celery tasks for job processing.
|
|
"""
|
|
|
|
import logging
|
|
import os
|
|
from typing import Any, Dict, Optional
|
|
|
|
from celery import shared_task
|
|
|
|
from core.storage import BUCKET_IN, BUCKET_OUT, download_to_temp, upload_file
|
|
from rpc.server import update_job_progress
|
|
from task.executor import get_executor
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@shared_task(bind=True, queue="transcode", max_retries=3, default_retry_delay=60)
|
|
def run_transcode_job(
|
|
self,
|
|
job_id: str,
|
|
source_key: str,
|
|
output_key: str,
|
|
preset: Optional[Dict[str, Any]] = None,
|
|
trim_start: Optional[float] = None,
|
|
trim_end: Optional[float] = None,
|
|
duration: Optional[float] = None,
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Celery task to run a transcode/trim job.
|
|
|
|
Downloads source from S3, runs FFmpeg, uploads result to S3.
|
|
"""
|
|
logger.info(f"Starting job {job_id}: {source_key} -> {output_key}")
|
|
|
|
update_job_progress(job_id, progress=0, status="processing")
|
|
|
|
# Download source from S3 to temp file
|
|
logger.info(f"Downloading {source_key} from {BUCKET_IN}")
|
|
tmp_source = download_to_temp(BUCKET_IN, source_key)
|
|
|
|
# Create temp output path with same extension
|
|
import tempfile
|
|
from pathlib import Path
|
|
|
|
ext = Path(output_key).suffix or ".mp4"
|
|
fd, tmp_output = tempfile.mkstemp(suffix=ext)
|
|
os.close(fd)
|
|
|
|
def progress_callback(percent: int, details: Dict[str, Any]) -> None:
|
|
update_job_progress(
|
|
job_id,
|
|
progress=percent,
|
|
current_time=details.get("time", 0.0),
|
|
status="processing",
|
|
)
|
|
|
|
try:
|
|
executor = get_executor()
|
|
success = executor.run(
|
|
job_id=job_id,
|
|
source_path=tmp_source,
|
|
output_path=tmp_output,
|
|
preset=preset,
|
|
trim_start=trim_start,
|
|
trim_end=trim_end,
|
|
duration=duration,
|
|
progress_callback=progress_callback,
|
|
)
|
|
|
|
if success:
|
|
# Upload result to S3
|
|
logger.info(f"Uploading {output_key} to {BUCKET_OUT}")
|
|
upload_file(tmp_output, BUCKET_OUT, output_key)
|
|
|
|
logger.info(f"Job {job_id} completed successfully")
|
|
update_job_progress(job_id, progress=100, status="completed")
|
|
return {
|
|
"status": "completed",
|
|
"job_id": job_id,
|
|
"output_key": output_key,
|
|
}
|
|
else:
|
|
raise RuntimeError("Executor returned False")
|
|
|
|
except Exception as e:
|
|
logger.exception(f"Job {job_id} failed: {e}")
|
|
update_job_progress(job_id, progress=0, status="failed", error=str(e))
|
|
|
|
if self.request.retries < self.max_retries:
|
|
raise self.retry(exc=e)
|
|
|
|
return {
|
|
"status": "failed",
|
|
"job_id": job_id,
|
|
"error": str(e),
|
|
}
|
|
|
|
finally:
|
|
# Clean up temp files
|
|
for f in [tmp_source, tmp_output]:
|
|
try:
|
|
os.unlink(f)
|
|
except OSError:
|
|
pass
|