65 lines
1.6 KiB
Python
65 lines
1.6 KiB
Python
"""
|
|
Celery task for job processing.
|
|
|
|
Generic dispatcher — routes to the appropriate handler based on job_type.
|
|
"""
|
|
|
|
import logging
|
|
from typing import Any, Dict
|
|
|
|
from celery import shared_task
|
|
|
|
from core.rpc.server import update_job_progress
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
|
def run_job(
|
|
self,
|
|
job_type: str,
|
|
job_id: str,
|
|
payload: Dict[str, Any],
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Generic Celery task — dispatches to the registered handler for job_type.
|
|
"""
|
|
logger.info(f"Starting {job_type} job {job_id}")
|
|
|
|
update_job_progress(job_id, progress=0, status="processing")
|
|
|
|
def progress_callback(percent: int, details: Dict[str, Any]) -> None:
|
|
update_job_progress(
|
|
job_id,
|
|
progress=percent,
|
|
current_time=details.get("time", 0.0),
|
|
status="processing",
|
|
)
|
|
|
|
try:
|
|
from .registry import get_handler
|
|
|
|
handler = get_handler(job_type)
|
|
result = handler.process(
|
|
job_id=job_id,
|
|
payload=payload,
|
|
progress_callback=progress_callback,
|
|
)
|
|
|
|
logger.info(f"Job {job_id} completed successfully")
|
|
update_job_progress(job_id, progress=100, status="completed")
|
|
return result
|
|
|
|
except Exception as e:
|
|
logger.exception(f"Job {job_id} failed: {e}")
|
|
update_job_progress(job_id, progress=0, status="failed", error=str(e))
|
|
|
|
if self.request.retries < self.max_retries:
|
|
raise self.retry(exc=e)
|
|
|
|
return {
|
|
"status": "failed",
|
|
"job_id": job_id,
|
|
"error": str(e),
|
|
}
|