Files
mediaproc/core/api/chunker_sse.py
2026-03-15 16:03:53 -03:00

74 lines
2.0 KiB
Python

"""
SSE endpoint for chunker pipeline events.
Uses Redis as the event bus between Celery workers and the SSE stream.
Celery worker pushes events via core.events, SSE endpoint polls them.
GET /chunker/stream/{job_id} → text/event-stream
"""
import asyncio
import json
import logging
import time
from typing import AsyncGenerator
from fastapi import APIRouter
from starlette.responses import StreamingResponse
from core.events import poll_events
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/chunker", tags=["chunker"])
async def _event_generator(job_id: str) -> AsyncGenerator[str, None]:
"""
Generate SSE events by polling Redis for chunk job events.
"""
cursor = 0
timeout = time.monotonic() + 600 # 10 min max
while time.monotonic() < timeout:
events, cursor = poll_events(job_id, cursor)
if not events:
yield f"event: waiting\ndata: {json.dumps({'job_id': job_id})}\n\n"
await asyncio.sleep(0.1)
continue
for data in events:
event_type = data.pop("event", "update")
payload = {**data, "job_id": job_id}
yield f"event: {event_type}\ndata: {json.dumps(payload)}\n\n"
if event_type in ("pipeline_complete", "pipeline_error", "cancelled"):
yield f"event: done\ndata: {json.dumps({'job_id': job_id})}\n\n"
return
await asyncio.sleep(0.05)
yield f"event: timeout\ndata: {json.dumps({'job_id': job_id})}\n\n"
@router.get("/stream/{job_id}")
async def stream_chunk_job(job_id: str):
"""
SSE stream for a chunk pipeline job.
The UI connects via native EventSource:
const es = new EventSource('/api/chunker/stream/<job_id>');
es.addEventListener('processing', (e) => { ... });
"""
return StreamingResponse(
_event_generator(job_id),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
},
)