Files
mediaproc/core/api/detect_sse.py
2026-03-23 09:58:40 -03:00

73 lines
2.1 KiB
Python

"""
SSE endpoint for detection pipeline events.
Uses Redis as the event bus between pipeline workers and the SSE stream.
Mirrors chunker_sse.py but polls detect_events:{job_id}.
GET /detect/stream/{job_id} → text/event-stream
"""
import asyncio
import json
import logging
import time
from typing import AsyncGenerator
from fastapi import APIRouter
from starlette.responses import StreamingResponse
from core.events import poll_events
from detect.events import DETECT_EVENTS_PREFIX, TERMINAL_EVENTS
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/detect", tags=["detect"])
async def _event_generator(job_id: str) -> AsyncGenerator[str, None]:
cursor = 0
timeout = time.monotonic() + 3600 # 1 hour max (detection jobs are long)
while time.monotonic() < timeout:
events, cursor = poll_events(job_id, cursor, prefix=DETECT_EVENTS_PREFIX)
if not events:
yield f"event: waiting\ndata: {json.dumps({'job_id': job_id})}\n\n"
await asyncio.sleep(0.1)
continue
for data in events:
event_type = data.pop("event", "update")
payload = {**data, "job_id": job_id}
yield f"event: {event_type}\ndata: {json.dumps(payload)}\n\n"
if event_type in TERMINAL_EVENTS:
yield f"event: done\ndata: {json.dumps({'job_id': job_id})}\n\n"
return
await asyncio.sleep(0.05)
yield f"event: timeout\ndata: {json.dumps({'job_id': job_id})}\n\n"
@router.get("/stream/{job_id}")
async def stream_detect_job(job_id: str):
"""
SSE stream for a detection pipeline job.
The UI connects via native EventSource:
const es = new EventSource('/api/detect/stream/<job_id>');
es.addEventListener('graph_update', (e) => { ... });
es.addEventListener('detection', (e) => { ... });
"""
return StreamingResponse(
_event_generator(job_id),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
},
)