phase 1
This commit is contained in:
79
core/api/detect/sse.py
Normal file
79
core/api/detect/sse.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""
|
||||
SSE endpoint for detection pipeline events.
|
||||
|
||||
Uses Redis as the event bus between pipeline workers and the SSE stream.
|
||||
Mirrors chunker_sse.py but polls detect_events:{job_id}.
|
||||
|
||||
GET /detect/stream/{job_id} → text/event-stream
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from fastapi import APIRouter
|
||||
from starlette.responses import StreamingResponse
|
||||
|
||||
from core.events import poll_events
|
||||
from detect.events import DETECT_EVENTS_PREFIX, TERMINAL_EVENTS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/detect", tags=["detect"])
|
||||
|
||||
|
||||
async def _event_generator(job_id: str) -> AsyncGenerator[str, None]:
|
||||
cursor = 0
|
||||
timeout = time.monotonic() + 3600 # 1 hour max
|
||||
|
||||
while time.monotonic() < timeout:
|
||||
events, cursor = poll_events(job_id, cursor, prefix=DETECT_EVENTS_PREFIX)
|
||||
|
||||
if not events:
|
||||
await asyncio.sleep(0.2)
|
||||
continue
|
||||
|
||||
is_terminal = False
|
||||
for data in events:
|
||||
event_type = data.pop("event", "update")
|
||||
payload = {**data, "job_id": job_id}
|
||||
|
||||
yield f"event: {event_type}\ndata: {json.dumps(payload)}\n\n"
|
||||
|
||||
if event_type in TERMINAL_EVENTS:
|
||||
is_terminal = True
|
||||
|
||||
if is_terminal:
|
||||
yield f"event: done\ndata: {json.dumps({'job_id': job_id})}\n\n"
|
||||
# Don't return — keep connection alive so EventSource doesn't reconnect.
|
||||
# Just idle until the client disconnects or timeout.
|
||||
while time.monotonic() < timeout:
|
||||
await asyncio.sleep(5)
|
||||
return
|
||||
|
||||
await asyncio.sleep(0.05)
|
||||
|
||||
yield f"event: timeout\ndata: {json.dumps({'job_id': job_id})}\n\n"
|
||||
|
||||
|
||||
@router.get("/stream/{job_id}")
|
||||
async def stream_detect_job(job_id: str):
|
||||
"""
|
||||
SSE stream for a detection pipeline job.
|
||||
|
||||
The UI connects via native EventSource:
|
||||
const es = new EventSource('/api/detect/stream/<job_id>');
|
||||
es.addEventListener('graph_update', (e) => { ... });
|
||||
es.addEventListener('detection', (e) => { ... });
|
||||
"""
|
||||
return StreamingResponse(
|
||||
_event_generator(job_id),
|
||||
media_type="text/event-stream",
|
||||
headers={
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
"X-Accel-Buffering": "no",
|
||||
},
|
||||
)
|
||||
Reference in New Issue
Block a user