Files
mediaproc/core/api/chunker_sse.py
2026-03-13 14:29:38 -03:00

79 lines
2.2 KiB
Python

"""
SSE endpoint for chunker pipeline events.
Bridges gRPC StreamProgress to browser-native EventSource.
GET /api/chunker/stream/{job_id} → text/event-stream
"""
import asyncio
import json
import logging
import time
from typing import AsyncGenerator
from fastapi import APIRouter
from starlette.responses import StreamingResponse
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/chunker", tags=["chunker"])
async def _event_generator(job_id: str) -> AsyncGenerator[str, None]:
"""
Generate SSE events by polling gRPC job state.
Yields server-sent events in the format:
event: <event_type>
data: <json_payload>
"""
from core.rpc.server import _active_jobs
last_state = None
timeout = time.monotonic() + 600 # 10 min max
while time.monotonic() < timeout:
job_state = _active_jobs.get(job_id)
if job_state is None:
# Job not found yet — may not have started
yield f"event: waiting\ndata: {json.dumps({'job_id': job_id})}\n\n"
await asyncio.sleep(0.5)
continue
# Only send if state changed
if job_state != last_state:
last_state = dict(job_state)
event_type = job_state.get("status", "update")
yield f"event: {event_type}\ndata: {json.dumps({**job_state, 'job_id': job_id})}\n\n"
# End stream when job is terminal
if event_type in ("completed", "failed", "cancelled"):
yield f"event: done\ndata: {json.dumps({'job_id': job_id})}\n\n"
break
await asyncio.sleep(0.2)
yield f"event: timeout\ndata: {json.dumps({'job_id': job_id})}\n\n"
@router.get("/stream/{job_id}")
async def stream_chunk_job(job_id: str):
"""
SSE stream for a chunk pipeline job.
The UI connects via native EventSource:
const es = new EventSource('/api/chunker/stream/<job_id>');
es.addEventListener('processing', (e) => { ... });
"""
return StreamingResponse(
_event_generator(job_id),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
},
)