add heavy loggin

This commit is contained in:
2026-03-26 10:59:56 -03:00
parent a85722f96a
commit beb0416280
27 changed files with 502 additions and 64 deletions

View File

@@ -168,7 +168,7 @@ def run_pipeline(req: RunRequest):
from detect.state import DetectState
local_path = _resolve_video_path(req.video_path)
job_id = str(uuid.uuid4())[:8]
job_id = str(uuid.uuid4())
if req.skip_vlm:
os.environ["SKIP_VLM"] = "1"
@@ -200,8 +200,6 @@ def run_pipeline(req: RunRequest):
source_asset_id=req.source_asset_id,
)
import traceback
from detect.graph import PipelineCancelled, set_cancel_check, clear_cancel_check
set_cancel_check(job_id, lambda: job_id in _cancelled_jobs)
@@ -218,9 +216,17 @@ def run_pipeline(req: RunRequest):
emit.job_complete(job_id, {"status": "cancelled"})
except Exception as e:
logger.exception("Pipeline run %s failed: %s", job_id, e)
tb = traceback.format_exc()
# Mark the current/last stage as error in the graph
from detect.graph import _node_states, NODES
if job_id in _node_states:
states = _node_states[job_id]
for node in reversed(NODES):
if states.get(node) in ("running", "done"):
states[node] = "error"
break
nodes = [{"id": n, "status": states[n]} for n in NODES]
emit.graph_update(job_id, nodes)
emit.log(job_id, "Pipeline", "ERROR", str(e))
emit.log(job_id, "Pipeline", "DEBUG", tb)
emit.job_complete(job_id, {"status": "failed", "error": str(e)})
finally:
_running_jobs.pop(job_id, None)

View File

@@ -26,16 +26,16 @@ router = APIRouter(prefix="/detect", tags=["detect"])
async def _event_generator(job_id: str) -> AsyncGenerator[str, None]:
cursor = 0
timeout = time.monotonic() + 3600 # 1 hour max (detection jobs are long)
timeout = time.monotonic() + 3600 # 1 hour max
while time.monotonic() < timeout:
events, cursor = poll_events(job_id, cursor, prefix=DETECT_EVENTS_PREFIX)
if not events:
yield f"event: waiting\ndata: {json.dumps({'job_id': job_id})}\n\n"
await asyncio.sleep(0.1)
await asyncio.sleep(0.2)
continue
is_terminal = False
for data in events:
event_type = data.pop("event", "update")
payload = {**data, "job_id": job_id}
@@ -43,8 +43,15 @@ async def _event_generator(job_id: str) -> AsyncGenerator[str, None]:
yield f"event: {event_type}\ndata: {json.dumps(payload)}\n\n"
if event_type in TERMINAL_EVENTS:
yield f"event: done\ndata: {json.dumps({'job_id': job_id})}\n\n"
return
is_terminal = True
if is_terminal:
yield f"event: done\ndata: {json.dumps({'job_id': job_id})}\n\n"
# Don't return — keep connection alive so EventSource doesn't reconnect.
# Just idle until the client disconnects or timeout.
while time.monotonic() < timeout:
await asyncio.sleep(5)
return
await asyncio.sleep(0.05)