This commit is contained in:
2026-03-26 02:54:56 -03:00
parent dfa3c12514
commit 08b67f2bb7
21 changed files with 1622 additions and 16 deletions

View File

@@ -20,6 +20,9 @@ from detect.stages.scene_filter import scene_filter
from detect.stages.yolo_detector import detect_objects
from detect.stages.ocr_stage import run_ocr
from detect.stages.brand_resolver import resolve_brands
from detect.stages.vlm_local import escalate_vlm
from detect.stages.vlm_cloud import escalate_cloud
from detect.stages.aggregator import compile_report
from detect.tracing import trace_node, flush as flush_traces
INFERENCE_URL = os.environ.get("INFERENCE_URL") # None = local mode
@@ -158,43 +161,77 @@ def node_escalate_vlm(state: DetectState) -> dict:
_emit_transition(state, "escalate_vlm", "running")
with trace_node(state, "escalate_vlm") as span:
profile = _get_profile(state)
candidates = state.get("unresolved_candidates", [])
job_id = state.get("job_id")
emit.log(job_id, "VLMLocal", "INFO", "Stub: VLM escalation not yet implemented")
span.set_output({"stub": True})
vlm_matched, still_unresolved = escalate_vlm(
candidates,
vlm_prompt_fn=profile.vlm_prompt,
inference_url=INFERENCE_URL,
content_type=profile.name,
job_id=job_id,
)
stats = state.get("stats", PipelineStats())
stats.regions_escalated_to_local_vlm = len(candidates)
span.set_output({"candidates": len(candidates), "matched": len(vlm_matched),
"still_unresolved": len(still_unresolved)})
existing = state.get("detections", [])
_emit_transition(state, "escalate_vlm", "done")
return {}
return {
"detections": existing + vlm_matched,
"unresolved_candidates": still_unresolved,
"stats": stats,
}
def node_escalate_cloud(state: DetectState) -> dict:
_emit_transition(state, "escalate_cloud", "running")
with trace_node(state, "escalate_cloud") as span:
profile = _get_profile(state)
candidates = state.get("unresolved_candidates", [])
job_id = state.get("job_id")
emit.log(job_id, "CloudLLM", "INFO", "Stub: cloud LLM escalation not yet implemented")
span.set_output({"stub": True})
stats = state.get("stats", PipelineStats())
cloud_matched = escalate_cloud(
candidates,
vlm_prompt_fn=profile.vlm_prompt,
stats=stats,
content_type=profile.name,
job_id=job_id,
)
span.set_output({"candidates": len(candidates), "matched": len(cloud_matched),
"cloud_calls": stats.cloud_llm_calls,
"cost_usd": stats.estimated_cloud_cost_usd})
existing = state.get("detections", [])
_emit_transition(state, "escalate_cloud", "done")
return {}
return {"detections": existing + cloud_matched, "stats": stats}
def node_compile_report(state: DetectState) -> dict:
_emit_transition(state, "compile_report", "running")
with trace_node(state, "compile_report") as span:
job_id = state.get("job_id")
profile = _get_profile(state)
detections = state.get("detections", [])
report = profile.aggregate(detections)
report.video_source = state.get("video_path", "")
stats = state.get("stats", PipelineStats())
job_id = state.get("job_id")
report = compile_report(
detections=detections,
stats=stats,
video_source=state.get("video_path", ""),
content_type=profile.name,
job_id=job_id,
)
emit.log(job_id, "Aggregator", "INFO",
f"Report: {len(report.brands)} brands, {len(report.timeline)} detections")
emit.job_complete(job_id, {
"video_source": report.video_source,
"content_type": report.content_type,
"brands": {k: {"total_appearances": v.total_appearances} for k, v in report.brands.items()},
})
span.set_output({"brands": len(report.brands), "detections": len(report.timeline)})
flush_traces()