""" Pydantic Models - GENERATED FILE Do not edit directly. Regenerate using modelgen. """ from datetime import datetime from enum import Enum from typing import Any, Dict, List, Optional from uuid import UUID from pydantic import BaseModel, Field class GraphNode(BaseModel): """A pipeline stage node.""" id: str status: str = "idle" items_in: int = 0 items_out: int = 0 class GraphEdge(BaseModel): """An edge between pipeline stages.""" source: str target: str throughput: int = 0 class BoundingBoxEvent(BaseModel): """Bounding box in SSE event payloads.""" x: int y: int w: int h: int confidence: float label: str resolved_brand: Optional[str] = None source: Optional[str] = None stage: Optional[str] = None class BrandSummary(BaseModel): """Per-brand stats in the final report.""" brand: str total_appearances: int = 0 total_screen_time: float = 0.0 avg_confidence: float = 0.0 first_seen: float = 0.0 last_seen: float = 0.0 class GraphUpdate(BaseModel): """Pipeline node state transition. SSE event: graph_update""" nodes: List[GraphNode] = Field(default_factory=list) edges: List[GraphEdge] = Field(default_factory=list) active_path: List[str] = Field(default_factory=list) class StatsUpdate(BaseModel): """Funnel statistics snapshot. SSE event: stats_update""" frames_extracted: int = 0 frames_after_scene_filter: int = 0 cv_regions_detected: int = 0 regions_detected: int = 0 regions_resolved_by_ocr: int = 0 regions_escalated_to_local_vlm: int = 0 regions_escalated_to_cloud_llm: int = 0 cloud_llm_calls: int = 0 processing_time_seconds: float = 0.0 estimated_cloud_cost_usd: float = 0.0 run_id: Optional[str] = None parent_job_id: Optional[str] = None run_type: str = "initial" class FrameUpdate(BaseModel): """Current frame being processed. SSE event: frame_update""" frame_ref: int timestamp: float jpeg_b64: str boxes: List[BoundingBoxEvent] = Field(default_factory=list) class Detection(BaseModel): """A confirmed brand detection. SSE event: detection""" brand: str timestamp: float duration: float confidence: float source: str content_type: str bbox: Optional[BoundingBoxEvent] = None frame_ref: Optional[int] = None class LogEvent(BaseModel): """Pipeline log line. SSE event: log""" level: str stage: str msg: str ts: str trace_id: Optional[str] = None class DetectionReportSummary(BaseModel): """Final detection report summary.""" video_source: str content_type: str duration_seconds: float total_detections: int = 0 brands: List[BrandSummary] = Field(default_factory=list) stats: Optional[StatsUpdate] = None class JobComplete(BaseModel): """Final report when pipeline finishes. SSE event: job_complete""" job_id: str report: Optional[DetectionReportSummary] = None class RunContext(BaseModel): """Run context injected into all SSE events for grouping.""" run_id: str parent_job_id: str run_type: str = "initial" class CheckpointInfo(BaseModel): """Available checkpoint for a stage.""" stage: str is_scenario: bool = False scenario_label: str = "" class ReplayRequest(BaseModel): """Request to replay pipeline from a specific stage.""" job_id: str start_stage: str config_overrides: Optional[Dict[str, Any]] = None class ReplayResponse(BaseModel): """Result of a replay invocation.""" status: str job_id: str start_stage: str detections: int = 0 brands_found: int = 0 class RetryRequest(BaseModel): """Request to queue async retry with different config.""" job_id: str config_overrides: Optional[Dict[str, Any]] = None start_stage: str = "escalate_vlm" schedule_seconds: Optional[float] = None class RetryResponse(BaseModel): """Result of queueing a retry task.""" status: str task_id: str job_id: str