Files
mediaproc/tests/detect/manual/push_logs.py

103 lines
3.2 KiB
Python

#!/usr/bin/env python3
"""
Push a stream of log events to Redis for UI testing.
Usage:
python tests/detect/manual/push_logs.py [--job JOB_ID] [--port PORT] [--count N] [--delay SECS]
Opens: http://mpr.local.ar/detection/?job=<JOB_ID>
"""
import argparse
import json
import logging
import random
import time
from datetime import datetime, timezone
import redis
logging.basicConfig(level=logging.INFO, format="%(levelname)-7s %(name)s%(message)s")
logger = logging.getLogger(__name__)
STAGES = ["FrameExtractor", "SceneFilter", "YOLODetector", "OCRStage", "BrandResolver", "VLMLocal", "Aggregator"]
LEVELS = ["INFO", "INFO", "INFO", "INFO", "WARNING", "DEBUG", "ERROR"] # weighted toward INFO
MESSAGES = {
"FrameExtractor": [
"Starting extraction: sample.mp4 (120.0s, 1920x1080, fps=2)",
"Extracted 240 frames",
"Frame extraction complete",
],
"SceneFilter": [
"Filtering duplicate scenes (hamming_threshold=8)",
"Removed 180 duplicate frames",
"Kept 60 unique frames (75% reduction)",
],
"YOLODetector": [
"Loading yolov8n.pt (fp16)",
"Processing batch 1/3 (20 frames)",
"Processing batch 2/3 (20 frames)",
"Processing batch 3/3 (20 frames)",
"Detected 45 regions across 60 frames",
],
"OCRStage": [
"Running PaddleOCR on 45 regions",
"Extracted text from 32 regions",
"Resolved 28 brands via OCR",
],
"BrandResolver": [
"Matching against brand dictionary (12 brands)",
"Exact matches: 20, Fuzzy matches: 8",
"Unresolved: 4 regions → escalating to VLM",
],
"VLMLocal": [
"Loading moondream2 (int4, 2.1GB VRAM)",
"Processing 4 unresolved crops",
"Resolved 3/4 crops, 1 → cloud escalation",
],
"Aggregator": [
"Compiling detection report",
"Found 6 unique brands, 31 total appearances",
"Report complete",
],
}
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--job", default=f"logs-{int(__import__('time').time()) % 100000}")
parser.add_argument("--port", type=int, default=6382)
parser.add_argument("--count", type=int, default=50)
parser.add_argument("--delay", type=float, default=0.2)
args = parser.parse_args()
r = redis.Redis(port=args.port, decode_responses=True)
key = f"detect_events:{args.job}"
logger.info("Pushing %d log events to %s (redis port %d)", args.count, key, args.port)
logger.info("Open: http://mpr.local.ar/detection/?job=%s", args.job)
input("\nPress Enter to start...")
for i in range(args.count):
stage = random.choice(STAGES)
level = random.choice(LEVELS)
msg = random.choice(MESSAGES[stage])
event = {
"event": "log",
"level": level,
"stage": stage,
"msg": f"[{i+1}/{args.count}] {msg}",
"ts": datetime.now(timezone.utc).isoformat(),
}
r.rpush(key, json.dumps(event))
logger.log(getattr(logging, level, logging.INFO), "[%s] %s", stage, msg)
time.sleep(args.delay)
logger.info("Done. %d events pushed.", args.count)
if __name__ == "__main__":
main()