scrub optimization
This commit is contained in:
0
cht/scrub/__init__.py
Normal file
0
cht/scrub/__init__.py
Normal file
92
cht/scrub/manager.py
Normal file
92
cht/scrub/manager.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""Proxy manager — background generation and lifecycle of scrub proxies."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from threading import Thread
|
||||
|
||||
from gi.repository import GLib
|
||||
|
||||
from cht.scrub.proxy import proxy_path_for, generate_proxy, cleanup_proxies, PROXY_HEIGHT
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProxyManager:
|
||||
"""Manages background proxy generation for scrub mode.
|
||||
|
||||
Usage:
|
||||
pm = ProxyManager(session_id="20260403_120000")
|
||||
pm.request(segment_path, on_ready=lambda path: ...)
|
||||
pm.cancel() # stop pending work
|
||||
"""
|
||||
|
||||
# Proxy states
|
||||
PENDING = "pending"
|
||||
GENERATING = "generating"
|
||||
READY = "ready"
|
||||
FAILED = "failed"
|
||||
|
||||
def __init__(self, session_id: str):
|
||||
self._session_id = session_id
|
||||
self._state: dict[str, str] = {} # segment_path_str → state
|
||||
self._proxies: dict[str, Path] = {} # segment_path_str → proxy_path
|
||||
self._cancelled = False
|
||||
|
||||
def request(self, segment_path: Path, on_ready=None, on_error=None) -> None:
|
||||
"""Request proxy for a segment. Calls back on GTK main thread when ready.
|
||||
|
||||
If proxy already exists, calls back immediately.
|
||||
"""
|
||||
key = str(segment_path)
|
||||
|
||||
# Already ready
|
||||
proxy = proxy_path_for(segment_path, self._session_id)
|
||||
if proxy.exists():
|
||||
self._state[key] = self.READY
|
||||
self._proxies[key] = proxy
|
||||
if on_ready:
|
||||
GLib.idle_add(on_ready, proxy)
|
||||
return
|
||||
|
||||
# Already generating
|
||||
if self._state.get(key) == self.GENERATING:
|
||||
return
|
||||
|
||||
self._state[key] = self.GENERATING
|
||||
|
||||
def _generate():
|
||||
if self._cancelled:
|
||||
return
|
||||
try:
|
||||
result = generate_proxy(segment_path, proxy)
|
||||
self._state[key] = self.READY
|
||||
self._proxies[key] = result
|
||||
if on_ready and not self._cancelled:
|
||||
GLib.idle_add(on_ready, result)
|
||||
except Exception as e:
|
||||
self._state[key] = self.FAILED
|
||||
log.error("Proxy generation failed: %s", e)
|
||||
if on_error and not self._cancelled:
|
||||
GLib.idle_add(on_error, str(e))
|
||||
|
||||
Thread(target=_generate, daemon=True,
|
||||
name=f"proxy_{segment_path.stem}").start()
|
||||
|
||||
def get_state(self, segment_path: Path) -> str | None:
|
||||
"""Return current state of proxy for segment, or None if not requested."""
|
||||
return self._state.get(str(segment_path))
|
||||
|
||||
def get_proxy(self, segment_path: Path) -> Path | None:
|
||||
"""Return proxy path if ready, None otherwise."""
|
||||
return self._proxies.get(str(segment_path))
|
||||
|
||||
def cancel(self) -> None:
|
||||
"""Cancel pending work. Already-running ffmpeg will finish but callbacks are suppressed."""
|
||||
self._cancelled = True
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""Delete all proxies for this session."""
|
||||
self.cancel()
|
||||
cleanup_proxies(self._session_id)
|
||||
self._state.clear()
|
||||
self._proxies.clear()
|
||||
81
cht/scrub/proxy.py
Normal file
81
cht/scrub/proxy.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Proxy generation — low-res MJPEG for frame-accurate scrubbing.
|
||||
|
||||
Each completed recording segment gets a lightweight proxy video where every
|
||||
frame is a keyframe (MJPEG). mpv can seek frame-accurately in these files
|
||||
with hr-seek=yes, giving DaVinci Resolve-style scrubbing speed.
|
||||
|
||||
Proxies are ephemeral — stored in /tmp, regenerated on demand.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import ffmpeg as ffmpeg_lib
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
PROXY_DIR = Path("/tmp/cht_proxy")
|
||||
PROXY_HEIGHT = 360 # pixels — low enough for speed, high enough to see content
|
||||
|
||||
|
||||
def proxy_path_for(segment_path: Path, session_id: str | None = None) -> Path:
|
||||
"""Return the proxy path for a given segment."""
|
||||
subdir = session_id or "default"
|
||||
return PROXY_DIR / subdir / f"{segment_path.stem}_proxy.avi"
|
||||
|
||||
|
||||
def generate_proxy(segment_path: Path, output_path: Path,
|
||||
height: int = PROXY_HEIGHT) -> Path:
|
||||
"""Transcode a segment to MJPEG proxy at reduced resolution.
|
||||
|
||||
Every frame is a keyframe — enables O(1) seeking.
|
||||
Returns output_path on success.
|
||||
"""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
stream = ffmpeg_lib.input(str(segment_path))
|
||||
output = (
|
||||
ffmpeg_lib.output(
|
||||
stream, str(output_path),
|
||||
vcodec="mjpeg",
|
||||
vf=f"scale=-2:{height}",
|
||||
# MJPEG: every frame is a keyframe by nature
|
||||
**{"q:v": "5"}, # quality 2-31, lower = better
|
||||
an=None, # strip audio
|
||||
)
|
||||
.overwrite_output()
|
||||
.global_args("-hide_banner", "-loglevel", "warning")
|
||||
)
|
||||
|
||||
log.info("Generating proxy: %s → %s", segment_path.name, output_path)
|
||||
try:
|
||||
output.run(capture_stdout=True, capture_stderr=True)
|
||||
except ffmpeg_lib.Error as e:
|
||||
stderr = (e.stderr or b"").decode("utf-8", errors="replace")
|
||||
log.error("Proxy generation failed for %s: %s", segment_path.name, stderr.strip())
|
||||
raise
|
||||
|
||||
log.info("Proxy ready: %s (%.1f MB)",
|
||||
output_path.name, output_path.stat().st_size / 1_000_000)
|
||||
return output_path
|
||||
|
||||
|
||||
def ensure_proxy(segment_path: Path, session_id: str | None = None,
|
||||
height: int = PROXY_HEIGHT) -> Path:
|
||||
"""Return proxy path, generating it if missing."""
|
||||
out = proxy_path_for(segment_path, session_id)
|
||||
if out.exists():
|
||||
return out
|
||||
return generate_proxy(segment_path, out, height)
|
||||
|
||||
|
||||
def cleanup_proxies(session_id: str | None = None) -> None:
|
||||
"""Delete proxy files for a session, or all proxies if session_id is None."""
|
||||
if session_id:
|
||||
target = PROXY_DIR / session_id
|
||||
else:
|
||||
target = PROXY_DIR
|
||||
if target.exists():
|
||||
shutil.rmtree(target)
|
||||
log.info("Cleaned up proxies: %s", target)
|
||||
Reference in New Issue
Block a user