diff --git a/ui/detection-app/src/App.vue b/ui/detection-app/src/App.vue
index c683875..2e15a97 100644
--- a/ui/detection-app/src/App.vue
+++ b/ui/detection-app/src/App.vue
@@ -1,8 +1,6 @@
diff --git a/ui/detection-app/src/composables/useCheckpointLoader.ts b/ui/detection-app/src/composables/useCheckpointLoader.ts
new file mode 100644
index 0000000..40e853c
--- /dev/null
+++ b/ui/detection-app/src/composables/useCheckpointLoader.ts
@@ -0,0 +1,98 @@
+import { ref, computed, watch } from 'vue'
+import type { Ref } from 'vue'
+import type { DataSource } from 'mpr-ui-framework'
+import { usePipelineStore } from '../stores/pipeline'
+
+interface CheckpointFrame {
+ seq: number
+ timestamp: number
+ jpeg_b64: string
+}
+
+export function useCheckpointLoader(
+ jobId: Ref,
+ source: DataSource,
+) {
+ const pipeline = usePipelineStore()
+
+ const currentFrameImage = ref(null)
+ const currentFrameRef = ref(null)
+
+ const checkpointFrames = ref([])
+ const checkpointFrameIndex = ref(0)
+ const checkpointStage = ref(null)
+
+ const stripSelStart = ref(0)
+ const stripSelEndOverride = ref(null)
+ const stripSelEnd = computed(() =>
+ stripSelEndOverride.value ?? Math.max(0, checkpointFrames.value.length - 1),
+ )
+
+ // Track current frame from SSE
+ source.on<{ frame_ref: number; jpeg_b64: string }>('frame_update', (e) => {
+ currentFrameImage.value = e.jpeg_b64
+ currentFrameRef.value = e.frame_ref
+ })
+
+ async function loadCheckpoint(job: string, stage: string) {
+ try {
+ const resp = await fetch(`/api/detect/checkpoints/${job}/${stage}`)
+ if (!resp.ok) return
+
+ const data = await resp.json()
+ checkpointFrames.value = data.frames ?? []
+ checkpointStage.value = stage
+
+ if (checkpointFrames.value.length > 0) {
+ checkpointFrameIndex.value = 0
+ const first = checkpointFrames.value[0]
+ currentFrameImage.value = first.jpeg_b64
+ currentFrameRef.value = first.seq
+ }
+
+ stripSelStart.value = 0
+ stripSelEndOverride.value = null
+ } catch (e) {
+ console.error('Failed to load checkpoint:', e)
+ }
+ }
+
+ function setCheckpointFrame(index: number) {
+ if (index < 0 || index >= checkpointFrames.value.length) return
+ checkpointFrameIndex.value = index
+ const frame = checkpointFrames.value[index]
+ currentFrameImage.value = frame.jpeg_b64
+ currentFrameRef.value = frame.seq
+ }
+
+ // Auto-load checkpoint when entering editor mode
+ watch(
+ () => [pipeline.layoutMode, pipeline.editorStage, jobId.value] as const,
+ ([mode, stage, job]) => {
+ if (mode === 'bbox_editor' && stage && job) {
+ const stageMap: Record = {
+ detect_edges: 'filter_scenes',
+ detect_contours: 'detect_edges',
+ detect_color: 'detect_contours',
+ merge_regions: 'detect_color',
+ }
+ const cpStage = stageMap[stage] ?? 'filter_scenes'
+ loadCheckpoint(job, cpStage)
+ }
+ },
+ { immediate: true },
+ )
+
+ return {
+ currentFrameImage,
+ currentFrameRef,
+ checkpointFrames,
+ checkpointFrameIndex,
+ checkpointStage,
+ stripSelStart,
+ stripSelEnd,
+ stripSelEndOverride,
+ loadCheckpoint,
+ setCheckpointFrame,
+ }
+}
diff --git a/ui/detection-app/src/composables/useEditorState.ts b/ui/detection-app/src/composables/useEditorState.ts
new file mode 100644
index 0000000..06ebe5e
--- /dev/null
+++ b/ui/detection-app/src/composables/useEditorState.ts
@@ -0,0 +1,120 @@
+import { ref } from 'vue'
+import type { Ref } from 'vue'
+import type { FrameOverlay, FrameBBox } from 'mpr-ui-framework'
+import { matchTracks, renderTracksToImageData, imageDataToPngB64 } from '@/cv'
+
+export type RegionBox = {
+ x: number
+ y: number
+ w: number
+ h: number
+ confidence: number
+ label: string
+}
+
+export function useEditorState(currentFrameRef: Ref) {
+ const editorOverlays = ref([])
+ const editorBoxes = ref([])
+
+ const allFrameRegions = ref>({})
+ const allFrameDebug = ref>({})
+ const frameDimensions = ref<{ w: number; h: number } | null>(null)
+
+ function updateDisplayForFrame(seq: number) {
+ const regions = allFrameRegions.value[seq] ?? []
+ editorBoxes.value = regions.map(r => ({
+ x: r.x, y: r.y, w: r.w, h: r.h,
+ confidence: r.confidence,
+ label: r.label ?? 'edge_region',
+ stage: 'detect_edges',
+ }))
+
+ const debug = allFrameDebug.value[seq]
+ if (debug) {
+ const overlays: FrameOverlay[] = []
+ if (debug.edge_overlay_b64) {
+ const existing = editorOverlays.value.find(o => o.label === 'Canny edges')
+ overlays.push({ src: debug.edge_overlay_b64, label: 'Canny edges', visible: existing?.visible ?? true, opacity: existing?.opacity ?? 0.25 })
+ }
+ if (debug.lines_overlay_b64) {
+ const existing = editorOverlays.value.find(o => o.label === 'Hough lines')
+ overlays.push({ src: debug.lines_overlay_b64, label: 'Hough lines', visible: existing?.visible ?? true, opacity: existing?.opacity ?? 0.25 })
+ }
+ const trackOverlay = editorOverlays.value.find(o => o.label === 'Motion tracks')
+ if (trackOverlay) overlays.push(trackOverlay)
+ editorOverlays.value = overlays
+ }
+
+ if (Object.keys(allFrameRegions.value).length >= 2 && frameDimensions.value) {
+ updateTrackOverlay(seq)
+ }
+ }
+
+ async function updateTrackOverlay(currentSeq: number) {
+ const dims = frameDimensions.value
+ if (!dims || Object.keys(allFrameRegions.value).length < 2) return
+ const tracks = matchTracks(allFrameRegions.value)
+ const imageData = renderTracksToImageData(tracks, dims.w, dims.h, currentSeq)
+ const b64 = await imageDataToPngB64(imageData)
+ const existing = editorOverlays.value.find(o => o.label === 'Motion tracks')
+ const trackOverlay: FrameOverlay = {
+ src: b64,
+ label: 'Motion tracks',
+ visible: existing?.visible ?? true,
+ opacity: existing?.opacity ?? 0.9,
+ srcFormat: 'png',
+ }
+ editorOverlays.value = [
+ ...editorOverlays.value.filter(o => o.label !== 'Motion tracks'),
+ trackOverlay,
+ ]
+ }
+
+ function onReplayResult(result: {
+ regions_by_frame?: Record
+ debug?: Record
+ frameWidth?: number
+ frameHeight?: number
+ }) {
+ if (result.frameWidth && result.frameHeight) {
+ frameDimensions.value = { w: result.frameWidth, h: result.frameHeight }
+ }
+
+ if (result.regions_by_frame) {
+ for (const [seqStr, regions] of Object.entries(result.regions_by_frame)) {
+ allFrameRegions.value[Number(seqStr)] = regions
+ }
+ }
+
+ if (result.debug) {
+ for (const [seqStr, dbg] of Object.entries(result.debug)) {
+ allFrameDebug.value[Number(seqStr)] = {
+ edge_overlay_b64: dbg.edge_overlay_b64,
+ lines_overlay_b64: dbg.lines_overlay_b64,
+ }
+ }
+ }
+
+ const currentSeq = currentFrameRef.value ?? 0
+ updateDisplayForFrame(currentSeq)
+ }
+
+ function resetEditorState() {
+ allFrameRegions.value = {}
+ allFrameDebug.value = {}
+ frameDimensions.value = null
+ editorOverlays.value = []
+ editorBoxes.value = []
+ }
+
+ return {
+ editorOverlays,
+ editorBoxes,
+ allFrameRegions,
+ allFrameDebug,
+ frameDimensions,
+ updateDisplayForFrame,
+ onReplayResult,
+ resetEditorState,
+ }
+}
diff --git a/ui/detection-app/src/composables/useSSEConnection.ts b/ui/detection-app/src/composables/useSSEConnection.ts
new file mode 100644
index 0000000..28bea6b
--- /dev/null
+++ b/ui/detection-app/src/composables/useSSEConnection.ts
@@ -0,0 +1,106 @@
+import { ref, watch } from 'vue'
+import { SSEDataSource } from 'mpr-ui-framework'
+import type { DataSource } from 'mpr-ui-framework'
+import type { StatsUpdate, RunContext } from '../types/sse-contract'
+import { usePipelineStore } from '../stores/pipeline'
+
+type AppStatus = 'idle' | 'live' | 'processing' | 'error'
+
+const STATUS_MAP: Record = {
+ idle: 'idle',
+ connecting: 'processing',
+ live: 'live',
+ error: 'error',
+}
+
+export function useSSEConnection() {
+ const pipeline = usePipelineStore()
+
+ const jobParam = new URLSearchParams(window.location.search).get('job')
+ const jobId = ref(jobParam || '')
+ const stats = ref(null)
+ const runContext = ref(null)
+ const status = ref('idle')
+ const sseConnected = ref(false)
+
+ // No job selected and no hash route → open source selector
+ if (!jobParam && !window.location.hash.replace(/^#\/?/, '')) {
+ pipeline.openSourceSelector()
+ }
+
+ const source = new SSEDataSource({
+ id: 'detect-stream',
+ url: jobId.value ? `/api/detect/stream/${jobId.value}` : '',
+ eventTypes: ['graph_update', 'stats_update', 'frame_update', 'detection', 'log', 'job_complete', 'waiting'],
+ })
+
+ source.on('stats_update', (e) => {
+ stats.value = e
+ if (!runContext.value && e.run_id) {
+ runContext.value = {
+ run_id: e.run_id!,
+ parent_job_id: e.parent_job_id!,
+ run_type: e.run_type ?? 'initial',
+ }
+ }
+ })
+
+ source.on<{ report?: { status?: string; error?: string } }>('job_complete', (e) => {
+ if (e.report?.status === 'failed') {
+ status.value = 'error'
+ }
+ })
+
+ // Reactive status sync — replaces setInterval polling
+ watch(
+ () => source.status.value,
+ (sourceStatus) => {
+ if (sseConnected.value) {
+ status.value = STATUS_MAP[sourceStatus] ?? 'idle'
+ }
+ },
+ )
+
+ // Only connect SSE for live pipeline runs
+ const isScenarioMode = pipeline.isEditing || pipeline.layoutMode !== 'normal'
+ if (jobId.value && !isScenarioMode) {
+ source.connect()
+ sseConnected.value = true
+ }
+
+ async function stopPipeline() {
+ if (!jobId.value) return
+ try {
+ await fetch(`/api/detect/stop/${jobId.value}`, { method: 'POST' })
+ } catch { /* ignore — UI will see the cancel event via SSE */ }
+ }
+
+ function onJobStarted(newJobId: string) {
+ jobId.value = newJobId
+ stats.value = null
+ runContext.value = null
+ status.value = 'processing'
+ pipeline.reset()
+ pipeline.setStatus('running')
+ // Update URL without reload
+ const url = new URL(window.location.href)
+ url.searchParams.set('job', newJobId)
+ window.history.pushState({}, '', url.toString())
+ // Connect SSE to new job
+ source.disconnect()
+ source.setUrl(`/api/detect/stream/${newJobId}`)
+ source.connect()
+ sseConnected.value = true
+ }
+
+ return {
+ jobId,
+ stats,
+ runContext,
+ status,
+ sseConnected,
+ source: source as DataSource,
+ stopPipeline,
+ onJobStarted,
+ }
+}