timeline
This commit is contained in:
@@ -81,8 +81,6 @@ __all__ = [
|
||||
"Job",
|
||||
"Timeline",
|
||||
"Checkpoint",
|
||||
"KnownBrand",
|
||||
"SourceBrandSighting",
|
||||
# Enums
|
||||
"AssetStatus",
|
||||
"JobStatus",
|
||||
|
||||
@@ -48,6 +48,11 @@ class StageDefinition:
|
||||
io: StageIO = field(default_factory=StageIO)
|
||||
config_fields: List[StageConfigField] = field(default_factory=list)
|
||||
|
||||
# The box label this stage produces that should be time-tracked in the editor.
|
||||
# Set to the label string (e.g. "edge_region") for stages that have a
|
||||
# meaningful temporal element. None means no motion tracker overlay.
|
||||
tracks_element: Optional[str] = None
|
||||
|
||||
# Legacy fields — used by old registry pattern during migration.
|
||||
# New stages use Stage subclass instead.
|
||||
fn: Any = None
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
"""
|
||||
Model serializers — one module per model group, mirroring core/schema/models/.
|
||||
|
||||
models/detect_pipeline.py → serializers/detect_pipeline.py
|
||||
models/detect_jobs.py → serializers/detect_jobs.py
|
||||
models/pipeline.py → serializers/pipeline.py
|
||||
models/detect.py → serializers/detect.py (SSE events)
|
||||
|
||||
Common utilities in _common.py.
|
||||
|
||||
@@ -51,6 +51,7 @@ class EdgeDetectionStage(Stage):
|
||||
StageConfigField("edge_pair_max_distance", "int", 200, "Max distance between line pair (px)", min=10, max=500),
|
||||
StageConfigField("edge_pair_min_distance", "int", 15, "Min distance between line pair (px)", min=5, max=200),
|
||||
],
|
||||
tracks_element="edge_region",
|
||||
)
|
||||
|
||||
def run(self, frames: list[Frame], config: dict) -> dict[int, list[BoundingBox]]:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<script setup lang="ts">
|
||||
import { ref } from 'vue'
|
||||
import { ref, computed } from 'vue'
|
||||
import { SSEDataSource, Panel, ResizeHandle } from 'mpr-ui-framework'
|
||||
import 'mpr-ui-framework/src/tokens.css'
|
||||
import LogPanel from './panels/LogPanel.vue'
|
||||
@@ -11,8 +11,10 @@ import TimelinePanel from './panels/TimelinePanel.vue'
|
||||
import CostStatsPanel from './panels/CostStatsPanel.vue'
|
||||
import SourceSelector from './panels/SourceSelector.vue'
|
||||
import StageConfigSliders from './components/StageConfigSliders.vue'
|
||||
import FrameStrip from './components/FrameStrip.vue'
|
||||
import type { StatsUpdate, RunContext } from './types/sse-contract'
|
||||
import type { FrameOverlay } from 'mpr-ui-framework/src/renderers/FrameRenderer.vue'
|
||||
import { matchTracks, renderTracksToImageData, imageDataToPngB64 } from 'mpr-ui-framework/src/cv'
|
||||
import { usePipelineStore } from './stores/pipeline'
|
||||
|
||||
const pipeline = usePipelineStore()
|
||||
@@ -127,6 +129,18 @@ const checkpointFrames = ref<{ seq: number; timestamp: number; jpeg_b64: string
|
||||
const checkpointFrameIndex = ref(0)
|
||||
const checkpointStage = ref<string | null>(null) // which stage the checkpoint is at
|
||||
|
||||
// Frame strip selection (indices into checkpointFrames)
|
||||
const stripSelStart = ref(0)
|
||||
const stripSelEnd = computed(() =>
|
||||
stripSelEndOverride.value ?? Math.max(0, checkpointFrames.value.length - 1),
|
||||
)
|
||||
const stripSelEndOverride = ref<number | null>(null)
|
||||
|
||||
// Per-frame CV results — accumulated across all processed frames
|
||||
const allFrameRegions = ref<Record<number, Array<{ x: number; y: number; w: number; h: number; confidence: number; label: string }>>>({})
|
||||
const allFrameDebug = ref<Record<number, { edge_overlay_b64: string; lines_overlay_b64: string }>>({})
|
||||
const frameDimensions = ref<{ w: number; h: number } | null>(null)
|
||||
|
||||
|
||||
source.on<{ frame_ref: number; jpeg_b64: string }>('frame_update', (e) => {
|
||||
currentFrameImage.value = e.jpeg_b64
|
||||
@@ -151,6 +165,13 @@ async function loadCheckpoint(job: string, stage: string) {
|
||||
currentFrameRef.value = first.seq
|
||||
}
|
||||
|
||||
// Reset accumulated CV results and strip selection for this checkpoint
|
||||
allFrameRegions.value = {}
|
||||
allFrameDebug.value = {}
|
||||
frameDimensions.value = null
|
||||
stripSelStart.value = 0
|
||||
stripSelEndOverride.value = null
|
||||
|
||||
status.value = 'idle'
|
||||
} catch (e) {
|
||||
console.error('Failed to load checkpoint:', e)
|
||||
@@ -163,6 +184,61 @@ function setCheckpointFrame(index: number) {
|
||||
const frame = checkpointFrames.value[index]
|
||||
currentFrameImage.value = frame.jpeg_b64
|
||||
currentFrameRef.value = frame.seq
|
||||
updateDisplayForFrame(frame.seq)
|
||||
}
|
||||
|
||||
function updateDisplayForFrame(seq: number) {
|
||||
// Update boxes for this frame from accumulated results
|
||||
const regions = allFrameRegions.value[seq] ?? []
|
||||
editorBoxes.value = regions.map(r => ({
|
||||
x: r.x, y: r.y, w: r.w, h: r.h,
|
||||
confidence: r.confidence,
|
||||
label: r.label ?? 'edge_region',
|
||||
stage: 'detect_edges',
|
||||
}))
|
||||
|
||||
// Update Canny/Hough overlays for this frame (preserving visibility/opacity)
|
||||
const debug = allFrameDebug.value[seq]
|
||||
if (debug) {
|
||||
const overlays: FrameOverlay[] = []
|
||||
if (debug.edge_overlay_b64) {
|
||||
const existing = editorOverlays.value.find(o => o.label === 'Canny edges')
|
||||
overlays.push({ src: debug.edge_overlay_b64, label: 'Canny edges', visible: existing?.visible ?? true, opacity: existing?.opacity ?? 0.25 })
|
||||
}
|
||||
if (debug.lines_overlay_b64) {
|
||||
const existing = editorOverlays.value.find(o => o.label === 'Hough lines')
|
||||
overlays.push({ src: debug.lines_overlay_b64, label: 'Hough lines', visible: existing?.visible ?? true, opacity: existing?.opacity ?? 0.25 })
|
||||
}
|
||||
// Re-append track overlay if it exists
|
||||
const trackOverlay = editorOverlays.value.find(o => o.label === 'Motion tracks')
|
||||
if (trackOverlay) overlays.push(trackOverlay)
|
||||
editorOverlays.value = overlays
|
||||
}
|
||||
|
||||
// Re-render track overlay with updated currentSeq
|
||||
if (Object.keys(allFrameRegions.value).length >= 2 && frameDimensions.value) {
|
||||
updateTrackOverlay(seq)
|
||||
}
|
||||
}
|
||||
|
||||
async function updateTrackOverlay(currentSeq: number) {
|
||||
const dims = frameDimensions.value
|
||||
if (!dims || Object.keys(allFrameRegions.value).length < 2) return
|
||||
const tracks = matchTracks(allFrameRegions.value)
|
||||
const imageData = renderTracksToImageData(tracks, dims.w, dims.h, currentSeq)
|
||||
const b64 = await imageDataToPngB64(imageData)
|
||||
const existing = editorOverlays.value.find(o => o.label === 'Motion tracks')
|
||||
const trackOverlay: FrameOverlay = {
|
||||
src: b64,
|
||||
label: 'Motion tracks',
|
||||
visible: existing?.visible ?? true,
|
||||
opacity: existing?.opacity ?? 0.9,
|
||||
srcFormat: 'png',
|
||||
}
|
||||
editorOverlays.value = [
|
||||
...editorOverlays.value.filter(o => o.label !== 'Motion tracks'),
|
||||
trackOverlay,
|
||||
]
|
||||
}
|
||||
|
||||
// Load checkpoint when in editor mode with a job (scenario URL)
|
||||
@@ -194,37 +270,36 @@ const editorBoxes = ref<import('mpr-ui-framework/src/renderers/FrameRenderer.vue
|
||||
function onReplayResult(result: {
|
||||
regions_by_frame?: Record<string, unknown[]>
|
||||
debug?: Record<string, { edge_overlay_b64: string; lines_overlay_b64: string; horizontal_count: number; pair_count: number }>
|
||||
frameWidth?: number
|
||||
frameHeight?: number
|
||||
}) {
|
||||
// Update boxes
|
||||
if (result.regions_by_frame) {
|
||||
const firstRegions = Object.values(result.regions_by_frame)[0] as any[] ?? []
|
||||
editorBoxes.value = firstRegions.map((r: any) => ({
|
||||
x: r.x, y: r.y, w: r.w, h: r.h,
|
||||
confidence: r.confidence,
|
||||
label: r.label ?? 'edge_region',
|
||||
stage: 'detect_edges',
|
||||
}))
|
||||
// Store frame dimensions for track overlay rendering
|
||||
if (result.frameWidth && result.frameHeight) {
|
||||
frameDimensions.value = { w: result.frameWidth, h: result.frameHeight }
|
||||
}
|
||||
|
||||
// Update overlays — only when debug data is present, preserve existing otherwise
|
||||
// Merge incoming per-frame regions into accumulated store
|
||||
if (result.regions_by_frame) {
|
||||
for (const [seqStr, regions] of Object.entries(result.regions_by_frame)) {
|
||||
allFrameRegions.value[Number(seqStr)] = regions as any[]
|
||||
}
|
||||
}
|
||||
|
||||
// Merge incoming per-frame debug overlays into accumulated store
|
||||
if (result.debug) {
|
||||
const firstDebug = Object.values(result.debug)[0]
|
||||
if (firstDebug) {
|
||||
const overlays: FrameOverlay[] = []
|
||||
if (firstDebug.edge_overlay_b64) {
|
||||
// Preserve visibility/opacity from existing overlays if they exist
|
||||
const existing = editorOverlays.value.find(o => o.label === 'Canny edges')
|
||||
overlays.push({ src: firstDebug.edge_overlay_b64, label: 'Canny edges', visible: existing?.visible ?? true, opacity: existing?.opacity ?? 0.25 })
|
||||
}
|
||||
if (firstDebug.lines_overlay_b64) {
|
||||
const existing = editorOverlays.value.find(o => o.label === 'Hough lines')
|
||||
overlays.push({ src: firstDebug.lines_overlay_b64, label: 'Hough lines', visible: existing?.visible ?? true, opacity: existing?.opacity ?? 0.25 })
|
||||
}
|
||||
editorOverlays.value = overlays
|
||||
for (const [seqStr, dbg] of Object.entries(result.debug)) {
|
||||
allFrameDebug.value[Number(seqStr)] = {
|
||||
edge_overlay_b64: dbg.edge_overlay_b64,
|
||||
lines_overlay_b64: dbg.lines_overlay_b64,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update display for the currently shown frame
|
||||
const currentSeq = currentFrameRef.value ?? 0
|
||||
updateDisplayForFrame(currentSeq)
|
||||
}
|
||||
|
||||
function onJobStarted(newJobId: string) {
|
||||
jobId.value = newJobId
|
||||
// Reset UI state
|
||||
@@ -338,10 +413,25 @@ function onJobStarted(newJobId: string) {
|
||||
:job-id="jobId"
|
||||
:frame-image="currentFrameImage"
|
||||
:frame-ref="currentFrameRef"
|
||||
:frames="checkpointFrames"
|
||||
:selection-start="stripSelStart"
|
||||
:selection-end="stripSelEnd"
|
||||
@replay-result="onReplayResult"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Frame strip: thumbnails + selection handles -->
|
||||
<FrameStrip
|
||||
v-if="checkpointFrames.length > 0"
|
||||
:frames="checkpointFrames"
|
||||
:current-index="checkpointFrameIndex"
|
||||
:selection-start="stripSelStart"
|
||||
:selection-end="stripSelEnd"
|
||||
@frame-click="setCheckpointFrame"
|
||||
@selection-change="(s, e) => { stripSelStart.value = s; stripSelEndOverride.value = e }"
|
||||
/>
|
||||
|
||||
<!-- Bottom: debug overlays + close -->
|
||||
<div class="editor-bottom">
|
||||
<div class="overlay-controls">
|
||||
|
||||
263
ui/detection-app/src/components/FrameStrip.vue
Normal file
263
ui/detection-app/src/components/FrameStrip.vue
Normal file
@@ -0,0 +1,263 @@
|
||||
<script setup lang="ts">
|
||||
import { ref, computed, onMounted, onUnmounted } from 'vue'
|
||||
|
||||
const props = defineProps<{
|
||||
frames: Array<{ seq: number; timestamp: number; jpeg_b64: string }>
|
||||
currentIndex: number
|
||||
selectionStart: number
|
||||
selectionEnd: number
|
||||
}>()
|
||||
|
||||
const emit = defineEmits<{
|
||||
'frame-click': [index: number]
|
||||
'selection-change': [start: number, end: number]
|
||||
}>()
|
||||
|
||||
const stripEl = ref<HTMLElement | null>(null)
|
||||
|
||||
// --- Drag handle logic ---
|
||||
type DragTarget = 'start' | 'end' | null
|
||||
const dragging = ref<DragTarget>(null)
|
||||
|
||||
function onHandleMousedown(target: DragTarget, e: MouseEvent) {
|
||||
dragging.value = target
|
||||
e.preventDefault()
|
||||
}
|
||||
|
||||
function onMousemove(e: MouseEvent) {
|
||||
if (!dragging.value || !stripEl.value) return
|
||||
const rect = stripEl.value.getBoundingClientRect()
|
||||
const x = e.clientX - rect.left
|
||||
const ratio = Math.max(0, Math.min(1, x / rect.width))
|
||||
const idx = Math.round(ratio * (props.frames.length - 1))
|
||||
|
||||
if (dragging.value === 'start') {
|
||||
const newStart = Math.min(idx, props.selectionEnd)
|
||||
emit('selection-change', newStart, props.selectionEnd)
|
||||
} else {
|
||||
const newEnd = Math.max(idx, props.selectionStart)
|
||||
emit('selection-change', props.selectionStart, newEnd)
|
||||
}
|
||||
}
|
||||
|
||||
function onMouseup() {
|
||||
dragging.value = null
|
||||
}
|
||||
|
||||
onMounted(() => {
|
||||
window.addEventListener('mousemove', onMousemove)
|
||||
window.addEventListener('mouseup', onMouseup)
|
||||
})
|
||||
onUnmounted(() => {
|
||||
window.removeEventListener('mousemove', onMousemove)
|
||||
window.removeEventListener('mouseup', onMouseup)
|
||||
})
|
||||
|
||||
// Handle positions as % of strip width
|
||||
const startPct = computed(() => {
|
||||
if (props.frames.length <= 1) return 0
|
||||
return (props.selectionStart / (props.frames.length - 1)) * 100
|
||||
})
|
||||
const endPct = computed(() => {
|
||||
if (props.frames.length <= 1) return 100
|
||||
return (props.selectionEnd / (props.frames.length - 1)) * 100
|
||||
})
|
||||
|
||||
function isInSelection(idx: number) {
|
||||
return idx >= props.selectionStart && idx <= props.selectionEnd
|
||||
}
|
||||
|
||||
// --- Scrub slider ---
|
||||
function onScrub(e: Event) {
|
||||
const val = Number((e.target as HTMLInputElement).value)
|
||||
emit('frame-click', val)
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="frame-strip" ref="stripEl">
|
||||
<!-- Thumbnails -->
|
||||
<div class="thumbs">
|
||||
<div
|
||||
v-for="(frame, idx) in frames"
|
||||
:key="frame.seq"
|
||||
class="thumb"
|
||||
:class="{
|
||||
current: idx === currentIndex,
|
||||
dimmed: !isInSelection(idx),
|
||||
}"
|
||||
@click="emit('frame-click', idx)"
|
||||
:title="`Frame ${frame.seq} · ${frame.timestamp.toFixed(2)}s`"
|
||||
>
|
||||
<img :src="`data:image/jpeg;base64,${frame.jpeg_b64}`" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Scrub slider -->
|
||||
<div class="scrub-row">
|
||||
<input
|
||||
type="range"
|
||||
class="scrub-slider"
|
||||
:min="0"
|
||||
:max="frames.length - 1"
|
||||
:value="currentIndex"
|
||||
@input="onScrub"
|
||||
/>
|
||||
<span class="scrub-label">{{ currentIndex + 1 }}/{{ frames.length }}</span>
|
||||
</div>
|
||||
|
||||
<!-- Selection handles -->
|
||||
<div class="handles">
|
||||
<div
|
||||
class="handle handle-start"
|
||||
:style="{ left: startPct + '%' }"
|
||||
@mousedown="onHandleMousedown('start', $event)"
|
||||
title="Drag to set selection start"
|
||||
/>
|
||||
<div
|
||||
class="handle handle-end"
|
||||
:style="{ left: endPct + '%' }"
|
||||
@mousedown="onHandleMousedown('end', $event)"
|
||||
title="Drag to set selection end"
|
||||
/>
|
||||
<div
|
||||
class="selection-range"
|
||||
:style="{ left: startPct + '%', width: (endPct - startPct) + '%' }"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
.frame-strip {
|
||||
position: relative;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
background: var(--surface-1);
|
||||
border-top: var(--panel-border);
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.thumbs {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 2px;
|
||||
padding: 4px 6px 0;
|
||||
overflow-x: auto;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
.thumbs::-webkit-scrollbar { display: none; }
|
||||
|
||||
.thumb {
|
||||
flex-shrink: 0;
|
||||
width: 80px;
|
||||
height: 50px;
|
||||
border: 2px solid transparent;
|
||||
border-radius: 2px;
|
||||
overflow: hidden;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.1s, opacity 0.15s;
|
||||
}
|
||||
|
||||
.thumb img {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
display: block;
|
||||
}
|
||||
|
||||
.thumb.current {
|
||||
border-color: #00bcd4;
|
||||
}
|
||||
|
||||
.thumb.dimmed {
|
||||
opacity: 0.35;
|
||||
}
|
||||
|
||||
.thumb:hover:not(.current) {
|
||||
border-color: var(--surface-3);
|
||||
}
|
||||
|
||||
/* Scrub slider row */
|
||||
.scrub-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 2px 6px;
|
||||
}
|
||||
|
||||
.scrub-slider {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
flex: 1;
|
||||
height: 4px;
|
||||
background: var(--surface-3);
|
||||
border-radius: 2px;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.scrub-slider::-webkit-slider-thumb {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
background: #00bcd4;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.scrub-slider::-moz-range-thumb {
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
background: #00bcd4;
|
||||
cursor: pointer;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.scrub-label {
|
||||
font-size: 9px;
|
||||
color: var(--text-dim);
|
||||
font-family: var(--font-mono);
|
||||
min-width: 30px;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
/* Handle track area */
|
||||
.handles {
|
||||
position: relative;
|
||||
height: 14px;
|
||||
margin: 0 6px;
|
||||
}
|
||||
|
||||
.selection-range {
|
||||
position: absolute;
|
||||
top: 5px;
|
||||
height: 4px;
|
||||
background: #00bcd4;
|
||||
opacity: 0.35;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.handle {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
width: 10px;
|
||||
height: 14px;
|
||||
transform: translateX(-50%);
|
||||
cursor: ew-resize;
|
||||
z-index: 2;
|
||||
}
|
||||
|
||||
.handle::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
top: 2px;
|
||||
transform: translateX(-50%);
|
||||
width: 3px;
|
||||
height: 10px;
|
||||
background: #00bcd4;
|
||||
border-radius: 2px;
|
||||
}
|
||||
</style>
|
||||
@@ -27,12 +27,20 @@ const props = defineProps<{
|
||||
frameImage?: string | null
|
||||
/** Currently displayed frame sequence number */
|
||||
frameRef?: number | null
|
||||
/** All checkpoint frames — when provided, Apply runs on the selection range */
|
||||
frames?: Array<{ seq: number; jpeg_b64: string }>
|
||||
/** Index into frames[] for selection start (default 0) */
|
||||
selectionStart?: number
|
||||
/** Index into frames[] for selection end (default frames.length - 1) */
|
||||
selectionEnd?: number
|
||||
}>()
|
||||
|
||||
const emit = defineEmits<{
|
||||
'replay-result': [result: {
|
||||
regions_by_frame: Record<string, unknown[]>
|
||||
debug: Record<string, { edge_overlay_b64: string; lines_overlay_b64: string; horizontal_count: number; pair_count: number }>
|
||||
frameWidth?: number
|
||||
frameHeight?: number
|
||||
}]
|
||||
}>()
|
||||
|
||||
@@ -45,6 +53,7 @@ const debugEnabled = ref(true)
|
||||
const autoApply = ref(true) // auto-run on slider change (fast CV); uncheck for heavy stages
|
||||
const execMode = ref<'local' | 'server'>('local')
|
||||
const execTimeMs = ref<number | null>(null)
|
||||
const processingIndex = ref<number | null>(null) // current frame index during multi-frame run
|
||||
|
||||
// Config field defaults for detect_edges (used when API is unavailable)
|
||||
const EDGE_DEFAULTS: ConfigField[] = [
|
||||
@@ -89,6 +98,11 @@ watch(() => props.frameImage, (newVal, oldVal) => {
|
||||
}
|
||||
})
|
||||
|
||||
// Auto-run when selection range changes (strip handle drag)
|
||||
watch([() => props.selectionStart, () => props.selectionEnd], () => {
|
||||
if (autoApply.value) onSliderChange()
|
||||
})
|
||||
|
||||
const numericFields = computed(() => fields.value.filter(f => f.type === 'int' || f.type === 'float'))
|
||||
const boolFields = computed(() => fields.value.filter(f => f.type === 'bool'))
|
||||
|
||||
@@ -108,7 +122,8 @@ function onSliderChange() {
|
||||
}
|
||||
|
||||
async function applyDetection() {
|
||||
if (!props.frameImage) {
|
||||
const hasFrames = props.frames && props.frames.length > 0
|
||||
if (!props.frameImage && !hasFrames) {
|
||||
error.value = 'No frame available'
|
||||
return
|
||||
}
|
||||
@@ -130,14 +145,10 @@ async function applyDetection() {
|
||||
}
|
||||
}
|
||||
|
||||
/** Browser-side CV — no network, instant */
|
||||
/** Browser-side CV — runs on current frame or selection range */
|
||||
async function runLocal() {
|
||||
const t0 = performance.now()
|
||||
|
||||
// Decode base64 JPEG → ImageData
|
||||
const imageData = await b64ToImageData(props.frameImage!)
|
||||
|
||||
// Build params from slider values
|
||||
const params: Partial<EdgeDetectionParams> = {
|
||||
cannyLow: values.value['edge_canny_low'] as number,
|
||||
cannyHigh: values.value['edge_canny_high'] as number,
|
||||
@@ -148,40 +159,51 @@ async function runLocal() {
|
||||
pairMinDistance: values.value['edge_pair_min_distance'] as number,
|
||||
}
|
||||
|
||||
const frameKey = String(props.frameRef ?? 0)
|
||||
// Determine which frames to process
|
||||
const targetFrames = props.frames && props.frames.length > 0
|
||||
? props.frames.slice(props.selectionStart ?? 0, (props.selectionEnd ?? props.frames.length - 1) + 1)
|
||||
: [{ seq: props.frameRef ?? 0, jpeg_b64: props.frameImage! }]
|
||||
|
||||
const regions_by_frame: Record<string, unknown[]> = {}
|
||||
const debug: Record<string, { edge_overlay_b64: string; lines_overlay_b64: string; horizontal_count: number; pair_count: number }> = {}
|
||||
let totalRegions = 0
|
||||
let frameWidth = 0
|
||||
let frameHeight = 0
|
||||
|
||||
for (let i = 0; i < targetFrames.length; i++) {
|
||||
processingIndex.value = i
|
||||
const frame = targetFrames[i]
|
||||
const imageData = await b64ToImageData(frame.jpeg_b64)
|
||||
frameWidth = imageData.width
|
||||
frameHeight = imageData.height
|
||||
const frameKey = String(frame.seq)
|
||||
|
||||
if (debugEnabled.value) {
|
||||
const result = await runEdgeDetectionDebug(imageData, params)
|
||||
execTimeMs.value = Math.round(performance.now() - t0)
|
||||
regionCount.value = result.regions.length
|
||||
|
||||
// Convert ImageData overlays to base64 for FrameRenderer
|
||||
totalRegions += result.regions.length
|
||||
const edgeB64 = await imageDataToB64(result.edgeImageData)
|
||||
const linesB64 = await imageDataToB64(result.linesImageData)
|
||||
|
||||
emit('replay-result', {
|
||||
regions_by_frame: { [frameKey]: result.regions },
|
||||
debug: {
|
||||
[frameKey]: {
|
||||
regions_by_frame[frameKey] = result.regions
|
||||
debug[frameKey] = {
|
||||
edge_overlay_b64: edgeB64,
|
||||
lines_overlay_b64: linesB64,
|
||||
horizontal_count: result.horizontalCount,
|
||||
pair_count: result.pairCount,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const result = await runEdgeDetection(imageData, params)
|
||||
execTimeMs.value = Math.round(performance.now() - t0)
|
||||
regionCount.value = result.regions.length
|
||||
|
||||
emit('replay-result', {
|
||||
regions_by_frame: { [frameKey]: result.regions },
|
||||
debug: {},
|
||||
})
|
||||
totalRegions += result.regions.length
|
||||
regions_by_frame[frameKey] = result.regions
|
||||
}
|
||||
}
|
||||
|
||||
processingIndex.value = null
|
||||
execTimeMs.value = Math.round(performance.now() - t0)
|
||||
regionCount.value = totalRegions
|
||||
|
||||
emit('replay-result', { regions_by_frame, debug, frameWidth, frameHeight })
|
||||
}
|
||||
|
||||
/** Server-side CV — calls GPU box via proxy */
|
||||
async function runServer() {
|
||||
const t0 = performance.now()
|
||||
@@ -297,7 +319,10 @@ async function runServer() {
|
||||
<input type="checkbox" v-model="autoApply" />
|
||||
<span>Auto</span>
|
||||
</label>
|
||||
<span v-if="regionCount != null" class="region-count">
|
||||
<span v-if="processingIndex != null && frames && frames.length > 1" class="region-count">
|
||||
{{ processingIndex + 1 }}/{{ frames.length }}
|
||||
</span>
|
||||
<span v-else-if="regionCount != null" class="region-count">
|
||||
{{ regionCount }} regions
|
||||
</span>
|
||||
<span v-if="execTimeMs != null" class="exec-time">
|
||||
|
||||
@@ -16,6 +16,8 @@ import { detectEdges, detectEdgesDebug, type EdgeRegion, type EdgeDetectionParam
|
||||
|
||||
export type { EdgeRegion, EdgeDetectionParams } from './edges'
|
||||
export type { EdgeDetectionResult, EdgeDetectionDebugResult } from './edges'
|
||||
export { matchTracks, renderTracksToImageData } from './tracks'
|
||||
export type { Track, TrackPoint } from './tracks'
|
||||
|
||||
/** Run edge detection. Returns bounding boxes. */
|
||||
export async function runEdgeDetection(
|
||||
@@ -59,6 +61,26 @@ export function b64ToImageData(b64: string): Promise<ImageData> {
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode ImageData to base64 PNG string (preserves transparency).
|
||||
*
|
||||
* Used for overlays that need a transparent background (e.g. motion tracks).
|
||||
* Pair with srcFormat: 'png' on the FrameOverlay.
|
||||
*/
|
||||
export async function imageDataToPngB64(imageData: ImageData): Promise<string> {
|
||||
const canvas = new OffscreenCanvas(imageData.width, imageData.height)
|
||||
const ctx = canvas.getContext('2d')!
|
||||
ctx.putImageData(imageData, 0, 0)
|
||||
const blob = await canvas.convertToBlob({ type: 'image/png' })
|
||||
const buffer = await blob.arrayBuffer()
|
||||
const bytes = new Uint8Array(buffer)
|
||||
let binary = ''
|
||||
for (let i = 0; i < bytes.length; i++) {
|
||||
binary += String.fromCharCode(bytes[i])
|
||||
}
|
||||
return btoa(binary)
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode ImageData to base64 JPEG string.
|
||||
*
|
||||
|
||||
181
ui/framework/src/cv/tracks.ts
Normal file
181
ui/framework/src/cv/tracks.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
/**
|
||||
* Motion track matching and rendering.
|
||||
*
|
||||
* Matches bounding boxes across frames by IoU (Intersection over Union).
|
||||
* Uses Hungarian-style optimal assignment so every track gets its best match.
|
||||
* Renders VFX-style tracks: × at start/end, dotted trace in between.
|
||||
* Returns ImageData (RGBA, transparent background) for use as a FrameOverlay.
|
||||
*/
|
||||
|
||||
export type TrackPoint = { seq: number; cx: number; cy: number; w: number; h: number }
|
||||
export type Track = { id: number; points: TrackPoint[] }
|
||||
|
||||
type Box = { x: number; y: number; w: number; h: number }
|
||||
|
||||
function iou(a: Box, b: Box): number {
|
||||
const x1 = Math.max(a.x, b.x)
|
||||
const y1 = Math.max(a.y, b.y)
|
||||
const x2 = Math.min(a.x + a.w, b.x + b.w)
|
||||
const y2 = Math.min(a.y + a.h, b.y + b.h)
|
||||
const inter = Math.max(0, x2 - x1) * Math.max(0, y2 - y1)
|
||||
if (inter === 0) return 0
|
||||
const union = a.w * a.h + b.w * b.h - inter
|
||||
return union > 0 ? inter / union : 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Match bounding boxes across frames by IoU.
|
||||
*
|
||||
* For each pair of consecutive frames, computes an IoU cost matrix and
|
||||
* assigns tracks to boxes using greedy-best-first on IoU score (descending).
|
||||
* minIoU: minimum overlap to consider a match (default 0.15).
|
||||
*/
|
||||
export function matchTracks(
|
||||
regionsByFrame: Record<number, Box[]>,
|
||||
minIoU = 0.15,
|
||||
): Track[] {
|
||||
const seqs = Object.keys(regionsByFrame).map(Number).sort((a, b) => a - b)
|
||||
if (seqs.length === 0) return []
|
||||
|
||||
let nextId = 0
|
||||
const active: Track[] = []
|
||||
const finished: Track[] = []
|
||||
|
||||
for (const seq of seqs) {
|
||||
const boxes = regionsByFrame[seq]
|
||||
|
||||
if (active.length === 0) {
|
||||
for (const b of boxes) {
|
||||
active.push({
|
||||
id: nextId++,
|
||||
points: [{ seq, cx: b.x + b.w / 2, cy: b.y + b.h / 2, w: b.w, h: b.h }],
|
||||
})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Build cost matrix: IoU between each active track's last box and each new box
|
||||
const costs: Array<{ trackIdx: number; boxIdx: number; score: number }> = []
|
||||
for (let t = 0; t < active.length; t++) {
|
||||
const last = active[t].points[active[t].points.length - 1]
|
||||
const trackBox: Box = { x: last.cx - last.w / 2, y: last.cy - last.h / 2, w: last.w, h: last.h }
|
||||
for (let b = 0; b < boxes.length; b++) {
|
||||
const score = iou(trackBox, boxes[b])
|
||||
if (score >= minIoU) {
|
||||
costs.push({ trackIdx: t, boxIdx: b, score })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Greedy-best-first assignment (sorted by descending IoU)
|
||||
costs.sort((a, b) => b.score - a.score)
|
||||
const matchedTracks = new Set<number>()
|
||||
const matchedBoxes = new Set<number>()
|
||||
const next: Track[] = []
|
||||
|
||||
for (const { trackIdx, boxIdx } of costs) {
|
||||
if (matchedTracks.has(trackIdx) || matchedBoxes.has(boxIdx)) continue
|
||||
matchedTracks.add(trackIdx)
|
||||
matchedBoxes.add(boxIdx)
|
||||
const b = boxes[boxIdx]
|
||||
active[trackIdx].points.push({
|
||||
seq, cx: b.x + b.w / 2, cy: b.y + b.h / 2, w: b.w, h: b.h,
|
||||
})
|
||||
next.push(active[trackIdx])
|
||||
}
|
||||
|
||||
// Unmatched tracks → finished
|
||||
for (let t = 0; t < active.length; t++) {
|
||||
if (!matchedTracks.has(t)) finished.push(active[t])
|
||||
}
|
||||
|
||||
// Unmatched boxes → new tracks
|
||||
for (let b = 0; b < boxes.length; b++) {
|
||||
if (!matchedBoxes.has(b)) {
|
||||
const box = boxes[b]
|
||||
next.push({
|
||||
id: nextId++,
|
||||
points: [{ seq, cx: box.x + box.w / 2, cy: box.y + box.h / 2, w: box.w, h: box.h }],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
active.length = 0
|
||||
active.push(...next)
|
||||
}
|
||||
|
||||
return [...finished, ...active]
|
||||
}
|
||||
|
||||
/**
|
||||
* Render tracks to ImageData (RGBA, transparent background).
|
||||
*
|
||||
* Only draws tracks that span ≥2 frames (single-frame detections are skipped).
|
||||
* × at start, × at end, dotted trace in between.
|
||||
* Filled dot at currentSeq.
|
||||
*/
|
||||
export function renderTracksToImageData(
|
||||
tracks: Track[],
|
||||
width: number,
|
||||
height: number,
|
||||
currentSeq: number,
|
||||
color = '#00e5ff',
|
||||
): ImageData {
|
||||
const canvas = new OffscreenCanvas(width, height)
|
||||
const ctx = canvas.getContext('2d')!
|
||||
|
||||
ctx.clearRect(0, 0, width, height)
|
||||
ctx.strokeStyle = color
|
||||
ctx.fillStyle = color
|
||||
|
||||
for (const track of tracks) {
|
||||
const pts = track.points
|
||||
// Skip single-frame detections — no temporal signal
|
||||
if (pts.length < 2) continue
|
||||
|
||||
// Dotted trace connecting all centers
|
||||
ctx.globalAlpha = 0.75
|
||||
ctx.lineWidth = 1.5
|
||||
ctx.setLineDash([4, 4])
|
||||
ctx.beginPath()
|
||||
ctx.moveTo(pts[0].cx, pts[0].cy)
|
||||
for (let i = 1; i < pts.length; i++) {
|
||||
ctx.lineTo(pts[i].cx, pts[i].cy)
|
||||
}
|
||||
ctx.stroke()
|
||||
ctx.setLineDash([])
|
||||
|
||||
// × at first point
|
||||
ctx.globalAlpha = 0.9
|
||||
ctx.lineWidth = 1.5
|
||||
drawX(ctx, pts[0].cx, pts[0].cy, 7)
|
||||
|
||||
// × at last point
|
||||
drawX(ctx, pts[pts.length - 1].cx, pts[pts.length - 1].cy, 7)
|
||||
|
||||
// Filled dot at current frame
|
||||
const curr = pts.find(p => p.seq === currentSeq)
|
||||
if (curr) {
|
||||
ctx.globalAlpha = 1
|
||||
ctx.beginPath()
|
||||
ctx.arc(curr.cx, curr.cy, 4, 0, Math.PI * 2)
|
||||
ctx.fill()
|
||||
}
|
||||
}
|
||||
|
||||
return ctx.getImageData(0, 0, width, height)
|
||||
}
|
||||
|
||||
function drawX(
|
||||
ctx: OffscreenCanvasRenderingContext2D,
|
||||
cx: number,
|
||||
cy: number,
|
||||
size: number,
|
||||
): void {
|
||||
ctx.beginPath()
|
||||
ctx.moveTo(cx - size, cy - size)
|
||||
ctx.lineTo(cx + size, cy + size)
|
||||
ctx.moveTo(cx + size, cy - size)
|
||||
ctx.lineTo(cx - size, cy + size)
|
||||
ctx.stroke()
|
||||
}
|
||||
@@ -15,12 +15,14 @@ export interface FrameBBox {
|
||||
}
|
||||
|
||||
export interface FrameOverlay {
|
||||
/** Base64 JPEG image (same dimensions as main image) */
|
||||
/** Base64 encoded image (same dimensions as main image) */
|
||||
src: string
|
||||
label: string
|
||||
visible: boolean
|
||||
/** Opacity 0-1, default 0.5 */
|
||||
opacity?: number
|
||||
/** Image format — 'jpeg' (default) or 'png' (supports transparency) */
|
||||
srcFormat?: 'jpeg' | 'png'
|
||||
}
|
||||
|
||||
const props = defineProps<{
|
||||
@@ -97,7 +99,7 @@ function drawOverlays(ctx: CanvasRenderingContext2D, dx: number, dy: number, dw:
|
||||
// Load async, redraw when ready
|
||||
const overlay = new window.Image()
|
||||
overlay.onload = () => draw()
|
||||
overlay.src = `data:image/jpeg;base64,${layer.src}`
|
||||
overlay.src = `data:image/${layer.srcFormat ?? 'jpeg'};base64,${layer.src}`
|
||||
overlayCache.set(layer.src, overlay)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user