This commit is contained in:
2026-03-28 09:40:07 -03:00
parent 0bd3888155
commit e46bbc419c
10 changed files with 508 additions and 49 deletions

View File

@@ -57,6 +57,13 @@ def write_config(update: ConfigUpdate):
return _runtime_config
@router.get("/config/profiles")
def list_profiles():
"""List available detection profiles."""
from detect.profiles import _PROFILES
return [{"name": name} for name in _PROFILES]
@router.get("/config/stages", response_model=list[StageConfigInfo])
def list_stage_configs():
"""Return the stage palette with config field metadata for the editor."""

View File

@@ -1,9 +1,13 @@
"""
Pipeline run endpoints.
POST /detect/run — launch pipeline on selected source
POST /detect/stop/{job_id} — cancel a running pipeline
POST /detect/clear/{job_id} — clear events from Redis
POST /detect/run — launch pipeline on selected source
POST /detect/stop/{job_id} — cancel a running pipeline
POST /detect/pause/{job_id} — pause after current stage
POST /detect/resume/{job_id} — resume a paused pipeline
POST /detect/step/{job_id} — run one stage then pause
POST /detect/clear/{job_id} — clear events from Redis
GET /detect/status/{job_id} — pipeline run status
"""
from __future__ import annotations
@@ -33,6 +37,7 @@ class RunRequest(BaseModel):
skip_vlm: bool = False
skip_cloud: bool = False
log_level: str = "INFO" # INFO | DEBUG
pause_after_stage: bool = False
class RunResponse(BaseModel):
@@ -92,9 +97,13 @@ def run_pipeline(req: RunRequest):
source_asset_id=req.source_asset_id,
)
from detect.graph import PipelineCancelled, set_cancel_check, clear_cancel_check
from detect.graph import (
PipelineCancelled, set_cancel_check, clear_cancel_check,
init_pause, clear_pause,
)
set_cancel_check(job_id, lambda: job_id in _cancelled_jobs)
init_pause(job_id, pause_after_stage=req.pause_after_stage)
def _run():
try:
@@ -123,6 +132,7 @@ def run_pipeline(req: RunRequest):
_running_jobs.pop(job_id, None)
_cancelled_jobs.discard(job_id)
clear_cancel_check(job_id)
clear_pause(job_id)
emit.clear_run_context()
thread = threading.Thread(target=_run, daemon=True, name=f"pipeline-{job_id}")
@@ -145,6 +155,75 @@ def stop_pipeline(job_id: str):
return {"status": "stopping", "job_id": job_id}
@router.post("/pause/{job_id}")
def pause(job_id: str):
"""Pause a running pipeline after the current stage completes."""
from detect.graph import pause_pipeline
if job_id not in _running_jobs:
raise HTTPException(status_code=404, detail=f"No running pipeline: {job_id}")
pause_pipeline(job_id)
return {"status": "pausing", "job_id": job_id}
@router.post("/resume/{job_id}")
def resume(job_id: str):
"""Resume a paused pipeline."""
from detect.graph import resume_pipeline
if job_id not in _running_jobs:
raise HTTPException(status_code=404, detail=f"No running pipeline: {job_id}")
resume_pipeline(job_id)
return {"status": "running", "job_id": job_id}
@router.post("/step/{job_id}")
def step(job_id: str):
"""Run one stage then pause again."""
from detect.graph import step_pipeline
if job_id not in _running_jobs:
raise HTTPException(status_code=404, detail=f"No running pipeline: {job_id}")
step_pipeline(job_id)
return {"status": "stepping", "job_id": job_id}
@router.post("/pause-after-stage/{job_id}")
def toggle_pause_after_stage(job_id: str, enabled: bool = True):
"""Toggle pause-after-each-stage mode."""
from detect.graph import set_pause_after_stage
if job_id not in _running_jobs:
raise HTTPException(status_code=404, detail=f"No running pipeline: {job_id}")
set_pause_after_stage(job_id, enabled)
return {"status": "ok", "pause_after_stage": enabled, "job_id": job_id}
@router.get("/status/{job_id}")
def pipeline_status(job_id: str):
"""Get pipeline run status."""
from detect.graph import is_paused
running = job_id in _running_jobs
paused = is_paused(job_id)
cancelling = job_id in _cancelled_jobs
if cancelling:
status = "cancelling"
elif paused:
status = "paused"
elif running:
status = "running"
else:
status = "idle"
return {"status": status, "job_id": job_id}
@router.post("/clear/{job_id}")
def clear_pipeline(job_id: str):
"""Clear events for a job from Redis."""

View File

@@ -4,7 +4,7 @@ Detection pipeline graph.
detect/graph/
nodes.py — node functions (one per stage)
events.py — graph_update SSE emission
runner.py — pipeline execution (LangGraph wrapper, checkpoint, cancel)
runner.py — pipeline execution (LangGraph wrapper, checkpoint, cancel, pause)
"""
from .nodes import NODES, NODE_FUNCTIONS
@@ -12,8 +12,15 @@ from .runner import (
PipelineCancelled,
build_graph,
clear_cancel_check,
clear_pause,
get_pipeline,
init_pause,
is_paused,
pause_pipeline,
resume_pipeline,
set_cancel_check,
set_pause_after_stage,
step_pipeline,
)
from .events import _node_states
@@ -25,5 +32,12 @@ __all__ = [
"get_pipeline",
"set_cancel_check",
"clear_cancel_check",
"init_pause",
"clear_pause",
"pause_pipeline",
"resume_pipeline",
"step_pipeline",
"set_pause_after_stage",
"is_paused",
"_node_states",
]

View File

@@ -7,13 +7,17 @@ custom runner in Phase 3, with an executor socket for distributed dispatch.
from __future__ import annotations
import logging
import os
import threading
from langgraph.graph import END, StateGraph
from detect.state import DetectState
from .nodes import NODES, NODE_FUNCTIONS
logger = logging.getLogger(__name__)
# --- Checkpoint wrapper ---
@@ -27,7 +31,15 @@ class PipelineCancelled(Exception):
pass
# Cancellation hook — set by the run endpoint, checked before each node
class PipelinePaused(Exception):
"""Raised when a pipeline is paused (internally, for flow control)."""
pass
# ---------------------------------------------------------------------------
# Cancellation — checked before each node
# ---------------------------------------------------------------------------
_cancel_check: dict[str, callable] = {}
@@ -39,6 +51,92 @@ def clear_cancel_check(job_id: str):
_cancel_check.pop(job_id, None)
# ---------------------------------------------------------------------------
# Pause / Resume / Step — checked after each node completes
#
# _pause_gate: threading.Event per job. When cleared, the runner blocks.
# When set, the runner proceeds to the next node.
# _pause_after_stage: if True, automatically clear the gate after each node.
# ---------------------------------------------------------------------------
_pause_gate: dict[str, threading.Event] = {}
_pause_after_stage: dict[str, bool] = {}
def init_pause(job_id: str, pause_after_stage: bool = False):
"""Initialize pause state for a job. Called when pipeline starts."""
gate = threading.Event()
gate.set() # start unpaused
_pause_gate[job_id] = gate
_pause_after_stage[job_id] = pause_after_stage
def clear_pause(job_id: str):
"""Clean up pause state. Called when pipeline finishes."""
_pause_gate.pop(job_id, None)
_pause_after_stage.pop(job_id, None)
def pause_pipeline(job_id: str):
"""Pause a running pipeline. It will block after the current stage completes."""
gate = _pause_gate.get(job_id)
if gate:
gate.clear()
logger.info("Pipeline %s paused", job_id)
def resume_pipeline(job_id: str):
"""Resume a paused pipeline."""
gate = _pause_gate.get(job_id)
if gate:
gate.set()
logger.info("Pipeline %s resumed", job_id)
def step_pipeline(job_id: str):
"""Run one stage then pause again."""
_pause_after_stage[job_id] = True
gate = _pause_gate.get(job_id)
if gate:
gate.set() # unblock for one stage, _pause_after_stage re-pauses after
logger.info("Pipeline %s stepping", job_id)
def set_pause_after_stage(job_id: str, enabled: bool):
"""Toggle pause-after-each-stage mode."""
_pause_after_stage[job_id] = enabled
if not enabled:
# If disabling, also resume in case we're currently paused
gate = _pause_gate.get(job_id)
if gate:
gate.set()
def is_paused(job_id: str) -> bool:
"""Check if a pipeline is currently paused."""
gate = _pause_gate.get(job_id)
return gate is not None and not gate.is_set()
def _wait_if_paused(job_id: str, node_name: str):
"""Block until resumed. Called after each node completes."""
gate = _pause_gate.get(job_id)
if gate is None:
return
# If pause-after-stage is on, pause now
if _pause_after_stage.get(job_id, False):
gate.clear()
from detect import emit
emit.log(job_id, "Pipeline", "INFO", f"Paused after {node_name}")
# Block until gate is set (resume/step) or cancelled
while not gate.wait(timeout=0.5):
check = _cancel_check.get(job_id)
if check and check():
raise PipelineCancelled(f"Cancelled while paused before next stage")
def _checkpointing_node(node_name: str, node_fn):
"""Wrap a node function to auto-checkpoint after completion."""
@@ -81,6 +179,10 @@ def _checkpointing_node(node_name: str, node_fn):
output_json=output_json,
)
_latest_checkpoint[job_id] = new_checkpoint_id
# Pause check — blocks if paused, respects cancel while waiting
_wait_if_paused(job_id, node_name)
return result
wrapper.__name__ = node_fn.__name__

View File

@@ -24,8 +24,10 @@ const logPanel = ref<{ clear: () => void } | null>(null)
// SSE connection + pipeline status
const {
jobId, stats, runContext, status, sseConnected, source,
stopPipeline, onJobStarted: sseJobStarted,
jobId, stats, runContext, status, paused, pauseAfterStage,
sseConnected, source,
stopPipeline, pausePipeline, resumePipeline, stepPipeline,
togglePauseAfterStage, onJobStarted: sseJobStarted,
} = useSSEConnection()
// Checkpoint frames + navigation
@@ -50,9 +52,9 @@ function setCheckpointFrame(index: number) {
}
// Wire job start to clear log panel
function onJobStarted(newJobId: string) {
function onJobStarted(newJobId: string, opts?: { pauseAfterStage?: boolean }) {
logPanel.value?.clear()
sseJobStarted(newJobId)
sseJobStarted(newJobId, opts)
}
</script>
@@ -62,20 +64,52 @@ function onJobStarted(newJobId: string) {
<header>
<h1>Detection Pipeline</h1>
<span class="status-badge" :class="status">{{ status }}</span>
<span v-if="runContext" class="run-info">
{{ runContext.run_type }} · run: {{ runContext.run_id }}
</span>
<button class="header-btn" title="Select source" @click="pipeline.openSourceSelector()">
<svg width="14" height="14" viewBox="0 0 16 16" fill="none" stroke="currentColor" stroke-width="1.5">
<path d="M2 4h4l2 2h6v8H2V4z"/><path d="M2 4V2h12v2"/>
</svg>
</button>
<button
v-if="sseConnected && (status === 'live' || status === 'processing')"
class="header-btn stop-btn"
title="Stop pipeline"
@click="stopPipeline"
></button>
<!-- Transport controls visible when a pipeline is running -->
<div v-if="sseConnected && (status === 'live' || status === 'processing' || paused)" class="transport">
<button
v-if="paused"
class="header-btn play-btn"
title="Resume"
@click="resumePipeline"
>
<svg width="12" height="12" viewBox="0 0 12 12" fill="currentColor"><polygon points="2,1 11,6 2,11"/></svg>
</button>
<button
v-else
class="header-btn pause-btn"
title="Pause after current stage"
@click="pausePipeline"
>
<svg width="12" height="12" viewBox="0 0 12 12" fill="currentColor"><rect x="2" y="1" width="3" height="10"/><rect x="7" y="1" width="3" height="10"/></svg>
</button>
<button
class="header-btn step-btn"
title="Run one stage"
@click="stepPipeline"
:disabled="!paused"
>
<svg width="12" height="12" viewBox="0 0 12 12" fill="currentColor"><polygon points="1,1 7,6 1,11"/><rect x="8" y="1" width="2.5" height="10"/></svg>
</button>
<button
class="header-btn stop-btn"
title="Stop pipeline"
@click="stopPipeline"
>
<svg width="10" height="10" viewBox="0 0 10 10" fill="currentColor"><rect width="10" height="10"/></svg>
</button>
<label class="pause-toggle" title="Pause after each stage">
<input type="checkbox" :checked="pauseAfterStage" @change="togglePauseAfterStage" />
<span>step</span>
</label>
</div>
<span v-if="paused" class="status-badge paused">PAUSED</span>
<span class="job-id">job: {{ jobId || '—' }}</span>
</header>
@@ -203,19 +237,14 @@ function onJobStarted(newJobId: string) {
</Panel>
<!-- === SOURCE SELECTOR MODE === -->
<SourceSelector v-else-if="pipeline.layoutMode === 'source_selector'" @job-started="onJobStarted" />
<SourceSelector v-else-if="pipeline.layoutMode === 'source_selector'" @job-started="(id: string, opts: any) => onJobStarted(id, opts)" />
</template>
</SplitPane>
<!-- Bottom bar: Log or Blob viewer depending on mode -->
<div class="log-row">
<template v-if="pipeline.layoutMode === 'source_selector'">
<!-- no log in source selector -->
</template>
<template v-else>
<LogPanel ref="logPanel" :source="source" :status="status" />
</template>
<!-- Bottom bar: Log (hidden in source selector) -->
<div v-if="pipeline.layoutMode !== 'source_selector'" class="log-row">
<LogPanel ref="logPanel" :source="source" :status="status" />
</div>
</div>
</template>
@@ -283,6 +312,16 @@ header h1 { font-size: var(--font-size-lg); font-weight: 600; }
background: var(--surface-3);
color: var(--text-primary);
}
.transport {
display: flex;
align-items: center;
gap: 2px;
}
.play-btn { color: var(--status-live); }
.pause-btn { color: var(--text-secondary); }
.step-btn { color: var(--text-secondary); }
.step-btn:disabled { opacity: 0.3; cursor: not-allowed; }
.stop-btn {
background: var(--status-error);
color: #000;
@@ -293,6 +332,19 @@ header h1 { font-size: var(--font-size-lg); font-weight: 600; }
opacity: 0.8;
}
.pause-toggle {
display: flex;
align-items: center;
gap: 4px;
font-size: 10px;
color: var(--text-dim);
cursor: pointer;
margin-left: 4px;
}
.pause-toggle input { accent-color: var(--status-processing); }
.status-badge.paused { background: var(--status-processing); color: #000; }
.job-id { color: var(--text-dim); font-size: var(--font-size-sm); margin-left: auto; }
.stats-col {

View File

@@ -68,6 +68,9 @@ export function useSSEConnection() {
sseConnected.value = true
}
const paused = ref(false)
const pauseAfterStage = ref(false)
async function stopPipeline() {
if (!jobId.value) return
try {
@@ -75,11 +78,70 @@ export function useSSEConnection() {
} catch { /* ignore — UI will see the cancel event via SSE */ }
}
function onJobStarted(newJobId: string) {
async function pausePipeline() {
if (!jobId.value) return
try {
await fetch(`/api/detect/pause/${jobId.value}`, { method: 'POST' })
paused.value = true
} catch { /* ignore */ }
}
async function resumePipeline() {
if (!jobId.value) return
try {
await fetch(`/api/detect/resume/${jobId.value}`, { method: 'POST' })
paused.value = false
} catch { /* ignore */ }
}
async function stepPipeline() {
if (!jobId.value) return
try {
await fetch(`/api/detect/step/${jobId.value}`, { method: 'POST' })
paused.value = false // briefly unpaused, will re-pause after stage
} catch { /* ignore */ }
}
async function togglePauseAfterStage() {
if (!jobId.value) return
const next = !pauseAfterStage.value
try {
await fetch(`/api/detect/pause-after-stage/${jobId.value}?enabled=${next}`, { method: 'POST' })
pauseAfterStage.value = next
} catch { /* ignore */ }
}
// Poll pipeline status to track paused state
// (SSE doesn't emit pause events — the thread is blocked)
let statusPoll: ReturnType<typeof setInterval> | null = null
function startStatusPoll() {
if (statusPoll) return
statusPoll = setInterval(async () => {
if (!jobId.value) return
try {
const resp = await fetch(`/api/detect/status/${jobId.value}`)
if (!resp.ok) return
const data = await resp.json()
paused.value = data.status === 'paused'
} catch { /* ignore */ }
}, 1000)
}
function stopStatusPoll() {
if (statusPoll) {
clearInterval(statusPoll)
statusPoll = null
}
}
function onJobStarted(newJobId: string, opts?: { pauseAfterStage?: boolean }) {
jobId.value = newJobId
stats.value = null
runContext.value = null
status.value = 'processing'
paused.value = false
pauseAfterStage.value = opts?.pauseAfterStage ?? false
pipeline.reset()
pipeline.setStatus('running')
// Update URL without reload
@@ -91,16 +153,34 @@ export function useSSEConnection() {
source.setUrl(`/api/detect/stream/${newJobId}`)
source.connect()
sseConnected.value = true
startStatusPoll()
}
// Start polling if we already have an active job
if (jobId.value && sseConnected.value) {
startStatusPoll()
}
// Stop polling when job completes
source.on<{ report?: { status?: string } }>('job_complete', () => {
stopStatusPoll()
paused.value = false
})
return {
jobId,
stats,
runContext,
status,
paused,
pauseAfterStage,
sseConnected,
source: source as DataSource,
stopPipeline,
pausePipeline,
resumePipeline,
stepPipeline,
togglePauseAfterStage,
onJobStarted,
}
}

View File

@@ -17,6 +17,7 @@ const nodes = ref<GraphNode[]>([])
// Derive graph mode from pipeline layout mode
const graphMode = computed<GraphMode>(() => {
if (pipeline.layoutMode === 'source_selector') return 'observe'
if (pipeline.layoutMode === 'bbox_editor') return 'edit-isolated'
if (pipeline.layoutMode === 'stage_editor') return 'edit-in-pipeline'
return 'observe'
@@ -29,6 +30,20 @@ watch(stageNames, (names) => {
}
}, { immediate: true })
// Source selector: placeholders until a chunk is selected, then real stage names
const displayNodes = computed<GraphNode[]>(() => {
if (pipeline.layoutMode === 'source_selector') {
if (pipeline.sourceHasSelection) {
return stageNames.value.map((id) => ({ id, status: 'pending' as const }))
}
return Array.from({ length: 10 }, (_, i) => ({
id: `_placeholder_${i}`,
status: 'placeholder' as const,
}))
}
return nodes.value
})
props.source.on<{ nodes: GraphNode[] }>('graph_update', (e) => {
nodes.value = e.nodes
})
@@ -60,7 +75,7 @@ function onOpenStageEditor(stage: string) {
<template>
<Panel title="Pipeline" :status="status">
<GraphRenderer
:nodes="nodes"
:nodes="displayNodes"
:mode="graphMode"
:active-stage="pipeline.editorStage"
:region-stages="editableStages"

View File

@@ -25,15 +25,22 @@ const SOURCE_TYPE_LABELS: Record<string, string> = {
stream: 'STREAM',
}
interface ProfileInfo {
name: string
}
const sources = ref<SourceInfo[]>([])
const chunks = ref<ChunkInfo[]>([])
const profiles = ref<ProfileInfo[]>([])
const selectedSource = ref<string | null>(null)
const selectedChunk = ref<string | null>(null)
const selectedChunks = ref<Set<string>>(new Set())
const selectedProfile = ref('soccer_broadcast')
const loading = ref(false)
const running = ref(false)
const skipVlm = ref(false)
const skipCloud = ref(true)
const checkpoint = ref(true)
const pauseAfterStage = ref(false)
const logLevel = ref('INFO')
const error = ref<string | null>(null)
@@ -41,9 +48,19 @@ async function loadSources() {
loading.value = true
error.value = null
try {
const resp = await fetch('/api/detect/sources')
if (!resp.ok) throw new Error(`${resp.status} ${resp.statusText}`)
sources.value = await resp.json()
const [srcResp, profResp] = await Promise.all([
fetch('/api/detect/sources'),
fetch('/api/detect/config/profiles'),
])
if (!srcResp.ok) throw new Error(`${srcResp.status} ${srcResp.statusText}`)
sources.value = await srcResp.json()
if (profResp.ok) {
profiles.value = await profResp.json()
if (profiles.value.length > 0 && !profiles.value.find(p => p.name === selectedProfile.value)) {
selectedProfile.value = profiles.value[0].name
}
}
} catch (e: any) {
error.value = `Failed to load sources: ${e.message}`
} finally {
@@ -53,7 +70,7 @@ async function loadSources() {
async function loadChunks(jobId: string) {
selectedSource.value = jobId
selectedChunk.value = null
selectedChunks.value = new Set()
chunks.value = []
try {
const resp = await fetch(`/api/detect/sources/${jobId}/chunks`)
@@ -64,8 +81,25 @@ async function loadChunks(jobId: string) {
}
}
function selectChunk(chunk: ChunkInfo) {
selectedChunk.value = chunk.key
function toggleChunk(chunk: ChunkInfo) {
const s = new Set(selectedChunks.value)
if (s.has(chunk.key)) {
s.delete(chunk.key)
} else {
s.add(chunk.key)
}
selectedChunks.value = s
pipeline.sourceHasSelection = s.size > 0
}
function selectAllChunks() {
selectedChunks.value = new Set(chunks.value.map(c => c.key))
pipeline.sourceHasSelection = true
}
function clearSelection() {
selectedChunks.value = new Set()
pipeline.sourceHasSelection = false
}
async function openPreview(chunk: ChunkInfo) {
@@ -83,24 +117,29 @@ async function openPreview(chunk: ChunkInfo) {
}
const emit = defineEmits<{
(e: 'job-started', jobId: string): void
(e: 'job-started', jobId: string, opts: { pauseAfterStage: boolean }): void
}>()
async function runPipeline() {
if (!selectedChunk.value) return
if (selectedChunks.value.size === 0) return
running.value = true
error.value = null
// Run first selected chunk (multi-run queuing is future work)
const videoPath = [...selectedChunks.value][0]
try {
const resp = await fetch('/api/detect/run', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
video_path: selectedChunk.value,
video_path: videoPath,
profile_name: selectedProfile.value,
checkpoint: checkpoint.value,
skip_vlm: skipVlm.value,
skip_cloud: skipCloud.value,
log_level: logLevel.value,
pause_after_stage: pauseAfterStage.value,
}),
})
if (!resp.ok) {
@@ -109,7 +148,7 @@ async function runPipeline() {
}
const data = await resp.json()
emit('job-started', data.job_id)
emit('job-started', data.job_id, { pauseAfterStage: pauseAfterStage.value })
} catch (e: any) {
error.value = `Failed to start pipeline: ${e.message}`
running.value = false
@@ -160,13 +199,20 @@ onMounted(loadSources)
<!-- Chunk list -->
<div class="source-section" v-if="chunks.length > 0">
<h3>Chunks</h3>
<div class="chunk-header">
<h3>Chunks</h3>
<span class="chunk-actions">
<button class="link-btn" @click="selectAllChunks">all</button>
<button class="link-btn" @click="clearSelection">none</button>
<span v-if="selectedChunks.size > 0" class="selection-count">{{ selectedChunks.size }} selected</span>
</span>
</div>
<div class="chunk-list">
<div
v-for="chunk in chunks"
:key="chunk.key"
:class="['chunk-item', { selected: selectedChunk === chunk.key }]"
@click="selectChunk(chunk)"
:class="['chunk-item', { selected: selectedChunks.has(chunk.key) }]"
@click="toggleChunk(chunk)"
>
<span class="chunk-name">{{ chunk.filename }}</span>
<span class="chunk-meta">
@@ -182,11 +228,18 @@ onMounted(loadSources)
</div>
<!-- Run options -->
<div class="run-options" v-if="selectedChunk">
<div class="run-options" v-if="selectedChunks.size > 0">
<h3>Run Options</h3>
<label>
Profile
<select v-model="selectedProfile" class="log-level-select">
<option v-for="p in profiles" :key="p.name" :value="p.name">{{ p.name.replace(/_/g, ' ') }}</option>
</select>
</label>
<label><input type="checkbox" v-model="checkpoint"> Checkpointing</label>
<label><input type="checkbox" v-model="skipVlm"> Skip VLM</label>
<label><input type="checkbox" v-model="skipCloud"> Skip Cloud</label>
<label><input type="checkbox" v-model="pauseAfterStage"> Pause after each stage</label>
<label>
Log level
<select v-model="logLevel" class="log-level-select">
@@ -195,10 +248,10 @@ onMounted(loadSources)
</select>
</label>
<div class="selected-path">{{ selectedChunk }}</div>
<div class="selected-path">{{ [...selectedChunks].join(', ') }}</div>
<button class="run-btn" @click="runPipeline" :disabled="running">
{{ running ? 'Starting...' : 'Run Pipeline' }}
{{ running ? 'Starting...' : selectedChunks.size === 1 ? 'Run Pipeline' : `Run Pipeline (${selectedChunks.size} chunks)` }}
</button>
</div>
@@ -239,6 +292,35 @@ onMounted(loadSources)
letter-spacing: 0.05em;
}
.chunk-header {
display: flex;
align-items: center;
justify-content: space-between;
}
.chunk-actions {
display: flex;
align-items: center;
gap: var(--space-2);
}
.link-btn {
background: none;
border: none;
color: var(--text-dim);
font-family: var(--font-mono);
font-size: 10px;
cursor: pointer;
text-decoration: underline;
padding: 0;
}
.link-btn:hover { color: var(--text-primary); }
.selection-count {
font-size: 10px;
color: var(--text-secondary);
}
.source-list, .chunk-list {
max-height: 200px;
overflow-y: auto;

View File

@@ -24,6 +24,8 @@ export const usePipelineStore = defineStore('pipeline', () => {
const layoutMode = ref<string>('normal')
const editorStage = ref<string | null>(null)
const sourceHasSelection = ref(false)
const isRunning = computed(() => status.value === 'running')
const isPaused = computed(() => status.value === 'paused')
const canReplay = computed(() => checkpoints.value.length > 0)
@@ -76,12 +78,14 @@ export const usePipelineStore = defineStore('pipeline', () => {
function closeEditor() {
layoutMode.value = 'normal'
editorStage.value = null
sourceHasSelection.value = false
}
function reset() {
status.value = 'idle'
layoutMode.value = 'normal'
editorStage.value = null
sourceHasSelection.value = false
nodes.value = []
currentStage.value = null
runId.value = null
@@ -92,7 +96,7 @@ export const usePipelineStore = defineStore('pipeline', () => {
return {
jobId, status, nodes, currentStage, runId, parentJobId, runType,
checkpoints, error, layoutMode, editorStage,
checkpoints, error, layoutMode, editorStage, sourceHasSelection,
isRunning, isPaused, canReplay, isEditing,
setJob, setStatus, updateNodes, setRunContext, setCheckpoints, setError,
openSourceSelector, openBBoxEditor, openStageEditor, closeEditor, reset,

View File

@@ -6,7 +6,7 @@ import '@vue-flow/core/dist/theme-default.css'
export interface GraphNode {
id: string
status: 'pending' | 'running' | 'done' | 'error' | 'skipped'
status: 'pending' | 'running' | 'done' | 'error' | 'skipped' | 'placeholder'
/** Whether a checkpoint exists at this stage */
hasCheckpoint?: boolean
/** Stage category (e.g. 'cv', 'ai', 'preprocessing') */
@@ -44,6 +44,7 @@ const STATUS_COLORS: Record<string, string> = {
done: 'var(--status-live)',
error: 'var(--status-error)',
skipped: '#4a6fa5',
placeholder: 'transparent',
}
function nodeAppearance(node: GraphNode) {
@@ -84,6 +85,16 @@ function nodeAppearance(node: GraphNode) {
}
}
// Placeholder: hollow, no text
if (node.status === 'placeholder') {
return {
color: 'transparent',
textColor: 'transparent',
opacity: 0.6,
outline: false,
}
}
// Default: observe mode or downstream in edit-in-pipeline
return {
color: STATUS_COLORS[node.status] ?? STATUS_COLORS.pending,
@@ -158,6 +169,7 @@ function onNodeClick(id: string) {
active: data.isActive,
outline: data.outline,
dimmed: data.opacity < 1,
placeholder: data.status === 'placeholder',
}"
:style="{
background: data.color,
@@ -248,6 +260,18 @@ function onNodeClick(id: string) {
pointer-events: none;
}
.stage-node.placeholder {
border: 1px dashed var(--text-secondary);
background: transparent;
color: transparent;
pointer-events: none;
}
.stage-node.placeholder .stage-actions,
.stage-node.placeholder .checkpoint-badge {
display: none;
}
.stage-label {
flex: 1;
}