This commit is contained in:
2026-03-23 15:52:03 -03:00
parent b57da622cb
commit 4fdbdfc6d3
11 changed files with 599 additions and 5 deletions

View File

@@ -35,6 +35,14 @@ def stats(job_id: str | None, **kwargs) -> None:
push_detect_event(job_id, "stats_update", dataclasses.asdict(s))
def graph_update(job_id: str | None, nodes: list[dict]) -> None:
"""Emit a graph_update event with node states."""
if not job_id:
return
payload = {"nodes": nodes}
push_detect_event(job_id, "graph_update", payload)
def detection(
job_id: str | None,
brand: str,

175
detect/graph.py Normal file
View File

@@ -0,0 +1,175 @@
"""
LangGraph pipeline graph for brand detection.
Nodes execute real logic for extract+filter, stubs for the rest.
Each node emits graph_update events so the UI can visualize transitions.
"""
from __future__ import annotations
from langgraph.graph import END, StateGraph
from detect import emit
from detect.models import PipelineStats
from detect.profiles import SoccerBroadcastProfile
from detect.state import DetectState
from detect.stages.frame_extractor import extract_frames
from detect.stages.scene_filter import scene_filter
NODES = [
"extract_frames",
"filter_scenes",
"detect_objects",
"run_ocr",
"match_brands",
"escalate_vlm",
"escalate_cloud",
"compile_report",
]
def _get_profile(state: DetectState):
name = state.get("profile_name", "soccer_broadcast")
if name == "soccer_broadcast":
return SoccerBroadcastProfile()
raise ValueError(f"Unknown profile: {name}")
# Track node states across the pipeline run
_node_states: dict[str, dict[str, str]] = {}
def _emit_transition(state: DetectState, node: str, status: str):
job_id = state.get("job_id")
if not job_id:
return
# Initialize state tracking for this job
if job_id not in _node_states:
_node_states[job_id] = {n: "pending" for n in NODES}
_node_states[job_id][node] = status
nodes = [{"id": n, "status": _node_states[job_id][n]} for n in NODES]
emit.graph_update(job_id, nodes)
# --- Node functions ---
def node_extract_frames(state: DetectState) -> dict:
_emit_transition(state, "extract_frames", "running")
profile = _get_profile(state)
config = profile.frame_extraction_config()
frames = extract_frames(state["video_path"], config, job_id=state.get("job_id"))
_emit_transition(state, "extract_frames", "done")
return {"frames": frames, "stats": PipelineStats(frames_extracted=len(frames))}
def node_filter_scenes(state: DetectState) -> dict:
_emit_transition(state, "filter_scenes", "running")
profile = _get_profile(state)
config = profile.scene_filter_config()
frames = state.get("frames", [])
kept = scene_filter(frames, config, job_id=state.get("job_id"))
stats = state.get("stats", PipelineStats())
stats.frames_after_scene_filter = len(kept)
_emit_transition(state, "filter_scenes", "done")
return {"filtered_frames": kept, "stats": stats}
def node_detect_objects(state: DetectState) -> dict:
_emit_transition(state, "detect_objects", "running")
job_id = state.get("job_id")
emit.log(job_id, "YOLODetector", "INFO", "Stub: object detection not yet implemented")
_emit_transition(state, "detect_objects", "done")
return {}
def node_run_ocr(state: DetectState) -> dict:
_emit_transition(state, "run_ocr", "running")
job_id = state.get("job_id")
emit.log(job_id, "OCRStage", "INFO", "Stub: OCR not yet implemented")
_emit_transition(state, "run_ocr", "done")
return {}
def node_match_brands(state: DetectState) -> dict:
_emit_transition(state, "match_brands", "running")
job_id = state.get("job_id")
emit.log(job_id, "BrandResolver", "INFO", "Stub: brand matching not yet implemented")
_emit_transition(state, "match_brands", "done")
return {"detections": []}
def node_escalate_vlm(state: DetectState) -> dict:
_emit_transition(state, "escalate_vlm", "running")
job_id = state.get("job_id")
emit.log(job_id, "VLMLocal", "INFO", "Stub: VLM escalation not yet implemented")
_emit_transition(state, "escalate_vlm", "done")
return {}
def node_escalate_cloud(state: DetectState) -> dict:
_emit_transition(state, "escalate_cloud", "running")
job_id = state.get("job_id")
emit.log(job_id, "CloudLLM", "INFO", "Stub: cloud LLM escalation not yet implemented")
_emit_transition(state, "escalate_cloud", "done")
return {}
def node_compile_report(state: DetectState) -> dict:
_emit_transition(state, "compile_report", "running")
job_id = state.get("job_id")
profile = _get_profile(state)
detections = state.get("detections", [])
report = profile.aggregate(detections)
report.video_source = state.get("video_path", "")
emit.log(job_id, "Aggregator", "INFO",
f"Report: {len(report.brands)} brands, {len(report.timeline)} detections")
emit.job_complete(job_id, {
"video_source": report.video_source,
"content_type": report.content_type,
"brands": {k: {"total_appearances": v.total_appearances} for k, v in report.brands.items()},
})
_emit_transition(state, "compile_report", "done")
return {"report": report}
# --- Graph construction ---
def build_graph() -> StateGraph:
graph = StateGraph(DetectState)
graph.add_node("extract_frames", node_extract_frames)
graph.add_node("filter_scenes", node_filter_scenes)
graph.add_node("detect_objects", node_detect_objects)
graph.add_node("run_ocr", node_run_ocr)
graph.add_node("match_brands", node_match_brands)
graph.add_node("escalate_vlm", node_escalate_vlm)
graph.add_node("escalate_cloud", node_escalate_cloud)
graph.add_node("compile_report", node_compile_report)
graph.set_entry_point("extract_frames")
graph.add_edge("extract_frames", "filter_scenes")
graph.add_edge("filter_scenes", "detect_objects")
graph.add_edge("detect_objects", "run_ocr")
graph.add_edge("run_ocr", "match_brands")
graph.add_edge("match_brands", "escalate_vlm")
graph.add_edge("escalate_vlm", "escalate_cloud")
graph.add_edge("escalate_cloud", "compile_report")
graph.add_edge("compile_report", END)
return graph
def get_pipeline():
"""Return a compiled, runnable pipeline."""
return build_graph().compile()

28
detect/state.py Normal file
View File

@@ -0,0 +1,28 @@
"""
LangGraph state definition for the detection pipeline.
This TypedDict flows through all graph nodes. Each node reads what
it needs and writes its outputs. LangGraph manages the state transitions.
"""
from __future__ import annotations
from typing import TypedDict
from detect.models import BrandDetection, DetectionReport, Frame, PipelineStats
class DetectState(TypedDict, total=False):
# Input
video_path: str
job_id: str
profile_name: str
# Stage outputs
frames: list[Frame]
filtered_frames: list[Frame]
detections: list[BrandDetection]
report: DetectionReport
# Running stats (updated by each stage)
stats: PipelineStats

View File

@@ -0,0 +1,62 @@
#!/usr/bin/env python3
"""
Run the full LangGraph detection pipeline on a test video.
Usage:
python tests/detect/manual/run_graph.py [--job JOB_ID] [--port PORT]
Opens: http://mpr.local.ar/detection/?job=<JOB_ID>
"""
import argparse
import logging
import os
import sys
parser = argparse.ArgumentParser()
parser.add_argument("--job", default="graph-test")
parser.add_argument("--port", type=int, default=6382)
args = parser.parse_args()
os.environ["REDIS_URL"] = f"redis://localhost:{args.port}/0"
logging.basicConfig(level=logging.INFO, format="%(levelname)-7s %(name)s%(message)s")
sys.path.insert(0, ".")
from detect.graph import get_pipeline
from detect.state import DetectState
logger = logging.getLogger(__name__)
VIDEO = "media/out/chunks/95043d50-4df6-4ac8-bbd5-2ba873117c6e/chunk_0000.mp4"
def main():
logger.info("Job: %s", args.job)
logger.info("Open: http://mpr.local.ar/detection/?job=%s", args.job)
input("\nPress Enter to start...")
pipeline = get_pipeline()
initial_state = DetectState(
video_path=VIDEO,
job_id=args.job,
profile_name="soccer_broadcast",
)
logger.info("Running pipeline...")
result = pipeline.invoke(initial_state)
frames = result.get("frames", [])
filtered = result.get("filtered_frames", [])
report = result.get("report")
logger.info("Frames extracted: %d", len(frames))
logger.info("Frames after filter: %d", len(filtered))
if report:
logger.info("Brands found: %d", len(report.brands))
logger.info("Done.")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,79 @@
"""Tests for the LangGraph detection pipeline."""
import pytest
from detect.graph import NODES, build_graph, get_pipeline
from detect.models import PipelineStats
from detect.state import DetectState
VIDEO = "media/out/chunks/95043d50-4df6-4ac8-bbd5-2ba873117c6e/chunk_0000.mp4"
def test_graph_compiles():
pipeline = get_pipeline()
assert pipeline is not None
def test_graph_has_all_nodes():
graph = build_graph()
for node in NODES:
assert node in graph.nodes
def test_graph_runs_end_to_end(monkeypatch):
"""Run the full graph with mocked event emission."""
events = []
monkeypatch.setattr("detect.emit.push_detect_event",
lambda job_id, etype, data: events.append((etype, data)))
pipeline = get_pipeline()
initial_state = DetectState(
video_path=VIDEO,
job_id="test-graph",
profile_name="soccer_broadcast",
)
result = pipeline.invoke(initial_state)
# All nodes should have transitioned
graph_events = [e for e in events if e[0] == "graph_update"]
assert len(graph_events) > 0
# Should have frames
assert len(result["frames"]) > 0
assert len(result["filtered_frames"]) > 0
# Report should exist
assert result["report"] is not None
assert result["report"].content_type == "soccer_broadcast"
# job_complete should have been emitted
complete_events = [e for e in events if e[0] == "job_complete"]
assert len(complete_events) == 1
def test_graph_node_transitions(monkeypatch):
"""Verify each node emits running → done transitions."""
events = []
monkeypatch.setattr("detect.emit.push_detect_event",
lambda job_id, etype, data: events.append((etype, data)))
pipeline = get_pipeline()
initial_state = DetectState(
video_path=VIDEO,
job_id="test-transitions",
profile_name="soccer_broadcast",
)
pipeline.invoke(initial_state)
graph_events = [e[1] for e in events if e[0] == "graph_update"]
# Each node should appear as "running" then "done"
for node_name in NODES:
running = [e for e in graph_events
if any(n["id"] == node_name and n["status"] == "running" for n in e["nodes"])]
done = [e for e in graph_events
if any(n["id"] == node_name and n["status"] == "done" for n in e["nodes"])]
assert len(running) >= 1, f"{node_name} never entered 'running'"
assert len(done) >= 1, f"{node_name} never reached 'done'"

View File

@@ -4,6 +4,7 @@ import { SSEDataSource, Panel, LayoutGrid } from 'mpr-ui-framework'
import 'mpr-ui-framework/src/tokens.css'
import LogPanel from './panels/LogPanel.vue'
import FunnelPanel from './panels/FunnelPanel.vue'
import PipelineGraphPanel from './panels/PipelineGraphPanel.vue'
import type { StatsUpdate } from './types/sse-contract'
const jobId = ref(new URLSearchParams(window.location.search).get('job') || 'test-job')
@@ -60,6 +61,8 @@ source.connect()
<FunnelPanel :source="source" :status="status" />
<PipelineGraphPanel :source="source" :status="status" />
<LogPanel :source="source" :status="status" />
</LayoutGrid>
</div>

View File

@@ -0,0 +1,31 @@
<script setup lang="ts">
import { ref } from 'vue'
import { Panel } from 'mpr-ui-framework'
import GraphRenderer from 'mpr-ui-framework/src/renderers/GraphRenderer.vue'
import type { GraphNode } from 'mpr-ui-framework/src/renderers/GraphRenderer.vue'
import type { DataSource } from 'mpr-ui-framework'
const PIPELINE_NODES = [
'extract_frames', 'filter_scenes', 'detect_objects', 'run_ocr',
'match_brands', 'escalate_vlm', 'escalate_cloud', 'compile_report',
]
const props = defineProps<{
source: DataSource
status?: 'idle' | 'live' | 'processing' | 'error'
}>()
const nodes = ref<GraphNode[]>(
PIPELINE_NODES.map((id) => ({ id, status: 'pending' }))
)
props.source.on<{ nodes: GraphNode[] }>('graph_update', (e) => {
nodes.value = e.nodes
})
</script>
<template>
<Panel title="Pipeline" :status="status">
<GraphRenderer :nodes="nodes" />
</Panel>
</template>

View File

@@ -10,15 +10,16 @@
"typecheck": "vue-tsc --noEmit"
},
"dependencies": {
"vue": "^3.5",
"@vue-flow/core": "^1.48.2",
"pinia": "^2.2",
"uplot": "^1.6"
"uplot": "^1.6",
"vue": "^3.5"
},
"devDependencies": {
"@vitejs/plugin-vue": "^5",
"typescript": "^5.6",
"vitest": "^2",
"vue-tsc": "^2",
"vite": "^6",
"@vitejs/plugin-vue": "^5"
"vitest": "^2",
"vue-tsc": "^2"
}
}

View File

@@ -8,6 +8,9 @@ importers:
.:
dependencies:
'@vue-flow/core':
specifier: ^1.48.2
version: 1.48.2(vue@3.5.30(typescript@5.9.3))
pinia:
specifier: ^2.2
version: 2.3.1(typescript@5.9.3)(vue@3.5.30(typescript@5.9.3))
@@ -478,6 +481,9 @@ packages:
'@types/estree@1.0.8':
resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==}
'@types/web-bluetooth@0.0.20':
resolution: {integrity: sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==}
'@vitejs/plugin-vue@5.2.4':
resolution: {integrity: sha512-7Yx/SXSOcQq5HiiV3orevHUFn+pmMB4cgbEkDYgnkUWb0WfeQ/wa2yFv6D5ICiCQOVpjA7vYDXrC7AGO8yjDHA==}
engines: {node: ^18.0.0 || >=20.0.0}
@@ -523,6 +529,11 @@ packages:
'@volar/typescript@2.4.15':
resolution: {integrity: sha512-2aZ8i0cqPGjXb4BhkMsPYDkkuc2ZQ6yOpqwAuNwUoncELqoy5fRgOQtLR9gB0g902iS0NAkvpIzs27geVyVdPg==}
'@vue-flow/core@1.48.2':
resolution: {integrity: sha512-raxhgKWE+G/mcEvXJjGFUDYW9rAI3GOtiHR3ZkNpwBWuIaCC1EYiBmKGwJOoNzVFgwO7COgErnK7i08i287AFA==}
peerDependencies:
vue: ^3.3.0
'@vue/compiler-core@3.5.30':
resolution: {integrity: sha512-s3DfdZkcu/qExZ+td75015ljzHc6vE+30cFMGRPROYjqkroYI5NV2X1yAMX9UeyBNWB9MxCfPcsjpLS11nzkkw==}
@@ -566,6 +577,15 @@ packages:
'@vue/shared@3.5.30':
resolution: {integrity: sha512-YXgQ7JjaO18NeK2K9VTbDHaFy62WrObMa6XERNfNOkAhD1F1oDSf3ZJ7K6GqabZ0BvSDHajp8qfS5Sa2I9n8uQ==}
'@vueuse/core@10.11.1':
resolution: {integrity: sha512-guoy26JQktXPcz+0n3GukWIy/JDNKti9v6VEMu6kV2sYBsWuGiTU8OWdg+ADfUbHg3/3DlqySDe7JmdHrktiww==}
'@vueuse/metadata@10.11.1':
resolution: {integrity: sha512-IGa5FXd003Ug1qAZmyE8wF3sJ81xGLSqTqtQ6jaVfkeZ4i5kS2mwQF61yhVqojRnenVew5PldLyRgvdl4YYuSw==}
'@vueuse/shared@10.11.1':
resolution: {integrity: sha512-LHpC8711VFZlDaYUXEBbFBCQ7GS3dVU9mjOhhMhXP6txTV4EhYQg/KGnQuvt/sPAtoUKq7VVUnL6mVtFoL42sA==}
alien-signals@1.0.13:
resolution: {integrity: sha512-OGj9yyTnJEttvzhTUWuscOvtqxq5vrhF7vL9oS0xJ2mK0ItPYP1/y+vCFebfxoEyAz0++1AIwJ5CMr+Fk3nDmg==}
@@ -594,6 +614,44 @@ packages:
csstype@3.2.3:
resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==}
d3-color@3.1.0:
resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==}
engines: {node: '>=12'}
d3-dispatch@3.0.1:
resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==}
engines: {node: '>=12'}
d3-drag@3.0.0:
resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==}
engines: {node: '>=12'}
d3-ease@3.0.1:
resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==}
engines: {node: '>=12'}
d3-interpolate@3.0.1:
resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==}
engines: {node: '>=12'}
d3-selection@3.0.0:
resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==}
engines: {node: '>=12'}
d3-timer@3.0.1:
resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==}
engines: {node: '>=12'}
d3-transition@3.0.1:
resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==}
engines: {node: '>=12'}
peerDependencies:
d3-selection: 2 - 3
d3-zoom@3.0.0:
resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==}
engines: {node: '>=12'}
de-indent@1.0.2:
resolution: {integrity: sha512-e/1zu3xH5MQryN2zdVaF0OrdNLUbvWxzMbi+iNA6Bky7l1RoP8a2fIbRocyHclXt/arDrrR6lL3TqFD9pMQTsg==}
@@ -1129,6 +1187,8 @@ snapshots:
'@types/estree@1.0.8': {}
'@types/web-bluetooth@0.0.20': {}
'@vitejs/plugin-vue@5.2.4(vite@6.4.1)(vue@3.5.30(typescript@5.9.3))':
dependencies:
vite: 6.4.1
@@ -1186,6 +1246,17 @@ snapshots:
path-browserify: 1.0.1
vscode-uri: 3.1.0
'@vue-flow/core@1.48.2(vue@3.5.30(typescript@5.9.3))':
dependencies:
'@vueuse/core': 10.11.1(vue@3.5.30(typescript@5.9.3))
d3-drag: 3.0.0
d3-interpolate: 3.0.1
d3-selection: 3.0.0
d3-zoom: 3.0.0
vue: 3.5.30(typescript@5.9.3)
transitivePeerDependencies:
- '@vue/composition-api'
'@vue/compiler-core@3.5.30':
dependencies:
'@babel/parser': 7.29.2
@@ -1260,6 +1331,25 @@ snapshots:
'@vue/shared@3.5.30': {}
'@vueuse/core@10.11.1(vue@3.5.30(typescript@5.9.3))':
dependencies:
'@types/web-bluetooth': 0.0.20
'@vueuse/metadata': 10.11.1
'@vueuse/shared': 10.11.1(vue@3.5.30(typescript@5.9.3))
vue-demi: 0.14.10(vue@3.5.30(typescript@5.9.3))
transitivePeerDependencies:
- '@vue/composition-api'
- vue
'@vueuse/metadata@10.11.1': {}
'@vueuse/shared@10.11.1(vue@3.5.30(typescript@5.9.3))':
dependencies:
vue-demi: 0.14.10(vue@3.5.30(typescript@5.9.3))
transitivePeerDependencies:
- '@vue/composition-api'
- vue
alien-signals@1.0.13: {}
assertion-error@2.0.1: {}
@@ -1284,6 +1374,42 @@ snapshots:
csstype@3.2.3: {}
d3-color@3.1.0: {}
d3-dispatch@3.0.1: {}
d3-drag@3.0.0:
dependencies:
d3-dispatch: 3.0.1
d3-selection: 3.0.0
d3-ease@3.0.1: {}
d3-interpolate@3.0.1:
dependencies:
d3-color: 3.1.0
d3-selection@3.0.0: {}
d3-timer@3.0.1: {}
d3-transition@3.0.1(d3-selection@3.0.0):
dependencies:
d3-color: 3.1.0
d3-dispatch: 3.0.1
d3-ease: 3.0.1
d3-interpolate: 3.0.1
d3-selection: 3.0.0
d3-timer: 3.0.1
d3-zoom@3.0.0:
dependencies:
d3-dispatch: 3.0.1
d3-drag: 3.0.0
d3-interpolate: 3.0.1
d3-selection: 3.0.0
d3-transition: 3.0.1(d3-selection@3.0.0)
de-indent@1.0.2: {}
debug@4.4.3:

View File

@@ -11,3 +11,4 @@ export { default as LayoutGrid } from './components/LayoutGrid.vue'
// Renderers
export { default as LogRenderer } from './renderers/LogRenderer.vue'
export { default as TimeSeriesRenderer } from './renderers/TimeSeriesRenderer.vue'
export { default as GraphRenderer } from './renderers/GraphRenderer.vue'

View File

@@ -0,0 +1,80 @@
<script setup lang="ts">
import { computed } from 'vue'
import { VueFlow } from '@vue-flow/core'
import '@vue-flow/core/dist/style.css'
import '@vue-flow/core/dist/theme-default.css'
export interface GraphNode {
id: string
status: 'pending' | 'running' | 'done' | 'error'
}
const props = defineProps<{
nodes: GraphNode[]
}>()
const statusColors: Record<string, string> = {
pending: 'var(--status-idle)',
running: 'var(--status-processing)',
done: 'var(--status-live)',
error: 'var(--status-error)',
}
const flowNodes = computed(() =>
props.nodes.map((n, i) => ({
id: n.id,
label: n.id.replace(/_/g, ' '),
position: { x: 20, y: i * 70 },
style: {
background: statusColors[n.status] ?? statusColors.pending,
color: n.status === 'pending' ? '#ccc' : '#000',
border: 'none',
borderRadius: 'var(--panel-radius)',
fontFamily: 'var(--font-mono)',
fontSize: 'var(--font-size-sm)',
fontWeight: '600',
padding: '8px 16px',
},
}))
)
const flowEdges = computed(() => {
const edges = []
for (let i = 0; i < props.nodes.length - 1; i++) {
edges.push({
id: `${props.nodes[i].id}->${props.nodes[i + 1].id}`,
source: props.nodes[i].id,
target: props.nodes[i + 1].id,
animated: props.nodes[i].status === 'running',
style: { stroke: '#555568' },
})
}
return edges
})
</script>
<template>
<div class="graph-renderer">
<VueFlow
:nodes="flowNodes"
:edges="flowEdges"
:fit-view-on-init="true"
:nodes-draggable="false"
:nodes-connectable="false"
:zoom-on-scroll="false"
:pan-on-scroll="false"
/>
</div>
</template>
<style scoped>
.graph-renderer {
width: 100%;
height: 100%;
min-height: 200px;
}
.graph-renderer :deep(.vue-flow__background) {
background: transparent;
}
</style>