updated modelgen, decoupling tester

This commit is contained in:
2026-04-12 03:07:25 -03:00
parent af06309dad
commit 85a856b7ac
58 changed files with 4770 additions and 625 deletions

View File

@@ -28,8 +28,8 @@ DB_DUMP=dev.sql
# =============================================================================
# PORTS
# =============================================================================
BACKEND_PORT=8000
FRONTEND_PORT=3000
BACKEND_PORT=8010
FRONTEND_PORT=3010
# =============================================================================
# BACKEND SERVER (Uvicorn)

View File

@@ -9,29 +9,42 @@
# ./start.sh --build # Rebuild images
set -e
cd "$(dirname "$0")/.."
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CFG_DIR="$(dirname "$SCRIPT_DIR")"
GEN_DIR="$CFG_DIR/../../gen/amar"
SPR_DIR="$GEN_DIR/soleprint"
BUILD=""
DETACH=""
TARGET="all"
NGINX="-f $CFG_DIR/soleprint/docker-compose.nginx.yml"
for arg in "$@"; do
case $arg in
-d|--detached) DETACH="-d" ;;
--build) BUILD="--build" ;;
--no-nginx) NGINX="" ;;
amar) TARGET="amar" ;;
soleprint) TARGET="soleprint" ;;
esac
done
if [ "$TARGET" = "all" ] || [ "$TARGET" = "amar" ]; then
if [ "$TARGET" = "all" ]; then
echo "Starting amar + soleprint..."
(cd "$CFG_DIR" && docker compose up $BUILD -d)
(cd "$SPR_DIR" && docker compose -f docker-compose.yml $NGINX up $BUILD -d)
if [ -z "$DETACH" ]; then
(cd "$CFG_DIR" && docker compose logs -f) &
(cd "$SPR_DIR" && docker compose logs -f) &
wait
fi
elif [ "$TARGET" = "amar" ]; then
echo "Starting amar..."
docker compose up $DETACH $BUILD
fi
if [ "$TARGET" = "all" ] || [ "$TARGET" = "soleprint" ]; then
(cd "$CFG_DIR" && docker compose up $DETACH $BUILD)
elif [ "$TARGET" = "soleprint" ]; then
echo "Starting soleprint..."
(cd soleprint && docker compose up $DETACH $BUILD)
(cd "$SPR_DIR" && docker compose -f docker-compose.yml $NGINX up $DETACH $BUILD)
fi
if [ -n "$DETACH" ]; then

View File

@@ -7,19 +7,23 @@
# ./stop.sh soleprint # Stop only soleprint
set -e
cd "$(dirname "$0")/.."
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CFG_DIR="$(dirname "$SCRIPT_DIR")"
GEN_DIR="$CFG_DIR/../../gen/amar"
SPR_DIR="$GEN_DIR/soleprint"
TARGET="all"
[ -n "$1" ] && TARGET="$1"
if [ "$TARGET" = "all" ] || [ "$TARGET" = "soleprint" ]; then
echo "Stopping soleprint..."
(cd soleprint && docker compose down)
(cd "$SPR_DIR" && docker compose -f docker-compose.yml -f "$CFG_DIR/soleprint/docker-compose.nginx.yml" down 2>/dev/null || true)
fi
if [ "$TARGET" = "all" ] || [ "$TARGET" = "amar" ]; then
echo "Stopping amar..."
docker compose down
(cd "$CFG_DIR" && docker compose down)
fi
echo "Done."

View File

@@ -13,14 +13,18 @@ DEPLOYMENT_NAME=amar_soleprint
NETWORK_NAME=soleprint_network
# =============================================================================
# PATHS
# PATHS (bare-metal only, not used by docker)
# =============================================================================
SOLEPRINT_BARE_PATH=/home/mariano/wdir/spr/gen
SOLEPRINT_BARE_PATH=/home/mariano/wdir/spr/gen/amar/soleprint
# =============================================================================
# PORTS
# =============================================================================
SOLEPRINT_PORT=12000
NGINX_PORT=8030
ROOM_NAME=amar
MANAGED_DOMAIN=amar.local.ar
SOLEPRINT_DOMAIN=spr.local.ar
# =============================================================================
# DATABASE (amar's DB for station tools)

View File

@@ -20,12 +20,10 @@ services:
image: nginx:alpine
container_name: ${DEPLOYMENT_NAME}_nginx
ports:
- "80:80"
- "${NGINX_PORT:-8030}:80"
volumes:
# Mount template that will be processed with envsubst
- ../ctrl/server/nginx/docker-local.conf:/etc/nginx/templates/default.conf.template:ro
# Mount wrapper files for serving
- ../wrapper:/app/wrapper:ro
- ./nginx/local.conf:/etc/nginx/conf.d/default.conf:ro
env_file:
- .env
environment:

View File

@@ -1,34 +1,34 @@
# Soleprint Services - Docker Compose
# Soleprint Services - Amar Room
#
# Runs soleprint hub as a single service
# Artery, atlas, station are accessed via path-based routing
#
# Usage:
# cd mainroom/soleprint && docker compose up -d
# cd gen/amar/soleprint && docker compose up
name: ${DEPLOYMENT_NAME}
services:
soleprint:
build:
context: ${SOLEPRINT_BARE_PATH}
context: .
dockerfile: Dockerfile
container_name: ${DEPLOYMENT_NAME}_soleprint
user: "${UID:-1000}:${GID:-1000}"
volumes:
- ${SOLEPRINT_BARE_PATH}:/app
- .:/app
ports:
- "${SOLEPRINT_PORT}:8000"
env_file:
- .env
environment:
# For single-port mode, all subsystems are internal routes
- ARTERY_EXTERNAL_URL=/artery
- ATLAS_EXTERNAL_URL=/atlas
- STATION_EXTERNAL_URL=/station
networks:
- default
# Use run.py for single-port bare-metal mode
command: uvicorn run:app --host 0.0.0.0 --port 8000 --reload
networks:
default:
external: true
name: ${NETWORK_NAME}

View File

@@ -33,4 +33,5 @@ services:
networks:
default:
external: true
name: ${NETWORK_NAME}

46
ctrl/deploy.sh Executable file
View File

@@ -0,0 +1,46 @@
#!/bin/bash
# Deploy soleprint standalone to server
#
# Usage:
# ./ctrl/deploy.sh # Sync and restart
# ./ctrl/deploy.sh --build # Rebuild locally first, then sync and restart
# ./ctrl/deploy.sh --sync-only # Sync without restarting
set -e
cd "$(dirname "$0")/.."
SERVER="mcrn.ar"
REMOTE_DIR="~/soleprint/gen/standalone"
BUILD=false
SYNC_ONLY=false
for arg in "$@"; do
case $arg in
--build) BUILD=true ;;
--sync-only) SYNC_ONLY=true ;;
esac
done
if [ "$BUILD" = true ]; then
echo "Building standalone..."
python build.py
fi
echo "Syncing gen/standalone/ to $SERVER:$REMOTE_DIR..."
rsync -avz --delete \
--exclude='__pycache__' \
--exclude='.venv' \
--exclude='*.pyc' \
--exclude='.env' \
--filter=':- .gitignore' \
gen/standalone/ "$SERVER:$REMOTE_DIR/"
if [ "$SYNC_ONLY" = true ]; then
echo "Sync complete (restart skipped)"
exit 0
fi
echo "Restarting soleprint on server..."
ssh "$SERVER" "cd $REMOTE_DIR && docker compose up -d --build"
echo "Deploy complete"

330
ctrl/spr.py Executable file
View File

@@ -0,0 +1,330 @@
#!/usr/bin/env python3
"""
spr - Soleprint component manager
Manages distributable components from the spr repo into target folders.
Runs from spr/. Consuming projects have no awareness of spr — they just
commit whatever lands in the target folder.
Usage:
python ctrl/spr.py list
python ctrl/spr.py sync soleprint-ui ~/wdir/unt/ui/framework
python ctrl/spr.py watch soleprint-ui ~/wdir/unt/ui/framework # ctrl+c to stop
python ctrl/spr.py publish soleprint-ui ~/wdir/mpr/ui/framework
python ctrl/spr.py diff soleprint-ui ~/wdir/mpr/ui/framework
"""
import argparse
import json
import logging
import shutil
import subprocess
import sys
import time
from datetime import datetime, timezone
from pathlib import Path
log = logging.getLogger("spr")
SPR_HOME = Path(__file__).resolve().parent.parent
REGISTRY_PATH = SPR_HOME / "registry.json"
EXCLUDE = {
"node_modules",
"__pycache__",
".venv",
"dist",
".git",
"*.egg-info",
".spr",
"pnpm-lock.yaml",
}
def load_registry():
if not REGISTRY_PATH.exists():
log.error("registry not found: %s", REGISTRY_PATH)
sys.exit(1)
return json.loads(REGISTRY_PATH.read_text())
def resolve_component(registry, name):
if name not in registry:
log.error("unknown component: %s", name)
log.info("available: %s", ", ".join(registry.keys()))
sys.exit(1)
entry = registry[name]
source = SPR_HOME / entry["path"]
if not source.is_dir():
log.error("source not found: %s", source)
sys.exit(1)
return entry["type"], source
def get_version(comp_type, source):
try:
if comp_type == "npm":
pkg = json.loads((source / "package.json").read_text())
return pkg.get("version", "?")
elif comp_type == "pip":
for line in (source / "pyproject.toml").read_text().splitlines():
if line.strip().startswith("version"):
return line.split('"')[1]
except FileNotFoundError:
pass
return "?"
def get_sha():
try:
result = subprocess.run(
["git", "rev-parse", "--short", "HEAD"],
cwd=SPR_HOME,
capture_output=True,
text=True,
)
return result.stdout.strip() if result.returncode == 0 else "unknown"
except FileNotFoundError:
return "unknown"
def should_exclude(path, base):
"""Check if a path should be excluded from sync."""
rel = path.relative_to(base)
for part in rel.parts:
if part in EXCLUDE:
return True
for pattern in EXCLUDE:
if "*" in pattern and part.endswith(pattern.replace("*", "")):
return True
return False
def copy_tree(src, dst):
"""Copy src to dst, excluding build artifacts. Returns count of files copied."""
count = 0
dst.mkdir(parents=True, exist_ok=True)
for item in src.iterdir():
if should_exclude(item, src.parent):
continue
target = dst / item.name
if item.is_dir():
count += copy_tree(item, target)
else:
target.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(item, target)
count += 1
return count
def sync_tree(src, dst):
"""Bidirectional sync — newest file wins. Returns (fwd, rev) counts."""
fwd = _sync_one_way(src, dst)
rev = _sync_one_way(dst, src)
return fwd, rev
def _sync_one_way(src, dst):
"""Copy files from src to dst only if src is newer."""
count = 0
if not src.is_dir():
return count
dst.mkdir(parents=True, exist_ok=True)
for item in src.iterdir():
if should_exclude(item, src.parent):
continue
target = dst / item.name
if item.is_dir():
count += _sync_one_way(item, target)
else:
if not target.exists() or item.stat().st_mtime > target.stat().st_mtime:
target.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(item, target)
count += 1
# Remove files in dst that don't exist in src
if dst.is_dir():
for item in dst.iterdir():
if should_exclude(item, dst.parent):
continue
counterpart = src / item.name
if not counterpart.exists():
if item.is_dir():
shutil.rmtree(item)
else:
item.unlink()
count += 1
return count
def write_stamp(dest, name, comp_type, source, mode):
version = get_version(comp_type, source)
sha = get_sha()
stamp = dest / ".spr"
stamp.write_text(
f"name={name}\n"
f"version={version}\n"
f"type={comp_type}\n"
f"sha={sha}\n"
f"mode={mode}\n"
f"updated={datetime.now(timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')}\n"
f"source={source}\n"
)
# ── commands ─────────────────────────────────────────────────
def cmd_list(args):
registry = load_registry()
log.info("Components (%s):\n", REGISTRY_PATH)
for name, entry in registry.items():
comp_type = entry["type"]
source = SPR_HOME / entry["path"]
version = get_version(comp_type, source)
log.info(" %s %s v%-10s %s", f"{name:<25}", f"{comp_type:<5}", version, entry["path"])
def cmd_publish(args):
registry = load_registry()
comp_type, source = resolve_component(registry, args.component)
dest = Path(args.dest).resolve()
if dest.exists():
shutil.rmtree(dest)
count = copy_tree(source, dest)
write_stamp(dest, args.component, comp_type, source, "published")
version = get_version(comp_type, source)
sha = get_sha()
log.info("%s v%s (%s) -> %s (%d files)", args.component, version, sha, dest, count)
def cmd_sync(args):
registry = load_registry()
comp_type, source = resolve_component(registry, args.component)
dest = Path(args.dest).resolve()
dest.mkdir(parents=True, exist_ok=True)
fwd, rev = sync_tree(source, dest)
write_stamp(dest, args.component, comp_type, source, "synced")
log.info("%s synced (%d fwd, %d rev)", args.component, fwd, rev)
def cmd_watch(args):
registry = load_registry()
comp_type, source = resolve_component(registry, args.component)
dest = Path(args.dest).resolve()
dest.mkdir(parents=True, exist_ok=True)
interval = args.interval
# Initial sync
fwd, rev = sync_tree(source, dest)
write_stamp(dest, args.component, comp_type, source, "watching")
log.info("%s initial sync (%d fwd, %d rev)", args.component, fwd, rev)
log.info(" source: %s", source)
log.info(" dest: %s", dest)
log.info(" watching every %ds — ctrl+c to stop", interval)
try:
while True:
time.sleep(interval)
fwd, rev = sync_tree(source, dest)
if fwd or rev:
log.info("synced (%d fwd, %d rev)", fwd, rev)
except KeyboardInterrupt:
# Final sync
fwd, rev = sync_tree(source, dest)
if fwd or rev:
log.info("final sync (%d fwd, %d rev)", fwd, rev)
log.info("stopped")
def cmd_diff(args):
registry = load_registry()
_, source = resolve_component(registry, args.component)
dest = Path(args.dest).resolve()
if not dest.exists():
log.error("%s not found at %s", args.component, dest)
sys.exit(1)
result = subprocess.run(
[
"diff",
"-rq",
"--exclude=node_modules",
"--exclude=__pycache__",
"--exclude=.venv",
"--exclude=dist",
"--exclude=*.egg-info",
"--exclude=.git",
"--exclude=.spr",
"--exclude=pnpm-lock.yaml",
str(source),
str(dest),
],
capture_output=True,
text=True,
)
if result.stdout:
log.info("\n%s", result.stdout.rstrip())
else:
log.info("%s is in sync", args.component)
# ── main ─────────────────────────────────────────────────────
def main():
logging.basicConfig(
level=logging.DEBUG if "--debug" in sys.argv else logging.INFO,
format="%(levelname)s %(message)s" if "--debug" in sys.argv else ":: %(message)s",
)
sys.argv = [a for a in sys.argv if a != "--debug"]
parser = argparse.ArgumentParser(
prog="spr",
description="Soleprint component manager",
)
sub = parser.add_subparsers(dest="command")
sub.add_parser("list", help="show available components")
for cmd in ("publish", "sync", "diff"):
p = sub.add_parser(cmd)
p.add_argument("component", help="component name")
p.add_argument("dest", help="target folder path")
p = sub.add_parser("watch", help="continuous two-way sync (foreground, ctrl+c to stop)")
p.add_argument("component", help="component name")
p.add_argument("dest", help="target folder path")
p.add_argument("-i", "--interval", type=int, default=2, help="poll interval in seconds (default: 2)")
args = parser.parse_args()
if not args.command:
parser.print_help()
sys.exit(0)
{
"list": cmd_list,
"publish": cmd_publish,
"sync": cmd_sync,
"watch": cmd_watch,
"diff": cmd_diff,
}[args.command](args)
if __name__ == "__main__":
main()

10
registry.json Normal file
View File

@@ -0,0 +1,10 @@
{
"soleprint-ui": {
"type": "npm",
"path": "soleprint/common/ui"
},
"soleprint-modelgen": {
"type": "pip",
"path": "soleprint/station/tools/modelgen"
}
}

View File

@@ -0,0 +1,25 @@
{
"name": "soleprint-ui",
"version": "0.1.0",
"private": true,
"type": "module",
"main": "src/index.ts",
"scripts": {
"test": "vitest run",
"test:watch": "vitest",
"typecheck": "vue-tsc --noEmit"
},
"dependencies": {
"@vue-flow/core": "^1.48.2",
"pinia": "^2.2",
"uplot": "^1.6",
"vue": "^3.5"
},
"devDependencies": {
"@vitejs/plugin-vue": "^5",
"typescript": "^5.6",
"vite": "^6",
"vitest": "^2",
"vue-tsc": "^2"
}
}

1692
soleprint/common/ui/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,32 @@
<script setup lang="ts">
const props = withDefaults(defineProps<{
columns?: number
rows?: number
gap?: string
}>(), {
columns: 2,
rows: 2,
gap: 'var(--space-2)',
})
</script>
<template>
<div
class="layout-grid"
:style="{
gridTemplateColumns: `repeat(${props.columns}, 1fr)`,
gridTemplateRows: `repeat(${props.rows}, 1fr)`,
gap: props.gap,
}"
>
<slot />
</div>
</template>
<style scoped>
.layout-grid {
display: grid;
width: 100%;
height: 100%;
}
</style>

View File

@@ -0,0 +1,87 @@
<script setup lang="ts">
defineProps<{
title: string
status?: 'idle' | 'live' | 'processing' | 'error'
}>()
</script>
<template>
<div class="panel">
<div class="panel-header">
<span class="panel-title">{{ title }}</span>
<span class="panel-actions"><slot name="actions" /></span>
<span class="panel-status" :class="status ?? 'idle'" />
</div>
<div class="panel-body">
<slot />
</div>
<div class="panel-overlay">
<slot name="overlay" />
</div>
</div>
</template>
<style scoped>
.panel {
position: relative;
background: var(--surface-1);
border: var(--panel-border);
border-radius: var(--panel-radius);
overflow: hidden;
display: flex;
flex-direction: column;
}
.panel-header {
display: flex;
align-items: center;
gap: var(--space-2);
height: var(--panel-header-height);
padding: 0 var(--space-3);
background: var(--surface-2);
border-bottom: var(--panel-border);
flex-shrink: 0;
}
.panel-title {
font-family: var(--font-ui);
font-size: var(--font-size-sm);
font-weight: 600;
color: var(--text-secondary);
text-transform: uppercase;
letter-spacing: 0.04em;
}
.panel-actions {
margin-left: auto;
display: flex;
align-items: center;
gap: var(--space-2);
}
.panel-status {
width: 8px;
height: 8px;
border-radius: 50%;
}
.panel-status.idle { background: var(--status-idle); }
.panel-status.live { background: var(--status-live); }
.panel-status.processing { background: var(--status-processing); }
.panel-status.error { background: var(--status-error); }
.panel-body {
flex: 1;
overflow: hidden;
padding: var(--space-2);
min-height: 0;
}
.panel-overlay {
position: absolute;
inset: var(--panel-header-height) 0 0 0;
pointer-events: none;
}
.panel-overlay > :deep(*) {
pointer-events: auto;
}
</style>

View File

@@ -0,0 +1,145 @@
<script setup lang="ts">
import { computed } from 'vue'
export interface ConfigField {
name: string
type: string
default: unknown
description: string
min: number | null
max: number | null
options: string[] | null
}
const props = defineProps<{
fields: ConfigField[]
values: Record<string, unknown>
}>()
const emit = defineEmits<{
'update': [name: string, value: unknown]
'reset': []
}>()
const numericFields = computed(() => props.fields.filter(f => f.type === 'int' || f.type === 'float'))
const boolFields = computed(() => props.fields.filter(f => f.type === 'bool'))
function onInput(name: string, value: unknown) {
emit('update', name, value)
}
</script>
<template>
<div class="param-editor">
<!-- Boolean fields -->
<label v-for="f in boolFields" :key="f.name" class="param-field bool-field">
<input
type="checkbox"
:checked="!!values[f.name]"
@change="(e) => onInput(f.name, (e.target as HTMLInputElement).checked)"
/>
<span class="field-label" :title="f.description">{{ f.name.replace(/_/g, ' ') }}</span>
</label>
<!-- Numeric fields (range sliders) -->
<div v-for="f in numericFields" :key="f.name" class="param-field">
<div class="field-header">
<span class="field-label" :title="f.description">{{ f.name.replace(/^edge_/, '').replace(/_/g, ' ') }}</span>
<span class="field-value">{{ values[f.name] }}</span>
</div>
<input
type="range"
:min="f.min ?? 0"
:max="f.max ?? 500"
:step="f.type === 'float' ? 0.01 : 1"
:value="values[f.name] as number"
@input="(e) => onInput(f.name, Number((e.target as HTMLInputElement).value))"
/>
<div class="field-range">
<span>{{ f.min ?? 0 }}</span>
<span>{{ f.max ?? 500 }}</span>
</div>
</div>
</div>
</template>
<style scoped>
.param-editor {
display: flex;
flex-direction: column;
gap: var(--space-2);
}
.param-field {
display: flex;
flex-direction: column;
gap: 2px;
}
.bool-field {
flex-direction: row;
align-items: center;
gap: 6px;
cursor: pointer;
}
.field-header {
display: flex;
justify-content: space-between;
align-items: center;
}
.field-label {
color: var(--text-secondary);
font-size: 10px;
text-transform: capitalize;
}
.field-value {
font-weight: 600;
font-size: 10px;
color: var(--text-primary);
min-width: 30px;
text-align: right;
}
.field-range {
display: flex;
justify-content: space-between;
font-size: 9px;
color: var(--text-dim);
}
input[type="range"] {
-webkit-appearance: none;
appearance: none;
width: 100%;
height: 4px;
background: var(--surface-3);
border-radius: 2px;
outline: none;
}
input[type="range"]::-webkit-slider-thumb {
-webkit-appearance: none;
appearance: none;
width: 12px;
height: 12px;
border-radius: 50%;
background: var(--text-primary);
cursor: pointer;
}
input[type="range"]::-moz-range-thumb {
width: 12px;
height: 12px;
border-radius: 50%;
background: var(--text-primary);
cursor: pointer;
border: none;
}
input[type="checkbox"] {
accent-color: #00bcd4;
}
</style>

View File

@@ -0,0 +1,70 @@
<script setup lang="ts">
import { ref } from 'vue'
const props = defineProps<{
direction: 'horizontal' | 'vertical'
}>()
const emit = defineEmits<{
resize: [delta: number]
}>()
const dragging = ref(false)
let startPos = 0
function onPointerDown(e: PointerEvent) {
dragging.value = true
startPos = props.direction === 'horizontal' ? e.clientX : e.clientY
const el = e.target as HTMLElement
el.setPointerCapture(e.pointerId)
}
function onPointerMove(e: PointerEvent) {
if (!dragging.value) return
const currentPos = props.direction === 'horizontal' ? e.clientX : e.clientY
const delta = currentPos - startPos
startPos = currentPos
emit('resize', delta)
}
function onPointerUp() {
dragging.value = false
}
</script>
<template>
<div
class="resize-handle"
:class="[direction, { dragging }]"
@pointerdown="onPointerDown"
@pointermove="onPointerMove"
@pointerup="onPointerUp"
/>
</template>
<style scoped>
.resize-handle {
flex-shrink: 0;
background: transparent;
transition: background 0.15s;
touch-action: none;
z-index: 10;
}
.resize-handle:hover,
.resize-handle.dragging {
background: var(--text-dim);
}
.resize-handle.horizontal {
width: 4px;
cursor: col-resize;
margin: 0 -2px;
}
.resize-handle.vertical {
height: 4px;
cursor: row-resize;
margin: -2px 0;
}
</style>

View File

@@ -0,0 +1,157 @@
<script setup lang="ts">
import { ref, computed } from 'vue'
const props = withDefaults(defineProps<{
/** Split direction */
direction?: 'horizontal' | 'vertical'
/** Initial size of the sized pane (px or flex ratio) */
initialSize?: number
/** Size mode: 'px' = sized pane fixed in pixels, 'ratio' = flex ratio */
sizeMode?: 'px' | 'ratio'
/** Which pane is sized: 'first' or 'second'. Default: 'first'. */
anchor?: 'first' | 'second'
/** Min size (px in px-mode, ratio in ratio-mode) */
min?: number
/** Max size (px in px-mode, ratio in ratio-mode) */
max?: number
/** Whether the divider is draggable */
resizable?: boolean
}>(), {
direction: 'horizontal',
initialSize: 1,
sizeMode: 'ratio',
anchor: 'first',
min: 0.1,
max: 10,
resizable: true,
})
const size = ref(props.initialSize)
const dragging = ref(false)
let startPos = 0
function onPointerDown(e: PointerEvent) {
if (!props.resizable) return
dragging.value = true
startPos = props.direction === 'horizontal' ? e.clientX : e.clientY
const el = e.target as HTMLElement
el.setPointerCapture(e.pointerId)
}
function onPointerMove(e: PointerEvent) {
if (!dragging.value) return
const currentPos = props.direction === 'horizontal' ? e.clientX : e.clientY
let delta = currentPos - startPos
startPos = currentPos
// Dragging right/down grows first pane, shrinks second.
// If anchor is 'second', invert so dragging grows the second pane.
if (props.anchor === 'second') delta = -delta
if (props.sizeMode === 'px') {
size.value = Math.max(props.min, Math.min(props.max, size.value + delta))
} else {
const scale = props.direction === 'horizontal' ? 0.01 : 0.02
size.value = Math.max(props.min, Math.min(props.max, size.value + delta * scale))
}
}
function onPointerUp() {
dragging.value = false
}
const isHorizontal = computed(() => props.direction === 'horizontal')
const sizedStyle = computed(() => {
if (props.sizeMode === 'px') {
const sizeStr = size.value + 'px'
const minStr = props.min + 'px'
return isHorizontal.value
? { width: sizeStr, minWidth: minStr, flexShrink: '0' }
: { height: sizeStr, minHeight: minStr, flexShrink: '0' }
}
return { flex: String(size.value) }
})
const flexStyle = computed(() => ({ flex: '1' }))
const firstStyle = computed(() => props.anchor === 'first' ? sizedStyle.value : flexStyle.value)
const secondStyle = computed(() => props.anchor === 'second' ? sizedStyle.value : flexStyle.value)
</script>
<template>
<div class="split-pane" :class="[direction]">
<div class="split-first" :style="firstStyle">
<slot name="first" />
</div>
<div
v-if="resizable"
class="split-divider"
:class="[direction, { dragging }]"
@pointerdown="onPointerDown"
@pointermove="onPointerMove"
@pointerup="onPointerUp"
/>
<div class="split-second" :style="secondStyle">
<slot name="second" />
</div>
</div>
</template>
<style scoped>
.split-pane {
display: flex;
width: 100%;
height: 100%;
min-height: 0;
min-width: 0;
overflow: hidden;
}
.split-pane.horizontal {
flex-direction: row;
}
.split-pane.vertical {
flex-direction: column;
}
.split-first,
.split-second {
min-height: 0;
min-width: 0;
overflow: hidden;
}
/* Children must fill their pane */
.split-first > :deep(*),
.split-second > :deep(*) {
width: 100%;
height: 100%;
}
.split-divider {
flex-shrink: 0;
background: transparent;
transition: background 0.15s;
touch-action: none;
z-index: 10;
}
.split-divider:hover,
.split-divider.dragging {
background: var(--text-dim);
}
.split-divider.horizontal {
width: 4px;
cursor: col-resize;
margin: 0 -2px;
}
.split-divider.vertical {
height: 4px;
cursor: row-resize;
margin: -2px 0;
}
</style>

View File

@@ -0,0 +1,23 @@
import { onMounted, onUnmounted, type Ref } from 'vue'
import { DataSource, type DataSourceStatus } from '../datasources/DataSource'
/**
* Composable that connects a component to a DataSource.
*
* Connects on mount, disconnects on unmount.
* Returns reactive refs for data, status, and error.
*/
export function useDataSource<T = unknown>(source: DataSource<T>): {
data: Ref<T | null>
status: Ref<DataSourceStatus>
error: Ref<string | null>
} {
onMounted(() => source.connect())
onUnmounted(() => source.disconnect())
return {
data: source.data as Ref<T | null>,
status: source.status,
error: source.error as Ref<string | null>,
}
}

View File

@@ -0,0 +1,57 @@
import { ref } from 'vue'
export interface EditorExecutionOptions {
/** Debounce delay in ms for auto-apply. Default: 150 */
debounceMs?: number
}
/**
* Generic editor execution pattern — debounced apply with auto-apply toggle,
* loading/error/timing state tracking.
*
* The caller provides the actual execution function. This composable handles
* the orchestration: debounce, auto-apply, loading state, timing.
*/
export function useEditorExecution(
executeFn: () => Promise<void>,
options: EditorExecutionOptions = {},
) {
const debounceMs = options.debounceMs ?? 150
const loading = ref(false)
const error = ref<string | null>(null)
const autoApply = ref(true)
const execTimeMs = ref<number | null>(null)
let debounceTimer: ReturnType<typeof setTimeout> | null = null
async function apply() {
loading.value = true
error.value = null
execTimeMs.value = null
const t0 = performance.now()
try {
await executeFn()
execTimeMs.value = Math.round(performance.now() - t0)
} catch (e) {
error.value = String(e)
} finally {
loading.value = false
}
}
function onParameterChange() {
if (!autoApply.value) return
if (debounceTimer) clearTimeout(debounceTimer)
debounceTimer = setTimeout(() => apply(), debounceMs)
}
return {
loading,
error,
autoApply,
execTimeMs,
apply,
onParameterChange,
}
}

View File

@@ -0,0 +1,77 @@
import { ref, type Ref } from 'vue'
/**
* Generic registry composable — fetches typed data from a URL, caches it,
* exposes it reactively.
*
* Use for any data that is loaded once at app init and rarely changes:
* stage definitions, config schemas, available models, etc.
*
* The registry is shared across all consumers (singleton per URL).
*/
const cache = new Map<string, { data: Ref<any>; loading: Ref<boolean>; error: Ref<string | null>; promise: Promise<void> | null }>()
export function useRegistry<T>(url: string): {
data: Ref<T[]>
loading: Ref<boolean>
error: Ref<string | null>
refresh: () => Promise<void>
} {
if (!cache.has(url)) {
const data = ref<T[]>([]) as Ref<T[]>
const loading = ref(false)
const error = ref<string | null>(null)
const entry = { data, loading, error, promise: null as Promise<void> | null }
cache.set(url, entry)
async function doFetch() {
loading.value = true
error.value = null
try {
const resp = await fetch(url)
if (!resp.ok) {
error.value = `Failed to fetch registry: ${resp.status}`
return
}
data.value = await resp.json()
} catch (e) {
error.value = String(e)
} finally {
loading.value = false
}
}
entry.promise = doFetch()
}
const entry = cache.get(url)!
async function refresh() {
const data = entry.data
const loading = entry.loading
const error = entry.error
loading.value = true
error.value = null
try {
const resp = await fetch(url)
if (!resp.ok) {
error.value = `Failed to fetch registry: ${resp.status}`
return
}
data.value = await resp.json()
} catch (e) {
error.value = String(e)
} finally {
loading.value = false
}
}
return {
data: entry.data as Ref<T[]>,
loading: entry.loading,
error: entry.error,
refresh,
}
}

View File

@@ -0,0 +1,40 @@
import { type Ref, ref } from 'vue'
export type DataSourceStatus = 'idle' | 'connecting' | 'live' | 'error'
/**
* Base class for all data sources.
*
* A DataSource connects to some event stream, exposes reactive state,
* and lets consumers subscribe to typed events. Panels read from these
* reactively — they never touch the transport layer directly.
*/
export abstract class DataSource<T = unknown> {
readonly id: string
readonly data: Ref<T | null> = ref(null) as Ref<T | null>
readonly status: Ref<DataSourceStatus> = ref('idle')
readonly error: Ref<string | null> = ref(null) as Ref<string | null>
private listeners = new Map<string, Set<(payload: any) => void>>()
constructor(id: string) {
this.id = id
}
abstract connect(): void
abstract disconnect(): void
/** Subscribe to a specific event type */
on<P = unknown>(eventType: string, handler: (payload: P) => void): () => void {
if (!this.listeners.has(eventType)) {
this.listeners.set(eventType, new Set())
}
this.listeners.get(eventType)!.add(handler)
return () => this.listeners.get(eventType)?.delete(handler)
}
/** Emit an event to subscribers (called by subclasses) */
protected emit(eventType: string, payload: unknown): void {
this.listeners.get(eventType)?.forEach((fn) => fn(payload))
}
}

View File

@@ -0,0 +1,93 @@
import { DataSource } from './DataSource'
export interface SSEDataSourceOptions {
/** Unique identifier for this source */
id: string
/** SSE endpoint URL (e.g. '/api/detect/stream/job-123') */
url: string
/** Event types to listen for. Each is dispatched to subscribers via on(). */
eventTypes: string[]
/** Max reconnection attempts before giving up. Default: 10 */
maxRetries?: number
}
/**
* DataSource backed by native EventSource (Server-Sent Events).
*
* Connects to a single SSE endpoint and demultiplexes events by type.
* Multiple panels can subscribe to different event types from the same source.
*/
export class SSEDataSource extends DataSource {
private es: EventSource | null = null
private url: string
private eventTypes: string[]
private maxRetries: number
private retryCount = 0
constructor(opts: SSEDataSourceOptions) {
super(opts.id)
this.url = opts.url
this.eventTypes = opts.eventTypes
this.maxRetries = opts.maxRetries ?? 10
}
connect(): void {
if (this.es) return
this.status.value = 'connecting'
this.error.value = null
this.es = new EventSource(this.url)
this.es.onopen = () => {
this.status.value = 'live'
this.retryCount = 0
}
this.es.onerror = () => {
if (this.es?.readyState === EventSource.CLOSED) {
this.retryCount++
if (this.retryCount >= this.maxRetries) {
this.status.value = 'error'
this.error.value = `Connection lost after ${this.maxRetries} retries`
this.disconnect()
} else {
this.status.value = 'connecting'
}
}
}
// Register a listener for each event type
for (const eventType of this.eventTypes) {
this.es.addEventListener(eventType, (e: MessageEvent) => {
try {
const parsed = JSON.parse(e.data)
this.data.value = parsed
this.emit(eventType, parsed)
} catch {
// ignore malformed events
}
})
}
// Terminal event — pipeline finished (success, failure, or cancel)
this.es.addEventListener('done', () => {
this.status.value = 'idle'
})
}
disconnect(): void {
if (this.es) {
this.es.close()
this.es = null
}
}
/** Update the URL (e.g. when job ID changes) and reconnect */
setUrl(url: string): void {
this.url = url
if (this.status.value === 'live' || this.status.value === 'connecting') {
this.disconnect()
this.connect()
}
}
}

View File

@@ -0,0 +1,45 @@
import { DataSource } from './DataSource'
export interface StaticEvent {
type: string
data: unknown
/** Delay in ms before emitting this event (relative to previous). Default: 0 */
delay?: number
}
/**
* DataSource that replays a fixture array of events.
*
* Used for development and testing without a running backend.
* Events are emitted in sequence with optional delays.
*/
export class StaticDataSource extends DataSource {
private events: StaticEvent[]
private timeouts: ReturnType<typeof setTimeout>[] = []
constructor(id: string, events: StaticEvent[]) {
super(id)
this.events = events
}
connect(): void {
this.status.value = 'live'
this.error.value = null
let cumDelay = 0
for (const event of this.events) {
cumDelay += event.delay ?? 0
const timeout = setTimeout(() => {
this.data.value = event.data
this.emit(event.type, event.data)
}, cumDelay)
this.timeouts.push(timeout)
}
}
disconnect(): void {
for (const t of this.timeouts) clearTimeout(t)
this.timeouts = []
this.status.value = 'idle'
}
}

View File

@@ -0,0 +1,103 @@
import { describe, it, expect, vi, afterEach } from 'vitest'
import { StaticDataSource } from '../StaticDataSource'
describe('StaticDataSource', () => {
afterEach(() => {
vi.restoreAllMocks()
})
it('emits events in order', async () => {
const source = new StaticDataSource('test', [
{ type: 'log', data: { msg: 'first' } },
{ type: 'log', data: { msg: 'second' } },
{ type: 'stats', data: { count: 42 } },
])
const received: { type: string; data: unknown }[] = []
source.on('log', (d) => received.push({ type: 'log', data: d }))
source.on('stats', (d) => received.push({ type: 'stats', data: d }))
source.connect()
// Events with delay=0 fire on next microtask via setTimeout(0)
await new Promise((r) => setTimeout(r, 10))
expect(source.status.value).toBe('live')
expect(received).toHaveLength(3)
expect(received[0]).toEqual({ type: 'log', data: { msg: 'first' } })
expect(received[1]).toEqual({ type: 'log', data: { msg: 'second' } })
expect(received[2]).toEqual({ type: 'stats', data: { count: 42 } })
source.disconnect()
expect(source.status.value).toBe('idle')
})
it('respects delays between events', async () => {
const source = new StaticDataSource('test-delay', [
{ type: 'a', data: 1 },
{ type: 'b', data: 2, delay: 50 },
])
const received: unknown[] = []
source.on('a', (d) => received.push(d))
source.on('b', (d) => received.push(d))
source.connect()
await new Promise((r) => setTimeout(r, 10))
expect(received).toHaveLength(1) // only 'a' so far
await new Promise((r) => setTimeout(r, 60))
expect(received).toHaveLength(2) // 'b' arrived after delay
source.disconnect()
})
it('updates data ref with latest event payload', async () => {
const source = new StaticDataSource('test-data', [
{ type: 'x', data: { v: 1 } },
{ type: 'x', data: { v: 2 } },
])
source.connect()
await new Promise((r) => setTimeout(r, 10))
expect(source.data.value).toEqual({ v: 2 })
source.disconnect()
})
it('cleans up on disconnect', async () => {
const source = new StaticDataSource('test-cleanup', [
{ type: 'a', data: 1 },
{ type: 'b', data: 2, delay: 100 },
])
const received: unknown[] = []
source.on('b', (d) => received.push(d))
source.connect()
await new Promise((r) => setTimeout(r, 10))
source.disconnect()
// 'b' should never fire since we disconnected before its delay
await new Promise((r) => setTimeout(r, 150))
expect(received).toHaveLength(0)
})
it('unsubscribe removes listener', async () => {
const source = new StaticDataSource('test-unsub', [
{ type: 'x', data: 1 },
])
const received: unknown[] = []
const unsub = source.on('x', (d) => received.push(d))
unsub()
source.connect()
await new Promise((r) => setTimeout(r, 10))
expect(received).toHaveLength(0)
source.disconnect()
})
})

View File

@@ -0,0 +1,38 @@
// Framework public API
export { DataSource, type DataSourceStatus } from './datasources/DataSource'
export { SSEDataSource } from './datasources/SSEDataSource'
export { StaticDataSource } from './datasources/StaticDataSource'
export { useDataSource } from './composables/useDataSource'
export { useRegistry } from './composables/useRegistry'
export { useEditorExecution } from './composables/useEditorExecution'
export type { EditorExecutionOptions } from './composables/useEditorExecution'
// Components
export { default as Panel } from './components/Panel.vue'
export { default as LayoutGrid } from './components/LayoutGrid.vue'
export { default as ResizeHandle } from './components/ResizeHandle.vue'
export { default as SplitPane } from './components/SplitPane.vue'
export { default as ParameterEditor } from './components/ParameterEditor.vue'
export type { ConfigField } from './components/ParameterEditor.vue'
// Renderers
export { default as LogRenderer } from './renderers/LogRenderer.vue'
export { default as TimeSeriesRenderer } from './renderers/TimeSeriesRenderer.vue'
export { default as GraphRenderer } from './renderers/GraphRenderer.vue'
export { default as FrameRenderer } from './renderers/FrameRenderer.vue'
export { default as TableRenderer } from './renderers/TableRenderer.vue'
// Renderer types
export type { FrameBBox, FrameOverlay } from './renderers/FrameRenderer.vue'
export type { LogEntry } from './renderers/LogRenderer.vue'
export type { GraphNode, GraphMode } from './renderers/GraphRenderer.vue'
export type { TableColumn } from './renderers/TableRenderer.vue'
export type { TimeSeriesSeries } from './renderers/TimeSeriesRenderer.vue'
// Interaction plugins
export type { InteractionPlugin, PluginContext } from './plugins/InteractionPlugin'
export { BBoxDrawPlugin } from './plugins/BBoxDrawPlugin'
export type { BBoxResult, BBoxCallback } from './plugins/BBoxDrawPlugin'
export { CrosshairPlugin } from './plugins/CrosshairPlugin'
export type { CrosshairCallback } from './plugins/CrosshairPlugin'

View File

@@ -0,0 +1,88 @@
/**
* BBoxDrawPlugin — draw bounding boxes on the frame viewer.
*
* User drags on the canvas to draw a rectangle.
* On pointer up, emits the bbox coordinates via the callback.
* The frame viewer panel feeds this into the selection store.
*/
import type { InteractionPlugin, PluginContext } from './InteractionPlugin'
export interface BBoxResult {
x: number
y: number
w: number
h: number
}
export type BBoxCallback = (bbox: BBoxResult) => void
export class BBoxDrawPlugin implements InteractionPlugin {
name = 'bbox-draw'
private ctx: CanvasRenderingContext2D | null = null
private drawing = false
private startX = 0
private startY = 0
private currentBox: BBoxResult | null = null
private callback: BBoxCallback
constructor(callback: BBoxCallback) {
this.callback = callback
}
onMount(context: PluginContext): void {
this.ctx = context.ctx
}
onUnmount(): void {
this.ctx = null
this.drawing = false
this.currentBox = null
}
onPointerDown(e: PointerEvent): void {
this.drawing = true
this.startX = e.offsetX
this.startY = e.offsetY
this.currentBox = null
}
onPointerMove(e: PointerEvent): void {
if (!this.drawing) return
const x = Math.min(this.startX, e.offsetX)
const y = Math.min(this.startY, e.offsetY)
const w = Math.abs(e.offsetX - this.startX)
const h = Math.abs(e.offsetY - this.startY)
this.currentBox = { x, y, w, h }
}
onPointerUp(_e: PointerEvent): void {
if (!this.drawing) return
this.drawing = false
if (this.currentBox && this.currentBox.w > 5 && this.currentBox.h > 5) {
this.callback(this.currentBox)
}
this.currentBox = null
}
render(ctx: CanvasRenderingContext2D): void {
if (!this.currentBox) return
const box = this.currentBox
ctx.strokeStyle = '#4f9cf9'
ctx.lineWidth = 2
ctx.setLineDash([6, 3])
ctx.strokeRect(box.x, box.y, box.w, box.h)
ctx.setLineDash([])
// Semi-transparent fill
ctx.fillStyle = 'rgba(79, 156, 249, 0.1)'
ctx.fillRect(box.x, box.y, box.w, box.h)
}
}

View File

@@ -0,0 +1,60 @@
/**
* CrosshairPlugin — synchronized vertical crosshair across time-series panels.
*
* When the user hovers on any panel with this plugin, the crosshair
* position (as a timestamp) is written to the selection store.
* All panels with this plugin render a vertical line at that timestamp.
*/
import type { InteractionPlugin, PluginContext } from './InteractionPlugin'
export type CrosshairCallback = (timestamp: number | null) => void
export class CrosshairPlugin implements InteractionPlugin {
name = 'crosshair'
private width = 0
private callback: CrosshairCallback
/** Current crosshair X position (pixels), set externally from store */
public crosshairX: number | null = null
constructor(callback: CrosshairCallback) {
this.callback = callback
}
onMount(context: PluginContext): void {
this.width = context.width
}
onUnmount(): void {
this.crosshairX = null
}
onPointerMove(e: PointerEvent): void {
// Convert pixel X to normalized position (0-1)
const normalized = e.offsetX / this.width
this.callback(normalized)
}
onPointerDown(_e: PointerEvent): void {
// no-op for crosshair
}
onPointerUp(_e: PointerEvent): void {
this.callback(null)
}
render(ctx: CanvasRenderingContext2D): void {
if (this.crosshairX === null) return
ctx.strokeStyle = '#a78bfa'
ctx.lineWidth = 1
ctx.setLineDash([4, 4])
ctx.beginPath()
ctx.moveTo(this.crosshairX, 0)
ctx.lineTo(this.crosshairX, ctx.canvas.height)
ctx.stroke()
ctx.setLineDash([])
}
}

View File

@@ -0,0 +1,36 @@
/**
* Interaction plugin interface.
*
* Plugins attach to a Panel's overlay canvas. They receive pointer events
* and emit typed results via the callback. The panel handles rendering
* the overlay and routing events to the active plugin.
*/
export interface PluginContext {
/** Canvas element for drawing overlays */
canvas: HTMLCanvasElement
/** 2D rendering context */
ctx: CanvasRenderingContext2D
/** Canvas dimensions (may differ from display size) */
width: number
height: number
}
export interface InteractionPlugin {
/** Unique plugin name */
name: string
/** Called when the plugin is mounted on a panel */
onMount(context: PluginContext): void
/** Called when the plugin is unmounted */
onUnmount(): void
/** Pointer event handlers (optional) */
onPointerDown?(e: PointerEvent): void
onPointerMove?(e: PointerEvent): void
onPointerUp?(e: PointerEvent): void
/** Called each animation frame to render the overlay */
render(ctx: CanvasRenderingContext2D): void
}

View File

@@ -0,0 +1,178 @@
<script setup lang="ts">
import { ref, watch, onMounted, onUnmounted, nextTick } from 'vue'
export interface FrameBBox {
x: number
y: number
w: number
h: number
confidence: number
label: string
resolved_brand?: string | null
source?: string | null
stage?: string | null
ocr_text?: string | null
}
export interface FrameOverlay {
/** Base64 encoded image (same dimensions as main image) */
src: string
label: string
visible: boolean
/** Opacity 0-1, default 0.5 */
opacity?: number
/** Image format — 'jpeg' (default) or 'png' (supports transparency) */
srcFormat?: 'jpeg' | 'png'
}
const props = defineProps<{
/** Base64 JPEG image */
imageSrc: string
/** Bounding boxes to overlay */
boxes: FrameBBox[]
/** Debug overlay layers (edge images, line visualizations, etc.) */
overlays?: FrameOverlay[]
}>()
const canvas = ref<HTMLCanvasElement | null>(null)
const container = ref<HTMLElement | null>(null)
function draw() {
const cvs = canvas.value
const ctr = container.value
if (!cvs || !ctr || !props.imageSrc) return
const ctx = cvs.getContext('2d')
if (!ctx) return
const img = new window.Image()
img.onload = () => {
cvs.width = ctr.clientWidth
cvs.height = ctr.clientHeight
const scale = Math.min(cvs.width / img.width, cvs.height / img.height)
const dx = (cvs.width - img.width * scale) / 2
const dy = (cvs.height - img.height * scale) / 2
ctx.clearRect(0, 0, cvs.width, cvs.height)
ctx.drawImage(img, dx, dy, img.width * scale, img.height * scale)
// Draw debug overlays (edge images, line visualizations)
drawOverlays(ctx, dx, dy, img.width * scale, img.height * scale)
// Draw bounding boxes on top
for (const box of props.boxes) {
const bx = dx + box.x * scale
const by = dy + box.y * scale
const bw = box.w * scale
const bh = box.h * scale
const color = sourceColor(box)
const resolved = box.resolved_brand || box.ocr_text
ctx.strokeStyle = color
ctx.lineWidth = 2
if (!resolved) {
ctx.setLineDash([4, 3])
}
ctx.strokeRect(bx, by, bw, bh)
ctx.setLineDash([])
}
}
img.src = `data:image/jpeg;base64,${props.imageSrc}`
}
/** Pending overlay images that need async loading */
const overlayCache = new Map<string, HTMLImageElement>()
function drawOverlays(ctx: CanvasRenderingContext2D, dx: number, dy: number, dw: number, dh: number) {
const layers = props.overlays ?? []
for (const layer of layers) {
if (!layer.visible || !layer.src) continue
const cached = overlayCache.get(layer.src)
if (cached && cached.complete) {
ctx.globalAlpha = layer.opacity ?? 0.5
ctx.drawImage(cached, dx, dy, dw, dh)
ctx.globalAlpha = 1.0
} else if (!cached) {
// Load async, redraw when ready
const overlay = new window.Image()
overlay.onload = () => draw()
overlay.src = `data:image/${layer.srcFormat ?? 'jpeg'};base64,${layer.src}`
overlayCache.set(layer.src, overlay)
}
}
}
const SOURCE_COLORS: Record<string, string> = {
yolo: '#f5a623', // yellow — raw detection
ocr: '#ff8c42', // orange — text extracted
ocr_matched: '#3ecf8e', // green — brand resolved
local_vlm: '#4f9cf9', // blue — VLM resolved
cloud_llm: '#a78bfa', // purple — cloud resolved
unresolved: '#e05252', // red — nothing matched
}
// CV region labels — distinct from source-based colors
const REGION_COLORS: Record<string, string> = {
edge_region: '#00bcd4', // cyan
contour_region: '#ffd54f', // yellow
color_region: '#e040fb', // magenta
candidate: '#4caf50', // green — passed readability
rejected: '#e05252', // red — failed readability
}
function sourceColor(box: FrameBBox): string {
if (REGION_COLORS[box.label]) return REGION_COLORS[box.label]
if (box.resolved_brand) return SOURCE_COLORS.ocr_matched
if (box.source && SOURCE_COLORS[box.source]) return SOURCE_COLORS[box.source]
return confidenceColor(box.confidence)
}
function confidenceColor(conf: number): string {
if (conf >= 0.7) return 'var(--conf-high)'
if (conf >= 0.4) return 'var(--conf-mid)'
return 'var(--conf-low)'
}
watch(() => [props.imageSrc, props.boxes, props.overlays], () => nextTick(draw), { deep: true })
onMounted(() => {
nextTick(draw)
const observer = new ResizeObserver(() => draw())
if (container.value) observer.observe(container.value)
onUnmounted(() => observer.disconnect())
})
</script>
<template>
<div ref="container" class="frame-renderer">
<canvas ref="canvas" />
<div v-if="!imageSrc" class="frame-empty">No frame</div>
</div>
</template>
<style scoped>
.frame-renderer {
width: 100%;
height: 100%;
min-height: 200px;
position: relative;
}
.frame-renderer canvas {
display: block;
width: 100%;
height: 100%;
}
.frame-empty {
position: absolute;
inset: 0;
display: flex;
align-items: center;
justify-content: center;
color: var(--text-dim);
}
</style>

View File

@@ -0,0 +1,317 @@
<script setup lang="ts">
import { computed } from 'vue'
import { VueFlow } from '@vue-flow/core'
import '@vue-flow/core/dist/style.css'
import '@vue-flow/core/dist/theme-default.css'
export interface GraphNode {
id: string
status: 'pending' | 'running' | 'done' | 'error' | 'skipped' | 'placeholder'
/** Whether a checkpoint exists at this stage */
hasCheckpoint?: boolean
/** Stage category (e.g. 'cv', 'ai', 'preprocessing') */
category?: string
/** Which editors are available for this stage */
availableEditors?: string[]
}
export type GraphMode = 'observe' | 'edit-in-pipeline' | 'edit-isolated'
const props = withDefaults(defineProps<{
nodes: GraphNode[]
/** Interaction mode — changes visual treatment and click behavior */
mode?: GraphMode
/** Currently edited stage (highlighted in edit modes) */
activeStage?: string | null
/** Stages that have a region editor (bbox/polygon) */
regionStages?: string[]
}>(), {
mode: 'observe',
activeStage: null,
})
const emit = defineEmits<{
'open-region-editor': [stage: string]
'open-stage-editor': [stage: string]
'node-click': [stage: string]
}>()
const regionStageSet = computed(() => new Set(props.regionStages ?? []))
const STATUS_COLORS: Record<string, string> = {
pending: 'var(--status-idle)',
running: 'var(--status-processing)',
done: 'var(--status-live)',
error: 'var(--status-error)',
skipped: '#4a6fa5',
placeholder: 'transparent',
}
function nodeAppearance(node: GraphNode) {
const isActive = node.id === props.activeStage
const mode = props.mode
// Edit-isolated: only the active node is fully visible
if (mode === 'edit-isolated' && !isActive) {
return {
color: 'var(--surface-3)',
textColor: 'var(--text-dim)',
opacity: 0.5,
outline: false,
}
}
// Edit-in-pipeline: active node highlighted, upstream dimmed, downstream normal
if (mode === 'edit-in-pipeline' && props.activeStage) {
const activeIdx = props.nodes.findIndex(n => n.id === props.activeStage)
const nodeIdx = props.nodes.findIndex(n => n.id === node.id)
if (isActive) {
return {
color: 'var(--status-processing)',
textColor: '#fff',
opacity: 1,
outline: true,
}
}
if (nodeIdx < activeIdx) {
// Upstream: frozen from checkpoint
return {
color: 'var(--surface-3)',
textColor: 'var(--text-secondary)',
opacity: 0.7,
outline: false,
}
}
}
// Placeholder: hollow, no text
if (node.status === 'placeholder') {
return {
color: 'transparent',
textColor: 'transparent',
opacity: 0.6,
outline: false,
}
}
// Default: observe mode or downstream in edit-in-pipeline
return {
color: STATUS_COLORS[node.status] ?? STATUS_COLORS.pending,
textColor: '#fff',
opacity: 1,
outline: isActive,
}
}
const flowNodes = computed(() =>
props.nodes.map((n, i) => {
const appearance = nodeAppearance(n)
return {
id: n.id,
type: 'stage',
position: { x: 20, y: i * 80 },
data: {
label: n.id.replace(/_/g, ' '),
status: n.status,
...appearance,
hasCheckpoint: n.hasCheckpoint ?? false,
hasStageEditor: regionStageSet.value.has(n.id),
isRunning: n.status === 'running',
isActive: n.id === props.activeStage,
},
}
})
)
const flowEdges = computed(() => {
const edges = []
for (let i = 0; i < props.nodes.length - 1; i++) {
const isActiveEdge = props.mode !== 'observe' && props.activeStage
&& props.nodes.findIndex(n => n.id === props.activeStage) > i
edges.push({
id: `${props.nodes[i].id}->${props.nodes[i + 1].id}`,
source: props.nodes[i].id,
target: props.nodes[i + 1].id,
animated: props.nodes[i].status === 'running',
style: {
stroke: isActiveEdge ? 'var(--text-dim)' : '#555568',
strokeDasharray: isActiveEdge ? '4 4' : undefined,
},
})
}
return edges
})
function onNodeClick(id: string) {
emit('node-click', id)
}
</script>
<template>
<div class="graph-renderer">
<VueFlow
:nodes="flowNodes"
:edges="flowEdges"
:fit-view-on-init="true"
:nodes-draggable="false"
:nodes-connectable="false"
:zoom-on-scroll="false"
:pan-on-scroll="false"
>
<template #node-stage="{ data, id }">
<div
class="stage-node"
:class="{
running: data.isRunning,
active: data.isActive,
outline: data.outline,
dimmed: data.opacity < 1,
placeholder: data.status === 'placeholder',
}"
:style="{
background: data.color,
color: data.textColor,
opacity: data.opacity,
}"
@click="onNodeClick(id)"
>
<span class="stage-label">{{ data.label }}</span>
<!-- Checkpoint indicator -->
<span v-if="data.hasCheckpoint" class="checkpoint-badge" title="Checkpoint available">
<svg width="10" height="10" viewBox="0 0 10 10" fill="currentColor">
<circle cx="5" cy="5" r="3" fill="none" stroke="currentColor" stroke-width="1.5"/>
<circle cx="5" cy="5" r="1.5"/>
</svg>
</span>
<span class="stage-actions">
<button
v-if="data.hasStageEditor"
class="stage-btn editor-btn"
title="Stage editor"
@click.stop="emit('open-region-editor', id)"
>
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" stroke="currentColor" stroke-width="1.5">
<circle cx="5" cy="5" r="3.5"/><line x1="7.5" y1="7.5" x2="11" y2="11"/>
</svg>
</button>
<button
class="stage-btn config-btn"
title="Stage config"
@click.stop="emit('open-stage-editor', id)"
>
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" stroke="currentColor" stroke-width="1.5">
<circle cx="6" cy="6" r="2"/><path d="M6 1v2M6 9v2M1 6h2M9 6h2M2.5 2.5l1.4 1.4M8.1 8.1l1.4 1.4M2.5 9.5l1.4-1.4M8.1 3.9l1.4-1.4"/>
</svg>
</button>
</span>
</div>
</template>
</VueFlow>
</div>
</template>
<style scoped>
.graph-renderer {
width: 100%;
height: 100%;
min-height: 200px;
}
.graph-renderer :deep(.vue-flow__background) {
background: transparent;
}
/* Hide default node styling — we use custom template */
.graph-renderer :deep(.vue-flow__node-stage) {
padding: 0;
border: none;
background: transparent;
border-radius: 0;
}
.stage-node {
display: flex;
align-items: center;
gap: 6px;
padding: 6px 10px;
border-radius: var(--panel-radius);
font-family: var(--font-mono);
font-size: var(--font-size-sm);
font-weight: 600;
min-width: 180px;
cursor: pointer;
transition: opacity 0.2s, box-shadow 0.2s;
}
.stage-node.running {
animation: node-pulse 1.5s infinite;
}
.stage-node.outline {
box-shadow: 0 0 0 2px var(--status-processing);
}
.stage-node.dimmed {
pointer-events: none;
}
.stage-node.placeholder {
border: 1px dashed var(--text-secondary);
background: transparent;
color: transparent;
pointer-events: none;
}
.stage-node.placeholder .stage-actions,
.stage-node.placeholder .checkpoint-badge {
display: none;
}
.stage-label {
flex: 1;
}
.checkpoint-badge {
opacity: 0.7;
display: flex;
align-items: center;
}
.stage-actions {
display: flex;
gap: 2px;
opacity: 0;
transition: opacity 0.15s;
}
.stage-node:hover .stage-actions {
opacity: 1;
}
.stage-btn {
background: rgba(0, 0, 0, 0.15);
border: none;
border-radius: 3px;
width: 20px;
height: 20px;
font-size: 11px;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
color: inherit;
}
.stage-btn:hover {
background: rgba(0, 0, 0, 0.3);
}
@keyframes node-pulse {
0%, 100% { opacity: 1; }
50% { opacity: 0.7; }
}
</style>

View File

@@ -0,0 +1,143 @@
<script setup lang="ts">
import { ref, computed, onMounted, onUnmounted, watch, nextTick } from 'vue'
export interface LogEntry {
level: string
stage: string
msg: string
ts: string
}
const props = withDefaults(defineProps<{
entries: LogEntry[]
rowHeight?: number
autoScroll?: boolean
}>(), {
rowHeight: 24,
autoScroll: true,
})
const container = ref<HTMLElement | null>(null)
const scrollTop = ref(0)
const containerHeight = ref(0)
const userScrolled = ref(false)
const visibleRange = computed(() => {
const start = Math.floor(scrollTop.value / props.rowHeight)
const visible = Math.ceil(containerHeight.value / props.rowHeight) + 2
return {
start: Math.max(0, start - 1),
end: Math.min(props.entries.length, start + visible),
}
})
const totalHeight = computed(() => props.entries.length * props.rowHeight)
const visibleEntries = computed(() =>
props.entries.slice(visibleRange.value.start, visibleRange.value.end).map((entry, i) => ({
...entry,
index: visibleRange.value.start + i,
}))
)
function onScroll(e: Event) {
const el = e.target as HTMLElement
scrollTop.value = el.scrollTop
// If user scrolled away from bottom, pause auto-scroll
const atBottom = el.scrollHeight - el.scrollTop - el.clientHeight < props.rowHeight * 2
userScrolled.value = !atBottom
}
function scrollToBottom() {
if (container.value && props.autoScroll && !userScrolled.value) {
container.value.scrollTop = container.value.scrollHeight
}
}
watch(() => props.entries.length, () => {
nextTick(scrollToBottom)
})
onMounted(() => {
if (container.value) {
containerHeight.value = container.value.clientHeight
const observer = new ResizeObserver(([entry]) => {
containerHeight.value = entry.contentRect.height
})
observer.observe(container.value)
onUnmounted(() => observer.disconnect())
}
})
const levelClass = (level: string) => level.toLowerCase()
</script>
<template>
<div class="log-renderer" ref="container" @scroll="onScroll">
<div class="log-spacer" :style="{ height: totalHeight + 'px' }">
<div
class="log-viewport"
:style="{ transform: `translateY(${visibleRange.start * rowHeight}px)` }"
>
<div
v-for="entry in visibleEntries"
:key="entry.index"
class="log-row"
:class="levelClass(entry.level)"
:style="{ height: rowHeight + 'px' }"
>
<span class="log-ts">{{ entry.ts }}</span>
<span class="log-level">{{ entry.level }}</span>
<span class="log-stage">{{ entry.stage }}</span>
<span class="log-msg">{{ entry.msg }}</span>
</div>
</div>
</div>
<div v-if="entries.length === 0" class="log-empty">
Waiting for log events...
</div>
</div>
</template>
<style scoped>
.log-renderer {
overflow-y: auto;
height: 100%;
font-family: var(--font-mono);
font-size: 12px;
}
.log-spacer {
position: relative;
}
.log-viewport {
position: absolute;
left: 0;
right: 0;
}
.log-row {
display: flex;
align-items: center;
gap: var(--space-2);
padding: 0 var(--space-2);
line-height: 1;
}
.log-ts { color: var(--text-dim); min-width: 80px; flex-shrink: 0; }
.log-level { min-width: 56px; font-weight: 600; flex-shrink: 0; }
.log-stage { color: var(--status-processing); min-width: 120px; flex-shrink: 0; }
.log-msg { white-space: nowrap; overflow: hidden; text-overflow: ellipsis; }
.log-row.info .log-level { color: var(--status-live); }
.log-row.warning .log-level { color: var(--status-escalating); }
.log-row.error .log-level { color: var(--status-error); }
.log-row.debug .log-level { color: var(--text-dim); }
.log-empty {
color: var(--text-dim);
padding: var(--space-6);
text-align: center;
}
</style>

View File

@@ -0,0 +1,122 @@
<script setup lang="ts">
import { computed } from 'vue'
export interface TableColumn {
key: string
label: string
width?: string
}
const props = defineProps<{
columns: TableColumn[]
rows: Record<string, unknown>[]
sortKey?: string
sortDir?: 'asc' | 'desc'
}>()
const emits = defineEmits<{
sort: [key: string]
}>()
const sorted = computed(() => {
if (!props.sortKey) return props.rows
const key = props.sortKey
const dir = props.sortDir === 'desc' ? -1 : 1
return [...props.rows].sort((a, b) => {
const av = a[key] as number | string
const bv = b[key] as number | string
if (av < bv) return -1 * dir
if (av > bv) return 1 * dir
return 0
})
})
</script>
<template>
<div class="table-renderer">
<table>
<thead>
<tr>
<th
v-for="col in columns"
:key="col.key"
:style="{ width: col.width }"
@click="emits('sort', col.key)"
class="sortable"
>
{{ col.label }}
<span v-if="sortKey === col.key" class="sort-indicator">
{{ sortDir === 'desc' ? '' : '' }}
</span>
</th>
</tr>
</thead>
<tbody>
<tr v-for="(row, i) in sorted" :key="i">
<td v-for="col in columns" :key="col.key">
{{ row[col.key] }}
</td>
</tr>
<tr v-if="rows.length === 0">
<td :colspan="columns.length" class="empty">No detections yet</td>
</tr>
</tbody>
</table>
</div>
</template>
<style scoped>
.table-renderer {
overflow: auto;
height: 100%;
font-family: var(--font-mono);
font-size: var(--font-size-sm);
}
table {
width: 100%;
border-collapse: collapse;
table-layout: fixed;
}
th {
position: sticky;
top: 0;
background: var(--surface-2);
color: var(--text-secondary);
font-weight: 600;
text-align: left;
padding: var(--space-2) var(--space-3);
border-bottom: var(--panel-border);
cursor: pointer;
user-select: none;
}
th:hover {
color: var(--text-primary);
}
.sort-indicator {
font-size: 9px;
margin-left: 4px;
}
td {
padding: var(--space-1) var(--space-3);
border-bottom: 1px solid var(--surface-3);
white-space: normal;
word-break: break-word;
overflow: hidden;
text-overflow: ellipsis;
}
tr:hover td {
background: var(--surface-3);
}
.empty {
color: var(--text-dim);
text-align: center;
padding: var(--space-6);
}
</style>

View File

@@ -0,0 +1,198 @@
<script setup lang="ts">
import { ref, onMounted, onUnmounted, watch, nextTick } from 'vue'
import uPlot from 'uplot'
import 'uplot/dist/uPlot.min.css'
export interface TimeSeriesSeries {
label: string
color: string
}
const props = withDefaults(defineProps<{
/** Array of series configs (label + color) */
series: TimeSeriesSeries[]
/** Data: [timestamps[], series1[], series2[], ...] */
data: uPlot.AlignedData
/** Chart title (optional) */
title?: string
/** Stacked area mode */
stacked?: boolean
}>(), {
stacked: false,
})
const container = ref<HTMLElement | null>(null)
const zoomed = ref(false)
let chart: uPlot | null = null
function buildOpts(): uPlot.Options {
const seriesOpts: uPlot.Series[] = [
{ label: 'Time' },
...props.series.map((s) => ({
label: s.label,
stroke: s.color,
fill: props.stacked ? s.color + '40' : undefined,
width: 2,
})),
]
return {
width: container.value?.clientWidth ?? 400,
height: container.value?.clientHeight ?? 200,
series: seriesOpts,
axes: [
{
stroke: '#555568',
grid: { stroke: '#2e2e3822' },
size: 40,
font: '10px monospace',
ticks: { size: 3 },
},
{
stroke: '#555568',
grid: { stroke: '#2e2e3822' },
size: 35,
font: '10px monospace',
ticks: { size: 3 },
},
],
cursor: { show: true },
legend: { show: true, live: false },
padding: [8, 8, 0, 0],
hooks: {
setScale: [(_self: uPlot, scaleKey: string) => {
if (scaleKey === 'x') zoomed.value = true
}],
},
}
}
function resetZoom() {
if (!chart) return
const data = chart.data
if (data && data[0] && data[0].length > 0) {
const min = data[0][0]
const max = data[0][data[0].length - 1]
chart.setScale('x', { min, max })
}
zoomed.value = false
}
function getLegendHeight(): number {
if (!container.value) return 0
const legend = container.value.querySelector('.u-legend') as HTMLElement | null
return legend ? legend.offsetHeight : 0
}
function createChart() {
if (!container.value) return
if (chart) chart.destroy()
chart = new uPlot(buildOpts(), props.data, container.value)
// Refit after legend renders
nextTick(() => resize())
}
function resize() {
if (!chart || !container.value) return
const legendH = getLegendHeight()
const availableH = container.value.clientHeight
// uPlot height = canvas height (chart sets total = canvas + legend)
const chartH = Math.max(60, availableH - legendH)
chart.setSize({
width: container.value.clientWidth,
height: chartH,
})
}
watch(() => props.data, (newData) => {
if (chart) {
chart.setData(newData)
} else {
nextTick(createChart)
}
}, { deep: true })
onMounted(() => {
nextTick(createChart)
const observer = new ResizeObserver(resize)
if (container.value) observer.observe(container.value)
onUnmounted(() => {
observer.disconnect()
chart?.destroy()
chart = null
})
})
</script>
<template>
<div class="timeseries-wrapper">
<button v-if="zoomed" class="reset-zoom" @click="resetZoom" title="Reset zoom"></button>
<div ref="container" class="timeseries-renderer" />
</div>
</template>
<style scoped>
.timeseries-wrapper {
width: 100%;
height: 100%;
position: relative;
}
.reset-zoom {
position: absolute;
top: 4px;
right: 4px;
z-index: 20;
background: var(--surface-2);
border: 1px solid var(--surface-3);
border-radius: 4px;
color: var(--text-secondary);
font-size: 14px;
width: 24px;
height: 24px;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
opacity: 0.7;
transition: opacity 0.15s;
}
.reset-zoom:hover {
opacity: 1;
color: var(--text-primary);
}
.timeseries-renderer {
width: 100%;
height: 100%;
display: flex;
flex-direction: column;
overflow: hidden;
}
/* uPlot creates a .u-wrap for canvas + a .u-legend below it */
.timeseries-renderer :deep(.u-wrap) {
flex: 1;
min-height: 0;
}
.timeseries-renderer :deep(.u-legend) {
flex-shrink: 0;
}
.timeseries-renderer :deep(.u-legend) {
font-family: var(--font-mono);
font-size: 10px;
color: var(--text-secondary);
padding: 2px 0;
display: flex;
flex-wrap: wrap;
gap: 0 8px;
}
.timeseries-renderer :deep(.u-legend .u-series) {
display: inline-flex;
padding: 0;
}
</style>

View File

@@ -0,0 +1,59 @@
/* Framework design tokens — retheme by replacing this file */
:root {
/* spacing scale (4px base) */
--space-1: 4px;
--space-2: 8px;
--space-3: 12px;
--space-4: 16px;
--space-6: 24px;
--space-8: 32px;
/* color — dark theme (observability UIs are always dark) */
--surface-0: #0d0d0f;
--surface-1: #16161a;
--surface-2: #1e1e24;
--surface-3: #26262f;
--border: #2e2e38;
--text-primary: #e8e8f0;
--text-secondary: #8888a0;
--text-dim: #555568;
/* status colors */
--status-idle: #555568;
--status-live: #3ecf8e;
--status-processing: #4f9cf9;
--status-escalating: #f5a623;
--status-error: #f06565;
/* confidence color scale (low → high) */
--conf-low: #f06565;
--conf-mid: #f5a623;
--conf-high: #3ecf8e;
/* typography */
--font-mono: 'JetBrains Mono', 'Fira Code', monospace;
--font-ui: 'Inter', system-ui, sans-serif;
--font-size-sm: 11px;
--font-size-base: 13px;
--font-size-lg: 15px;
/* panel chrome */
--panel-radius: 6px;
--panel-border: 1px solid var(--border);
--panel-header-height: 36px;
}
/* Animated gradient outline for buttons in a waiting state.
Usage: add class="waiting" to any button/element. */
@keyframes waiting-glow {
0% { box-shadow: 0 0 3px 1px var(--status-processing); }
33% { box-shadow: 0 0 3px 1px var(--status-live); }
66% { box-shadow: 0 0 3px 1px var(--status-escalating); }
100% { box-shadow: 0 0 3px 1px var(--status-processing); }
}
.waiting {
animation: waiting-glow 2s linear infinite;
outline: 1px solid transparent;
}

View File

@@ -0,0 +1,18 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "bundler",
"strict": true,
"jsx": "preserve",
"noEmit": true,
"isolatedModules": true,
"esModuleInterop": true,
"skipLibCheck": true,
"baseUrl": ".",
"paths": {
"@/*": ["src/*"]
}
},
"include": ["src/**/*.ts", "src/**/*.vue"]
}

View File

@@ -0,0 +1,7 @@
import { defineConfig } from 'vitest/config'
export default defineConfig({
test: {
environment: 'node',
},
})

View File

@@ -610,13 +610,13 @@ def index(request: Request):
showcase_url = config.get("showcase_url")
return templates.TemplateResponse(
request,
"index.html",
{
"request": request,
context={
"artery": "/artery",
"atlas": "/atlas",
"station": "/station",
"managed": managed,
"managed": bool(managed),
"managed_url": managed_url,
"showcase_url": showcase_url,
},

View File

@@ -1,13 +0,0 @@
# Core Nest Ports
# Format: one port per line
# Comments allowed with #
# Amar
3000
8000
# Pawprint Services
13000
13001
13002
13003

View File

@@ -1,88 +0,0 @@
#!/bin/bash
# Update ports file from core_nest configuration
# Gathers ports from pawprint and amar .env files
#
# Usage: ./update-ports.sh
set -e
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PORTS_FILE="$SCRIPT_DIR/ports"
# TODO: Make these configurable or auto-detect
CORE_NEST_ROOT="${CORE_NEST_ROOT:-/home/mariano/core_nest}"
PAWPRINT_ENV="$CORE_NEST_ROOT/pawprint/.env"
AMAR_ENV="$CORE_NEST_ROOT/amar/.env"
echo "=== Updating Core Nest Ports ==="
echo ""
# Backup existing ports file
if [ -f "$PORTS_FILE" ]; then
cp "$PORTS_FILE" "$PORTS_FILE.bak"
echo " ✓ Backed up existing ports to ports.bak"
fi
# Start new ports file
cat > "$PORTS_FILE" <<'EOF'
# Core Nest Ports
# Auto-generated by update-ports.sh
# Format: one port per line
# Comments allowed with #
EOF
# Extract ports from amar .env
if [ -f "$AMAR_ENV" ]; then
echo " Reading amar ports..."
echo "# Amar" >> "$PORTS_FILE"
# Frontend port (default 3000)
AMAR_FRONTEND_PORT=$(grep "^AMAR_FRONTEND_PORT=" "$AMAR_ENV" 2>/dev/null | cut -d'=' -f2 || echo "3000")
echo "$AMAR_FRONTEND_PORT" >> "$PORTS_FILE"
# Backend port (default 8000)
AMAR_BACKEND_PORT=$(grep "^AMAR_BACKEND_PORT=" "$AMAR_ENV" 2>/dev/null | cut -d'=' -f2 || echo "8000")
echo "$AMAR_BACKEND_PORT" >> "$PORTS_FILE"
echo " ✓ Added amar ports: $AMAR_FRONTEND_PORT, $AMAR_BACKEND_PORT"
else
echo " ⚠ Amar .env not found, using defaults"
echo "# Amar (defaults)" >> "$PORTS_FILE"
echo "3000" >> "$PORTS_FILE"
echo "8000" >> "$PORTS_FILE"
fi
echo "" >> "$PORTS_FILE"
# Extract ports from pawprint .env
if [ -f "$PAWPRINT_ENV" ]; then
echo " Reading pawprint ports..."
echo "# Pawprint Services" >> "$PORTS_FILE"
PAWPRINT_PORT=$(grep "^PAWPRINT_PORT=" "$PAWPRINT_ENV" 2>/dev/null | cut -d'=' -f2 || echo "13000")
ARTERY_PORT=$(grep "^ARTERY_PORT=" "$PAWPRINT_ENV" 2>/dev/null | cut -d'=' -f2 || echo "13001")
ALBUM_PORT=$(grep "^ALBUM_PORT=" "$PAWPRINT_ENV" 2>/dev/null | cut -d'=' -f2 || echo "13002")
WARD_PORT=$(grep "^WARD_PORT=" "$PAWPRINT_ENV" 2>/dev/null | cut -d'=' -f2 || echo "13003")
echo "$PAWPRINT_PORT" >> "$PORTS_FILE"
echo "$ARTERY_PORT" >> "$PORTS_FILE"
echo "$ALBUM_PORT" >> "$PORTS_FILE"
echo "$WARD_PORT" >> "$PORTS_FILE"
echo " ✓ Added pawprint ports: $PAWPRINT_PORT, $ARTERY_PORT, $ALBUM_PORT, $WARD_PORT"
else
echo " ⚠ Pawprint .env not found, using defaults"
echo "# Pawprint Services (defaults)" >> "$PORTS_FILE"
echo "13000" >> "$PORTS_FILE"
echo "13001" >> "$PORTS_FILE"
echo "13002" >> "$PORTS_FILE"
echo "13003" >> "$PORTS_FILE"
fi
echo ""
echo "=== Done ==="
echo ""
echo "Updated ports file: $PORTS_FILE"
echo ""
cat "$PORTS_FILE"

View File

@@ -7,36 +7,38 @@ Supported generators:
- TypeScriptGenerator: TypeScript interfaces
- ProtobufGenerator: Protocol Buffer definitions
- PrismaGenerator: Prisma schema
- GrapheneGenerator: Graphene ObjectType/InputObjectType classes
- StrawberryGenerator: Strawberry type/input/enum classes
"""
from typing import Dict, Type
from .base import BaseGenerator
from .django import DjangoGenerator
from .graphene import GrapheneGenerator
from .prisma import PrismaGenerator
from .protobuf import ProtobufGenerator
from .pydantic import PydanticGenerator
from .sqlmodel import SQLModelGenerator
from .strawberry import StrawberryGenerator
from .typescript import TypeScriptGenerator
# Registry of available generators
GENERATORS: Dict[str, Type[BaseGenerator]] = {
"pydantic": PydanticGenerator,
"django": DjangoGenerator,
"sqlmodel": SQLModelGenerator,
"typescript": TypeScriptGenerator,
"ts": TypeScriptGenerator, # Alias
"protobuf": ProtobufGenerator,
"proto": ProtobufGenerator, # Alias
"prisma": PrismaGenerator,
"graphene": GrapheneGenerator,
"strawberry": StrawberryGenerator,
}
__all__ = [
"BaseGenerator",
"PydanticGenerator",
"DjangoGenerator",
"GrapheneGenerator",
"StrawberryGenerator",
"TypeScriptGenerator",
"ProtobufGenerator",
"PrismaGenerator",

View File

@@ -12,7 +12,7 @@ from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
from ..helpers import get_origin_name, get_type_name, unwrap_optional
from ..helpers import get_origin_name, get_type_name, is_dataclass_type, unwrap_optional
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import PYDANTIC_RESOLVERS
from .base import BaseGenerator
@@ -54,8 +54,9 @@ class PydanticGenerator(BaseGenerator):
if hasattr(models, "get_shared_component"):
content = self._generate_from_config(models)
elif hasattr(models, "models"):
all_models = models.models + getattr(models, "api_models", [])
content = self._generate_from_definitions(
models.models, getattr(models, "enums", [])
all_models, getattr(models, "enums", [])
)
elif isinstance(models, tuple):
content = self._generate_from_definitions(models[0], models[1])
@@ -245,6 +246,7 @@ class PydanticGenerator(BaseGenerator):
"",
]
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
lines = [f"class {enum_def.name}(str, Enum):"]
for name, value in enum_def.values:
@@ -307,6 +309,11 @@ class PydanticGenerator(BaseGenerator):
if isinstance(base, type) and issubclass(base, Enum)
else None
)
or (
PYDANTIC_RESOLVERS["dataclass"]
if is_dataclass_type(base)
else None
)
)
result = resolver(base) if resolver else "str"
return f"Optional[{result}]" if optional else result
@@ -321,7 +328,12 @@ class PydanticGenerator(BaseGenerator):
if isinstance(default, Enum):
return f" = {default.__class__.__name__}.{default.name}"
if callable(default):
return " = Field(default_factory=list)" if "list" in str(default) else ""
default_str = str(default)
if "list" in default_str:
return " = Field(default_factory=list)"
if "dict" in default_str:
return " = Field(default_factory=dict)"
return ""
return f" = {default!r}"
def _generate_from_config(self, config) -> str:

View File

@@ -0,0 +1,181 @@
"""
SQLModel Generator
Generates SQLModel table classes from model definitions.
Extends the Pydantic generator — SQLModel classes *are* Pydantic models
with table=True and SQLAlchemy column config for JSON fields.
"""
import dataclasses as dc
import re
from enum import Enum
from typing import Any, List, get_type_hints
from ..helpers import get_origin_name, get_type_name, unwrap_optional
from .pydantic import PydanticGenerator
# ---------------------------------------------------------------------------
# Field resolvers — each returns a Field() string or None to fall through
# ---------------------------------------------------------------------------
def _resolve_special(name, _base, _origin, _optional, _default):
"""id, created_at, updated_at get fixed Field() definitions."""
specials = {
"id": "Field(default_factory=uuid4, primary_key=True)",
"created_at": "Field(default_factory=datetime.utcnow)",
"updated_at": "Field(default_factory=datetime.utcnow)",
}
return specials.get(name)
def _resolve_json(name, _base, origin, _optional, _default):
"""Dict and List fields → sa_column=Column(JSON)."""
mapping = {
"dict": ("dict", "{}"),
"list": ("list", "[]"),
}
entry = mapping.get(origin)
if not entry:
return None
factory, server_default = entry
return (
f"Field(default_factory={factory}, "
f"sa_column=Column(JSON, nullable=False, server_default='{server_default}'))"
)
def _resolve_indexed(name, _base, _origin, optional, _default):
"""Known indexed fields."""
indexed = {"source_asset_id", "parent_job_id", "job_id", "canonical_name"}
if name not in indexed:
return None
if optional:
return "Field(default=None, index=True)"
return "Field(index=True)"
def _resolve_optional(_name, _base, _origin, optional, _default):
"""Optional fields default to None."""
if optional:
return "None"
return None
def _resolve_default(_name, _base, _origin, _optional, default):
"""Fields with explicit defaults. Enum before str (str enums are both)."""
if default is dc.MISSING or default is None:
return None
if isinstance(default, Enum):
return f'"{default.value}"'
if isinstance(default, bool):
return str(default)
if isinstance(default, (int, float)):
return str(default)
if isinstance(default, str):
return f'"{default}"'
return None
# Resolver chain — first non-None result wins
_FIELD_RESOLVERS = [
_resolve_special,
_resolve_json,
_resolve_indexed,
_resolve_optional,
_resolve_default,
]
def _resolve_field(name, type_hint, default):
"""Run the resolver chain for a field. Returns ' = ...' string."""
base, is_optional = unwrap_optional(type_hint)
origin = get_origin_name(base)
for resolver in _FIELD_RESOLVERS:
result = resolver(name, base, origin, is_optional, default)
if result is not None:
return f" = {result}"
return ""
def _to_snake(name):
"""CamelCase → snake_case for table names."""
return re.sub(r"(?<=[a-z])(?=[A-Z])", "_", name).lower()
_HEADER = [
'"""',
"SQLModel Table Models - GENERATED FILE",
"",
"Do not edit directly. Regenerate using modelgen.",
'"""',
"",
"from datetime import datetime",
"from enum import Enum",
"from typing import Any, Dict, List, Optional",
"from uuid import UUID, uuid4",
"",
"from sqlmodel import SQLModel, Field, Column",
"from sqlalchemy import JSON",
"",
]
class SQLModelGenerator(PydanticGenerator):
"""Generates SQLModel table classes."""
def _generate_header(self) -> List[str]:
return list(_HEADER)
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
return _build_table(
cls.__name__,
cls.__doc__ or cls.__name__,
get_type_hints(cls),
{f.name: f for f in dc.fields(cls)},
self._resolve_type,
)
def _generate_model_from_definition(self, model_def) -> List[str]:
hints = {f.name: f.type_hint for f in model_def.fields}
defaults = {f.name: f.default for f in model_def.fields}
class FakeField:
def __init__(self, default):
self.default = default
fields = {name: FakeField(defaults.get(name, dc.MISSING)) for name in hints}
return _build_table(
model_def.name,
model_def.docstring or model_def.name,
hints,
fields,
self._resolve_type,
)
def _build_table(name, docstring, hints, fields, resolve_type_fn):
"""Build a SQLModel table class from field data."""
table_name = _to_snake(name)
lines = [
f"class {name}(SQLModel, table=True):",
f' """{docstring.strip().split(chr(10))[0]}"""',
f' __tablename__ = "{table_name}"',
"",
]
for field_name, type_hint in hints.items():
if field_name.startswith("_"):
continue
field = fields.get(field_name)
default_val = dc.MISSING
if field and field.default is not dc.MISSING:
default_val = field.default
py_type = resolve_type_fn(type_hint, False)
field_extra = _resolve_field(field_name, type_hint, default_val)
lines.append(f" {field_name}: {py_type}{field_extra}")
return lines

View File

@@ -1,28 +1,29 @@
"""
Graphene Generator
Strawberry Generator
Generates graphene ObjectType and InputObjectType classes from model definitions.
Generates strawberry type, input, and enum classes from model definitions.
Only generates type definitions queries, mutations, and resolvers are hand-written.
"""
import dataclasses as dc
from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
from ..helpers import get_origin_name, get_type_name, unwrap_optional
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import GRAPHENE_RESOLVERS
from ..types import STRAWBERRY_RESOLVERS
from .base import BaseGenerator
class GrapheneGenerator(BaseGenerator):
"""Generates graphene type definition files."""
class StrawberryGenerator(BaseGenerator):
"""Generates strawberry type definition files."""
def file_extension(self) -> str:
return ".py"
def generate(self, models, output_path: Path) -> None:
"""Generate graphene types to output_path."""
"""Generate strawberry types to output_path."""
output_path.parent.mkdir(parents=True, exist_ok=True)
if hasattr(models, "models"):
@@ -47,22 +48,18 @@ class GrapheneGenerator(BaseGenerator):
enums: List[EnumDefinition],
api_models: List[ModelDefinition],
) -> str:
"""Generate from ModelDefinition objects."""
lines = self._generate_header()
# Generate enums as graphene.Enum
for enum_def in enums:
lines.extend(self._generate_enum(enum_def))
lines.append("")
lines.append("")
# Generate domain models as ObjectType
for model_def in models:
lines.extend(self._generate_object_type(model_def))
lines.append("")
lines.append("")
# Generate API models — request types as InputObjectType, others as ObjectType
for model_def in api_models:
if model_def.name.endswith("Request"):
lines.extend(self._generate_input_type(model_def))
@@ -74,7 +71,6 @@ class GrapheneGenerator(BaseGenerator):
return "\n".join(lines).rstrip() + "\n"
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
"""Generate from Python dataclasses."""
lines = self._generate_header()
enums_generated = set()
@@ -99,37 +95,38 @@ class GrapheneGenerator(BaseGenerator):
def _generate_header(self) -> List[str]:
return [
'"""',
"Graphene Types - GENERATED FILE",
"Strawberry Types - GENERATED FILE",
"",
"Do not edit directly. Regenerate using modelgen.",
'"""',
"",
"import graphene",
"import strawberry",
"from enum import Enum",
"from typing import List, Optional",
"from uuid import UUID",
"from datetime import datetime",
"from strawberry.scalars import JSON",
"",
"",
]
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
"""Generate graphene.Enum from EnumDefinition."""
lines = [f"class {enum_def.name}(graphene.Enum):"]
lines = ["@strawberry.enum", f"class {enum_def.name}(Enum):"]
for name, value in enum_def.values:
lines.append(f' {name} = "{value}"')
return lines
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
"""Generate graphene.Enum from Python Enum."""
lines = [f"class {enum_cls.__name__}(graphene.Enum):"]
lines = ["@strawberry.enum", f"class {enum_cls.__name__}(Enum):"]
for member in enum_cls:
lines.append(f' {member.name} = "{member.value}"')
return lines
def _generate_object_type(self, model_def: ModelDefinition) -> List[str]:
"""Generate graphene.ObjectType from ModelDefinition."""
name = model_def.name
# Append Type suffix if not already present
type_name = f"{name}Type" if not name.endswith("Type") else name
lines = [f"class {type_name}(graphene.ObjectType):"]
lines = ["@strawberry.type", f"class {type_name}:"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
@@ -139,23 +136,19 @@ class GrapheneGenerator(BaseGenerator):
lines.append(" pass")
else:
for field in model_def.fields:
graphene_type = self._resolve_type(field.type_hint, field.optional)
lines.append(f" {field.name} = {graphene_type}")
type_str = self._resolve_type(field.type_hint, optional=True)
lines.append(f" {field.name}: {type_str} = None")
return lines
def _generate_input_type(self, model_def: ModelDefinition) -> List[str]:
"""Generate graphene.InputObjectType from ModelDefinition."""
import dataclasses as dc
name = model_def.name
# Convert FooRequest -> FooInput
if name.endswith("Request"):
input_name = name[: -len("Request")] + "Input"
else:
input_name = f"{name}Input"
lines = [f"class {input_name}(graphene.InputObjectType):"]
lines = ["@strawberry.input", f"class {input_name}:"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
@@ -164,73 +157,64 @@ class GrapheneGenerator(BaseGenerator):
if not model_def.fields:
lines.append(" pass")
else:
# Required fields first, then optional/defaulted
required = []
optional = []
for field in model_def.fields:
graphene_type = self._resolve_type(field.type_hint, field.optional)
# Required only if not optional AND no default value
has_default = field.default is not dc.MISSING
if not field.optional and not has_default:
graphene_type = self._make_required(graphene_type)
elif has_default and not field.optional:
graphene_type = self._add_default(graphene_type, field.default)
lines.append(f" {field.name} = {graphene_type}")
required.append(field)
else:
optional.append(field)
for field in required:
type_str = self._resolve_type(field.type_hint, optional=False)
lines.append(f" {field.name}: {type_str}")
for field in optional:
has_default = field.default is not dc.MISSING
if has_default and not callable(field.default):
type_str = self._resolve_type(field.type_hint, optional=False)
lines.append(f" {field.name}: {type_str} = {field.default!r}")
else:
type_str = self._resolve_type(field.type_hint, optional=True)
lines.append(f" {field.name}: {type_str} = None")
return lines
def _generate_object_type_from_dataclass(self, cls: type) -> List[str]:
"""Generate graphene.ObjectType from a dataclass."""
import dataclasses as dc
type_name = f"{cls.__name__}Type"
lines = [f"class {type_name}(graphene.ObjectType):"]
lines = ["@strawberry.type", f"class {type_name}:"]
hints = get_type_hints(cls)
for name, type_hint in hints.items():
if name.startswith("_"):
continue
graphene_type = self._resolve_type(type_hint, False)
lines.append(f" {name} = {graphene_type}")
type_str = self._resolve_type(type_hint, optional=True)
lines.append(f" {name}: {type_str} = None")
return lines
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
"""Resolve Python type to graphene field call string."""
"""Resolve Python type hint to a strawberry annotation string."""
base, is_optional = unwrap_optional(type_hint)
optional = optional or is_optional
origin = get_origin_name(base)
type_name = get_type_name(base)
# Look up resolver
resolver = (
GRAPHENE_RESOLVERS.get(origin)
or GRAPHENE_RESOLVERS.get(type_name)
or GRAPHENE_RESOLVERS.get(base)
STRAWBERRY_RESOLVERS.get(origin)
or STRAWBERRY_RESOLVERS.get(type_name)
or STRAWBERRY_RESOLVERS.get(base)
or (
GRAPHENE_RESOLVERS["enum"]
STRAWBERRY_RESOLVERS["enum"]
if isinstance(base, type) and issubclass(base, Enum)
else None
)
)
result = resolver(base) if resolver else "graphene.String"
inner = resolver(base) if resolver else "str"
# List types already have () syntax from resolver
if result.startswith("graphene.List("):
return result
# Scalar types: add () call
return f"{result}()"
def _make_required(self, field_str: str) -> str:
"""Add required=True to a graphene field."""
if field_str.endswith("()"):
return field_str[:-1] + "required=True)"
return field_str
def _add_default(self, field_str: str, default: Any) -> str:
"""Add default_value to a graphene field."""
if callable(default):
# default_factory — skip, graphene doesn't support factories
return field_str
if field_str.endswith("()"):
return field_str[:-1] + f"default_value={default!r})"
return field_str
if optional:
return f"Optional[{inner}]"
return inner

View File

@@ -8,7 +8,7 @@ from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
from ..helpers import get_origin_name, get_type_name, unwrap_optional
from ..helpers import get_origin_name, get_type_name, is_dataclass_type, unwrap_optional
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import TS_RESOLVERS
from .base import BaseGenerator
@@ -139,6 +139,11 @@ class TypeScriptGenerator(BaseGenerator):
if isinstance(base, type) and issubclass(base, Enum)
else None
)
or (
TS_RESOLVERS["dataclass"]
if is_dataclass_type(base)
else None
)
)
result = resolver(base) if resolver else "string"

View File

@@ -44,6 +44,17 @@ def get_list_inner(type_hint: Any) -> str:
return "str"
def is_dataclass_type(type_hint: Any) -> bool:
"""Check if type is a dataclass (nested model reference)."""
return isinstance(type_hint, type) and dc.is_dataclass(type_hint)
def get_list_inner_type(type_hint: Any) -> Any:
"""Get the raw inner type of List[T] (not stringified)."""
args = get_args(type_hint)
return args[0] if args else None
def get_field_default(field: dc.Field) -> Any:
"""Get default value from dataclass field."""
if field.default is not dc.MISSING:

View File

@@ -101,6 +101,12 @@ class SchemaLoader:
for enum_cls in enums:
self.enums.append(self._parse_enum(enum_cls))
# Extract VIEWS (view/event projections)
if load_all or "views" in include:
views = getattr(module, "VIEWS", [])
for cls in views:
self.api_models.append(self._parse_dataclass(cls))
# Extract GRPC_MESSAGES (optional)
if load_all or "grpc" in include:
grpc_messages = getattr(module, "GRPC_MESSAGES", [])
@@ -117,6 +123,20 @@ class SchemaLoader:
methods=grpc_service.get("methods", []),
)
# Generic group loader: any include group not handled above
# is looked up as UPPER_CASE attribute on the module.
# e.g. include "detect_views" → module.DETECT_VIEWS
if include:
known_groups = {"dataclasses", "enums", "api", "views", "grpc"}
for group in include - known_groups:
attr_name = group.upper()
items = getattr(module, attr_name, [])
for cls in items:
if isinstance(cls, type) and dc.is_dataclass(cls):
self.api_models.append(self._parse_dataclass(cls))
elif isinstance(cls, type) and issubclass(cls, Enum):
self.enums.append(self._parse_enum(cls))
return self
def _import_module(self, path: Path):

View File

@@ -0,0 +1,16 @@
[build-system]
requires = ["setuptools>=68.0"]
build-backend = "setuptools.build_meta"
[project]
name = "soleprint-modelgen"
version = "0.2.0"
description = "Multi-source, multi-target model code generator"
requires-python = ">=3.10"
dependencies = []
[project.scripts]
modelgen = "modelgen.__main__:main"
[tool.setuptools.packages.find]
include = ["modelgen*"]

View File

@@ -5,6 +5,7 @@ Type mappings for each output format.
Used by generators to convert Python types to target framework types.
"""
import dataclasses as dc
from typing import Any, Callable, get_args
# =============================================================================
@@ -39,8 +40,12 @@ DJANGO_SPECIAL: dict[str, str] = {
def _get_list_inner(type_hint: Any) -> str:
"""Get inner type of List[T] for Pydantic."""
args = get_args(type_hint)
if args and args[0] in (str, int, float, bool):
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
if args:
inner = args[0]
if inner in (str, int, float, bool):
return {str: "str", int: "int", float: "float", bool: "bool"}[inner]
if isinstance(inner, type) and dc.is_dataclass(inner):
return inner.__name__
return "str"
@@ -49,11 +54,13 @@ PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
int: lambda _: "int",
float: lambda _: "float",
bool: lambda _: "bool",
Any: lambda _: "Any",
"UUID": lambda _: "UUID",
"datetime": lambda _: "datetime",
"dict": lambda _: "Dict[str, Any]",
"list": lambda base: f"List[{_get_list_inner(base)}]",
"enum": lambda base: base.__name__,
"dataclass": lambda base: base.__name__,
}
# =============================================================================
@@ -72,6 +79,8 @@ def _resolve_ts_list(base: Any) -> str:
return "number[]"
elif inner is bool:
return "boolean[]"
elif isinstance(inner, type) and dc.is_dataclass(inner):
return f"{inner.__name__}[]"
return "string[]"
@@ -85,6 +94,7 @@ TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
"dict": lambda _: "Record<string, unknown>",
"list": _resolve_ts_list,
"enum": lambda base: base.__name__,
"dataclass": lambda base: base.__name__,
}
# =============================================================================
@@ -139,34 +149,34 @@ PRISMA_SPECIAL: dict[str, str] = {
}
# =============================================================================
# Graphene Type Resolvers
# Strawberry Type Resolvers
# =============================================================================
def _resolve_graphene_list(base: Any) -> str:
"""Resolve graphene List type."""
def _resolve_strawberry_list(base: Any) -> str:
"""Resolve strawberry List type annotation."""
args = get_args(base)
if args:
inner = args[0]
if inner is str:
return "graphene.List(graphene.String)"
return "List[str]"
elif inner is int:
return "graphene.List(graphene.Int)"
return "List[int]"
elif inner is float:
return "graphene.List(graphene.Float)"
return "List[float]"
elif inner is bool:
return "graphene.List(graphene.Boolean)"
return "graphene.List(graphene.String)"
return "List[bool]"
return "List[str]"
GRAPHENE_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "graphene.String",
int: lambda _: "graphene.Int",
float: lambda _: "graphene.Float",
bool: lambda _: "graphene.Boolean",
"UUID": lambda _: "graphene.UUID",
"datetime": lambda _: "graphene.DateTime",
"dict": lambda _: "graphene.JSONString",
"list": _resolve_graphene_list,
"enum": lambda base: f"graphene.String", # Enums exposed as strings in GQL
STRAWBERRY_RESOLVERS: dict[Any, Callable[[Any], str]] = {
str: lambda _: "str",
int: lambda _: "int",
float: lambda _: "float",
bool: lambda _: "bool",
"UUID": lambda _: "UUID",
"datetime": lambda _: "datetime",
"dict": lambda _: "JSON",
"list": _resolve_strawberry_list,
"enum": lambda base: base.__name__,
}

View File

@@ -1,5 +1,5 @@
{
"room_name": "amar",
"room_name": "standalone",
"wrapper": {
"enabled": true,
"environment": {
@@ -10,31 +10,19 @@
{
"id": "admin",
"label": "Admin",
"username": "admin@test.com",
"password": "Amar2025!",
"username": "admin@example.com",
"password": "admin",
"icon": "👑",
"role": "ADMIN"
},
{
"id": "vet1",
"label": "Vet 1",
"username": "vet@test.com",
"password": "Amar2025!",
"icon": "🩺",
"role": "VET"
},
{
"id": "tutor1",
"label": "Tutor 1",
"username": "tutor@test.com",
"password": "Amar2025!",
"icon": "🐶",
"id": "user1",
"label": "User 1",
"username": "user@example.com",
"password": "user",
"icon": "👤",
"role": "USER"
}
],
"jira": {
"ticket_id": "VET-535",
"epic": "EPIC-51.3"
}
]
}
}

View File

@@ -2,12 +2,30 @@
Pure HTTP Contract Tests - Base Class
Framework-agnostic: works against ANY backend implementation.
Does NOT manage database - expects a ready environment.
Auth strategies (set CONTRACT_TEST_AUTH_TYPE env var):
- bearer (default): JWT token via CONTRACT_TEST_TOKEN or fetched from TOKEN_ENDPOINT
- api-key: API key via CONTRACT_TEST_API_KEY
- none: No authentication
Usage:
CONTRACT_TEST_URL=http://127.0.0.1:8000 pytest
CONTRACT_TEST_URL=http://127.0.0.1:8000 CONTRACT_TEST_TOKEN=your_jwt pytest
CONTRACT_TEST_URL=http://127.0.0.1:8000 CONTRACT_TEST_AUTH_TYPE=api-key CONTRACT_TEST_API_KEY=key pytest
"""
import os
import unittest
import httpx
from .config import config
def get_base_url():
"""Get base URL from environment (required)"""
url = os.environ.get("CONTRACT_TEST_URL", "")
if not url:
raise ValueError("CONTRACT_TEST_URL environment variable required")
return url.rstrip("/")
class ContractTestCase(unittest.TestCase):
@@ -18,35 +36,71 @@ class ContractTestCase(unittest.TestCase):
- Framework-agnostic (works with Django, FastAPI, Node, etc.)
- Pure HTTP via httpx library
- No database access - all data through API
- API Key authentication
- Configurable authentication (bearer, api-key, none)
"""
# Auth config - override via environment or subclass
AUTH_TYPE = os.environ.get("CONTRACT_TEST_AUTH_TYPE", "bearer")
TEST_USER_EMAIL = os.environ.get("CONTRACT_TEST_USER", "contract_test@example.com")
TEST_USER_PASSWORD = os.environ.get("CONTRACT_TEST_PASSWORD", "testpass123")
TOKEN_ENDPOINT = os.environ.get("CONTRACT_TEST_TOKEN_ENDPOINT", "/api/token/")
# Class-level cache
_base_url = None
_token = None
_api_key = None
@classmethod
def setUpClass(cls):
"""Set up once per test class"""
super().setUpClass()
cls._base_url = config.get("CONTRACT_TEST_URL", "").rstrip("/")
if not cls._base_url:
raise ValueError("CONTRACT_TEST_URL required in environment")
cls._base_url = get_base_url()
cls._api_key = config.get("CONTRACT_TEST_API_KEY", "")
if cls.AUTH_TYPE == "bearer":
cls._token = os.environ.get("CONTRACT_TEST_TOKEN", "")
if not cls._token:
cls._token = cls._fetch_token()
elif cls.AUTH_TYPE == "api-key":
cls._api_key = os.environ.get("CONTRACT_TEST_API_KEY", "")
if not cls._api_key:
raise ValueError("CONTRACT_TEST_API_KEY required in environment")
raise ValueError("CONTRACT_TEST_API_KEY required for api-key auth")
@classmethod
def _fetch_token(cls):
"""Get JWT token for authentication"""
url = f"{cls._base_url}{cls.TOKEN_ENDPOINT}"
try:
response = httpx.post(url, json={
"username": cls.TEST_USER_EMAIL,
"password": cls.TEST_USER_PASSWORD,
}, timeout=10)
if response.status_code == 200:
return response.json().get("access", "")
else:
print(f"Warning: Token request failed with {response.status_code}")
except httpx.RequestError as e:
print(f"Warning: Token request failed: {e}")
return ""
@property
def base_url(self):
return self._base_url
@property
def token(self):
return self._token
@property
def api_key(self):
return self._api_key
def _auth_headers(self):
"""Get authorization headers"""
return {"Authorization": f"Api-Key {self.api_key}"}
"""Get authorization headers based on auth type"""
if self.AUTH_TYPE == "bearer" and self._token:
return {"Authorization": f"Bearer {self._token}"}
elif self.AUTH_TYPE == "api-key" and self._api_key:
return {"Authorization": f"Api-Key {self._api_key}"}
return {}
# =========================================================================
# HTTP helpers
@@ -117,3 +171,6 @@ class ContractTestCase(unittest.TestCase):
"""Assert data is a list with minimum length"""
self.assertIsInstance(data, list)
self.assertGreaterEqual(len(data), min_length)
__all__ = ["ContractTestCase", "get_base_url"]

View File

@@ -53,7 +53,7 @@ def load_environments() -> list:
{
"id": "demo",
"name": "Demo",
"url": config.get("CONTRACT_TEST_URL", "https://demo.amarmascotas.ar"),
"url": config.get("CONTRACT_TEST_URL", "http://localhost:8000"),
"api_key": config.get("CONTRACT_TEST_API_KEY", ""),
"description": "Demo environment",
"default": True

View File

@@ -1,37 +0,0 @@
"""
API Endpoints - Single source of truth for contract tests.
If API paths or versioning changes, update here only.
"""
class Endpoints:
"""API endpoint paths"""
# ==========================================================================
# Mascotas
# ==========================================================================
PET_OWNERS = "/mascotas/api/v1/pet-owners/"
PET_OWNER_DETAIL = "/mascotas/api/v1/pet-owners/{id}/"
PETS = "/mascotas/api/v1/pets/"
PET_DETAIL = "/mascotas/api/v1/pets/{id}/"
COVERAGE_CHECK = "/mascotas/api/v1/coverage/check/"
# ==========================================================================
# Productos
# ==========================================================================
SERVICES = "/productos/api/v1/services/"
CART = "/productos/api/v1/cart/"
CART_DETAIL = "/productos/api/v1/cart/{id}/"
# ==========================================================================
# Solicitudes
# ==========================================================================
SERVICE_REQUESTS = "/solicitudes/service-requests/"
SERVICE_REQUEST_DETAIL = "/solicitudes/service-requests/{id}/"
# ==========================================================================
# Auth
# ==========================================================================
TOKEN = "/api/token/"
TOKEN_REFRESH = "/api/token/refresh/"

View File

@@ -1,31 +1,10 @@
[
{
"id": "demo",
"name": "Demo",
"url": "https://demo.amarmascotas.ar",
"id": "local",
"name": "Local",
"url": "http://localhost:8000",
"api_key": "",
"description": "Demo environment for testing",
"description": "Local development server",
"default": true
},
{
"id": "dev",
"name": "Development",
"url": "https://dev.amarmascotas.ar",
"api_key": "",
"description": "Development environment"
},
{
"id": "stage",
"name": "Staging",
"url": "https://stage.amarmascotas.ar",
"api_key": "",
"description": "Staging environment"
},
{
"id": "prod",
"name": "Production",
"url": "https://amarmascotas.ar",
"api_key": "",
"description": "Production environment (use with caution!)"
}
]

View File

@@ -1,44 +1,17 @@
"""
Contract Tests - Shared test data helpers.
Contract Tests - Generic test data helpers.
Used across all endpoint tests to generate consistent test data.
Room-specific helpers belong in cfg/<room>/station/tools/tester/tests/helpers.py
"""
import time
def unique_email(prefix="test"):
"""Generate unique email for test data"""
"""Generate unique email for test data (avoids collisions across runs)"""
return f"{prefix}_{int(time.time() * 1000)}@contract-test.local"
def sample_pet_owner(email=None):
"""Generate sample pet owner data"""
return {
"first_name": "Test",
"last_name": "Usuario",
"email": email or unique_email("owner"),
"phone": "1155667788",
"address": "Av. Santa Fe 1234",
"geo_latitude": -34.5955,
"geo_longitude": -58.4166,
}
SAMPLE_CAT = {
"name": "TestCat",
"pet_type": "CAT",
"is_neutered": False,
}
SAMPLE_DOG = {
"name": "TestDog",
"pet_type": "DOG",
"is_neutered": False,
}
SAMPLE_NEUTERED_CAT = {
"name": "NeuteredCat",
"pet_type": "CAT",
"is_neutered": True,
}
def unique_id(prefix="test"):
"""Generate unique string identifier"""
return f"{prefix}_{int(time.time() * 1000)}"

View File

@@ -1,164 +1,4 @@
"""
Pure HTTP Contract Tests - Base Class
"""Re-export from parent for backward compatibility."""
from ..base import ContractTestCase, get_base_url
Framework-agnostic: works against ANY backend implementation.
Does NOT manage database - expects a ready environment.
Requirements:
- Server running at CONTRACT_TEST_URL
- Database migrated and seeded
- Test user exists OR CONTRACT_TEST_TOKEN provided
Usage:
CONTRACT_TEST_URL=http://127.0.0.1:8000 pytest
CONTRACT_TEST_TOKEN=your_jwt_token pytest
"""
import os
import unittest
import httpx
from .endpoints import Endpoints
def get_base_url():
"""Get base URL from environment (required)"""
url = os.environ.get("CONTRACT_TEST_URL", "")
if not url:
raise ValueError("CONTRACT_TEST_URL environment variable required")
return url.rstrip("/")
class ContractTestCase(unittest.TestCase):
"""
Base class for pure HTTP contract tests.
Features:
- Framework-agnostic (works with Django, FastAPI, Node, etc.)
- Pure HTTP via requests library
- No database access - all data through API
- JWT authentication
"""
# Auth credentials - override via environment
TEST_USER_EMAIL = os.environ.get("CONTRACT_TEST_USER", "contract_test@example.com")
TEST_USER_PASSWORD = os.environ.get("CONTRACT_TEST_PASSWORD", "testpass123")
# Class-level cache
_base_url = None
_token = None
@classmethod
def setUpClass(cls):
"""Set up once per test class"""
super().setUpClass()
cls._base_url = get_base_url()
# Use provided token or fetch one
cls._token = os.environ.get("CONTRACT_TEST_TOKEN", "")
if not cls._token:
cls._token = cls._fetch_token()
@classmethod
def _fetch_token(cls):
"""Get JWT token for authentication"""
url = f"{cls._base_url}{Endpoints.TOKEN}"
try:
response = httpx.post(url, json={
"username": cls.TEST_USER_EMAIL,
"password": cls.TEST_USER_PASSWORD,
}, timeout=10)
if response.status_code == 200:
return response.json().get("access", "")
else:
print(f"Warning: Token request failed with {response.status_code}")
except httpx.RequestError as e:
print(f"Warning: Token request failed: {e}")
return ""
@property
def base_url(self):
return self._base_url
@property
def token(self):
return self._token
def _auth_headers(self):
"""Get authorization headers"""
if self.token:
return {"Authorization": f"Bearer {self.token}"}
return {}
# =========================================================================
# HTTP helpers
# =========================================================================
def get(self, path: str, params: dict = None, **kwargs):
"""GET request"""
url = f"{self.base_url}{path}"
headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
response = httpx.get(url, params=params, headers=headers, timeout=30, **kwargs)
return self._wrap_response(response)
def post(self, path: str, data: dict = None, **kwargs):
"""POST request with JSON"""
url = f"{self.base_url}{path}"
headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
response = httpx.post(url, json=data, headers=headers, timeout=30, **kwargs)
return self._wrap_response(response)
def put(self, path: str, data: dict = None, **kwargs):
"""PUT request with JSON"""
url = f"{self.base_url}{path}"
headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
response = httpx.put(url, json=data, headers=headers, timeout=30, **kwargs)
return self._wrap_response(response)
def patch(self, path: str, data: dict = None, **kwargs):
"""PATCH request with JSON"""
url = f"{self.base_url}{path}"
headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
response = httpx.patch(url, json=data, headers=headers, timeout=30, **kwargs)
return self._wrap_response(response)
def delete(self, path: str, **kwargs):
"""DELETE request"""
url = f"{self.base_url}{path}"
headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
response = httpx.delete(url, headers=headers, timeout=30, **kwargs)
return self._wrap_response(response)
def _wrap_response(self, response):
"""Add .data attribute for consistency with DRF responses"""
try:
response.data = response.json()
except Exception:
response.data = None
return response
# =========================================================================
# Assertion helpers
# =========================================================================
def assert_status(self, response, expected_status: int):
"""Assert response has expected status code"""
self.assertEqual(
response.status_code,
expected_status,
f"Expected {expected_status}, got {response.status_code}. "
f"Response: {response.data if hasattr(response, 'data') else response.content[:500]}"
)
def assert_has_fields(self, data: dict, *fields: str):
"""Assert dictionary has all specified fields"""
missing = [f for f in fields if f not in data]
self.assertEqual(missing, [], f"Missing fields: {missing}. Got: {list(data.keys())}")
def assert_is_list(self, data, min_length: int = 0):
"""Assert data is a list with minimum length"""
self.assertIsInstance(data, list)
self.assertGreaterEqual(len(data), min_length)
__all__ = ["ContractTestCase"]
__all__ = ["ContractTestCase", "get_base_url"]

View File

@@ -1,29 +0,0 @@
"""
Contract Tests Configuration
Supports two testing modes via CONTRACT_TEST_MODE environment variable:
# Fast mode (default) - Django test client, test DB
pytest tests/contracts/
# Live mode - Real HTTP with LiveServerTestCase, test DB
CONTRACT_TEST_MODE=live pytest tests/contracts/
"""
import os
import pytest
# Let pytest-django handle Django setup via pytest.ini DJANGO_SETTINGS_MODULE
def pytest_configure(config):
"""Register custom markers"""
config.addinivalue_line(
"markers", "workflow: marks test as a workflow/flow test (runs endpoint tests in sequence)"
)
@pytest.fixture(scope="session")
def contract_test_mode():
"""Return current test mode"""
return os.environ.get("CONTRACT_TEST_MODE", "api")

View File

@@ -1,38 +0,0 @@
"""
API Endpoints - Single source of truth for contract tests.
If API paths or versioning changes, update here only.
"""
class Endpoints:
"""API endpoint paths"""
# ==========================================================================
# Mascotas
# ==========================================================================
PET_OWNERS = "/mascotas/api/v1/pet-owners/"
PET_OWNER_DETAIL = "/mascotas/api/v1/pet-owners/{id}/"
PETS = "/mascotas/api/v1/pets/"
PET_DETAIL = "/mascotas/api/v1/pets/{id}/"
COVERAGE_CHECK = "/mascotas/api/v1/coverage/check/"
# ==========================================================================
# Productos
# ==========================================================================
SERVICES = "/productos/api/v1/services/"
CATEGORIES = "/productos/api/v1/categories/"
CART = "/productos/api/v1/cart/"
CART_DETAIL = "/productos/api/v1/cart/{id}/"
# ==========================================================================
# Solicitudes
# ==========================================================================
SERVICE_REQUESTS = "/solicitudes/service-requests/"
SERVICE_REQUEST_DETAIL = "/solicitudes/service-requests/{id}/"
# ==========================================================================
# Auth
# ==========================================================================
TOKEN = "/api/token/"
TOKEN_REFRESH = "/api/token/refresh/"

View File

@@ -1,44 +0,0 @@
"""
Contract Tests - Shared test data helpers.
Used across all endpoint tests to generate consistent test data.
"""
import time
def unique_email(prefix="test"):
"""Generate unique email for test data"""
return f"{prefix}_{int(time.time() * 1000)}@contract-test.local"
def sample_pet_owner(email=None):
"""Generate sample pet owner data"""
return {
"first_name": "Test",
"last_name": "Usuario",
"email": email or unique_email("owner"),
"phone": "1155667788",
"address": "Av. Santa Fe 1234",
"geo_latitude": -34.5955,
"geo_longitude": -58.4166,
}
SAMPLE_CAT = {
"name": "TestCat",
"pet_type": "CAT",
"is_neutered": False,
}
SAMPLE_DOG = {
"name": "TestDog",
"pet_type": "DOG",
"is_neutered": False,
}
SAMPLE_NEUTERED_CAT = {
"name": "NeuteredCat",
"pet_type": "CAT",
"is_neutered": True,
}