void>>()
+
+ constructor(id: string) {
+ this.id = id
+ }
+
+ abstract connect(): void
+ abstract disconnect(): void
+
+ /** Subscribe to a specific event type */
+ on(eventType: string, handler: (payload: P) => void): () => void {
+ if (!this.listeners.has(eventType)) {
+ this.listeners.set(eventType, new Set())
+ }
+ this.listeners.get(eventType)!.add(handler)
+ return () => this.listeners.get(eventType)?.delete(handler)
+ }
+
+ /** Emit an event to subscribers (called by subclasses) */
+ protected emit(eventType: string, payload: unknown): void {
+ this.listeners.get(eventType)?.forEach((fn) => fn(payload))
+ }
+}
diff --git a/soleprint/common/ui/src/datasources/SSEDataSource.ts b/soleprint/common/ui/src/datasources/SSEDataSource.ts
new file mode 100644
index 0000000..ed52527
--- /dev/null
+++ b/soleprint/common/ui/src/datasources/SSEDataSource.ts
@@ -0,0 +1,93 @@
+import { DataSource } from './DataSource'
+
+export interface SSEDataSourceOptions {
+ /** Unique identifier for this source */
+ id: string
+ /** SSE endpoint URL (e.g. '/api/detect/stream/job-123') */
+ url: string
+ /** Event types to listen for. Each is dispatched to subscribers via on(). */
+ eventTypes: string[]
+ /** Max reconnection attempts before giving up. Default: 10 */
+ maxRetries?: number
+}
+
+/**
+ * DataSource backed by native EventSource (Server-Sent Events).
+ *
+ * Connects to a single SSE endpoint and demultiplexes events by type.
+ * Multiple panels can subscribe to different event types from the same source.
+ */
+export class SSEDataSource extends DataSource {
+ private es: EventSource | null = null
+ private url: string
+ private eventTypes: string[]
+ private maxRetries: number
+ private retryCount = 0
+
+ constructor(opts: SSEDataSourceOptions) {
+ super(opts.id)
+ this.url = opts.url
+ this.eventTypes = opts.eventTypes
+ this.maxRetries = opts.maxRetries ?? 10
+ }
+
+ connect(): void {
+ if (this.es) return
+ this.status.value = 'connecting'
+ this.error.value = null
+
+ this.es = new EventSource(this.url)
+
+ this.es.onopen = () => {
+ this.status.value = 'live'
+ this.retryCount = 0
+ }
+
+ this.es.onerror = () => {
+ if (this.es?.readyState === EventSource.CLOSED) {
+ this.retryCount++
+ if (this.retryCount >= this.maxRetries) {
+ this.status.value = 'error'
+ this.error.value = `Connection lost after ${this.maxRetries} retries`
+ this.disconnect()
+ } else {
+ this.status.value = 'connecting'
+ }
+ }
+ }
+
+ // Register a listener for each event type
+ for (const eventType of this.eventTypes) {
+ this.es.addEventListener(eventType, (e: MessageEvent) => {
+ try {
+ const parsed = JSON.parse(e.data)
+ this.data.value = parsed
+ this.emit(eventType, parsed)
+ } catch {
+ // ignore malformed events
+ }
+ })
+ }
+
+ // Terminal event — pipeline finished (success, failure, or cancel)
+ this.es.addEventListener('done', () => {
+ this.status.value = 'idle'
+ })
+ }
+
+ disconnect(): void {
+ if (this.es) {
+ this.es.close()
+ this.es = null
+ }
+ }
+
+ /** Update the URL (e.g. when job ID changes) and reconnect */
+ setUrl(url: string): void {
+ this.url = url
+ if (this.status.value === 'live' || this.status.value === 'connecting') {
+ this.disconnect()
+ this.connect()
+ }
+ }
+}
diff --git a/soleprint/common/ui/src/datasources/StaticDataSource.ts b/soleprint/common/ui/src/datasources/StaticDataSource.ts
new file mode 100644
index 0000000..a09dfa6
--- /dev/null
+++ b/soleprint/common/ui/src/datasources/StaticDataSource.ts
@@ -0,0 +1,45 @@
+import { DataSource } from './DataSource'
+
+export interface StaticEvent {
+ type: string
+ data: unknown
+ /** Delay in ms before emitting this event (relative to previous). Default: 0 */
+ delay?: number
+}
+
+/**
+ * DataSource that replays a fixture array of events.
+ *
+ * Used for development and testing without a running backend.
+ * Events are emitted in sequence with optional delays.
+ */
+export class StaticDataSource extends DataSource {
+ private events: StaticEvent[]
+ private timeouts: ReturnType[] = []
+
+ constructor(id: string, events: StaticEvent[]) {
+ super(id)
+ this.events = events
+ }
+
+ connect(): void {
+ this.status.value = 'live'
+ this.error.value = null
+
+ let cumDelay = 0
+ for (const event of this.events) {
+ cumDelay += event.delay ?? 0
+ const timeout = setTimeout(() => {
+ this.data.value = event.data
+ this.emit(event.type, event.data)
+ }, cumDelay)
+ this.timeouts.push(timeout)
+ }
+ }
+
+ disconnect(): void {
+ for (const t of this.timeouts) clearTimeout(t)
+ this.timeouts = []
+ this.status.value = 'idle'
+ }
+}
diff --git a/soleprint/common/ui/src/datasources/__tests__/StaticDataSource.test.ts b/soleprint/common/ui/src/datasources/__tests__/StaticDataSource.test.ts
new file mode 100644
index 0000000..9c4cb30
--- /dev/null
+++ b/soleprint/common/ui/src/datasources/__tests__/StaticDataSource.test.ts
@@ -0,0 +1,103 @@
+import { describe, it, expect, vi, afterEach } from 'vitest'
+import { StaticDataSource } from '../StaticDataSource'
+
+describe('StaticDataSource', () => {
+ afterEach(() => {
+ vi.restoreAllMocks()
+ })
+
+ it('emits events in order', async () => {
+ const source = new StaticDataSource('test', [
+ { type: 'log', data: { msg: 'first' } },
+ { type: 'log', data: { msg: 'second' } },
+ { type: 'stats', data: { count: 42 } },
+ ])
+
+ const received: { type: string; data: unknown }[] = []
+ source.on('log', (d) => received.push({ type: 'log', data: d }))
+ source.on('stats', (d) => received.push({ type: 'stats', data: d }))
+
+ source.connect()
+
+ // Events with delay=0 fire on next microtask via setTimeout(0)
+ await new Promise((r) => setTimeout(r, 10))
+
+ expect(source.status.value).toBe('live')
+ expect(received).toHaveLength(3)
+ expect(received[0]).toEqual({ type: 'log', data: { msg: 'first' } })
+ expect(received[1]).toEqual({ type: 'log', data: { msg: 'second' } })
+ expect(received[2]).toEqual({ type: 'stats', data: { count: 42 } })
+
+ source.disconnect()
+ expect(source.status.value).toBe('idle')
+ })
+
+ it('respects delays between events', async () => {
+ const source = new StaticDataSource('test-delay', [
+ { type: 'a', data: 1 },
+ { type: 'b', data: 2, delay: 50 },
+ ])
+
+ const received: unknown[] = []
+ source.on('a', (d) => received.push(d))
+ source.on('b', (d) => received.push(d))
+
+ source.connect()
+
+ await new Promise((r) => setTimeout(r, 10))
+ expect(received).toHaveLength(1) // only 'a' so far
+
+ await new Promise((r) => setTimeout(r, 60))
+ expect(received).toHaveLength(2) // 'b' arrived after delay
+
+ source.disconnect()
+ })
+
+ it('updates data ref with latest event payload', async () => {
+ const source = new StaticDataSource('test-data', [
+ { type: 'x', data: { v: 1 } },
+ { type: 'x', data: { v: 2 } },
+ ])
+
+ source.connect()
+ await new Promise((r) => setTimeout(r, 10))
+
+ expect(source.data.value).toEqual({ v: 2 })
+
+ source.disconnect()
+ })
+
+ it('cleans up on disconnect', async () => {
+ const source = new StaticDataSource('test-cleanup', [
+ { type: 'a', data: 1 },
+ { type: 'b', data: 2, delay: 100 },
+ ])
+
+ const received: unknown[] = []
+ source.on('b', (d) => received.push(d))
+
+ source.connect()
+ await new Promise((r) => setTimeout(r, 10))
+ source.disconnect()
+
+ // 'b' should never fire since we disconnected before its delay
+ await new Promise((r) => setTimeout(r, 150))
+ expect(received).toHaveLength(0)
+ })
+
+ it('unsubscribe removes listener', async () => {
+ const source = new StaticDataSource('test-unsub', [
+ { type: 'x', data: 1 },
+ ])
+
+ const received: unknown[] = []
+ const unsub = source.on('x', (d) => received.push(d))
+ unsub()
+
+ source.connect()
+ await new Promise((r) => setTimeout(r, 10))
+
+ expect(received).toHaveLength(0)
+ source.disconnect()
+ })
+})
diff --git a/soleprint/common/ui/src/index.ts b/soleprint/common/ui/src/index.ts
new file mode 100644
index 0000000..2ec6787
--- /dev/null
+++ b/soleprint/common/ui/src/index.ts
@@ -0,0 +1,38 @@
+// Framework public API
+export { DataSource, type DataSourceStatus } from './datasources/DataSource'
+export { SSEDataSource } from './datasources/SSEDataSource'
+export { StaticDataSource } from './datasources/StaticDataSource'
+export { useDataSource } from './composables/useDataSource'
+export { useRegistry } from './composables/useRegistry'
+export { useEditorExecution } from './composables/useEditorExecution'
+export type { EditorExecutionOptions } from './composables/useEditorExecution'
+
+// Components
+export { default as Panel } from './components/Panel.vue'
+export { default as LayoutGrid } from './components/LayoutGrid.vue'
+export { default as ResizeHandle } from './components/ResizeHandle.vue'
+export { default as SplitPane } from './components/SplitPane.vue'
+export { default as ParameterEditor } from './components/ParameterEditor.vue'
+export type { ConfigField } from './components/ParameterEditor.vue'
+
+// Renderers
+export { default as LogRenderer } from './renderers/LogRenderer.vue'
+export { default as TimeSeriesRenderer } from './renderers/TimeSeriesRenderer.vue'
+export { default as GraphRenderer } from './renderers/GraphRenderer.vue'
+export { default as FrameRenderer } from './renderers/FrameRenderer.vue'
+export { default as TableRenderer } from './renderers/TableRenderer.vue'
+
+// Renderer types
+export type { FrameBBox, FrameOverlay } from './renderers/FrameRenderer.vue'
+export type { LogEntry } from './renderers/LogRenderer.vue'
+export type { GraphNode, GraphMode } from './renderers/GraphRenderer.vue'
+export type { TableColumn } from './renderers/TableRenderer.vue'
+export type { TimeSeriesSeries } from './renderers/TimeSeriesRenderer.vue'
+
+// Interaction plugins
+export type { InteractionPlugin, PluginContext } from './plugins/InteractionPlugin'
+export { BBoxDrawPlugin } from './plugins/BBoxDrawPlugin'
+export type { BBoxResult, BBoxCallback } from './plugins/BBoxDrawPlugin'
+export { CrosshairPlugin } from './plugins/CrosshairPlugin'
+export type { CrosshairCallback } from './plugins/CrosshairPlugin'
+
diff --git a/soleprint/common/ui/src/plugins/BBoxDrawPlugin.ts b/soleprint/common/ui/src/plugins/BBoxDrawPlugin.ts
new file mode 100644
index 0000000..064ef6d
--- /dev/null
+++ b/soleprint/common/ui/src/plugins/BBoxDrawPlugin.ts
@@ -0,0 +1,88 @@
+/**
+ * BBoxDrawPlugin — draw bounding boxes on the frame viewer.
+ *
+ * User drags on the canvas to draw a rectangle.
+ * On pointer up, emits the bbox coordinates via the callback.
+ * The frame viewer panel feeds this into the selection store.
+ */
+
+import type { InteractionPlugin, PluginContext } from './InteractionPlugin'
+
+export interface BBoxResult {
+ x: number
+ y: number
+ w: number
+ h: number
+}
+
+export type BBoxCallback = (bbox: BBoxResult) => void
+
+export class BBoxDrawPlugin implements InteractionPlugin {
+ name = 'bbox-draw'
+
+ private ctx: CanvasRenderingContext2D | null = null
+ private drawing = false
+ private startX = 0
+ private startY = 0
+ private currentBox: BBoxResult | null = null
+ private callback: BBoxCallback
+
+ constructor(callback: BBoxCallback) {
+ this.callback = callback
+ }
+
+ onMount(context: PluginContext): void {
+ this.ctx = context.ctx
+ }
+
+ onUnmount(): void {
+ this.ctx = null
+ this.drawing = false
+ this.currentBox = null
+ }
+
+ onPointerDown(e: PointerEvent): void {
+ this.drawing = true
+ this.startX = e.offsetX
+ this.startY = e.offsetY
+ this.currentBox = null
+ }
+
+ onPointerMove(e: PointerEvent): void {
+ if (!this.drawing) return
+
+ const x = Math.min(this.startX, e.offsetX)
+ const y = Math.min(this.startY, e.offsetY)
+ const w = Math.abs(e.offsetX - this.startX)
+ const h = Math.abs(e.offsetY - this.startY)
+
+ this.currentBox = { x, y, w, h }
+ }
+
+ onPointerUp(_e: PointerEvent): void {
+ if (!this.drawing) return
+ this.drawing = false
+
+ if (this.currentBox && this.currentBox.w > 5 && this.currentBox.h > 5) {
+ this.callback(this.currentBox)
+ }
+
+ this.currentBox = null
+ }
+
+ render(ctx: CanvasRenderingContext2D): void {
+ if (!this.currentBox) return
+
+ const box = this.currentBox
+
+ ctx.strokeStyle = '#4f9cf9'
+ ctx.lineWidth = 2
+ ctx.setLineDash([6, 3])
+ ctx.strokeRect(box.x, box.y, box.w, box.h)
+ ctx.setLineDash([])
+
+ // Semi-transparent fill
+ ctx.fillStyle = 'rgba(79, 156, 249, 0.1)'
+ ctx.fillRect(box.x, box.y, box.w, box.h)
+ }
+}
diff --git a/soleprint/common/ui/src/plugins/CrosshairPlugin.ts b/soleprint/common/ui/src/plugins/CrosshairPlugin.ts
new file mode 100644
index 0000000..0011b5d
--- /dev/null
+++ b/soleprint/common/ui/src/plugins/CrosshairPlugin.ts
@@ -0,0 +1,60 @@
+/**
+ * CrosshairPlugin — synchronized vertical crosshair across time-series panels.
+ *
+ * When the user hovers on any panel with this plugin, the crosshair
+ * position (as a timestamp) is written to the selection store.
+ * All panels with this plugin render a vertical line at that timestamp.
+ */
+
+import type { InteractionPlugin, PluginContext } from './InteractionPlugin'
+
+export type CrosshairCallback = (timestamp: number | null) => void
+
+export class CrosshairPlugin implements InteractionPlugin {
+ name = 'crosshair'
+
+ private width = 0
+ private callback: CrosshairCallback
+
+ /** Current crosshair X position (pixels), set externally from store */
+ public crosshairX: number | null = null
+
+ constructor(callback: CrosshairCallback) {
+ this.callback = callback
+ }
+
+ onMount(context: PluginContext): void {
+ this.width = context.width
+ }
+
+ onUnmount(): void {
+ this.crosshairX = null
+ }
+
+ onPointerMove(e: PointerEvent): void {
+ // Convert pixel X to normalized position (0-1)
+ const normalized = e.offsetX / this.width
+ this.callback(normalized)
+ }
+
+ onPointerDown(_e: PointerEvent): void {
+ // no-op for crosshair
+ }
+
+ onPointerUp(_e: PointerEvent): void {
+ this.callback(null)
+ }
+
+ render(ctx: CanvasRenderingContext2D): void {
+ if (this.crosshairX === null) return
+
+ ctx.strokeStyle = '#a78bfa'
+ ctx.lineWidth = 1
+ ctx.setLineDash([4, 4])
+ ctx.beginPath()
+ ctx.moveTo(this.crosshairX, 0)
+ ctx.lineTo(this.crosshairX, ctx.canvas.height)
+ ctx.stroke()
+ ctx.setLineDash([])
+ }
+}
diff --git a/soleprint/common/ui/src/plugins/InteractionPlugin.ts b/soleprint/common/ui/src/plugins/InteractionPlugin.ts
new file mode 100644
index 0000000..82fd944
--- /dev/null
+++ b/soleprint/common/ui/src/plugins/InteractionPlugin.ts
@@ -0,0 +1,36 @@
+/**
+ * Interaction plugin interface.
+ *
+ * Plugins attach to a Panel's overlay canvas. They receive pointer events
+ * and emit typed results via the callback. The panel handles rendering
+ * the overlay and routing events to the active plugin.
+ */
+
+export interface PluginContext {
+ /** Canvas element for drawing overlays */
+ canvas: HTMLCanvasElement
+ /** 2D rendering context */
+ ctx: CanvasRenderingContext2D
+ /** Canvas dimensions (may differ from display size) */
+ width: number
+ height: number
+}
+
+export interface InteractionPlugin {
+ /** Unique plugin name */
+ name: string
+
+ /** Called when the plugin is mounted on a panel */
+ onMount(context: PluginContext): void
+
+ /** Called when the plugin is unmounted */
+ onUnmount(): void
+
+ /** Pointer event handlers (optional) */
+ onPointerDown?(e: PointerEvent): void
+ onPointerMove?(e: PointerEvent): void
+ onPointerUp?(e: PointerEvent): void
+
+ /** Called each animation frame to render the overlay */
+ render(ctx: CanvasRenderingContext2D): void
+}
diff --git a/soleprint/common/ui/src/renderers/FrameRenderer.vue b/soleprint/common/ui/src/renderers/FrameRenderer.vue
new file mode 100644
index 0000000..50118c0
--- /dev/null
+++ b/soleprint/common/ui/src/renderers/FrameRenderer.vue
@@ -0,0 +1,178 @@
+
+
+
+
+
+
+
diff --git a/soleprint/common/ui/src/renderers/GraphRenderer.vue b/soleprint/common/ui/src/renderers/GraphRenderer.vue
new file mode 100644
index 0000000..53017cb
--- /dev/null
+++ b/soleprint/common/ui/src/renderers/GraphRenderer.vue
@@ -0,0 +1,317 @@
+
+
+
+
+
+
+
+
{{ data.label }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/soleprint/common/ui/src/renderers/LogRenderer.vue b/soleprint/common/ui/src/renderers/LogRenderer.vue
new file mode 100644
index 0000000..2974909
--- /dev/null
+++ b/soleprint/common/ui/src/renderers/LogRenderer.vue
@@ -0,0 +1,143 @@
+
+
+
+
+
+
+
+ {{ entry.ts }}
+ {{ entry.level }}
+ {{ entry.stage }}
+ {{ entry.msg }}
+
+
+
+
+ Waiting for log events...
+
+
+
+
+
diff --git a/soleprint/common/ui/src/renderers/TableRenderer.vue b/soleprint/common/ui/src/renderers/TableRenderer.vue
new file mode 100644
index 0000000..d4c3d69
--- /dev/null
+++ b/soleprint/common/ui/src/renderers/TableRenderer.vue
@@ -0,0 +1,122 @@
+
+
+
+
+
+
+
+ |
+ {{ col.label }}
+
+ {{ sortDir === 'desc' ? '▼' : '▲' }}
+
+ |
+
+
+
+
+ |
+ {{ row[col.key] }}
+ |
+
+
+ | No detections yet |
+
+
+
+
+
+
+
diff --git a/soleprint/common/ui/src/renderers/TimeSeriesRenderer.vue b/soleprint/common/ui/src/renderers/TimeSeriesRenderer.vue
new file mode 100644
index 0000000..c664ffb
--- /dev/null
+++ b/soleprint/common/ui/src/renderers/TimeSeriesRenderer.vue
@@ -0,0 +1,198 @@
+
+
+
+
+
+
+
diff --git a/soleprint/common/ui/src/tokens.css b/soleprint/common/ui/src/tokens.css
new file mode 100644
index 0000000..4858ec3
--- /dev/null
+++ b/soleprint/common/ui/src/tokens.css
@@ -0,0 +1,59 @@
+/* Framework design tokens — retheme by replacing this file */
+:root {
+ /* spacing scale (4px base) */
+ --space-1: 4px;
+ --space-2: 8px;
+ --space-3: 12px;
+ --space-4: 16px;
+ --space-6: 24px;
+ --space-8: 32px;
+
+ /* color — dark theme (observability UIs are always dark) */
+ --surface-0: #0d0d0f;
+ --surface-1: #16161a;
+ --surface-2: #1e1e24;
+ --surface-3: #26262f;
+ --border: #2e2e38;
+
+ --text-primary: #e8e8f0;
+ --text-secondary: #8888a0;
+ --text-dim: #555568;
+
+ /* status colors */
+ --status-idle: #555568;
+ --status-live: #3ecf8e;
+ --status-processing: #4f9cf9;
+ --status-escalating: #f5a623;
+ --status-error: #f06565;
+
+ /* confidence color scale (low → high) */
+ --conf-low: #f06565;
+ --conf-mid: #f5a623;
+ --conf-high: #3ecf8e;
+
+ /* typography */
+ --font-mono: 'JetBrains Mono', 'Fira Code', monospace;
+ --font-ui: 'Inter', system-ui, sans-serif;
+ --font-size-sm: 11px;
+ --font-size-base: 13px;
+ --font-size-lg: 15px;
+
+ /* panel chrome */
+ --panel-radius: 6px;
+ --panel-border: 1px solid var(--border);
+ --panel-header-height: 36px;
+}
+
+/* Animated gradient outline for buttons in a waiting state.
+ Usage: add class="waiting" to any button/element. */
+@keyframes waiting-glow {
+ 0% { box-shadow: 0 0 3px 1px var(--status-processing); }
+ 33% { box-shadow: 0 0 3px 1px var(--status-live); }
+ 66% { box-shadow: 0 0 3px 1px var(--status-escalating); }
+ 100% { box-shadow: 0 0 3px 1px var(--status-processing); }
+}
+
+.waiting {
+ animation: waiting-glow 2s linear infinite;
+ outline: 1px solid transparent;
+}
diff --git a/soleprint/common/ui/tsconfig.json b/soleprint/common/ui/tsconfig.json
new file mode 100644
index 0000000..cee8317
--- /dev/null
+++ b/soleprint/common/ui/tsconfig.json
@@ -0,0 +1,18 @@
+{
+ "compilerOptions": {
+ "target": "ES2022",
+ "module": "ESNext",
+ "moduleResolution": "bundler",
+ "strict": true,
+ "jsx": "preserve",
+ "noEmit": true,
+ "isolatedModules": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true,
+ "baseUrl": ".",
+ "paths": {
+ "@/*": ["src/*"]
+ }
+ },
+ "include": ["src/**/*.ts", "src/**/*.vue"]
+}
diff --git a/soleprint/common/ui/vitest.config.ts b/soleprint/common/ui/vitest.config.ts
new file mode 100644
index 0000000..2b1c323
--- /dev/null
+++ b/soleprint/common/ui/vitest.config.ts
@@ -0,0 +1,7 @@
+import { defineConfig } from 'vitest/config'
+
+export default defineConfig({
+ test: {
+ environment: 'node',
+ },
+})
diff --git a/soleprint/run.py b/soleprint/run.py
index da8aba0..2a3540c 100644
--- a/soleprint/run.py
+++ b/soleprint/run.py
@@ -610,13 +610,13 @@ def index(request: Request):
showcase_url = config.get("showcase_url")
return templates.TemplateResponse(
+ request,
"index.html",
- {
- "request": request,
+ context={
"artery": "/artery",
"atlas": "/atlas",
"station": "/station",
- "managed": managed,
+ "managed": bool(managed),
"managed_url": managed_url,
"showcase_url": showcase_url,
},
diff --git a/soleprint/station/tools/hub/ports b/soleprint/station/tools/hub/ports
deleted file mode 100644
index 8ae23fd..0000000
--- a/soleprint/station/tools/hub/ports
+++ /dev/null
@@ -1,13 +0,0 @@
-# Core Nest Ports
-# Format: one port per line
-# Comments allowed with #
-
-# Amar
-3000
-8000
-
-# Pawprint Services
-13000
-13001
-13002
-13003
diff --git a/soleprint/station/tools/hub/update-ports.sh b/soleprint/station/tools/hub/update-ports.sh
deleted file mode 100755
index 360f0d8..0000000
--- a/soleprint/station/tools/hub/update-ports.sh
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/bin/bash
-# Update ports file from core_nest configuration
-# Gathers ports from pawprint and amar .env files
-#
-# Usage: ./update-ports.sh
-
-set -e
-
-SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
-PORTS_FILE="$SCRIPT_DIR/ports"
-
-# TODO: Make these configurable or auto-detect
-CORE_NEST_ROOT="${CORE_NEST_ROOT:-/home/mariano/core_nest}"
-PAWPRINT_ENV="$CORE_NEST_ROOT/pawprint/.env"
-AMAR_ENV="$CORE_NEST_ROOT/amar/.env"
-
-echo "=== Updating Core Nest Ports ==="
-echo ""
-
-# Backup existing ports file
-if [ -f "$PORTS_FILE" ]; then
- cp "$PORTS_FILE" "$PORTS_FILE.bak"
- echo " ✓ Backed up existing ports to ports.bak"
-fi
-
-# Start new ports file
-cat > "$PORTS_FILE" <<'EOF'
-# Core Nest Ports
-# Auto-generated by update-ports.sh
-# Format: one port per line
-# Comments allowed with #
-
-EOF
-
-# Extract ports from amar .env
-if [ -f "$AMAR_ENV" ]; then
- echo " Reading amar ports..."
- echo "# Amar" >> "$PORTS_FILE"
-
- # Frontend port (default 3000)
- AMAR_FRONTEND_PORT=$(grep "^AMAR_FRONTEND_PORT=" "$AMAR_ENV" 2>/dev/null | cut -d'=' -f2 || echo "3000")
- echo "$AMAR_FRONTEND_PORT" >> "$PORTS_FILE"
-
- # Backend port (default 8000)
- AMAR_BACKEND_PORT=$(grep "^AMAR_BACKEND_PORT=" "$AMAR_ENV" 2>/dev/null | cut -d'=' -f2 || echo "8000")
- echo "$AMAR_BACKEND_PORT" >> "$PORTS_FILE"
-
- echo " ✓ Added amar ports: $AMAR_FRONTEND_PORT, $AMAR_BACKEND_PORT"
-else
- echo " ⚠ Amar .env not found, using defaults"
- echo "# Amar (defaults)" >> "$PORTS_FILE"
- echo "3000" >> "$PORTS_FILE"
- echo "8000" >> "$PORTS_FILE"
-fi
-
-echo "" >> "$PORTS_FILE"
-
-# Extract ports from pawprint .env
-if [ -f "$PAWPRINT_ENV" ]; then
- echo " Reading pawprint ports..."
- echo "# Pawprint Services" >> "$PORTS_FILE"
-
- PAWPRINT_PORT=$(grep "^PAWPRINT_PORT=" "$PAWPRINT_ENV" 2>/dev/null | cut -d'=' -f2 || echo "13000")
- ARTERY_PORT=$(grep "^ARTERY_PORT=" "$PAWPRINT_ENV" 2>/dev/null | cut -d'=' -f2 || echo "13001")
- ALBUM_PORT=$(grep "^ALBUM_PORT=" "$PAWPRINT_ENV" 2>/dev/null | cut -d'=' -f2 || echo "13002")
- WARD_PORT=$(grep "^WARD_PORT=" "$PAWPRINT_ENV" 2>/dev/null | cut -d'=' -f2 || echo "13003")
-
- echo "$PAWPRINT_PORT" >> "$PORTS_FILE"
- echo "$ARTERY_PORT" >> "$PORTS_FILE"
- echo "$ALBUM_PORT" >> "$PORTS_FILE"
- echo "$WARD_PORT" >> "$PORTS_FILE"
-
- echo " ✓ Added pawprint ports: $PAWPRINT_PORT, $ARTERY_PORT, $ALBUM_PORT, $WARD_PORT"
-else
- echo " ⚠ Pawprint .env not found, using defaults"
- echo "# Pawprint Services (defaults)" >> "$PORTS_FILE"
- echo "13000" >> "$PORTS_FILE"
- echo "13001" >> "$PORTS_FILE"
- echo "13002" >> "$PORTS_FILE"
- echo "13003" >> "$PORTS_FILE"
-fi
-
-echo ""
-echo "=== Done ==="
-echo ""
-echo "Updated ports file: $PORTS_FILE"
-echo ""
-cat "$PORTS_FILE"
diff --git a/soleprint/station/tools/modelgen/generator/__init__.py b/soleprint/station/tools/modelgen/generator/__init__.py
index 5abc0ac..7e1b55c 100644
--- a/soleprint/station/tools/modelgen/generator/__init__.py
+++ b/soleprint/station/tools/modelgen/generator/__init__.py
@@ -7,36 +7,38 @@ Supported generators:
- TypeScriptGenerator: TypeScript interfaces
- ProtobufGenerator: Protocol Buffer definitions
- PrismaGenerator: Prisma schema
-- GrapheneGenerator: Graphene ObjectType/InputObjectType classes
+- StrawberryGenerator: Strawberry type/input/enum classes
"""
from typing import Dict, Type
from .base import BaseGenerator
from .django import DjangoGenerator
-from .graphene import GrapheneGenerator
from .prisma import PrismaGenerator
from .protobuf import ProtobufGenerator
from .pydantic import PydanticGenerator
+from .sqlmodel import SQLModelGenerator
+from .strawberry import StrawberryGenerator
from .typescript import TypeScriptGenerator
# Registry of available generators
GENERATORS: Dict[str, Type[BaseGenerator]] = {
"pydantic": PydanticGenerator,
"django": DjangoGenerator,
+ "sqlmodel": SQLModelGenerator,
"typescript": TypeScriptGenerator,
"ts": TypeScriptGenerator, # Alias
"protobuf": ProtobufGenerator,
"proto": ProtobufGenerator, # Alias
"prisma": PrismaGenerator,
- "graphene": GrapheneGenerator,
+ "strawberry": StrawberryGenerator,
}
__all__ = [
"BaseGenerator",
"PydanticGenerator",
"DjangoGenerator",
- "GrapheneGenerator",
+ "StrawberryGenerator",
"TypeScriptGenerator",
"ProtobufGenerator",
"PrismaGenerator",
diff --git a/soleprint/station/tools/modelgen/generator/pydantic.py b/soleprint/station/tools/modelgen/generator/pydantic.py
index c2676f2..866f3b9 100644
--- a/soleprint/station/tools/modelgen/generator/pydantic.py
+++ b/soleprint/station/tools/modelgen/generator/pydantic.py
@@ -12,7 +12,7 @@ from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
-from ..helpers import get_origin_name, get_type_name, unwrap_optional
+from ..helpers import get_origin_name, get_type_name, is_dataclass_type, unwrap_optional
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import PYDANTIC_RESOLVERS
from .base import BaseGenerator
@@ -54,8 +54,9 @@ class PydanticGenerator(BaseGenerator):
if hasattr(models, "get_shared_component"):
content = self._generate_from_config(models)
elif hasattr(models, "models"):
+ all_models = models.models + getattr(models, "api_models", [])
content = self._generate_from_definitions(
- models.models, getattr(models, "enums", [])
+ all_models, getattr(models, "enums", [])
)
elif isinstance(models, tuple):
content = self._generate_from_definitions(models[0], models[1])
@@ -245,6 +246,7 @@ class PydanticGenerator(BaseGenerator):
"",
]
+
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
lines = [f"class {enum_def.name}(str, Enum):"]
for name, value in enum_def.values:
@@ -307,6 +309,11 @@ class PydanticGenerator(BaseGenerator):
if isinstance(base, type) and issubclass(base, Enum)
else None
)
+ or (
+ PYDANTIC_RESOLVERS["dataclass"]
+ if is_dataclass_type(base)
+ else None
+ )
)
result = resolver(base) if resolver else "str"
return f"Optional[{result}]" if optional else result
@@ -321,7 +328,12 @@ class PydanticGenerator(BaseGenerator):
if isinstance(default, Enum):
return f" = {default.__class__.__name__}.{default.name}"
if callable(default):
- return " = Field(default_factory=list)" if "list" in str(default) else ""
+ default_str = str(default)
+ if "list" in default_str:
+ return " = Field(default_factory=list)"
+ if "dict" in default_str:
+ return " = Field(default_factory=dict)"
+ return ""
return f" = {default!r}"
def _generate_from_config(self, config) -> str:
diff --git a/soleprint/station/tools/modelgen/generator/sqlmodel.py b/soleprint/station/tools/modelgen/generator/sqlmodel.py
new file mode 100644
index 0000000..40c574b
--- /dev/null
+++ b/soleprint/station/tools/modelgen/generator/sqlmodel.py
@@ -0,0 +1,181 @@
+"""
+SQLModel Generator
+
+Generates SQLModel table classes from model definitions.
+Extends the Pydantic generator — SQLModel classes *are* Pydantic models
+with table=True and SQLAlchemy column config for JSON fields.
+"""
+
+import dataclasses as dc
+import re
+from enum import Enum
+from typing import Any, List, get_type_hints
+
+from ..helpers import get_origin_name, get_type_name, unwrap_optional
+from .pydantic import PydanticGenerator
+
+
+# ---------------------------------------------------------------------------
+# Field resolvers — each returns a Field() string or None to fall through
+# ---------------------------------------------------------------------------
+
+def _resolve_special(name, _base, _origin, _optional, _default):
+ """id, created_at, updated_at get fixed Field() definitions."""
+ specials = {
+ "id": "Field(default_factory=uuid4, primary_key=True)",
+ "created_at": "Field(default_factory=datetime.utcnow)",
+ "updated_at": "Field(default_factory=datetime.utcnow)",
+ }
+ return specials.get(name)
+
+
+def _resolve_json(name, _base, origin, _optional, _default):
+ """Dict and List fields → sa_column=Column(JSON)."""
+ mapping = {
+ "dict": ("dict", "{}"),
+ "list": ("list", "[]"),
+ }
+ entry = mapping.get(origin)
+ if not entry:
+ return None
+ factory, server_default = entry
+ return (
+ f"Field(default_factory={factory}, "
+ f"sa_column=Column(JSON, nullable=False, server_default='{server_default}'))"
+ )
+
+
+def _resolve_indexed(name, _base, _origin, optional, _default):
+ """Known indexed fields."""
+ indexed = {"source_asset_id", "parent_job_id", "job_id", "canonical_name"}
+ if name not in indexed:
+ return None
+ if optional:
+ return "Field(default=None, index=True)"
+ return "Field(index=True)"
+
+
+def _resolve_optional(_name, _base, _origin, optional, _default):
+ """Optional fields default to None."""
+ if optional:
+ return "None"
+ return None
+
+
+def _resolve_default(_name, _base, _origin, _optional, default):
+ """Fields with explicit defaults. Enum before str (str enums are both)."""
+ if default is dc.MISSING or default is None:
+ return None
+ if isinstance(default, Enum):
+ return f'"{default.value}"'
+ if isinstance(default, bool):
+ return str(default)
+ if isinstance(default, (int, float)):
+ return str(default)
+ if isinstance(default, str):
+ return f'"{default}"'
+ return None
+
+
+# Resolver chain — first non-None result wins
+_FIELD_RESOLVERS = [
+ _resolve_special,
+ _resolve_json,
+ _resolve_indexed,
+ _resolve_optional,
+ _resolve_default,
+]
+
+
+def _resolve_field(name, type_hint, default):
+ """Run the resolver chain for a field. Returns ' = ...' string."""
+ base, is_optional = unwrap_optional(type_hint)
+ origin = get_origin_name(base)
+
+ for resolver in _FIELD_RESOLVERS:
+ result = resolver(name, base, origin, is_optional, default)
+ if result is not None:
+ return f" = {result}"
+ return ""
+
+
+def _to_snake(name):
+ """CamelCase → snake_case for table names."""
+ return re.sub(r"(?<=[a-z])(?=[A-Z])", "_", name).lower()
+
+
+_HEADER = [
+ '"""',
+ "SQLModel Table Models - GENERATED FILE",
+ "",
+ "Do not edit directly. Regenerate using modelgen.",
+ '"""',
+ "",
+ "from datetime import datetime",
+ "from enum import Enum",
+ "from typing import Any, Dict, List, Optional",
+ "from uuid import UUID, uuid4",
+ "",
+ "from sqlmodel import SQLModel, Field, Column",
+ "from sqlalchemy import JSON",
+ "",
+]
+
+
+class SQLModelGenerator(PydanticGenerator):
+ """Generates SQLModel table classes."""
+
+ def _generate_header(self) -> List[str]:
+ return list(_HEADER)
+
+ def _generate_model_from_dataclass(self, cls: type) -> List[str]:
+ return _build_table(
+ cls.__name__,
+ cls.__doc__ or cls.__name__,
+ get_type_hints(cls),
+ {f.name: f for f in dc.fields(cls)},
+ self._resolve_type,
+ )
+
+ def _generate_model_from_definition(self, model_def) -> List[str]:
+ hints = {f.name: f.type_hint for f in model_def.fields}
+ defaults = {f.name: f.default for f in model_def.fields}
+
+ class FakeField:
+ def __init__(self, default):
+ self.default = default
+
+ fields = {name: FakeField(defaults.get(name, dc.MISSING)) for name in hints}
+ return _build_table(
+ model_def.name,
+ model_def.docstring or model_def.name,
+ hints,
+ fields,
+ self._resolve_type,
+ )
+
+
+def _build_table(name, docstring, hints, fields, resolve_type_fn):
+ """Build a SQLModel table class from field data."""
+ table_name = _to_snake(name)
+ lines = [
+ f"class {name}(SQLModel, table=True):",
+ f' """{docstring.strip().split(chr(10))[0]}"""',
+ f' __tablename__ = "{table_name}"',
+ "",
+ ]
+
+ for field_name, type_hint in hints.items():
+ if field_name.startswith("_"):
+ continue
+
+ field = fields.get(field_name)
+ default_val = dc.MISSING
+ if field and field.default is not dc.MISSING:
+ default_val = field.default
+
+ py_type = resolve_type_fn(type_hint, False)
+ field_extra = _resolve_field(field_name, type_hint, default_val)
+ lines.append(f" {field_name}: {py_type}{field_extra}")
+
+ return lines
diff --git a/soleprint/station/tools/modelgen/generator/graphene.py b/soleprint/station/tools/modelgen/generator/strawberry.py
similarity index 62%
rename from soleprint/station/tools/modelgen/generator/graphene.py
rename to soleprint/station/tools/modelgen/generator/strawberry.py
index 503bbba..14ed78e 100644
--- a/soleprint/station/tools/modelgen/generator/graphene.py
+++ b/soleprint/station/tools/modelgen/generator/strawberry.py
@@ -1,28 +1,29 @@
"""
-Graphene Generator
+Strawberry Generator
-Generates graphene ObjectType and InputObjectType classes from model definitions.
+Generates strawberry type, input, and enum classes from model definitions.
Only generates type definitions — queries, mutations, and resolvers are hand-written.
"""
+import dataclasses as dc
from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
from ..helpers import get_origin_name, get_type_name, unwrap_optional
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
-from ..types import GRAPHENE_RESOLVERS
+from ..types import STRAWBERRY_RESOLVERS
from .base import BaseGenerator
-class GrapheneGenerator(BaseGenerator):
- """Generates graphene type definition files."""
+class StrawberryGenerator(BaseGenerator):
+ """Generates strawberry type definition files."""
def file_extension(self) -> str:
return ".py"
def generate(self, models, output_path: Path) -> None:
- """Generate graphene types to output_path."""
+ """Generate strawberry types to output_path."""
output_path.parent.mkdir(parents=True, exist_ok=True)
if hasattr(models, "models"):
@@ -47,22 +48,18 @@ class GrapheneGenerator(BaseGenerator):
enums: List[EnumDefinition],
api_models: List[ModelDefinition],
) -> str:
- """Generate from ModelDefinition objects."""
lines = self._generate_header()
- # Generate enums as graphene.Enum
for enum_def in enums:
lines.extend(self._generate_enum(enum_def))
lines.append("")
lines.append("")
- # Generate domain models as ObjectType
for model_def in models:
lines.extend(self._generate_object_type(model_def))
lines.append("")
lines.append("")
- # Generate API models — request types as InputObjectType, others as ObjectType
for model_def in api_models:
if model_def.name.endswith("Request"):
lines.extend(self._generate_input_type(model_def))
@@ -74,7 +71,6 @@ class GrapheneGenerator(BaseGenerator):
return "\n".join(lines).rstrip() + "\n"
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
- """Generate from Python dataclasses."""
lines = self._generate_header()
enums_generated = set()
@@ -99,37 +95,38 @@ class GrapheneGenerator(BaseGenerator):
def _generate_header(self) -> List[str]:
return [
'"""',
- "Graphene Types - GENERATED FILE",
+ "Strawberry Types - GENERATED FILE",
"",
"Do not edit directly. Regenerate using modelgen.",
'"""',
"",
- "import graphene",
+ "import strawberry",
+ "from enum import Enum",
+ "from typing import List, Optional",
+ "from uuid import UUID",
+ "from datetime import datetime",
+ "from strawberry.scalars import JSON",
"",
"",
]
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
- """Generate graphene.Enum from EnumDefinition."""
- lines = [f"class {enum_def.name}(graphene.Enum):"]
+ lines = ["@strawberry.enum", f"class {enum_def.name}(Enum):"]
for name, value in enum_def.values:
lines.append(f' {name} = "{value}"')
return lines
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
- """Generate graphene.Enum from Python Enum."""
- lines = [f"class {enum_cls.__name__}(graphene.Enum):"]
+ lines = ["@strawberry.enum", f"class {enum_cls.__name__}(Enum):"]
for member in enum_cls:
lines.append(f' {member.name} = "{member.value}"')
return lines
def _generate_object_type(self, model_def: ModelDefinition) -> List[str]:
- """Generate graphene.ObjectType from ModelDefinition."""
name = model_def.name
- # Append Type suffix if not already present
type_name = f"{name}Type" if not name.endswith("Type") else name
- lines = [f"class {type_name}(graphene.ObjectType):"]
+ lines = ["@strawberry.type", f"class {type_name}:"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
@@ -139,23 +136,19 @@ class GrapheneGenerator(BaseGenerator):
lines.append(" pass")
else:
for field in model_def.fields:
- graphene_type = self._resolve_type(field.type_hint, field.optional)
- lines.append(f" {field.name} = {graphene_type}")
+ type_str = self._resolve_type(field.type_hint, optional=True)
+ lines.append(f" {field.name}: {type_str} = None")
return lines
def _generate_input_type(self, model_def: ModelDefinition) -> List[str]:
- """Generate graphene.InputObjectType from ModelDefinition."""
- import dataclasses as dc
-
name = model_def.name
- # Convert FooRequest -> FooInput
if name.endswith("Request"):
input_name = name[: -len("Request")] + "Input"
else:
input_name = f"{name}Input"
- lines = [f"class {input_name}(graphene.InputObjectType):"]
+ lines = ["@strawberry.input", f"class {input_name}:"]
if model_def.docstring:
doc = model_def.docstring.strip().split("\n")[0]
lines.append(f' """{doc}"""')
@@ -164,73 +157,64 @@ class GrapheneGenerator(BaseGenerator):
if not model_def.fields:
lines.append(" pass")
else:
+ # Required fields first, then optional/defaulted
+ required = []
+ optional = []
for field in model_def.fields:
- graphene_type = self._resolve_type(field.type_hint, field.optional)
- # Required only if not optional AND no default value
has_default = field.default is not dc.MISSING
if not field.optional and not has_default:
- graphene_type = self._make_required(graphene_type)
- elif has_default and not field.optional:
- graphene_type = self._add_default(graphene_type, field.default)
- lines.append(f" {field.name} = {graphene_type}")
+ required.append(field)
+ else:
+ optional.append(field)
+
+ for field in required:
+ type_str = self._resolve_type(field.type_hint, optional=False)
+ lines.append(f" {field.name}: {type_str}")
+
+ for field in optional:
+ has_default = field.default is not dc.MISSING
+ if has_default and not callable(field.default):
+ type_str = self._resolve_type(field.type_hint, optional=False)
+ lines.append(f" {field.name}: {type_str} = {field.default!r}")
+ else:
+ type_str = self._resolve_type(field.type_hint, optional=True)
+ lines.append(f" {field.name}: {type_str} = None")
return lines
def _generate_object_type_from_dataclass(self, cls: type) -> List[str]:
- """Generate graphene.ObjectType from a dataclass."""
- import dataclasses as dc
-
type_name = f"{cls.__name__}Type"
- lines = [f"class {type_name}(graphene.ObjectType):"]
+ lines = ["@strawberry.type", f"class {type_name}:"]
hints = get_type_hints(cls)
for name, type_hint in hints.items():
if name.startswith("_"):
continue
- graphene_type = self._resolve_type(type_hint, False)
- lines.append(f" {name} = {graphene_type}")
+ type_str = self._resolve_type(type_hint, optional=True)
+ lines.append(f" {name}: {type_str} = None")
return lines
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
- """Resolve Python type to graphene field call string."""
+ """Resolve Python type hint to a strawberry annotation string."""
base, is_optional = unwrap_optional(type_hint)
optional = optional or is_optional
origin = get_origin_name(base)
type_name = get_type_name(base)
- # Look up resolver
resolver = (
- GRAPHENE_RESOLVERS.get(origin)
- or GRAPHENE_RESOLVERS.get(type_name)
- or GRAPHENE_RESOLVERS.get(base)
+ STRAWBERRY_RESOLVERS.get(origin)
+ or STRAWBERRY_RESOLVERS.get(type_name)
+ or STRAWBERRY_RESOLVERS.get(base)
or (
- GRAPHENE_RESOLVERS["enum"]
+ STRAWBERRY_RESOLVERS["enum"]
if isinstance(base, type) and issubclass(base, Enum)
else None
)
)
- result = resolver(base) if resolver else "graphene.String"
+ inner = resolver(base) if resolver else "str"
- # List types already have () syntax from resolver
- if result.startswith("graphene.List("):
- return result
-
- # Scalar types: add () call
- return f"{result}()"
-
- def _make_required(self, field_str: str) -> str:
- """Add required=True to a graphene field."""
- if field_str.endswith("()"):
- return field_str[:-1] + "required=True)"
- return field_str
-
- def _add_default(self, field_str: str, default: Any) -> str:
- """Add default_value to a graphene field."""
- if callable(default):
- # default_factory — skip, graphene doesn't support factories
- return field_str
- if field_str.endswith("()"):
- return field_str[:-1] + f"default_value={default!r})"
- return field_str
+ if optional:
+ return f"Optional[{inner}]"
+ return inner
diff --git a/soleprint/station/tools/modelgen/generator/typescript.py b/soleprint/station/tools/modelgen/generator/typescript.py
index e1cc5f8..09d796f 100644
--- a/soleprint/station/tools/modelgen/generator/typescript.py
+++ b/soleprint/station/tools/modelgen/generator/typescript.py
@@ -8,7 +8,7 @@ from enum import Enum
from pathlib import Path
from typing import Any, List, get_type_hints
-from ..helpers import get_origin_name, get_type_name, unwrap_optional
+from ..helpers import get_origin_name, get_type_name, is_dataclass_type, unwrap_optional
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
from ..types import TS_RESOLVERS
from .base import BaseGenerator
@@ -139,6 +139,11 @@ class TypeScriptGenerator(BaseGenerator):
if isinstance(base, type) and issubclass(base, Enum)
else None
)
+ or (
+ TS_RESOLVERS["dataclass"]
+ if is_dataclass_type(base)
+ else None
+ )
)
result = resolver(base) if resolver else "string"
diff --git a/soleprint/station/tools/modelgen/helpers.py b/soleprint/station/tools/modelgen/helpers.py
index 60d6c51..d445447 100644
--- a/soleprint/station/tools/modelgen/helpers.py
+++ b/soleprint/station/tools/modelgen/helpers.py
@@ -44,6 +44,17 @@ def get_list_inner(type_hint: Any) -> str:
return "str"
+def is_dataclass_type(type_hint: Any) -> bool:
+ """Check if type is a dataclass (nested model reference)."""
+ return isinstance(type_hint, type) and dc.is_dataclass(type_hint)
+
+
+def get_list_inner_type(type_hint: Any) -> Any:
+ """Get the raw inner type of List[T] (not stringified)."""
+ args = get_args(type_hint)
+ return args[0] if args else None
+
+
def get_field_default(field: dc.Field) -> Any:
"""Get default value from dataclass field."""
if field.default is not dc.MISSING:
diff --git a/soleprint/station/tools/modelgen/loader/schema.py b/soleprint/station/tools/modelgen/loader/schema.py
index 78833f3..10f461b 100644
--- a/soleprint/station/tools/modelgen/loader/schema.py
+++ b/soleprint/station/tools/modelgen/loader/schema.py
@@ -101,6 +101,12 @@ class SchemaLoader:
for enum_cls in enums:
self.enums.append(self._parse_enum(enum_cls))
+ # Extract VIEWS (view/event projections)
+ if load_all or "views" in include:
+ views = getattr(module, "VIEWS", [])
+ for cls in views:
+ self.api_models.append(self._parse_dataclass(cls))
+
# Extract GRPC_MESSAGES (optional)
if load_all or "grpc" in include:
grpc_messages = getattr(module, "GRPC_MESSAGES", [])
@@ -117,6 +123,20 @@ class SchemaLoader:
methods=grpc_service.get("methods", []),
)
+ # Generic group loader: any include group not handled above
+ # is looked up as UPPER_CASE attribute on the module.
+ # e.g. include "detect_views" → module.DETECT_VIEWS
+ if include:
+ known_groups = {"dataclasses", "enums", "api", "views", "grpc"}
+ for group in include - known_groups:
+ attr_name = group.upper()
+ items = getattr(module, attr_name, [])
+ for cls in items:
+ if isinstance(cls, type) and dc.is_dataclass(cls):
+ self.api_models.append(self._parse_dataclass(cls))
+ elif isinstance(cls, type) and issubclass(cls, Enum):
+ self.enums.append(self._parse_enum(cls))
+
return self
def _import_module(self, path: Path):
diff --git a/soleprint/station/tools/modelgen/pyproject.toml b/soleprint/station/tools/modelgen/pyproject.toml
new file mode 100644
index 0000000..4f30d69
--- /dev/null
+++ b/soleprint/station/tools/modelgen/pyproject.toml
@@ -0,0 +1,16 @@
+[build-system]
+requires = ["setuptools>=68.0"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "soleprint-modelgen"
+version = "0.2.0"
+description = "Multi-source, multi-target model code generator"
+requires-python = ">=3.10"
+dependencies = []
+
+[project.scripts]
+modelgen = "modelgen.__main__:main"
+
+[tool.setuptools.packages.find]
+include = ["modelgen*"]
diff --git a/soleprint/station/tools/modelgen/types.py b/soleprint/station/tools/modelgen/types.py
index cf35e48..274d14a 100644
--- a/soleprint/station/tools/modelgen/types.py
+++ b/soleprint/station/tools/modelgen/types.py
@@ -5,6 +5,7 @@ Type mappings for each output format.
Used by generators to convert Python types to target framework types.
"""
+import dataclasses as dc
from typing import Any, Callable, get_args
# =============================================================================
@@ -39,8 +40,12 @@ DJANGO_SPECIAL: dict[str, str] = {
def _get_list_inner(type_hint: Any) -> str:
"""Get inner type of List[T] for Pydantic."""
args = get_args(type_hint)
- if args and args[0] in (str, int, float, bool):
- return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
+ if args:
+ inner = args[0]
+ if inner in (str, int, float, bool):
+ return {str: "str", int: "int", float: "float", bool: "bool"}[inner]
+ if isinstance(inner, type) and dc.is_dataclass(inner):
+ return inner.__name__
return "str"
@@ -49,11 +54,13 @@ PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
int: lambda _: "int",
float: lambda _: "float",
bool: lambda _: "bool",
+ Any: lambda _: "Any",
"UUID": lambda _: "UUID",
"datetime": lambda _: "datetime",
"dict": lambda _: "Dict[str, Any]",
"list": lambda base: f"List[{_get_list_inner(base)}]",
"enum": lambda base: base.__name__,
+ "dataclass": lambda base: base.__name__,
}
# =============================================================================
@@ -72,6 +79,8 @@ def _resolve_ts_list(base: Any) -> str:
return "number[]"
elif inner is bool:
return "boolean[]"
+ elif isinstance(inner, type) and dc.is_dataclass(inner):
+ return f"{inner.__name__}[]"
return "string[]"
@@ -85,6 +94,7 @@ TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
"dict": lambda _: "Record",
"list": _resolve_ts_list,
"enum": lambda base: base.__name__,
+ "dataclass": lambda base: base.__name__,
}
# =============================================================================
@@ -139,34 +149,34 @@ PRISMA_SPECIAL: dict[str, str] = {
}
# =============================================================================
-# Graphene Type Resolvers
+# Strawberry Type Resolvers
# =============================================================================
-def _resolve_graphene_list(base: Any) -> str:
- """Resolve graphene List type."""
+def _resolve_strawberry_list(base: Any) -> str:
+ """Resolve strawberry List type annotation."""
args = get_args(base)
if args:
inner = args[0]
if inner is str:
- return "graphene.List(graphene.String)"
+ return "List[str]"
elif inner is int:
- return "graphene.List(graphene.Int)"
+ return "List[int]"
elif inner is float:
- return "graphene.List(graphene.Float)"
+ return "List[float]"
elif inner is bool:
- return "graphene.List(graphene.Boolean)"
- return "graphene.List(graphene.String)"
+ return "List[bool]"
+ return "List[str]"
-GRAPHENE_RESOLVERS: dict[Any, Callable[[Any], str]] = {
- str: lambda _: "graphene.String",
- int: lambda _: "graphene.Int",
- float: lambda _: "graphene.Float",
- bool: lambda _: "graphene.Boolean",
- "UUID": lambda _: "graphene.UUID",
- "datetime": lambda _: "graphene.DateTime",
- "dict": lambda _: "graphene.JSONString",
- "list": _resolve_graphene_list,
- "enum": lambda base: f"graphene.String", # Enums exposed as strings in GQL
+STRAWBERRY_RESOLVERS: dict[Any, Callable[[Any], str]] = {
+ str: lambda _: "str",
+ int: lambda _: "int",
+ float: lambda _: "float",
+ bool: lambda _: "bool",
+ "UUID": lambda _: "UUID",
+ "datetime": lambda _: "datetime",
+ "dict": lambda _: "JSON",
+ "list": _resolve_strawberry_list,
+ "enum": lambda base: base.__name__,
}
diff --git a/soleprint/station/tools/sbwrapper/config.json b/soleprint/station/tools/sbwrapper/config.json
index 1250aa8..3a8bfed 100755
--- a/soleprint/station/tools/sbwrapper/config.json
+++ b/soleprint/station/tools/sbwrapper/config.json
@@ -1,5 +1,5 @@
{
- "room_name": "amar",
+ "room_name": "standalone",
"wrapper": {
"enabled": true,
"environment": {
@@ -10,31 +10,19 @@
{
"id": "admin",
"label": "Admin",
- "username": "admin@test.com",
- "password": "Amar2025!",
+ "username": "admin@example.com",
+ "password": "admin",
"icon": "👑",
"role": "ADMIN"
},
{
- "id": "vet1",
- "label": "Vet 1",
- "username": "vet@test.com",
- "password": "Amar2025!",
- "icon": "🩺",
- "role": "VET"
- },
- {
- "id": "tutor1",
- "label": "Tutor 1",
- "username": "tutor@test.com",
- "password": "Amar2025!",
- "icon": "🐶",
+ "id": "user1",
+ "label": "User 1",
+ "username": "user@example.com",
+ "password": "user",
+ "icon": "👤",
"role": "USER"
}
- ],
- "jira": {
- "ticket_id": "VET-535",
- "epic": "EPIC-51.3"
- }
+ ]
}
}
diff --git a/soleprint/station/tools/tester/base.py b/soleprint/station/tools/tester/base.py
index 048c1cd..0cc6a62 100644
--- a/soleprint/station/tools/tester/base.py
+++ b/soleprint/station/tools/tester/base.py
@@ -2,12 +2,30 @@
Pure HTTP Contract Tests - Base Class
Framework-agnostic: works against ANY backend implementation.
+Does NOT manage database - expects a ready environment.
+
+Auth strategies (set CONTRACT_TEST_AUTH_TYPE env var):
+ - bearer (default): JWT token via CONTRACT_TEST_TOKEN or fetched from TOKEN_ENDPOINT
+ - api-key: API key via CONTRACT_TEST_API_KEY
+ - none: No authentication
+
+Usage:
+ CONTRACT_TEST_URL=http://127.0.0.1:8000 pytest
+ CONTRACT_TEST_URL=http://127.0.0.1:8000 CONTRACT_TEST_TOKEN=your_jwt pytest
+ CONTRACT_TEST_URL=http://127.0.0.1:8000 CONTRACT_TEST_AUTH_TYPE=api-key CONTRACT_TEST_API_KEY=key pytest
"""
+import os
import unittest
import httpx
-from .config import config
+
+def get_base_url():
+ """Get base URL from environment (required)"""
+ url = os.environ.get("CONTRACT_TEST_URL", "")
+ if not url:
+ raise ValueError("CONTRACT_TEST_URL environment variable required")
+ return url.rstrip("/")
class ContractTestCase(unittest.TestCase):
@@ -18,35 +36,71 @@ class ContractTestCase(unittest.TestCase):
- Framework-agnostic (works with Django, FastAPI, Node, etc.)
- Pure HTTP via httpx library
- No database access - all data through API
- - API Key authentication
+ - Configurable authentication (bearer, api-key, none)
"""
+ # Auth config - override via environment or subclass
+ AUTH_TYPE = os.environ.get("CONTRACT_TEST_AUTH_TYPE", "bearer")
+ TEST_USER_EMAIL = os.environ.get("CONTRACT_TEST_USER", "contract_test@example.com")
+ TEST_USER_PASSWORD = os.environ.get("CONTRACT_TEST_PASSWORD", "testpass123")
+ TOKEN_ENDPOINT = os.environ.get("CONTRACT_TEST_TOKEN_ENDPOINT", "/api/token/")
+
+ # Class-level cache
_base_url = None
+ _token = None
_api_key = None
@classmethod
def setUpClass(cls):
"""Set up once per test class"""
super().setUpClass()
- cls._base_url = config.get("CONTRACT_TEST_URL", "").rstrip("/")
- if not cls._base_url:
- raise ValueError("CONTRACT_TEST_URL required in environment")
+ cls._base_url = get_base_url()
- cls._api_key = config.get("CONTRACT_TEST_API_KEY", "")
- if not cls._api_key:
- raise ValueError("CONTRACT_TEST_API_KEY required in environment")
+ if cls.AUTH_TYPE == "bearer":
+ cls._token = os.environ.get("CONTRACT_TEST_TOKEN", "")
+ if not cls._token:
+ cls._token = cls._fetch_token()
+ elif cls.AUTH_TYPE == "api-key":
+ cls._api_key = os.environ.get("CONTRACT_TEST_API_KEY", "")
+ if not cls._api_key:
+ raise ValueError("CONTRACT_TEST_API_KEY required for api-key auth")
+
+ @classmethod
+ def _fetch_token(cls):
+ """Get JWT token for authentication"""
+ url = f"{cls._base_url}{cls.TOKEN_ENDPOINT}"
+ try:
+ response = httpx.post(url, json={
+ "username": cls.TEST_USER_EMAIL,
+ "password": cls.TEST_USER_PASSWORD,
+ }, timeout=10)
+ if response.status_code == 200:
+ return response.json().get("access", "")
+ else:
+ print(f"Warning: Token request failed with {response.status_code}")
+ except httpx.RequestError as e:
+ print(f"Warning: Token request failed: {e}")
+ return ""
@property
def base_url(self):
return self._base_url
+ @property
+ def token(self):
+ return self._token
+
@property
def api_key(self):
return self._api_key
def _auth_headers(self):
- """Get authorization headers"""
- return {"Authorization": f"Api-Key {self.api_key}"}
+ """Get authorization headers based on auth type"""
+ if self.AUTH_TYPE == "bearer" and self._token:
+ return {"Authorization": f"Bearer {self._token}"}
+ elif self.AUTH_TYPE == "api-key" and self._api_key:
+ return {"Authorization": f"Api-Key {self._api_key}"}
+ return {}
# =========================================================================
# HTTP helpers
@@ -117,3 +171,6 @@ class ContractTestCase(unittest.TestCase):
"""Assert data is a list with minimum length"""
self.assertIsInstance(data, list)
self.assertGreaterEqual(len(data), min_length)
+
+
+__all__ = ["ContractTestCase", "get_base_url"]
diff --git a/soleprint/station/tools/tester/config.py b/soleprint/station/tools/tester/config.py
index a477830..3059b77 100644
--- a/soleprint/station/tools/tester/config.py
+++ b/soleprint/station/tools/tester/config.py
@@ -53,7 +53,7 @@ def load_environments() -> list:
{
"id": "demo",
"name": "Demo",
- "url": config.get("CONTRACT_TEST_URL", "https://demo.amarmascotas.ar"),
+ "url": config.get("CONTRACT_TEST_URL", "http://localhost:8000"),
"api_key": config.get("CONTRACT_TEST_API_KEY", ""),
"description": "Demo environment",
"default": True
diff --git a/soleprint/station/tools/tester/endpoints.py b/soleprint/station/tools/tester/endpoints.py
deleted file mode 100644
index 35a4dd5..0000000
--- a/soleprint/station/tools/tester/endpoints.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-API Endpoints - Single source of truth for contract tests.
-
-If API paths or versioning changes, update here only.
-"""
-
-
-class Endpoints:
- """API endpoint paths"""
-
- # ==========================================================================
- # Mascotas
- # ==========================================================================
- PET_OWNERS = "/mascotas/api/v1/pet-owners/"
- PET_OWNER_DETAIL = "/mascotas/api/v1/pet-owners/{id}/"
- PETS = "/mascotas/api/v1/pets/"
- PET_DETAIL = "/mascotas/api/v1/pets/{id}/"
- COVERAGE_CHECK = "/mascotas/api/v1/coverage/check/"
-
- # ==========================================================================
- # Productos
- # ==========================================================================
- SERVICES = "/productos/api/v1/services/"
- CART = "/productos/api/v1/cart/"
- CART_DETAIL = "/productos/api/v1/cart/{id}/"
-
- # ==========================================================================
- # Solicitudes
- # ==========================================================================
- SERVICE_REQUESTS = "/solicitudes/service-requests/"
- SERVICE_REQUEST_DETAIL = "/solicitudes/service-requests/{id}/"
-
- # ==========================================================================
- # Auth
- # ==========================================================================
- TOKEN = "/api/token/"
- TOKEN_REFRESH = "/api/token/refresh/"
diff --git a/soleprint/station/tools/tester/environments.json b/soleprint/station/tools/tester/environments.json
index 075ebeb..f63ebcc 100644
--- a/soleprint/station/tools/tester/environments.json
+++ b/soleprint/station/tools/tester/environments.json
@@ -1,31 +1,10 @@
[
{
- "id": "demo",
- "name": "Demo",
- "url": "https://demo.amarmascotas.ar",
+ "id": "local",
+ "name": "Local",
+ "url": "http://localhost:8000",
"api_key": "",
- "description": "Demo environment for testing",
+ "description": "Local development server",
"default": true
- },
- {
- "id": "dev",
- "name": "Development",
- "url": "https://dev.amarmascotas.ar",
- "api_key": "",
- "description": "Development environment"
- },
- {
- "id": "stage",
- "name": "Staging",
- "url": "https://stage.amarmascotas.ar",
- "api_key": "",
- "description": "Staging environment"
- },
- {
- "id": "prod",
- "name": "Production",
- "url": "https://amarmascotas.ar",
- "api_key": "",
- "description": "Production environment (use with caution!)"
}
]
diff --git a/soleprint/station/tools/tester/helpers.py b/soleprint/station/tools/tester/helpers.py
index 4fa5b0b..3393908 100644
--- a/soleprint/station/tools/tester/helpers.py
+++ b/soleprint/station/tools/tester/helpers.py
@@ -1,44 +1,17 @@
"""
-Contract Tests - Shared test data helpers.
+Contract Tests - Generic test data helpers.
-Used across all endpoint tests to generate consistent test data.
+Room-specific helpers belong in cfg//station/tools/tester/tests/helpers.py
"""
import time
def unique_email(prefix="test"):
- """Generate unique email for test data"""
+ """Generate unique email for test data (avoids collisions across runs)"""
return f"{prefix}_{int(time.time() * 1000)}@contract-test.local"
-def sample_pet_owner(email=None):
- """Generate sample pet owner data"""
- return {
- "first_name": "Test",
- "last_name": "Usuario",
- "email": email or unique_email("owner"),
- "phone": "1155667788",
- "address": "Av. Santa Fe 1234",
- "geo_latitude": -34.5955,
- "geo_longitude": -58.4166,
- }
-
-
-SAMPLE_CAT = {
- "name": "TestCat",
- "pet_type": "CAT",
- "is_neutered": False,
-}
-
-SAMPLE_DOG = {
- "name": "TestDog",
- "pet_type": "DOG",
- "is_neutered": False,
-}
-
-SAMPLE_NEUTERED_CAT = {
- "name": "NeuteredCat",
- "pet_type": "CAT",
- "is_neutered": True,
-}
+def unique_id(prefix="test"):
+ """Generate unique string identifier"""
+ return f"{prefix}_{int(time.time() * 1000)}"
diff --git a/soleprint/station/tools/tester/tests/base.py b/soleprint/station/tools/tester/tests/base.py
index 3120c06..1756c18 100644
--- a/soleprint/station/tools/tester/tests/base.py
+++ b/soleprint/station/tools/tester/tests/base.py
@@ -1,164 +1,4 @@
-"""
-Pure HTTP Contract Tests - Base Class
+"""Re-export from parent for backward compatibility."""
+from ..base import ContractTestCase, get_base_url
-Framework-agnostic: works against ANY backend implementation.
-Does NOT manage database - expects a ready environment.
-
-Requirements:
- - Server running at CONTRACT_TEST_URL
- - Database migrated and seeded
- - Test user exists OR CONTRACT_TEST_TOKEN provided
-
-Usage:
- CONTRACT_TEST_URL=http://127.0.0.1:8000 pytest
- CONTRACT_TEST_TOKEN=your_jwt_token pytest
-"""
-
-import os
-import unittest
-import httpx
-
-from .endpoints import Endpoints
-
-
-def get_base_url():
- """Get base URL from environment (required)"""
- url = os.environ.get("CONTRACT_TEST_URL", "")
- if not url:
- raise ValueError("CONTRACT_TEST_URL environment variable required")
- return url.rstrip("/")
-
-
-class ContractTestCase(unittest.TestCase):
- """
- Base class for pure HTTP contract tests.
-
- Features:
- - Framework-agnostic (works with Django, FastAPI, Node, etc.)
- - Pure HTTP via requests library
- - No database access - all data through API
- - JWT authentication
- """
-
- # Auth credentials - override via environment
- TEST_USER_EMAIL = os.environ.get("CONTRACT_TEST_USER", "contract_test@example.com")
- TEST_USER_PASSWORD = os.environ.get("CONTRACT_TEST_PASSWORD", "testpass123")
-
- # Class-level cache
- _base_url = None
- _token = None
-
- @classmethod
- def setUpClass(cls):
- """Set up once per test class"""
- super().setUpClass()
- cls._base_url = get_base_url()
-
- # Use provided token or fetch one
- cls._token = os.environ.get("CONTRACT_TEST_TOKEN", "")
- if not cls._token:
- cls._token = cls._fetch_token()
-
- @classmethod
- def _fetch_token(cls):
- """Get JWT token for authentication"""
- url = f"{cls._base_url}{Endpoints.TOKEN}"
- try:
- response = httpx.post(url, json={
- "username": cls.TEST_USER_EMAIL,
- "password": cls.TEST_USER_PASSWORD,
- }, timeout=10)
- if response.status_code == 200:
- return response.json().get("access", "")
- else:
- print(f"Warning: Token request failed with {response.status_code}")
- except httpx.RequestError as e:
- print(f"Warning: Token request failed: {e}")
- return ""
-
- @property
- def base_url(self):
- return self._base_url
-
- @property
- def token(self):
- return self._token
-
- def _auth_headers(self):
- """Get authorization headers"""
- if self.token:
- return {"Authorization": f"Bearer {self.token}"}
- return {}
-
- # =========================================================================
- # HTTP helpers
- # =========================================================================
-
- def get(self, path: str, params: dict = None, **kwargs):
- """GET request"""
- url = f"{self.base_url}{path}"
- headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
- response = httpx.get(url, params=params, headers=headers, timeout=30, **kwargs)
- return self._wrap_response(response)
-
- def post(self, path: str, data: dict = None, **kwargs):
- """POST request with JSON"""
- url = f"{self.base_url}{path}"
- headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
- response = httpx.post(url, json=data, headers=headers, timeout=30, **kwargs)
- return self._wrap_response(response)
-
- def put(self, path: str, data: dict = None, **kwargs):
- """PUT request with JSON"""
- url = f"{self.base_url}{path}"
- headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
- response = httpx.put(url, json=data, headers=headers, timeout=30, **kwargs)
- return self._wrap_response(response)
-
- def patch(self, path: str, data: dict = None, **kwargs):
- """PATCH request with JSON"""
- url = f"{self.base_url}{path}"
- headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
- response = httpx.patch(url, json=data, headers=headers, timeout=30, **kwargs)
- return self._wrap_response(response)
-
- def delete(self, path: str, **kwargs):
- """DELETE request"""
- url = f"{self.base_url}{path}"
- headers = {**self._auth_headers(), **kwargs.pop("headers", {})}
- response = httpx.delete(url, headers=headers, timeout=30, **kwargs)
- return self._wrap_response(response)
-
- def _wrap_response(self, response):
- """Add .data attribute for consistency with DRF responses"""
- try:
- response.data = response.json()
- except Exception:
- response.data = None
- return response
-
- # =========================================================================
- # Assertion helpers
- # =========================================================================
-
- def assert_status(self, response, expected_status: int):
- """Assert response has expected status code"""
- self.assertEqual(
- response.status_code,
- expected_status,
- f"Expected {expected_status}, got {response.status_code}. "
- f"Response: {response.data if hasattr(response, 'data') else response.content[:500]}"
- )
-
- def assert_has_fields(self, data: dict, *fields: str):
- """Assert dictionary has all specified fields"""
- missing = [f for f in fields if f not in data]
- self.assertEqual(missing, [], f"Missing fields: {missing}. Got: {list(data.keys())}")
-
- def assert_is_list(self, data, min_length: int = 0):
- """Assert data is a list with minimum length"""
- self.assertIsInstance(data, list)
- self.assertGreaterEqual(len(data), min_length)
-
-
-__all__ = ["ContractTestCase"]
+__all__ = ["ContractTestCase", "get_base_url"]
diff --git a/soleprint/station/tools/tester/tests/conftest.py b/soleprint/station/tools/tester/tests/conftest.py
deleted file mode 100644
index cfbc6dd..0000000
--- a/soleprint/station/tools/tester/tests/conftest.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""
-Contract Tests Configuration
-
-Supports two testing modes via CONTRACT_TEST_MODE environment variable:
-
- # Fast mode (default) - Django test client, test DB
- pytest tests/contracts/
-
- # Live mode - Real HTTP with LiveServerTestCase, test DB
- CONTRACT_TEST_MODE=live pytest tests/contracts/
-"""
-
-import os
-import pytest
-
-# Let pytest-django handle Django setup via pytest.ini DJANGO_SETTINGS_MODULE
-
-
-def pytest_configure(config):
- """Register custom markers"""
- config.addinivalue_line(
- "markers", "workflow: marks test as a workflow/flow test (runs endpoint tests in sequence)"
- )
-
-
-@pytest.fixture(scope="session")
-def contract_test_mode():
- """Return current test mode"""
- return os.environ.get("CONTRACT_TEST_MODE", "api")
diff --git a/soleprint/station/tools/tester/tests/endpoints.py b/soleprint/station/tools/tester/tests/endpoints.py
deleted file mode 100644
index 7af2031..0000000
--- a/soleprint/station/tools/tester/tests/endpoints.py
+++ /dev/null
@@ -1,38 +0,0 @@
-"""
-API Endpoints - Single source of truth for contract tests.
-
-If API paths or versioning changes, update here only.
-"""
-
-
-class Endpoints:
- """API endpoint paths"""
-
- # ==========================================================================
- # Mascotas
- # ==========================================================================
- PET_OWNERS = "/mascotas/api/v1/pet-owners/"
- PET_OWNER_DETAIL = "/mascotas/api/v1/pet-owners/{id}/"
- PETS = "/mascotas/api/v1/pets/"
- PET_DETAIL = "/mascotas/api/v1/pets/{id}/"
- COVERAGE_CHECK = "/mascotas/api/v1/coverage/check/"
-
- # ==========================================================================
- # Productos
- # ==========================================================================
- SERVICES = "/productos/api/v1/services/"
- CATEGORIES = "/productos/api/v1/categories/"
- CART = "/productos/api/v1/cart/"
- CART_DETAIL = "/productos/api/v1/cart/{id}/"
-
- # ==========================================================================
- # Solicitudes
- # ==========================================================================
- SERVICE_REQUESTS = "/solicitudes/service-requests/"
- SERVICE_REQUEST_DETAIL = "/solicitudes/service-requests/{id}/"
-
- # ==========================================================================
- # Auth
- # ==========================================================================
- TOKEN = "/api/token/"
- TOKEN_REFRESH = "/api/token/refresh/"
diff --git a/soleprint/station/tools/tester/tests/helpers.py b/soleprint/station/tools/tester/tests/helpers.py
deleted file mode 100644
index 4fa5b0b..0000000
--- a/soleprint/station/tools/tester/tests/helpers.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""
-Contract Tests - Shared test data helpers.
-
-Used across all endpoint tests to generate consistent test data.
-"""
-
-import time
-
-
-def unique_email(prefix="test"):
- """Generate unique email for test data"""
- return f"{prefix}_{int(time.time() * 1000)}@contract-test.local"
-
-
-def sample_pet_owner(email=None):
- """Generate sample pet owner data"""
- return {
- "first_name": "Test",
- "last_name": "Usuario",
- "email": email or unique_email("owner"),
- "phone": "1155667788",
- "address": "Av. Santa Fe 1234",
- "geo_latitude": -34.5955,
- "geo_longitude": -58.4166,
- }
-
-
-SAMPLE_CAT = {
- "name": "TestCat",
- "pet_type": "CAT",
- "is_neutered": False,
-}
-
-SAMPLE_DOG = {
- "name": "TestDog",
- "pet_type": "DOG",
- "is_neutered": False,
-}
-
-SAMPLE_NEUTERED_CAT = {
- "name": "NeuteredCat",
- "pet_type": "CAT",
- "is_neutered": True,
-}