almosther isolating soleprint
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@@ -9,7 +9,5 @@ __pycache__/
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# Generated runnable instance (symlinks + generated)
|
||||
# Generated runnable instance (entirely gitignored - regenerate with build.py)
|
||||
gen/
|
||||
# But track the generated models (one-time per client)
|
||||
!gen/models/
|
||||
|
||||
96
CLAUDE.md
96
CLAUDE.md
@@ -104,8 +104,12 @@ A **Room** is an environment with soleprint context, features, and conventions:
|
||||
The **mainroom** orchestrates interaction between soleprint and managed rooms:
|
||||
- `sbwrapper/` - Sidebar UI overlay for any managed app (quick login, Jira info, etc.)
|
||||
- `link/` - Adapters to connect soleprint to managed app data WITHOUT modifying either
|
||||
- `soleprint/` - Docker configs for running soleprint services
|
||||
- `ctrl/` - Commands for orchestration (sets env vars, starts services)
|
||||
- `soleprint/` - Docker configs + ctrl scripts for running soleprint services
|
||||
- `ctrl/local/` - Local deployment scripts (push.sh, deploy.sh)
|
||||
- `ctrl/server/` - Server setup scripts
|
||||
- `ctrl/` - Mainroom-level orchestration commands
|
||||
|
||||
Soleprint can run without a managed room (for testing veins, etc.) but is always initiated from mainroom.
|
||||
|
||||
### Hub vs Gen
|
||||
- `hub/` = Versioned base files (main.py, dataloader, index.html)
|
||||
@@ -127,19 +131,59 @@ Code inside soleprint should NOT have imports too tied to system names. Display
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Running Locally
|
||||
### Build Tool
|
||||
|
||||
The build script at spr root handles both development and deployment builds:
|
||||
|
||||
```bash
|
||||
cd spr/gen
|
||||
pip install -r requirements.txt
|
||||
python main.py # Hub on :12000
|
||||
# From spr/
|
||||
python build.py --help
|
||||
python build.py dev # Build with symlinks
|
||||
python build.py deploy --output /path/ # Build for production
|
||||
python build.py models # Only regenerate models
|
||||
```
|
||||
|
||||
### Regenerating Models (one-time / rare)
|
||||
### Modelgen (Generic Tool)
|
||||
|
||||
Modelgen is a generic model generation tool in `station/tools/modelgen/`:
|
||||
|
||||
```bash
|
||||
cd spr/station/tools/generator
|
||||
python -m generators.orchestrator \
|
||||
--config ../../../config/soleprint.config.json \
|
||||
--output ../../../gen
|
||||
# Generate models from config
|
||||
python -m station.tools.modelgen from-config --config config/soleprint.config.json --output gen/models/
|
||||
|
||||
# Future: extract models from codebases (for databrowse)
|
||||
python -m station.tools.modelgen extract --source /path/to/django/project --output models/
|
||||
```
|
||||
|
||||
### Setting Up Dev Environment
|
||||
```bash
|
||||
cd spr/
|
||||
python build.py dev # Creates gen/ with symlinks
|
||||
|
||||
cd gen/
|
||||
python3 -m venv .venv
|
||||
.venv/bin/pip install -r requirements.txt
|
||||
.venv/bin/python main.py # Hub on :12000
|
||||
```
|
||||
|
||||
### Building for Deployment
|
||||
```bash
|
||||
cd spr/
|
||||
python build.py deploy --output ../deploy/soleprint/
|
||||
|
||||
# Then deploy:
|
||||
rsync -av ../deploy/soleprint/ server:/app/soleprint/
|
||||
ssh server 'cd /app/soleprint && ./run.sh'
|
||||
|
||||
# Or use mainroom ctrl scripts:
|
||||
cd mainroom/soleprint/ctrl/local
|
||||
./deploy.sh
|
||||
```
|
||||
|
||||
### Regenerating Models Only (rare)
|
||||
```bash
|
||||
cd spr/
|
||||
python build.py models
|
||||
```
|
||||
|
||||
### Orchestrating with Managed Room
|
||||
@@ -168,7 +212,7 @@ Planned:
|
||||
|
||||
| Tool | Location | Status | Notes |
|
||||
|------|----------|--------|-------|
|
||||
| generator | station/tools/generator | Working | Refactor file IO pending |
|
||||
| modelgen | station/tools/modelgen | Working | Generic model generation (used by build.py, databrowse) |
|
||||
| datagen | station/tools/datagen | Working | Test data generation |
|
||||
| tester | station/tools/tester | Advanced | Full BDD/playwright |
|
||||
| hub | station/tools/hub | Idea | Port management |
|
||||
@@ -194,34 +238,44 @@ Planned:
|
||||
- [x] Mainroom structure from core_nest
|
||||
- [x] Docker configs updated to soleprint naming
|
||||
- [x] Tools consolidated from pawprint
|
||||
- [x] Build tool with dev/deploy modes
|
||||
- [x] gen/ runs correctly
|
||||
|
||||
**Next:**
|
||||
1. [ ] Test gen/ runs correctly
|
||||
2. [ ] Create spr/ctrl/ scripts
|
||||
3. [ ] Complete mainroom/ctrl/ orchestration scripts
|
||||
4. [ ] Worktree for databrowse
|
||||
5. [ ] Worktree for sbwrapper
|
||||
1. [ ] Test mainroom/soleprint/ctrl scripts
|
||||
2. [ ] Test mainroom with managed room (amar)
|
||||
3. [ ] Worktree for databrowse (uses modelgen extract)
|
||||
4. [ ] Worktree for sbwrapper
|
||||
|
||||
## Files Ignored (gitignore)
|
||||
|
||||
- `fails/` - Previous attempts, reference only
|
||||
- `def/` - Definition drafts
|
||||
- `gen/` - Runnable instance (except gen/models/)
|
||||
- `gen/` - Entire folder gitignored (regenerate with `python build.py dev`)
|
||||
- `__pycache__/`, `*.pyc`
|
||||
- `venv/`, `.venv/`
|
||||
|
||||
## Quick Reference
|
||||
|
||||
```bash
|
||||
# Build for dev (from spr/)
|
||||
python build.py dev
|
||||
|
||||
# Start dev server
|
||||
cd gen && python main.py
|
||||
cd gen && .venv/bin/python main.py
|
||||
|
||||
# Health check
|
||||
curl localhost:12000/health
|
||||
|
||||
# View landing
|
||||
open http://localhost:12000
|
||||
# Build for deployment
|
||||
python build.py deploy --output /path/to/deploy/
|
||||
|
||||
# Deploy via ctrl scripts
|
||||
cd mainroom/soleprint/ctrl/local && ./deploy.sh
|
||||
|
||||
# Docker (via mainroom)
|
||||
cd mainroom/soleprint && docker compose up -d
|
||||
|
||||
# Modelgen (generic tool)
|
||||
python -m station.tools.modelgen from-config -c config/soleprint.config.json -o gen/models/
|
||||
```
|
||||
|
||||
304
build.py
Normal file
304
build.py
Normal file
@@ -0,0 +1,304 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Soleprint Build Tool
|
||||
|
||||
Builds the soleprint instance using modelgen for model generation.
|
||||
|
||||
Modes:
|
||||
- dev: Uses symlinks for quick development (edit source, run from gen/)
|
||||
- deploy: Copies everything for production deployment (no symlinks)
|
||||
|
||||
Usage:
|
||||
python build.py dev
|
||||
python build.py deploy --output /path/to/deploy/
|
||||
python build.py models
|
||||
|
||||
Examples:
|
||||
# Set up dev environment
|
||||
python build.py dev
|
||||
|
||||
# Build for deployment
|
||||
python build.py deploy --output ../deploy/soleprint/
|
||||
|
||||
# Only regenerate models
|
||||
python build.py models
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# SPR root is where this script lives
|
||||
SPR_ROOT = Path(__file__).resolve().parent
|
||||
|
||||
|
||||
def ensure_dir(path: Path):
|
||||
"""Create directory if it doesn't exist."""
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def create_symlink(source: Path, target: Path):
|
||||
"""Create a symlink, removing existing if present."""
|
||||
if target.exists() or target.is_symlink():
|
||||
if target.is_symlink():
|
||||
target.unlink()
|
||||
elif target.is_dir():
|
||||
shutil.rmtree(target)
|
||||
else:
|
||||
target.unlink()
|
||||
|
||||
# Make relative symlink
|
||||
rel_source = os.path.relpath(source, target.parent)
|
||||
target.symlink_to(rel_source)
|
||||
print(f" Linked: {target.name} -> {rel_source}")
|
||||
|
||||
|
||||
def copy_path(source: Path, target: Path):
|
||||
"""Copy file or directory, resolving symlinks."""
|
||||
if target.exists():
|
||||
if target.is_dir():
|
||||
shutil.rmtree(target)
|
||||
else:
|
||||
target.unlink()
|
||||
|
||||
if source.is_dir():
|
||||
shutil.copytree(source, target, symlinks=False)
|
||||
print(f" Copied: {target.name}/ ({count_files(target)} files)")
|
||||
else:
|
||||
shutil.copy2(source, target)
|
||||
print(f" Copied: {target.name}")
|
||||
|
||||
|
||||
def count_files(path: Path) -> int:
|
||||
"""Count files in directory recursively."""
|
||||
return sum(1 for _ in path.rglob("*") if _.is_file())
|
||||
|
||||
|
||||
def generate_models(output_dir: Path):
|
||||
"""Generate models using modelgen tool.
|
||||
|
||||
Args:
|
||||
output_dir: Directory where models/pydantic/__init__.py will be created
|
||||
"""
|
||||
config_path = SPR_ROOT / "config" / "soleprint.config.json"
|
||||
|
||||
if not config_path.exists():
|
||||
print(f"Warning: Config not found at {config_path}")
|
||||
return False
|
||||
|
||||
# Soleprint-specific: models go in models/pydantic/__init__.py
|
||||
models_file = output_dir / "models" / "pydantic" / "__init__.py"
|
||||
models_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Run modelgen as subprocess
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"station.tools.modelgen",
|
||||
"from-config",
|
||||
"--config",
|
||||
str(config_path),
|
||||
"--output",
|
||||
str(models_file),
|
||||
"--format",
|
||||
"pydantic",
|
||||
]
|
||||
|
||||
result = subprocess.run(cmd, cwd=SPR_ROOT)
|
||||
return result.returncode == 0
|
||||
|
||||
|
||||
def build_dev(output_dir: Path):
|
||||
"""
|
||||
Build for development using symlinks.
|
||||
|
||||
Structure:
|
||||
gen/
|
||||
├── main.py -> ../hub/main.py
|
||||
├── index.html -> ../hub/index.html
|
||||
├── requirements.txt -> ../hub/requirements.txt
|
||||
├── dataloader/ -> ../hub/dataloader/
|
||||
├── artery/ -> ../artery/
|
||||
├── atlas/ -> ../atlas/
|
||||
├── station/ -> ../station/
|
||||
├── data/ -> ../data/
|
||||
└── models/ # Generated
|
||||
"""
|
||||
print(f"\n=== Building DEV environment ===")
|
||||
print(f"SPR root: {SPR_ROOT}")
|
||||
print(f"Output: {output_dir}")
|
||||
|
||||
ensure_dir(output_dir)
|
||||
|
||||
# Hub files (symlinks)
|
||||
print("\nLinking hub files...")
|
||||
hub = SPR_ROOT / "hub"
|
||||
create_symlink(hub / "main.py", output_dir / "main.py")
|
||||
create_symlink(hub / "index.html", output_dir / "index.html")
|
||||
create_symlink(hub / "requirements.txt", output_dir / "requirements.txt")
|
||||
create_symlink(hub / "dataloader", output_dir / "dataloader")
|
||||
|
||||
# System directories (symlinks)
|
||||
print("\nLinking systems...")
|
||||
for system in ["artery", "atlas", "station"]:
|
||||
source = SPR_ROOT / system
|
||||
if source.exists():
|
||||
create_symlink(source, output_dir / system)
|
||||
|
||||
# Data directory (symlink)
|
||||
print("\nLinking data...")
|
||||
create_symlink(SPR_ROOT / "data", output_dir / "data")
|
||||
|
||||
# Models (generated) - pass output_dir, modelgen adds models/pydantic
|
||||
print("\nGenerating models...")
|
||||
if not generate_models(output_dir):
|
||||
print(" Warning: Model generation failed, you may need to run it manually")
|
||||
|
||||
print("\n✓ Dev build complete!")
|
||||
print(f"\nTo run:")
|
||||
print(f" cd {output_dir}")
|
||||
print(f" python3 -m venv .venv")
|
||||
print(f" .venv/bin/pip install -r requirements.txt")
|
||||
print(f" .venv/bin/python main.py")
|
||||
|
||||
|
||||
def build_deploy(output_dir: Path):
|
||||
"""
|
||||
Build for deployment by copying all files (no symlinks).
|
||||
"""
|
||||
print(f"\n=== Building DEPLOY package ===")
|
||||
print(f"SPR root: {SPR_ROOT}")
|
||||
print(f"Output: {output_dir}")
|
||||
|
||||
if output_dir.exists():
|
||||
response = input(f"\nOutput directory exists. Overwrite? [y/N] ")
|
||||
if response.lower() != "y":
|
||||
print("Aborted.")
|
||||
return
|
||||
shutil.rmtree(output_dir)
|
||||
|
||||
ensure_dir(output_dir)
|
||||
|
||||
# Hub files (copy)
|
||||
print("\nCopying hub files...")
|
||||
hub = SPR_ROOT / "hub"
|
||||
copy_path(hub / "main.py", output_dir / "main.py")
|
||||
copy_path(hub / "index.html", output_dir / "index.html")
|
||||
copy_path(hub / "requirements.txt", output_dir / "requirements.txt")
|
||||
copy_path(hub / "dataloader", output_dir / "dataloader")
|
||||
|
||||
# System directories (copy)
|
||||
print("\nCopying systems...")
|
||||
for system in ["artery", "atlas", "station"]:
|
||||
source = SPR_ROOT / system
|
||||
if source.exists():
|
||||
copy_path(source, output_dir / system)
|
||||
|
||||
# Data directory (copy)
|
||||
print("\nCopying data...")
|
||||
copy_path(SPR_ROOT / "data", output_dir / "data")
|
||||
|
||||
# Models (generate fresh) - pass output_dir, modelgen adds models/pydantic
|
||||
print("\nGenerating models...")
|
||||
if not generate_models(output_dir):
|
||||
# Fallback: copy from gen if exists
|
||||
existing = SPR_ROOT / "gen" / "models"
|
||||
if existing.exists():
|
||||
print(" Using existing models from gen/")
|
||||
copy_path(existing, output_dir / "models")
|
||||
|
||||
# Copy schema.json for reference
|
||||
print("\nCopying schema...")
|
||||
copy_path(SPR_ROOT / "schema.json", output_dir / "schema.json")
|
||||
|
||||
# Create run script
|
||||
run_script = output_dir / "run.sh"
|
||||
run_script.write_text("""#!/bin/bash
|
||||
# Soleprint runner
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
if [ ! -d ".venv" ]; then
|
||||
echo "Creating virtual environment..."
|
||||
python3 -m venv .venv
|
||||
.venv/bin/pip install -r requirements.txt
|
||||
fi
|
||||
|
||||
echo "Starting soleprint on http://localhost:12000"
|
||||
.venv/bin/python main.py
|
||||
""")
|
||||
run_script.chmod(0o755)
|
||||
print(" Created: run.sh")
|
||||
|
||||
total_files = count_files(output_dir)
|
||||
print(f"\n✓ Deploy build complete! ({total_files} files)")
|
||||
print(f"\nTo run:")
|
||||
print(f" cd {output_dir}")
|
||||
print(f" ./run.sh")
|
||||
print(f"\nOr deploy to server:")
|
||||
print(f" rsync -av {output_dir}/ server:/app/soleprint/")
|
||||
print(f" ssh server 'cd /app/soleprint && ./run.sh'")
|
||||
|
||||
|
||||
def build_models():
|
||||
"""Only regenerate models."""
|
||||
print(f"\n=== Generating models only ===")
|
||||
|
||||
output_dir = SPR_ROOT / "gen"
|
||||
ensure_dir(output_dir)
|
||||
|
||||
if generate_models(output_dir):
|
||||
print("\n✓ Models generated!")
|
||||
else:
|
||||
print("\nError: Model generation failed", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Soleprint Build Tool",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=__doc__,
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
# dev command
|
||||
dev_parser = subparsers.add_parser("dev", help="Build for development (symlinks)")
|
||||
dev_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=Path,
|
||||
default=SPR_ROOT / "gen",
|
||||
help="Output directory (default: gen/)",
|
||||
)
|
||||
|
||||
# deploy command
|
||||
deploy_parser = subparsers.add_parser(
|
||||
"deploy", help="Build for deployment (copies)"
|
||||
)
|
||||
deploy_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=Path,
|
||||
required=True,
|
||||
help="Output directory for deployment package",
|
||||
)
|
||||
|
||||
# models command
|
||||
subparsers.add_parser("models", help="Only regenerate models")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "dev":
|
||||
build_dev(args.output.resolve())
|
||||
elif args.command == "deploy":
|
||||
build_deploy(args.output.resolve())
|
||||
elif args.command == "models":
|
||||
build_models()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -6,16 +6,16 @@
|
||||
"title": "Architecture Model",
|
||||
"status": "ready",
|
||||
"template": null,
|
||||
"larder": {
|
||||
"depot": {
|
||||
"name": "arch-model",
|
||||
"slug": "arch-model",
|
||||
"title": "Architecture Model",
|
||||
"status": "ready",
|
||||
"source_template": null,
|
||||
"data_path": "album/book/arch-model"
|
||||
"data_path": "atlas/book/arch-model"
|
||||
},
|
||||
"output_larder": null,
|
||||
"system": "album"
|
||||
"output_depot": null,
|
||||
"system": "atlas"
|
||||
},
|
||||
{
|
||||
"name": "feature-flow",
|
||||
@@ -23,16 +23,16 @@
|
||||
"title": "Feature Flow Pipeline",
|
||||
"status": "ready",
|
||||
"template": null,
|
||||
"larder": {
|
||||
"depot": {
|
||||
"name": "feature-flow",
|
||||
"slug": "feature-flow",
|
||||
"title": "Feature Flow Pipeline",
|
||||
"status": "ready",
|
||||
"source_template": null,
|
||||
"data_path": "album/book/feature-flow"
|
||||
"data_path": "atlas/book/feature-flow"
|
||||
},
|
||||
"output_larder": null,
|
||||
"system": "album"
|
||||
"output_depot": null,
|
||||
"system": "atlas"
|
||||
},
|
||||
{
|
||||
"name": "gherkin-samples",
|
||||
@@ -40,16 +40,16 @@
|
||||
"title": "Gherkin Samples",
|
||||
"status": "ready",
|
||||
"template": null,
|
||||
"larder": {
|
||||
"depot": {
|
||||
"name": "gherkin-samples",
|
||||
"slug": "gherkin-samples",
|
||||
"title": "Gherkin Samples",
|
||||
"status": "ready",
|
||||
"source_template": null,
|
||||
"data_path": "album/book/gherkin-samples"
|
||||
"data_path": "atlas/book/gherkin-samples"
|
||||
},
|
||||
"output_larder": null,
|
||||
"system": "album"
|
||||
"output_depot": null,
|
||||
"system": "atlas"
|
||||
},
|
||||
{
|
||||
"name": "feature-form-samples",
|
||||
@@ -61,19 +61,19 @@
|
||||
"slug": "feature-form",
|
||||
"title": "Feature Form Template",
|
||||
"status": "ready",
|
||||
"template_path": "album/template/feature-form",
|
||||
"system": "album"
|
||||
"template_path": "atlas/template/feature-form",
|
||||
"system": "atlas"
|
||||
},
|
||||
"larder": {
|
||||
"depot": {
|
||||
"name": "feature-form",
|
||||
"slug": "feature-form",
|
||||
"title": "Feature Forms",
|
||||
"status": "ready",
|
||||
"source_template": "feature-form",
|
||||
"data_path": "album/book/feature-form-samples/feature-form"
|
||||
"data_path": "atlas/book/feature-form-samples/feature-form"
|
||||
},
|
||||
"output_larder": null,
|
||||
"system": "album"
|
||||
"output_depot": null,
|
||||
"system": "atlas"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"title": "Feature Forms",
|
||||
"status": "ready",
|
||||
"source_template": "feature-form",
|
||||
"data_path": "album/book/feature-form-samples/feature-form"
|
||||
"data_path": "atlas/book/feature-form-samples/feature-form"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,18 +5,18 @@
|
||||
"slug": "turnos",
|
||||
"title": "Turnos Monitor",
|
||||
"status": "dev",
|
||||
"system": "ward",
|
||||
"system": "station",
|
||||
"description": "Pipeline view of requests → turnos. Shows vet-petowner at a glance.",
|
||||
"path": "ward/monitor/turnos"
|
||||
"path": "station/monitor/turnos"
|
||||
},
|
||||
{
|
||||
"name": "data_browse",
|
||||
"slug": "data-browse",
|
||||
"title": "Data Browse",
|
||||
"status": "ready",
|
||||
"system": "ward",
|
||||
"description": "Quick navigation to test users and data states. Book/larder pattern with SQL mode for manual testing workflows.",
|
||||
"path": "ward/monitor/data_browse"
|
||||
"system": "station",
|
||||
"description": "Quick navigation to test users and data states. Book/depot pattern with SQL mode for manual testing workflows.",
|
||||
"path": "station/monitor/data_browse"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
{
|
||||
"items": [
|
||||
{"name": "pawprint-local", "slug": "pawprint-local", "title": "Pawprint Local", "status": "dev", "config_path": "deploy/pawprint-local"}
|
||||
{
|
||||
"name": "soleprint-local",
|
||||
"slug": "soleprint-local",
|
||||
"title": "Soleprint Local",
|
||||
"status": "dev",
|
||||
"config_path": "mainroom/soleprint"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"title": "Feature Form Template",
|
||||
"status": "ready",
|
||||
"template_path": "data/template/feature-form",
|
||||
"system": "album"
|
||||
"system": "atlas"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
"slug": "tester",
|
||||
"title": "Contract Tests",
|
||||
"status": "live",
|
||||
"system": "ward",
|
||||
"system": "station",
|
||||
"type": "app",
|
||||
"description": "HTTP contract test runner with multi-environment support. Filter, run, and track tests against dev/stage/prod.",
|
||||
"path": "ward/tools/tester",
|
||||
"path": "station/tools/tester",
|
||||
"url": "/tools/tester/"
|
||||
},
|
||||
{
|
||||
@@ -16,33 +16,22 @@
|
||||
"slug": "datagen",
|
||||
"title": "Test Data Generator",
|
||||
"status": "live",
|
||||
"system": "ward",
|
||||
"system": "station",
|
||||
"type": "cli",
|
||||
"description": "Generate realistic test data for Amar domain (users, pets, services) and MercadoPago API responses. Used by mock veins and test seeders.",
|
||||
"path": "ward/tools/datagen",
|
||||
"path": "station/tools/datagen",
|
||||
"cli": "python -m datagen"
|
||||
},
|
||||
{
|
||||
"name": "generate_test_data",
|
||||
"slug": "generate-test-data",
|
||||
"title": "DB Test Data Extractor",
|
||||
"status": "dev",
|
||||
"system": "ward",
|
||||
"type": "cli",
|
||||
"description": "Extract representative subsets from PostgreSQL dumps for testing/development.",
|
||||
"path": "ward/tools/generate_test_data",
|
||||
"cli": "python -m generate_test_data"
|
||||
},
|
||||
{
|
||||
"name": "modelgen",
|
||||
"slug": "modelgen",
|
||||
"title": "Model Generator",
|
||||
"status": "dev",
|
||||
"system": "ward",
|
||||
"status": "live",
|
||||
"system": "station",
|
||||
"type": "cli",
|
||||
"description": "Generate platform-specific models (Pydantic, Django, Prisma) from JSON Schema.",
|
||||
"path": "ward/tools/modelgen",
|
||||
"cli": "python -m modelgen"
|
||||
"description": "Generic model generation tool. Generates typed models (Pydantic, etc.) from config files, JSON Schema, or by extracting from existing codebases (Django, SQLAlchemy). Used by build.py and databrowse.",
|
||||
"path": "station/tools/modelgen",
|
||||
"cli": "python -m station.tools.modelgen"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
157
mainroom/soleprint/ctrl/README.md
Normal file
157
mainroom/soleprint/ctrl/README.md
Normal file
@@ -0,0 +1,157 @@
|
||||
# Pawprint Control Scripts
|
||||
|
||||
Control scripts for managing pawprint services via systemd (alternative to Docker deployment).
|
||||
|
||||
## Structure
|
||||
|
||||
```
|
||||
ctrl/
|
||||
├── .env.pawprint # Shared configuration
|
||||
├── local/ # Scripts run from developer machine
|
||||
│ ├── commit.sh # Commit changes across all repos
|
||||
│ ├── deploy.sh # Full deployment workflow
|
||||
│ ├── init.sh # Initial sync to server
|
||||
│ ├── push.sh # Deploy to server (all by default)
|
||||
│ └── status.sh # Git status of all repos
|
||||
└── server/ # Scripts run on server
|
||||
├── install-deps.sh # Install Python deps (all by default)
|
||||
├── restart.sh # Restart services (all by default)
|
||||
├── setup-cert.sh # Setup SSL certificate
|
||||
├── setup-nginx.sh # Create nginx config
|
||||
└── setup-service.sh # Create systemd service
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Edit `.env.pawprint` to configure:
|
||||
|
||||
```bash
|
||||
# Deployment
|
||||
DEPLOY_SERVER=mariano@mcrn.ar
|
||||
DEPLOY_REMOTE_PATH=~/pawprint
|
||||
|
||||
# Local paths
|
||||
PAWPRINT_BARE_PATH=/home/mariano/pawprint
|
||||
|
||||
# Server paths
|
||||
SERVER_USER=mariano
|
||||
SERVER_PAWPRINT_PATH=/home/mariano/pawprint
|
||||
SERVER_VENV_BASE=/home/mariano/venvs
|
||||
```
|
||||
|
||||
## Design Principle
|
||||
|
||||
**All services are the default.** No flags needed for common operations.
|
||||
|
||||
```bash
|
||||
./push.sh # Deploys all (default)
|
||||
./push.sh artery # Deploy only artery (when needed)
|
||||
```
|
||||
|
||||
See `DESIGN_PAWPRINT.md` for detailed philosophy.
|
||||
|
||||
## Local Scripts
|
||||
|
||||
### commit.sh
|
||||
```bash
|
||||
./local/commit.sh "Your commit message"
|
||||
```
|
||||
|
||||
### status.sh
|
||||
```bash
|
||||
./local/status.sh
|
||||
```
|
||||
|
||||
### push.sh
|
||||
```bash
|
||||
./local/push.sh # Push all services (default)
|
||||
./local/push.sh artery # Push only artery
|
||||
```
|
||||
|
||||
### deploy.sh
|
||||
```bash
|
||||
./local/deploy.sh
|
||||
# Then restart on server:
|
||||
# ssh mariano@mcrn.ar 'bash ~/pawprint/ctrl/server/restart.sh'
|
||||
```
|
||||
|
||||
### init.sh
|
||||
```bash
|
||||
./local/init.sh # Initial full sync (run once)
|
||||
```
|
||||
|
||||
## Server Scripts
|
||||
|
||||
### restart.sh
|
||||
```bash
|
||||
sudo ./server/restart.sh # Restart all (default)
|
||||
sudo ./server/restart.sh artery # Restart only artery
|
||||
```
|
||||
|
||||
### install-deps.sh
|
||||
```bash
|
||||
./server/install-deps.sh # Install all (default)
|
||||
./server/install-deps.sh artery # Install only artery
|
||||
```
|
||||
|
||||
### setup-service.sh
|
||||
```bash
|
||||
sudo ./server/setup-service.sh pawprint 12000 main:app
|
||||
sudo ./server/setup-service.sh artery 12001 main:app
|
||||
```
|
||||
|
||||
### setup-nginx.sh
|
||||
```bash
|
||||
sudo ./server/setup-nginx.sh artery artery.mcrn.ar 12001
|
||||
```
|
||||
|
||||
### setup-cert.sh
|
||||
```bash
|
||||
sudo ./server/setup-cert.sh artery.mcrn.ar
|
||||
```
|
||||
|
||||
## Deployment Workflow
|
||||
|
||||
### Initial Setup (once)
|
||||
|
||||
Local:
|
||||
```bash
|
||||
cd ctrl/local
|
||||
./init.sh
|
||||
```
|
||||
|
||||
Server:
|
||||
```bash
|
||||
cd ~/pawprint/ctrl/server
|
||||
./install-deps.sh
|
||||
sudo ./setup-service.sh pawprint 12000 main:app
|
||||
sudo ./setup-service.sh artery 12001 main:app
|
||||
sudo ./setup-service.sh album 12002 main:app
|
||||
sudo ./setup-service.sh ward 12003 main:app
|
||||
sudo ./setup-nginx.sh pawprint pawprint.mcrn.ar 12000
|
||||
sudo ./setup-nginx.sh artery artery.mcrn.ar 12001
|
||||
sudo ./setup-nginx.sh album album.mcrn.ar 12002
|
||||
sudo ./setup-nginx.sh ward ward.mcrn.ar 12003
|
||||
```
|
||||
|
||||
### Regular Updates
|
||||
|
||||
Local:
|
||||
```bash
|
||||
cd ctrl/local
|
||||
./commit.sh "Update feature X"
|
||||
./deploy.sh
|
||||
```
|
||||
|
||||
Server:
|
||||
```bash
|
||||
sudo ~/pawprint/ctrl/server/restart.sh
|
||||
```
|
||||
|
||||
## Nest vs Pawprint Control
|
||||
|
||||
- **core_nest/ctrl/** - Manages full nest (amar + pawprint) via Docker
|
||||
- **pawprint/ctrl/** - Manages pawprint services via systemd
|
||||
|
||||
This directory provides systemd-based deployment as an alternative to Docker.
|
||||
For full nest orchestration with Docker, use `core_nest/ctrl/`.
|
||||
34
mainroom/soleprint/ctrl/local/commit.sh
Executable file
34
mainroom/soleprint/ctrl/local/commit.sh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
# Commit changes across all repos with the same message
|
||||
# Usage: ./commit.sh "commit message"
|
||||
|
||||
set -e
|
||||
|
||||
MSG="${1:?Usage: $0 \"commit message\"}"
|
||||
|
||||
# Find pawprint bare metal directory from PAWPRINT_BARE_PATH or default
|
||||
PAWPRINT_DIR="${PAWPRINT_BARE_PATH:-/home/mariano/pawprint}"
|
||||
REPOS=("$PAWPRINT_DIR" "$PAWPRINT_DIR/artery" "$PAWPRINT_DIR/album" "$PAWPRINT_DIR/ward")
|
||||
|
||||
for repo in "${REPOS[@]}"; do
|
||||
name=$(basename "$repo")
|
||||
[ "$repo" = "$PAWPRINT_DIR" ] && name="pawprint"
|
||||
|
||||
if [ ! -d "$repo/.git" ]; then
|
||||
echo "=== $name: not a git repo, skipping ==="
|
||||
continue
|
||||
fi
|
||||
|
||||
cd "$repo"
|
||||
|
||||
if git diff --quiet && git diff --cached --quiet && [ -z "$(git ls-files --others --exclude-standard)" ]; then
|
||||
echo "=== $name: nothing to commit ==="
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "=== $name ==="
|
||||
git add -A
|
||||
git commit -m "$MSG"
|
||||
done
|
||||
|
||||
echo "Done!"
|
||||
24
mainroom/soleprint/ctrl/local/deploy.sh
Executable file
24
mainroom/soleprint/ctrl/local/deploy.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
# Push all to server (run locally)
|
||||
# Usage: ./deploy.sh
|
||||
# Then run restart on server as admin
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
CTRL_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
|
||||
# Load configuration
|
||||
source "$CTRL_DIR/.env.soleprint" 2>/dev/null || true
|
||||
REMOTE="${DEPLOY_SERVER:-mariano@mcrn.ar}"
|
||||
|
||||
echo "=== Pushing all ==="
|
||||
"$SCRIPT_DIR/push.sh"
|
||||
|
||||
echo ""
|
||||
echo "=== Push complete ==="
|
||||
echo "Now restart services on server:"
|
||||
echo " ssh $REMOTE 'sudo systemctl restart soleprint artery atlas station'"
|
||||
echo ""
|
||||
echo "# Or restart specific service:"
|
||||
echo "# ssh $REMOTE 'sudo systemctl restart artery'"
|
||||
28
mainroom/soleprint/ctrl/local/init.sh
Executable file
28
mainroom/soleprint/ctrl/local/init.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
# Initial full sync of pawprint to server
|
||||
# Run once to setup, then use push.sh for updates
|
||||
|
||||
set -e
|
||||
|
||||
# Load configuration
|
||||
CTRL_DIR="$(cd "$(dirname "$0")/../.." SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" pwd)"
|
||||
source "$CTRL_DIR/.env.pawprint" 2>/dev/null || true
|
||||
|
||||
PAWPRINT_DIR="${PAWPRINT_BARE_PATH:-/home/mariano/pawprint}"
|
||||
REMOTE="${DEPLOY_SERVER:-mariano@mcrn.ar}"
|
||||
REMOTE_DIR="${DEPLOY_REMOTE_PATH:-~/pawprint}"
|
||||
|
||||
echo "=== Initial sync of pawprint ==="
|
||||
echo "From: $PAWPRINT_DIR"
|
||||
echo "To: $REMOTE:$REMOTE_DIR"
|
||||
|
||||
rsync -avz \
|
||||
--filter=':- .gitignore' \
|
||||
--exclude '.git' \
|
||||
--exclude '.env' \
|
||||
"$PAWPRINT_DIR/" "$REMOTE:$REMOTE_DIR/"
|
||||
|
||||
echo ""
|
||||
echo "Done! Now on server run:"
|
||||
echo " cd ~/pawprint"
|
||||
echo " # Use core_nest/pawprint/tools/server/setup-*.sh scripts for initial setup"
|
||||
66
mainroom/soleprint/ctrl/local/push.sh
Executable file
66
mainroom/soleprint/ctrl/local/push.sh
Executable file
@@ -0,0 +1,66 @@
|
||||
#!/bin/bash
|
||||
# Deploy repos via rsync
|
||||
# Usage: ./push.sh [target]
|
||||
# Example: ./push.sh (deploys all: soleprint, artery, atlas, station)
|
||||
# ./push.sh artery (deploys only artery)
|
||||
# ./push.sh soleprint (deploys only soleprint root, no sub-repos)
|
||||
|
||||
set -e
|
||||
|
||||
TARGET="${1:-all}"
|
||||
|
||||
# Load configuration
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
CTRL_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
source "$CTRL_DIR/.env.soleprint" 2>/dev/null || true
|
||||
|
||||
SOLEPRINT_DIR="${SOLEPRINT_BARE_PATH:-/home/mariano/wdir/spr/gen}"
|
||||
REMOTE="${DEPLOY_SERVER:-mariano@mcrn.ar}"
|
||||
REMOTE_BASE="${DEPLOY_REMOTE_PATH:-~/soleprint}"
|
||||
|
||||
# Handle all (default)
|
||||
if [ "$TARGET" = "all" ]; then
|
||||
echo "=== Deploying all services ==="
|
||||
for target in soleprint artery atlas station; do
|
||||
"$0" "$target"
|
||||
echo ""
|
||||
done
|
||||
echo "=== All done ==="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$TARGET" = "soleprint" ]; then
|
||||
# Push only root files (no sub-repos)
|
||||
echo "=== Deploying soleprint (root only) ==="
|
||||
rsync -avz \
|
||||
--filter=':- .gitignore' \
|
||||
--exclude '.git' \
|
||||
--exclude '.env' \
|
||||
--exclude '.venv' \
|
||||
--exclude 'artery/' \
|
||||
--exclude 'atlas/' \
|
||||
--exclude 'station/' \
|
||||
"$SOLEPRINT_DIR/" "$REMOTE:$REMOTE_BASE/"
|
||||
echo "Done!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
LOCAL_DIR="$SOLEPRINT_DIR/$TARGET"
|
||||
REMOTE_DIR="$REMOTE_BASE/$TARGET"
|
||||
|
||||
if [ ! -d "$LOCAL_DIR" ]; then
|
||||
echo "Error: $LOCAL_DIR does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "=== Deploying $TARGET ==="
|
||||
echo "From: $LOCAL_DIR"
|
||||
echo "To: $REMOTE:$REMOTE_DIR"
|
||||
|
||||
rsync -avz \
|
||||
--filter=':- .gitignore' \
|
||||
--exclude '.git' \
|
||||
--exclude '.env' \
|
||||
"$LOCAL_DIR/" "$REMOTE:$REMOTE_DIR/"
|
||||
|
||||
echo "Done!"
|
||||
33
mainroom/soleprint/ctrl/local/status.sh
Executable file
33
mainroom/soleprint/ctrl/local/status.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/bin/bash
|
||||
# Show git status of all repos
|
||||
# Usage: ./status.sh
|
||||
|
||||
# Find pawprint bare metal directory from PAWPRINT_BARE_PATH or default
|
||||
PAWPRINT_DIR="${PAWPRINT_BARE_PATH:-/home/mariano/pawprint}"
|
||||
REPOS=("$PAWPRINT_DIR" "$PAWPRINT_DIR/artery" "$PAWPRINT_DIR/album" "$PAWPRINT_DIR/ward")
|
||||
|
||||
for repo in "${REPOS[@]}"; do
|
||||
name=$(basename "$repo")
|
||||
[ "$repo" = "$PAWPRINT_DIR" ] && name="pawprint"
|
||||
|
||||
if [ ! -d "$repo/.git" ]; then
|
||||
echo "=== $name: not a git repo ==="
|
||||
continue
|
||||
fi
|
||||
|
||||
cd "$repo"
|
||||
branch=$(git branch --show-current)
|
||||
|
||||
# Check for changes
|
||||
staged=$(git diff --cached --numstat | wc -l)
|
||||
unstaged=$(git diff --numstat | wc -l)
|
||||
untracked=$(git ls-files --others --exclude-standard | wc -l)
|
||||
|
||||
if [ "$staged" -eq 0 ] && [ "$unstaged" -eq 0 ] && [ "$untracked" -eq 0 ]; then
|
||||
echo "=== $name ($branch): clean ==="
|
||||
else
|
||||
echo "=== $name ($branch): +$staged staged, ~$unstaged modified, ?$untracked untracked ==="
|
||||
git status --short
|
||||
fi
|
||||
echo
|
||||
done
|
||||
55
mainroom/soleprint/ctrl/server/install-deps.sh
Executable file
55
mainroom/soleprint/ctrl/server/install-deps.sh
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
# Install/update dependencies for apps
|
||||
# Usage: ./install-deps.sh [app-name]
|
||||
# Example: ./install-deps.sh (installs deps for all services)
|
||||
# ./install-deps.sh artery (installs deps for artery only)
|
||||
|
||||
set -e
|
||||
|
||||
APP_NAME="${1:-all}"
|
||||
|
||||
# Load configuration
|
||||
CTRL_DIR="$(cd "$(dirname "$0")/../.." SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" pwd)"
|
||||
source "$CTRL_DIR/.env.pawprint" 2>/dev/null || true
|
||||
|
||||
APP_USER="${SERVER_USER:-mariano}"
|
||||
PAWPRINT_PATH="${SERVER_PAWPRINT_PATH:-/home/mariano/pawprint}"
|
||||
VENV_BASE="${SERVER_VENV_BASE:-/home/mariano/venvs}"
|
||||
|
||||
# Handle all (default)
|
||||
if [ "$APP_NAME" = "all" ]; then
|
||||
echo "=== Installing deps for all services ==="
|
||||
for app in pawprint artery album ward; do
|
||||
echo ""
|
||||
echo "--- $app ---"
|
||||
"$0" "$app"
|
||||
done
|
||||
echo ""
|
||||
echo "=== All done ==="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
VENV_DIR="$VENV_BASE/$APP_NAME"
|
||||
|
||||
if [ "$APP_NAME" = "pawprint" ]; then
|
||||
REQ_FILE="$PAWPRINT_PATH/requirements.txt"
|
||||
else
|
||||
REQ_FILE="$PAWPRINT_PATH/$APP_NAME/requirements.txt"
|
||||
fi
|
||||
|
||||
if [ ! -f "$REQ_FILE" ]; then
|
||||
echo "Error: $REQ_FILE not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$VENV_DIR" ]; then
|
||||
echo "Creating venv: $VENV_DIR"
|
||||
python3 -m venv "$VENV_DIR"
|
||||
fi
|
||||
|
||||
echo "Installing deps from $REQ_FILE"
|
||||
source "$VENV_DIR/bin/activate"
|
||||
pip install -r "$REQ_FILE"
|
||||
deactivate
|
||||
|
||||
echo "Done!"
|
||||
24
mainroom/soleprint/ctrl/server/restart.sh
Executable file
24
mainroom/soleprint/ctrl/server/restart.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
# Restart pawprint services
|
||||
# Usage: ./restart.sh [service]
|
||||
# Example: ./restart.sh (restarts all services)
|
||||
# ./restart.sh artery (restarts only artery)
|
||||
|
||||
set -e
|
||||
|
||||
TARGET="${1:-all}"
|
||||
|
||||
# Handle all (default)
|
||||
if [ "$TARGET" = "all" ]; then
|
||||
echo "Restarting all services..."
|
||||
systemctl restart pawprint artery album ward
|
||||
echo "Status:"
|
||||
systemctl status pawprint artery album ward --no-pager | grep -E "●|Active:"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Restarting $TARGET..."
|
||||
systemctl restart "$TARGET"
|
||||
|
||||
echo "Status:"
|
||||
systemctl status "$TARGET" --no-pager | grep -E "●|Active:"
|
||||
24
mainroom/soleprint/ctrl/server/setup-cert.sh
Executable file
24
mainroom/soleprint/ctrl/server/setup-cert.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
# Install/update SSL certificate for a subdomain
|
||||
# Usage: ./setup-cert.sh <subdomain>
|
||||
# Example: ./setup-cert.sh pawprint.mcrn.ar
|
||||
|
||||
set -e
|
||||
|
||||
SUBDOMAIN="${1:?Usage: $0 <subdomain>}"
|
||||
|
||||
echo "=== Setting up SSL cert for $SUBDOMAIN ==="
|
||||
|
||||
# Check if certbot is installed
|
||||
if ! command -v certbot &> /dev/null; then
|
||||
echo "Installing certbot..."
|
||||
apt update
|
||||
apt install -y certbot python3-certbot-nginx
|
||||
fi
|
||||
|
||||
# Get/renew certificate
|
||||
certbot --nginx -d "$SUBDOMAIN" --non-interactive --agree-tos --register-unsafely-without-email
|
||||
|
||||
echo ""
|
||||
echo "Done! Certificate installed for $SUBDOMAIN"
|
||||
echo "Auto-renewal is enabled via systemd timer"
|
||||
54
mainroom/soleprint/ctrl/server/setup-nginx.sh
Executable file
54
mainroom/soleprint/ctrl/server/setup-nginx.sh
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/bin/bash
|
||||
# Creates nginx config for FastAPI app
|
||||
# Usage: ./setup-nginx.sh <app-name> <subdomain> <port>
|
||||
# Example: ./setup-nginx.sh artery artery.mcrn.ar 12001
|
||||
|
||||
set -e
|
||||
|
||||
APP_NAME="${1:?Usage: $0 <app-name> <subdomain> <port>}"
|
||||
SUBDOMAIN="${2:?Usage: $0 <app-name> <subdomain> <port>}"
|
||||
PORT="${3:?Usage: $0 <app-name> <subdomain> <port>}"
|
||||
|
||||
NGINX_CONF="/etc/nginx/sites-available/$APP_NAME"
|
||||
|
||||
echo "Creating nginx config: $NGINX_CONF"
|
||||
|
||||
sudo tee "$NGINX_CONF" > /dev/null << EOF
|
||||
server {
|
||||
listen 80;
|
||||
server_name $SUBDOMAIN;
|
||||
return 301 https://\$host\$request_uri;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name $SUBDOMAIN;
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/mcrn.ar/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/mcrn.ar/privkey.pem;
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:$PORT;
|
||||
proxy_set_header Host \$host;
|
||||
proxy_set_header X-Real-IP \$remote_addr;
|
||||
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||
proxy_read_timeout 300;
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Enabling site..."
|
||||
sudo ln -sf "$NGINX_CONF" /etc/nginx/sites-enabled/
|
||||
|
||||
echo "Testing nginx config..."
|
||||
sudo nginx -t
|
||||
|
||||
echo "Reloading nginx..."
|
||||
sudo systemctl reload nginx
|
||||
|
||||
echo ""
|
||||
echo "Done! Site available at https://$SUBDOMAIN"
|
||||
echo "Note: Make sure DNS points $SUBDOMAIN to this server"
|
||||
54
mainroom/soleprint/ctrl/server/setup-service.sh
Executable file
54
mainroom/soleprint/ctrl/server/setup-service.sh
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/bin/bash
|
||||
# Creates systemd service for FastAPI app
|
||||
# Usage: ./setup-service.sh <app-name> <port> <app-module>
|
||||
# Example: ./setup-service.sh artery 12001 main:app
|
||||
|
||||
set -e
|
||||
|
||||
APP_NAME="${1:?Usage: $0 <app-name> <port> <app-module>}"
|
||||
PORT="${2:?Usage: $0 <app-name> <port> <app-module>}"
|
||||
APP_MODULE="${3:-main:app}"
|
||||
|
||||
APP_USER="mariano"
|
||||
VENV_DIR="/home/$APP_USER/venvs/$APP_NAME"
|
||||
|
||||
# pawprint root is special case
|
||||
if [ "$APP_NAME" = "pawprint" ]; then
|
||||
WORK_DIR="/home/$APP_USER/pawprint"
|
||||
else
|
||||
WORK_DIR="/home/$APP_USER/pawprint/$APP_NAME"
|
||||
fi
|
||||
SERVICE_FILE="/etc/systemd/system/${APP_NAME}.service"
|
||||
|
||||
echo "Creating systemd service: $SERVICE_FILE"
|
||||
|
||||
sudo tee "$SERVICE_FILE" > /dev/null << EOF
|
||||
[Unit]
|
||||
Description=$APP_NAME FastAPI service
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
User=$APP_USER
|
||||
Group=$APP_USER
|
||||
WorkingDirectory=$WORK_DIR
|
||||
Environment="PATH=$VENV_DIR/bin"
|
||||
EnvironmentFile=$PAWPRINT_PATH/.env
|
||||
ExecStart=$VENV_DIR/bin/uvicorn $APP_MODULE --host 127.0.0.1 --port $PORT
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
echo "Reloading systemd..."
|
||||
sudo systemctl daemon-reload
|
||||
|
||||
echo "Enabling service..."
|
||||
sudo systemctl enable "$APP_NAME"
|
||||
|
||||
echo ""
|
||||
echo "Done! Service commands:"
|
||||
echo " sudo systemctl start $APP_NAME"
|
||||
echo " sudo systemctl status $APP_NAME"
|
||||
echo " sudo journalctl -u $APP_NAME -f"
|
||||
82
mainroom/soleprint/ctrl/sync-tests.sh
Executable file
82
mainroom/soleprint/ctrl/sync-tests.sh
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Sync contract tests from amar_django_back_contracts to ward/tools/tester
|
||||
#
|
||||
# Usage: ./sync-tests.sh
|
||||
|
||||
set -e
|
||||
|
||||
# Paths
|
||||
SOURCE_REPO="/home/mariano/wdir/ama/amar_django_back_contracts"
|
||||
DEST_DIR="/home/mariano/wdir/ama/pawprint/ward/tools/tester/tests"
|
||||
|
||||
# Colors
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
echo -e "${BLUE}=== Syncing Contract Tests ===${NC}"
|
||||
echo "Source: $SOURCE_REPO/tests/contracts"
|
||||
echo "Dest: $DEST_DIR"
|
||||
echo
|
||||
|
||||
# Check source exists
|
||||
if [ ! -d "$SOURCE_REPO/tests/contracts" ]; then
|
||||
echo "Error: Source directory not found: $SOURCE_REPO/tests/contracts"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create destination if it doesn't exist
|
||||
mkdir -p "$DEST_DIR"
|
||||
|
||||
# Sync test files (preserve structure)
|
||||
echo -e "${BLUE}Copying test files...${NC}"
|
||||
|
||||
# Copy the contract test structure
|
||||
rsync -av --delete \
|
||||
--include="*/" \
|
||||
--include="test_*.py" \
|
||||
--include="__init__.py" \
|
||||
--include="base*.py" \
|
||||
--include="conftest.py" \
|
||||
--include="endpoints.py" \
|
||||
--include="helpers.py" \
|
||||
--exclude="*" \
|
||||
"$SOURCE_REPO/tests/contracts/" \
|
||||
"$DEST_DIR/"
|
||||
|
||||
# Remove base_api.py and base_live.py (we only need pure HTTP base.py)
|
||||
rm -f "$DEST_DIR/base_api.py" "$DEST_DIR/base_live.py"
|
||||
|
||||
# Create a simple base.py that uses tester's base class
|
||||
cat > "$DEST_DIR/base.py" << 'EOF'
|
||||
"""
|
||||
Contract Tests - Base Class
|
||||
|
||||
Uses tester's HTTP base class for framework-agnostic testing.
|
||||
"""
|
||||
|
||||
# Import from tester's base
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add tester to path if needed
|
||||
tester_path = Path(__file__).parent.parent
|
||||
if str(tester_path) not in sys.path:
|
||||
sys.path.insert(0, str(tester_path))
|
||||
|
||||
from base import ContractTestCase
|
||||
|
||||
__all__ = ["ContractTestCase"]
|
||||
EOF
|
||||
|
||||
echo
|
||||
echo -e "${GREEN}✓ Tests synced successfully${NC}"
|
||||
echo
|
||||
echo "Test structure:"
|
||||
find "$DEST_DIR" -name "test_*.py" -type f | sed 's|'"$DEST_DIR"'||' | sort
|
||||
|
||||
echo
|
||||
echo -e "${BLUE}Next steps:${NC}"
|
||||
echo "1. Run tester locally: cd /home/mariano/wdir/ama/pawprint/ward && python -m tools.tester"
|
||||
echo "2. Deploy to server: cd /home/mariano/wdir/ama/pawprint/deploy && ./deploy.sh"
|
||||
202
station/tools/modelgen/__main__.py
Normal file
202
station/tools/modelgen/__main__.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
Modelgen - Generic Model Generation Tool
|
||||
|
||||
Generates typed models from various sources to various formats.
|
||||
|
||||
Input sources:
|
||||
- Configuration files (soleprint.config.json style)
|
||||
- JSON Schema (planned)
|
||||
- Existing codebases: Django, SQLAlchemy, Prisma (planned - for databrowse)
|
||||
|
||||
Output formats:
|
||||
- pydantic: Pydantic BaseModel classes
|
||||
- django: Django ORM models (planned)
|
||||
- prisma: Prisma schema (planned)
|
||||
- sqlalchemy: SQLAlchemy models (planned)
|
||||
|
||||
Usage:
|
||||
python -m station.tools.modelgen --help
|
||||
python -m station.tools.modelgen from-config -c config.json -o models/ -f pydantic
|
||||
python -m station.tools.modelgen from-schema -s schema.json -o models/ -f pydantic
|
||||
python -m station.tools.modelgen extract -s /path/to/django/app -o models/ -f pydantic
|
||||
|
||||
This is a GENERIC tool. For soleprint-specific builds, use:
|
||||
python build.py dev|deploy
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def cmd_from_config(args):
|
||||
"""Generate models from a configuration file (soleprint.config.json style)."""
|
||||
from .config_loader import load_config
|
||||
from .model_generator import ModelGenerator
|
||||
|
||||
config_path = Path(args.config)
|
||||
if not config_path.exists():
|
||||
print(f"Error: Config file not found: {config_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
output_path = Path(args.output)
|
||||
|
||||
print(f"Loading config: {config_path}")
|
||||
config = load_config(config_path)
|
||||
|
||||
print(f"Generating {args.format} models to: {output_path}")
|
||||
generator = ModelGenerator(
|
||||
config=config,
|
||||
output_path=output_path,
|
||||
output_format=args.format,
|
||||
)
|
||||
result_path = generator.generate()
|
||||
|
||||
print(f"✓ Models generated: {result_path}")
|
||||
|
||||
|
||||
def cmd_from_schema(args):
|
||||
"""Generate models from JSON Schema."""
|
||||
print("Error: from-schema not yet implemented", file=sys.stderr)
|
||||
print("Use from-config with a soleprint.config.json file for now", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def cmd_extract(args):
|
||||
"""Extract models from existing codebase (for databrowse graphs)."""
|
||||
print("Error: extract not yet implemented", file=sys.stderr)
|
||||
print(
|
||||
"This will extract models from Django/SQLAlchemy/Prisma codebases.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print("Use cases:", file=sys.stderr)
|
||||
print(" - Generate browsable graphs for databrowse tool", file=sys.stderr)
|
||||
print(" - Convert between ORM formats", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def cmd_list_formats(args):
|
||||
"""List available output formats."""
|
||||
from .model_generator import ModelGenerator
|
||||
|
||||
print("Available output formats:")
|
||||
for fmt in ModelGenerator.available_formats():
|
||||
print(f" - {fmt}")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Modelgen - Generic Model Generation Tool",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=__doc__,
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
# from-config command
|
||||
config_parser = subparsers.add_parser(
|
||||
"from-config",
|
||||
help="Generate models from configuration file",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--config",
|
||||
"-c",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to configuration file (e.g., soleprint.config.json)",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--format",
|
||||
"-f",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic", "django", "prisma", "sqlalchemy"],
|
||||
help="Output format (default: pydantic)",
|
||||
)
|
||||
config_parser.set_defaults(func=cmd_from_config)
|
||||
|
||||
# from-schema command (placeholder)
|
||||
schema_parser = subparsers.add_parser(
|
||||
"from-schema",
|
||||
help="Generate models from JSON Schema (not yet implemented)",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--schema",
|
||||
"-s",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to JSON Schema file",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--format",
|
||||
"-f",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic", "django", "prisma", "sqlalchemy"],
|
||||
help="Output format (default: pydantic)",
|
||||
)
|
||||
schema_parser.set_defaults(func=cmd_from_schema)
|
||||
|
||||
# extract command (placeholder for databrowse)
|
||||
extract_parser = subparsers.add_parser(
|
||||
"extract",
|
||||
help="Extract models from existing codebase (not yet implemented)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--source",
|
||||
"-s",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to source codebase",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--framework",
|
||||
type=str,
|
||||
choices=["django", "sqlalchemy", "prisma", "auto"],
|
||||
default="auto",
|
||||
help="Source framework to extract from (default: auto-detect)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--format",
|
||||
"-f",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic", "django", "prisma", "sqlalchemy"],
|
||||
help="Output format (default: pydantic)",
|
||||
)
|
||||
extract_parser.set_defaults(func=cmd_extract)
|
||||
|
||||
# list-formats command
|
||||
formats_parser = subparsers.add_parser(
|
||||
"list-formats",
|
||||
help="List available output formats",
|
||||
)
|
||||
formats_parser.set_defaults(func=cmd_list_formats)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,63 +1,82 @@
|
||||
"""
|
||||
Model Generator
|
||||
|
||||
Generates Pydantic models from framework configuration.
|
||||
Generic model generation from configuration files.
|
||||
Supports multiple output formats and is extensible for bidirectional conversion.
|
||||
|
||||
Output formats:
|
||||
- pydantic: Pydantic BaseModel classes
|
||||
- django: Django ORM models (planned)
|
||||
- prisma: Prisma schema (planned)
|
||||
- sqlalchemy: SQLAlchemy models (planned)
|
||||
|
||||
Future: Extract models FROM existing codebases (reverse direction)
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import Dict, Type
|
||||
|
||||
from .config_loader import ConfigLoader
|
||||
|
||||
|
||||
class ModelGenerator:
|
||||
"""Generates Pydantic model files from configuration"""
|
||||
class BaseModelWriter(ABC):
|
||||
"""Abstract base for model output writers."""
|
||||
|
||||
def __init__(self, config: ConfigLoader, output_dir: Path):
|
||||
self.config = config
|
||||
self.output_dir = Path(output_dir)
|
||||
@abstractmethod
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
"""Write models to the specified path."""
|
||||
pass
|
||||
|
||||
def generate(self):
|
||||
"""Generate all model files"""
|
||||
models_dir = self.output_dir / "models" / "pydantic"
|
||||
models_dir.mkdir(parents=True, exist_ok=True)
|
||||
@abstractmethod
|
||||
def file_extension(self) -> str:
|
||||
"""Return the file extension for this format."""
|
||||
pass
|
||||
|
||||
# Generate __init__.py with all models
|
||||
self._generate_models_file(models_dir / "__init__.py")
|
||||
|
||||
print(f"Generated models in {models_dir}")
|
||||
class PydanticWriter(BaseModelWriter):
|
||||
"""Generates Pydantic model files."""
|
||||
|
||||
def _generate_models_file(self, output_path: Path):
|
||||
"""Generate the main models file"""
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
"""Write Pydantic models to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
content = self._generate_content(config)
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_content(self, config: ConfigLoader) -> str:
|
||||
"""Generate the Pydantic models file content."""
|
||||
|
||||
# Get component names from config
|
||||
config_comp = self.config.get_shared_component('config')
|
||||
data_comp = self.config.get_shared_component('data')
|
||||
config_comp = config.get_shared_component("config")
|
||||
data_comp = config.get_shared_component("data")
|
||||
|
||||
data_flow_sys = self.config.get_system('data_flow')
|
||||
doc_sys = self.config.get_system('documentation')
|
||||
exec_sys = self.config.get_system('execution')
|
||||
data_flow_sys = config.get_system("data_flow")
|
||||
doc_sys = config.get_system("documentation")
|
||||
exec_sys = config.get_system("execution")
|
||||
|
||||
connector_comp = self.config.get_component('data_flow', 'connector')
|
||||
pulse_comp = self.config.get_component('data_flow', 'composed')
|
||||
connector_comp = config.get_component("data_flow", "connector")
|
||||
pulse_comp = config.get_component("data_flow", "composed")
|
||||
|
||||
pattern_comp = self.config.get_component('documentation', 'pattern')
|
||||
maps_comp = self.config.get_component('documentation', 'library')
|
||||
doc_composed = self.config.get_component('documentation', 'composed')
|
||||
pattern_comp = config.get_component("documentation", "pattern")
|
||||
doc_composed = config.get_component("documentation", "composed")
|
||||
|
||||
tool_comp = self.config.get_component('execution', 'utility')
|
||||
monitor_comp = self.config.get_component('execution', 'watcher')
|
||||
cabinet_comp = self.config.get_component('execution', 'container')
|
||||
exec_composed = self.config.get_component('execution', 'composed')
|
||||
tool_comp = config.get_component("execution", "utility")
|
||||
monitor_comp = config.get_component("execution", "watcher")
|
||||
cabinet_comp = config.get_component("execution", "container")
|
||||
exec_composed = config.get_component("execution", "composed")
|
||||
|
||||
# Build the template
|
||||
content = f'''"""
|
||||
Pydantic models - Generated from {self.config.framework.name}.config.json
|
||||
return f'''"""
|
||||
Pydantic models - Generated from {config.framework.name}.config.json
|
||||
|
||||
DO NOT EDIT MANUALLY - Regenerate from config
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Optional, List, Literal
|
||||
from typing import List, Literal, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
@@ -83,8 +102,10 @@ class ToolType(str, Enum):
|
||||
|
||||
# === Shared Components ===
|
||||
|
||||
|
||||
class {config_comp.title}(BaseModel):
|
||||
"""{config_comp.description}. Shared across {data_flow_sys.name}, {exec_sys.name}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -94,6 +115,7 @@ class {config_comp.title}(BaseModel):
|
||||
|
||||
class {data_comp.title}(BaseModel):
|
||||
"""{data_comp.description}. Shared across all systems."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -104,8 +126,10 @@ class {data_comp.title}(BaseModel):
|
||||
|
||||
# === System-Specific Components ===
|
||||
|
||||
|
||||
class {connector_comp.title}(BaseModel):
|
||||
"""{connector_comp.description} ({data_flow_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -117,6 +141,7 @@ class {connector_comp.title}(BaseModel):
|
||||
|
||||
class {pattern_comp.title}(BaseModel):
|
||||
"""{pattern_comp.description} ({doc_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -127,6 +152,7 @@ class {pattern_comp.title}(BaseModel):
|
||||
|
||||
class {tool_comp.title}(BaseModel):
|
||||
"""{tool_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -141,6 +167,7 @@ class {tool_comp.title}(BaseModel):
|
||||
|
||||
class {monitor_comp.title}(BaseModel):
|
||||
"""{monitor_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -150,6 +177,7 @@ class {monitor_comp.title}(BaseModel):
|
||||
|
||||
class {cabinet_comp.title}(BaseModel):
|
||||
"""{cabinet_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -160,8 +188,10 @@ class {cabinet_comp.title}(BaseModel):
|
||||
|
||||
# === Composed Types ===
|
||||
|
||||
|
||||
class {pulse_comp.title}(BaseModel):
|
||||
"""{pulse_comp.description} ({data_flow_sys.name}). Formula: {pulse_comp.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -174,6 +204,7 @@ class {pulse_comp.title}(BaseModel):
|
||||
|
||||
class {doc_composed.title}(BaseModel):
|
||||
"""{doc_composed.description} ({doc_sys.name}). Formula: {doc_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -186,6 +217,7 @@ class {doc_composed.title}(BaseModel):
|
||||
|
||||
class {exec_composed.title}(BaseModel):
|
||||
"""{exec_composed.description} ({exec_sys.name}). Formula: {exec_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
@@ -198,6 +230,7 @@ class {exec_composed.title}(BaseModel):
|
||||
|
||||
# === Collection wrappers for JSON files ===
|
||||
|
||||
|
||||
class {config_comp.title}Collection(BaseModel):
|
||||
items: List[{config_comp.title}] = Field(default_factory=list)
|
||||
|
||||
@@ -238,18 +271,100 @@ class {exec_composed.title}Collection(BaseModel):
|
||||
items: List[{exec_composed.title}] = Field(default_factory=list)
|
||||
'''
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
class DjangoWriter(BaseModelWriter):
|
||||
"""Generates Django model files (placeholder)."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
raise NotImplementedError("Django model generation not yet implemented")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from .config_loader import load_config
|
||||
class PrismaWriter(BaseModelWriter):
|
||||
"""Generates Prisma schema files (placeholder)."""
|
||||
|
||||
# Test with soleprint config
|
||||
config_path = Path(__file__).parent.parent / "soleprint.config.json"
|
||||
config = load_config(config_path)
|
||||
def file_extension(self) -> str:
|
||||
return ".prisma"
|
||||
|
||||
output_dir = Path(__file__).parent.parent / "soleprint-room"
|
||||
generator = ModelGenerator(config, output_dir)
|
||||
generator.generate()
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
raise NotImplementedError("Prisma schema generation not yet implemented")
|
||||
|
||||
print("Models generated successfully!")
|
||||
|
||||
class SQLAlchemyWriter(BaseModelWriter):
|
||||
"""Generates SQLAlchemy model files (placeholder)."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
raise NotImplementedError("SQLAlchemy model generation not yet implemented")
|
||||
|
||||
|
||||
# Registry of available writers
|
||||
WRITERS: Dict[str, Type[BaseModelWriter]] = {
|
||||
"pydantic": PydanticWriter,
|
||||
"django": DjangoWriter,
|
||||
"prisma": PrismaWriter,
|
||||
"sqlalchemy": SQLAlchemyWriter,
|
||||
}
|
||||
|
||||
|
||||
class ModelGenerator:
|
||||
"""
|
||||
Generates typed models from configuration.
|
||||
|
||||
This is the main entry point for model generation.
|
||||
Delegates to format-specific writers.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: ConfigLoader,
|
||||
output_path: Path,
|
||||
output_format: str = "pydantic",
|
||||
):
|
||||
"""
|
||||
Initialize the generator.
|
||||
|
||||
Args:
|
||||
config: Loaded configuration
|
||||
output_path: Exact path where to write (file or directory depending on format)
|
||||
output_format: Output format (pydantic, django, prisma, sqlalchemy)
|
||||
"""
|
||||
self.config = config
|
||||
self.output_path = Path(output_path)
|
||||
self.output_format = output_format
|
||||
|
||||
if output_format not in WRITERS:
|
||||
raise ValueError(
|
||||
f"Unknown output format: {output_format}. "
|
||||
f"Available: {list(WRITERS.keys())}"
|
||||
)
|
||||
|
||||
self.writer = WRITERS[output_format]()
|
||||
|
||||
def generate(self) -> Path:
|
||||
"""
|
||||
Generate models to the specified output path.
|
||||
|
||||
Returns:
|
||||
Path to the generated file/directory
|
||||
"""
|
||||
# Determine output file path
|
||||
if self.output_path.suffix:
|
||||
# User specified a file path
|
||||
output_file = self.output_path
|
||||
else:
|
||||
# User specified a directory, add default filename
|
||||
output_file = self.output_path / f"__init__{self.writer.file_extension()}"
|
||||
|
||||
self.writer.write(self.config, output_file)
|
||||
print(f"Generated {self.output_format} models: {output_file}")
|
||||
return output_file
|
||||
|
||||
@classmethod
|
||||
def available_formats(cls) -> list:
|
||||
"""Return list of available output formats."""
|
||||
return list(WRITERS.keys())
|
||||
Reference in New Issue
Block a user