Files
soleprint/build.py
buenosairesam c4e1f240b8
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Remove dev subcommand, add standalone docker-compose with external URLs
2026-01-02 23:35:19 -03:00

423 lines
13 KiB
Python

#!/usr/bin/env python3
"""
Soleprint Build Tool
Builds the soleprint instance using modelgen for model generation.
All modes copy files (no symlinks) for Docker compatibility.
After editing source files, re-run `python build.py` to update gen/.
Usage:
python build.py # Build gen/standalone/
python build.py --cfg <room> # Build gen/<room>/
python build.py --all # Build all (standalone + rooms)
python build.py --deploy --output /path/ # Build for production
python build.py --models # Only regenerate models
Examples:
# Build standalone
python build.py
cd gen/standalone && .venv/bin/python run.py
# With room config
python build.py --cfg myroom
cd gen/myroom && .venv/bin/python run.py
# Build all targets
python build.py --all
# Build for deployment
python build.py --deploy --output ../deploy/soleprint/
"""
import argparse
import logging
import os
import shutil
import subprocess
import sys
from pathlib import Path
# SPR root is where this script lives
SPR_ROOT = Path(__file__).resolve().parent
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(message)s",
)
log = logging.getLogger(__name__)
def ensure_dir(path: Path):
"""Create directory if it doesn't exist."""
path.mkdir(parents=True, exist_ok=True)
def copy_path(source: Path, target: Path):
"""Copy file or directory, resolving symlinks."""
if target.is_symlink():
target.unlink()
elif target.exists():
if target.is_dir():
shutil.rmtree(target)
else:
target.unlink()
if source.is_dir():
shutil.copytree(source, target, symlinks=False)
log.info(f" Copied: {target.name}/ ({count_files(target)} files)")
else:
shutil.copy2(source, target)
log.info(f" Copied: {target.name}")
def count_files(path: Path) -> int:
"""Count files in directory recursively."""
return sum(1 for _ in path.rglob("*") if _.is_file())
def generate_models(output_dir: Path, cfg_name: str | None = None):
"""Generate models using modelgen tool.
Args:
output_dir: Directory where models/pydantic/__init__.py will be created
cfg_name: Room config name (e.g., 'amar'), or None for standalone
"""
room = cfg_name or "standalone"
config_path = SPR_ROOT / "cfg" / room / "config.json"
if not config_path.exists():
log.warning(f"Config not found at {config_path}")
return False
# Soleprint-specific: models go in models/pydantic/__init__.py
models_file = output_dir / "models" / "pydantic" / "__init__.py"
models_file.parent.mkdir(parents=True, exist_ok=True)
# Run modelgen as subprocess
cmd = [
sys.executable,
"-m",
"station.tools.modelgen",
"from-config",
"--config",
str(config_path),
"--output",
str(models_file),
"--format",
"pydantic",
]
result = subprocess.run(cmd, cwd=SPR_ROOT)
return result.returncode == 0
def copy_cfg(output_dir: Path, cfg_name: str | None):
"""Copy configuration files to output directory.
Args:
output_dir: Target directory
cfg_name: Name of room config (e.g., 'amar'), or None for standalone
"""
room = cfg_name or "standalone"
room_cfg = SPR_ROOT / "cfg" / room
if not room_cfg.exists():
log.warning(f"Room config '{room}' not found at {room_cfg}")
return
log.info(f"\nCopying {room} room config...")
# Copy config.json to cfg/
cfg_dir = output_dir / "cfg"
ensure_dir(cfg_dir)
room_config = room_cfg / "config.json"
if room_config.exists():
copy_path(room_config, cfg_dir / "config.json")
# Copy data/ to output data/
room_data = room_cfg / "data"
if room_data.exists():
log.info(f" Copying {room} data files...")
copy_path(room_data, output_dir / "data")
# Copy .env.example to output root
env_example = room_cfg / ".env.example"
if env_example.exists():
copy_path(env_example, output_dir / ".env.example")
# Copy room-specific databrowse depot if exists
room_databrowse = room_cfg / "databrowse" / "depot"
if room_databrowse.exists():
log.info(f" Copying {room} databrowse depot...")
target = output_dir / "station" / "monitors" / "databrowse" / "depot"
copy_path(room_databrowse, target)
# Copy room-specific tester tests if exists
room_tests = room_cfg / "tester" / "tests"
if room_tests.exists():
log.info(f" Copying {room} tester tests...")
target = output_dir / "station" / "tools" / "tester" / "tests"
copy_path(room_tests, target)
# Copy room-specific monitors if exists
room_monitors = room_cfg / "monitors"
if room_monitors.exists():
log.info(f" Copying {room} monitors...")
for monitor in room_monitors.iterdir():
if monitor.is_dir():
target = output_dir / "station" / "monitors" / monitor.name
copy_path(monitor, target)
# Copy room-specific models if exists
room_models = room_cfg / "models"
if room_models.exists():
log.info(f" Copying {room} models...")
target = output_dir / "models" / room
copy_path(room_models, target)
# Copy room-specific soleprint config (docker-compose.yml, etc)
room_soleprint = room_cfg / "soleprint"
if room_soleprint.exists():
log.info(f" Copying {room} soleprint config...")
for item in room_soleprint.iterdir():
copy_path(item, output_dir / item.name)
def build_dev(output_dir: Path, cfg_name: str | None = None):
"""
Build for development using copies (Docker-compatible).
Structure:
gen/standalone/ or gen/<room>/
├── main.py
├── run.py
├── index.html
├── requirements.txt
├── Dockerfile
├── dataloader/
├── artery/
├── atlas/
├── station/
├── data/
├── cfg/
├── .env.example # From cfg/<room>/.env.example
└── models/ # Generated
After editing source files, re-run `python build.py` to update.
"""
log.info("\n=== Building DEV environment ===")
log.info(f"SPR root: {SPR_ROOT}")
log.info(f"Output: {output_dir}")
if cfg_name:
log.info(f"Room cfg: {cfg_name}")
ensure_dir(output_dir)
# Soleprint core files
log.info("\nCopying soleprint files...")
soleprint = SPR_ROOT / "soleprint"
copy_path(soleprint / "main.py", output_dir / "main.py")
copy_path(soleprint / "run.py", output_dir / "run.py")
copy_path(soleprint / "index.html", output_dir / "index.html")
copy_path(soleprint / "requirements.txt", output_dir / "requirements.txt")
copy_path(soleprint / "dataloader", output_dir / "dataloader")
if (soleprint / "Dockerfile").exists():
copy_path(soleprint / "Dockerfile", output_dir / "Dockerfile")
# System directories
log.info("\nCopying systems...")
for system in ["artery", "atlas", "station"]:
source = SPR_ROOT / system
if source.exists():
copy_path(source, output_dir / system)
# Config (includes data/ from room)
log.info("\nCopying config...")
copy_cfg(output_dir, cfg_name)
# Models (generated)
log.info("\nGenerating models...")
if not generate_models(output_dir, cfg_name):
log.warning("Model generation failed, you may need to run it manually")
log.info("\n✓ Dev build complete!")
log.info(f"\nTo run:")
log.info(f" cd {output_dir}")
log.info(f" python3 -m venv .venv")
log.info(f" .venv/bin/pip install -r requirements.txt")
log.info(f" .venv/bin/python run.py # Single-port bare-metal dev")
if cfg_name:
log.info(
f"\nAfter editing source, rebuild with: python build.py --cfg {cfg_name}"
)
else:
log.info(f"\nAfter editing source, rebuild with: python build.py")
def build_deploy(output_dir: Path, cfg_name: str | None = None):
"""
Build for deployment by copying all files (no symlinks).
"""
log.info("\n=== Building DEPLOY package ===")
log.info(f"SPR root: {SPR_ROOT}")
log.info(f"Output: {output_dir}")
if cfg_name:
log.info(f"Room cfg: {cfg_name}")
if output_dir.exists():
response = input(f"\nOutput directory exists. Overwrite? [y/N] ")
if response.lower() != "y":
log.info("Aborted.")
return
shutil.rmtree(output_dir)
ensure_dir(output_dir)
# Soleprint core files (copy)
log.info("\nCopying soleprint files...")
soleprint = SPR_ROOT / "soleprint"
copy_path(soleprint / "main.py", output_dir / "main.py")
copy_path(soleprint / "run.py", output_dir / "run.py")
copy_path(soleprint / "index.html", output_dir / "index.html")
copy_path(soleprint / "requirements.txt", output_dir / "requirements.txt")
copy_path(soleprint / "dataloader", output_dir / "dataloader")
if (soleprint / "Dockerfile").exists():
copy_path(soleprint / "Dockerfile", output_dir / "Dockerfile")
# System directories (copy)
log.info("\nCopying systems...")
for system in ["artery", "atlas", "station"]:
source = SPR_ROOT / system
if source.exists():
copy_path(source, output_dir / system)
# Config (includes data/ from room)
log.info("\nCopying config...")
copy_cfg(output_dir, cfg_name)
# Models (generate fresh) - pass output_dir, modelgen adds models/pydantic
log.info("\nGenerating models...")
if not generate_models(output_dir, cfg_name):
# Fallback: copy from gen if exists
room = cfg_name or "standalone"
existing = SPR_ROOT / "gen" / room / "models"
if existing.exists():
log.info(" Using existing models from gen/")
copy_path(existing, output_dir / "models")
# Copy schema.json for reference
log.info("\nCopying schema...")
copy_path(SPR_ROOT / "schema.json", output_dir / "schema.json")
# Create run script
run_script = output_dir / "start.sh"
run_script.write_text("""#!/bin/bash
# Soleprint runner
cd "$(dirname "$0")"
if [ ! -d ".venv" ]; then
echo "Creating virtual environment..."
python3 -m venv .venv
.venv/bin/pip install -r requirements.txt
fi
echo "Starting soleprint on http://localhost:12000"
.venv/bin/python main.py
""")
run_script.chmod(0o755)
log.info(" Created: start.sh")
total_files = count_files(output_dir)
log.info(f"\n✓ Deploy build complete! ({total_files} files)")
log.info(f"\nTo run:")
log.info(f" cd {output_dir}")
log.info(f" ./start.sh")
log.info(f"\nOr deploy to server:")
log.info(f" rsync -av {output_dir}/ server:/app/soleprint/")
log.info(f" ssh server 'cd /app/soleprint && ./start.sh'")
def build_models():
"""Only regenerate models."""
log.info("\n=== Generating models only ===")
output_dir = SPR_ROOT / "gen"
ensure_dir(output_dir)
if generate_models(output_dir):
log.info("\n✓ Models generated!")
else:
log.error("Model generation failed")
sys.exit(1)
def main():
parser = argparse.ArgumentParser(
description="Soleprint Build Tool",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=__doc__,
)
parser.add_argument(
"--output",
"-o",
type=Path,
default=None,
help="Output directory (default: gen/standalone/ or gen/<cfg>/)",
)
parser.add_argument(
"--cfg",
"-c",
type=str,
default=None,
help="Room config to include (e.g., 'myroom')",
)
parser.add_argument(
"--all",
action="store_true",
help="Build all configs (standalone + all rooms in cfg/)",
)
parser.add_argument(
"--deploy",
action="store_true",
help="Build for deployment (creates start.sh, schema.json)",
)
parser.add_argument(
"--models",
action="store_true",
help="Only regenerate models",
)
args = parser.parse_args()
if args.models:
build_models()
elif args.deploy:
if not args.output:
log.error("--deploy requires --output")
sys.exit(1)
build_deploy(args.output.resolve(), args.cfg)
elif args.all:
# Build standalone
build_dev(SPR_ROOT / "gen" / "standalone", None)
# Build all room configs
cfg_dir = SPR_ROOT / "cfg"
for room in cfg_dir.iterdir():
if room.is_dir() and room.name not in ("__pycache__", "standalone"):
build_dev(SPR_ROOT / "gen" / room.name, room.name)
else:
# Determine output directory
if args.output:
output_dir = args.output.resolve()
elif args.cfg:
output_dir = SPR_ROOT / "gen" / args.cfg
else:
output_dir = SPR_ROOT / "gen" / "standalone"
build_dev(output_dir, args.cfg)
if __name__ == "__main__":
main()