393 lines
12 KiB
Python
393 lines
12 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Soleprint Build Tool
|
|
|
|
Generates soleprint instances from source + room config.
|
|
|
|
Usage:
|
|
python build.py # Build gen/standalone/
|
|
python build.py --cfg <room> # Build gen/<room>/
|
|
python build.py --all # Build all rooms
|
|
python build.py --output /path/ # Build to custom path
|
|
python build.py --models # Only regenerate models
|
|
|
|
Generated structure for standalone rooms:
|
|
gen/standalone/
|
|
artery/, atlas/, station/, main.py, ...
|
|
|
|
Generated structure for managed rooms:
|
|
gen/<room>/
|
|
<managed_name>/ # Copied repos + ctrl
|
|
link/ # Bridge code
|
|
soleprint/ # Soleprint instance
|
|
"""
|
|
|
|
import argparse
|
|
import json
|
|
import logging
|
|
import shutil
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
SPR_ROOT = Path(__file__).resolve().parent
|
|
|
|
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
def load_config(cfg_name: str | None) -> dict:
|
|
"""Load room config.json."""
|
|
room = cfg_name or "standalone"
|
|
config_path = SPR_ROOT / "cfg" / room / "config.json"
|
|
if config_path.exists():
|
|
return json.loads(config_path.read_text())
|
|
return {}
|
|
|
|
|
|
def ensure_dir(path: Path):
|
|
path.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
def copy_path(source: Path, target: Path, quiet: bool = False):
|
|
"""Copy file or directory, resolving symlinks."""
|
|
if target.is_symlink():
|
|
target.unlink()
|
|
elif target.exists():
|
|
if target.is_dir():
|
|
shutil.rmtree(target)
|
|
else:
|
|
target.unlink()
|
|
|
|
if source.is_dir():
|
|
shutil.copytree(source, target, symlinks=False)
|
|
if not quiet:
|
|
log.info(f" {target.name}/")
|
|
else:
|
|
shutil.copy2(source, target)
|
|
if not quiet:
|
|
log.info(f" {target.name}")
|
|
|
|
|
|
def count_files(path: Path) -> int:
|
|
return sum(1 for _ in path.rglob("*") if _.is_file())
|
|
|
|
|
|
def merge_into(source: Path, target: Path):
|
|
"""Merge source directory into target (additive, overwrites conflicts)."""
|
|
if not source.exists():
|
|
return
|
|
for item in source.rglob("*"):
|
|
if item.is_file():
|
|
rel = item.relative_to(source)
|
|
dest = target / rel
|
|
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
shutil.copy2(item, dest)
|
|
|
|
|
|
def parse_gitignore(gitignore_path: Path) -> set[str]:
|
|
"""Parse .gitignore and return set of patterns to ignore."""
|
|
patterns = set()
|
|
if not gitignore_path.exists():
|
|
return patterns
|
|
|
|
for line in gitignore_path.read_text().splitlines():
|
|
line = line.strip()
|
|
# Skip comments and empty lines
|
|
if not line or line.startswith("#"):
|
|
continue
|
|
# Remove trailing slashes (directory indicators)
|
|
pattern = line.rstrip("/")
|
|
# Skip negation patterns (we don't support them)
|
|
if pattern.startswith("!"):
|
|
continue
|
|
patterns.add(pattern)
|
|
return patterns
|
|
|
|
|
|
def copy_repo(source: Path, target: Path):
|
|
"""Copy a repo directory, excluding .git and gitignored files."""
|
|
if not source.exists():
|
|
log.warning(f"Repo not found: {source}")
|
|
return False
|
|
|
|
# Always ignore these
|
|
always_ignore = {".git", "__pycache__", "node_modules", ".venv", "venv", ".env"}
|
|
|
|
# Parse .gitignore from repo root
|
|
gitignore_patterns = parse_gitignore(source / ".gitignore")
|
|
|
|
def ignore_patterns(directory, files):
|
|
ignored = set()
|
|
rel_dir = Path(directory).relative_to(source)
|
|
|
|
for f in files:
|
|
# Always ignore these
|
|
if f in always_ignore:
|
|
ignored.add(f)
|
|
continue
|
|
|
|
# Check gitignore patterns
|
|
rel_path = rel_dir / f if str(rel_dir) != "." else Path(f)
|
|
|
|
for pattern in gitignore_patterns:
|
|
# Simple pattern matching (filename or extension)
|
|
if pattern.startswith("*."):
|
|
# Extension pattern like *.pyc
|
|
if f.endswith(pattern[1:]):
|
|
ignored.add(f)
|
|
break
|
|
elif pattern == f or pattern == str(rel_path):
|
|
# Exact match
|
|
ignored.add(f)
|
|
break
|
|
elif "/" not in pattern and f == pattern:
|
|
# Simple name match anywhere
|
|
ignored.add(f)
|
|
break
|
|
|
|
return ignored
|
|
|
|
shutil.copytree(source, target, ignore=ignore_patterns, symlinks=False)
|
|
return True
|
|
|
|
|
|
def build_managed(output_dir: Path, cfg_name: str, config: dict):
|
|
"""Build managed folder with repos + ctrl."""
|
|
managed = config.get("managed", {})
|
|
managed_name = managed.get("name", cfg_name)
|
|
repos = managed.get("repos", {})
|
|
|
|
managed_dir = output_dir / managed_name
|
|
ensure_dir(managed_dir)
|
|
|
|
log.info(f"Building managed ({managed_name})...")
|
|
|
|
# Copy repos
|
|
for repo_name, repo_path in repos.items():
|
|
source = Path(repo_path)
|
|
target = managed_dir / repo_name
|
|
if copy_repo(source, target):
|
|
log.info(f" {repo_name}/")
|
|
|
|
room_cfg = SPR_ROOT / "cfg" / cfg_name
|
|
|
|
# Docker files from room root -> managed root
|
|
for item in room_cfg.iterdir():
|
|
if item.is_file() and (
|
|
item.name.startswith("Dockerfile") or item.name.startswith("docker-compose")
|
|
):
|
|
copy_path(item, managed_dir / item.name)
|
|
|
|
# Copy managed app config from cfg/<room>/<managed_name>/ (e.g., .env, dumps/)
|
|
room_managed_cfg = room_cfg / managed_name
|
|
if room_managed_cfg.exists():
|
|
log.info(f" Copying {managed_name} config...")
|
|
for item in room_managed_cfg.iterdir():
|
|
if item.is_file():
|
|
copy_path(item, managed_dir / item.name, quiet=True)
|
|
elif item.is_dir():
|
|
target = managed_dir / item.name
|
|
if target.exists():
|
|
# Merge into existing repo directory
|
|
merge_into(item, target)
|
|
else:
|
|
copy_path(item, target)
|
|
|
|
# Scripts from ctrl/ -> output_dir/ctrl/ (sibling of managed, link, soleprint)
|
|
room_ctrl = room_cfg / "ctrl"
|
|
if room_ctrl.exists():
|
|
ctrl_dir = output_dir / "ctrl"
|
|
ensure_dir(ctrl_dir)
|
|
for item in room_ctrl.iterdir():
|
|
if item.is_file():
|
|
copy_path(item, ctrl_dir / item.name)
|
|
|
|
|
|
def build_link(output_dir: Path, cfg_name: str):
|
|
"""Build link folder."""
|
|
room_cfg = SPR_ROOT / "cfg" / cfg_name
|
|
link_source = room_cfg / "link"
|
|
|
|
if not link_source.exists():
|
|
return
|
|
|
|
log.info("Building link...")
|
|
link_dir = output_dir / "link"
|
|
copy_path(link_source, link_dir)
|
|
|
|
|
|
def generate_models(output_dir: Path, room: str):
|
|
"""Generate models using modelgen tool."""
|
|
from soleprint.station.tools.modelgen import ModelGenerator, load_config
|
|
|
|
config_path = SPR_ROOT / "cfg" / room / "config.json"
|
|
|
|
if not config_path.exists():
|
|
log.warning(f"Config not found: {config_path}")
|
|
return False
|
|
|
|
models_file = output_dir / "models" / "pydantic" / "__init__.py"
|
|
models_file.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
try:
|
|
config = load_config(config_path)
|
|
generator = ModelGenerator(
|
|
config=config,
|
|
output_path=models_file,
|
|
output_format="pydantic",
|
|
)
|
|
generator.generate()
|
|
return True
|
|
except Exception as e:
|
|
log.error(f"Model generation failed: {e}")
|
|
return False
|
|
|
|
|
|
def copy_cfg(output_dir: Path, room: str):
|
|
"""Copy room configuration files to output directory."""
|
|
room_cfg = SPR_ROOT / "cfg" / room
|
|
|
|
if not room_cfg.exists():
|
|
log.warning(f"Room config not found: {room}")
|
|
return
|
|
|
|
log.info(f"Copying {room} config...")
|
|
|
|
# config.json -> cfg/
|
|
cfg_dir = output_dir / "cfg"
|
|
ensure_dir(cfg_dir)
|
|
if (room_cfg / "config.json").exists():
|
|
copy_path(room_cfg / "config.json", cfg_dir / "config.json")
|
|
|
|
# data/ -> data/
|
|
if (room_cfg / "data").exists():
|
|
copy_path(room_cfg / "data", output_dir / "data")
|
|
|
|
# .env.example
|
|
if (room_cfg / ".env.example").exists():
|
|
copy_path(room_cfg / ".env.example", output_dir / ".env.example")
|
|
|
|
# Room-specific soleprint config (docker-compose.yml, etc)
|
|
# Now in cfg/<room>/soleprint/
|
|
room_soleprint = room_cfg / "soleprint"
|
|
if room_soleprint.exists():
|
|
for item in room_soleprint.iterdir():
|
|
if item.is_file():
|
|
copy_path(item, output_dir / item.name)
|
|
|
|
# Merge room-specific system configs from soleprint subfolder
|
|
for system in ["artery", "atlas", "station"]:
|
|
room_system = room_soleprint / system
|
|
if room_system.exists():
|
|
log.info(f" Merging {room} {system}...")
|
|
merge_into(room_system, output_dir / system)
|
|
|
|
|
|
def build_soleprint(output_dir: Path, room: str):
|
|
"""Build soleprint folder with core + room config merged."""
|
|
soleprint = SPR_ROOT / "soleprint"
|
|
|
|
# Soleprint core files
|
|
log.info("Copying soleprint core...")
|
|
for name in [
|
|
"main.py",
|
|
"run.py",
|
|
"index.html",
|
|
"requirements.txt",
|
|
"Dockerfile",
|
|
]:
|
|
if (soleprint / name).exists():
|
|
copy_path(soleprint / name, output_dir / name)
|
|
copy_path(soleprint / "dataloader", output_dir / "dataloader")
|
|
|
|
# System directories
|
|
log.info("Copying systems...")
|
|
for system in ["artery", "atlas", "station"]:
|
|
source = soleprint / system
|
|
if source.exists():
|
|
copy_path(source, output_dir / system)
|
|
|
|
# Common modules (auth, etc)
|
|
if (soleprint / "common").exists():
|
|
copy_path(soleprint / "common", output_dir / "common")
|
|
|
|
# Room config (includes merging room-specific artery/atlas/station)
|
|
copy_cfg(output_dir, room)
|
|
|
|
# Generate models
|
|
log.info("Generating models...")
|
|
if not generate_models(output_dir, room):
|
|
log.warning("Model generation failed")
|
|
|
|
|
|
def build(output_dir: Path, cfg_name: str | None = None, clean: bool = True):
|
|
"""Build complete room instance."""
|
|
room = cfg_name or "standalone"
|
|
config = load_config(cfg_name)
|
|
managed = config.get("managed")
|
|
|
|
log.info(f"\n=== Building {room} ===")
|
|
|
|
# Clean output directory first
|
|
if clean and output_dir.exists():
|
|
log.info(f"Cleaning {output_dir}...")
|
|
shutil.rmtree(output_dir)
|
|
|
|
ensure_dir(output_dir)
|
|
|
|
if managed:
|
|
# 3-folder structure: <managed>/, link/, soleprint/
|
|
build_managed(output_dir, room, config)
|
|
build_link(output_dir, room)
|
|
|
|
soleprint_name = config.get("framework", {}).get("name", "soleprint")
|
|
soleprint_dir = output_dir / soleprint_name
|
|
ensure_dir(soleprint_dir)
|
|
build_soleprint(soleprint_dir, room)
|
|
else:
|
|
# Standalone: everything in output_dir
|
|
build_soleprint(output_dir, room)
|
|
|
|
log.info(f"\n✓ Built: {output_dir}")
|
|
|
|
|
|
def build_models_only():
|
|
"""Only regenerate models."""
|
|
log.info("Generating models...")
|
|
if generate_models(SPR_ROOT / "gen"):
|
|
log.info("✓ Models generated")
|
|
else:
|
|
log.error("Model generation failed")
|
|
sys.exit(1)
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="Soleprint Build Tool")
|
|
|
|
parser.add_argument("--output", "-o", type=Path, help="Output directory")
|
|
parser.add_argument("--cfg", "-c", type=str, help="Room config name")
|
|
parser.add_argument("--all", action="store_true", help="Build all rooms")
|
|
parser.add_argument("--models", action="store_true", help="Only regenerate models")
|
|
|
|
args = parser.parse_args()
|
|
|
|
if args.models:
|
|
build_models_only()
|
|
elif args.all:
|
|
build(SPR_ROOT / "gen" / "standalone", None)
|
|
for room in (SPR_ROOT / "cfg").iterdir():
|
|
if room.is_dir() and room.name not in ("__pycache__", "standalone"):
|
|
build(SPR_ROOT / "gen" / room.name, room.name)
|
|
else:
|
|
if args.output:
|
|
output_dir = args.output.resolve()
|
|
elif args.cfg:
|
|
output_dir = SPR_ROOT / "gen" / args.cfg
|
|
else:
|
|
output_dir = SPR_ROOT / "gen" / "standalone"
|
|
build(output_dir, args.cfg)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|