Files
soleprint/init/core.py
2026-04-12 12:34:25 -03:00

824 lines
23 KiB
Python

"""
init.core — Room generation logic
Templates + file generation for soleprint rooms.
No interactive I/O — called by CLI and web wizard with data already collected.
"""
import json
import logging
import shutil
from pathlib import Path
log = logging.getLogger("init")
SPR_ROOT = Path(__file__).resolve().parent.parent
CFG_DIR = SPR_ROOT / "cfg"
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def write_file(path: Path, content: str):
"""Write file, creating parent dirs."""
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(content)
log.info(" + %s", path.relative_to(SPR_ROOT))
def next_free_port() -> int:
"""Scan existing rooms to find next free hub port."""
used = set()
if not CFG_DIR.exists():
return 12000
for cfg in CFG_DIR.iterdir():
config_path = cfg / "config.json"
if config_path.exists():
try:
data = json.loads(config_path.read_text())
port = data.get("framework", {}).get("hub_port")
if port:
used.add(int(port))
except (json.JSONDecodeError, ValueError):
pass
port = 12000
while port in used:
port += 10
return port
def list_rooms() -> list[str]:
"""Return names of existing rooms in cfg/."""
if not CFG_DIR.exists():
return []
return sorted(
d.name for d in CFG_DIR.iterdir()
if d.is_dir() and (d / "config.json").exists()
)
# ---------------------------------------------------------------------------
# Constants
# ---------------------------------------------------------------------------
SYSTEMS = [
{"key": "data_flow", "name": "artery", "slug": "artery", "title": "Artery", "tagline": "Todo lo vital", "icon": ""},
{"key": "documentation", "name": "atlas", "slug": "atlas", "title": "Atlas", "tagline": "Documentacion accionable", "icon": ""},
{"key": "execution", "name": "station", "slug": "station", "title": "Station", "tagline": "Monitores, Entornos y Herramientas", "icon": ""},
]
COMPONENTS = {
"shared": {
"config": {"name": "room", "title": "Room", "description": "Runtime environment configuration", "plural": "rooms"},
"data": {"name": "depot", "title": "Depot", "description": "Data storage / provisions", "plural": "depots"},
},
"data_flow": {
"connector": {"name": "vein", "title": "Vein", "description": "Stateless API connector", "plural": "veins"},
"mock": {"name": "shunt", "title": "Shunt", "description": "Fake connector for testing", "plural": "shunts"},
"composed": {"name": "pulse", "title": "Pulse", "description": "Composed data flow", "plural": "pulses", "formula": "Vein + Room + Depot"},
"app": {"name": "plexus", "title": "Plexus", "description": "Full app with backend, frontend and DB", "plural": "plexus"},
},
"documentation": {
"pattern": {"name": "template", "title": "Template", "description": "Documentation pattern", "plural": "templates"},
"library": {"name": "book", "title": "Book", "description": "Documentation library"},
"composed": {"name": "book", "title": "Book", "description": "Composed documentation", "plural": "books", "formula": "Template + Depot"},
},
"execution": {
"utility": {"name": "tool", "title": "Tool", "description": "Execution utility", "plural": "tools"},
"watcher": {"name": "monitor", "title": "Monitor", "description": "Service monitor", "plural": "monitors"},
"container": {"name": "cabinet", "title": "Cabinet", "description": "Tool container", "plural": "cabinets"},
"workspace": {"name": "desk", "title": "Desk", "description": "Execution workspace"},
"composed": {"name": "desk", "title": "Desk", "description": "Composed execution bundle", "plural": "desks", "formula": "Cabinet + Room + Depots"},
},
}
DATA_FILES = [
"books", "depots", "desks", "monitors", "plexuses",
"pulses", "rooms", "shunts", "tables", "templates", "tools", "veins",
]
BACKEND_FRAMEWORKS = ["django", "fastapi", "express", "other"]
FRONTEND_FRAMEWORKS = ["nextjs", "react", "vue", "other"]
# ---------------------------------------------------------------------------
# Config builder
# ---------------------------------------------------------------------------
def make_config(room: str, port: int, managed: dict | None = None) -> dict:
cfg = {
"framework": {
"name": "soleprint",
"slug": "soleprint",
"version": "0.1.0",
"description": "Development workflow and documentation system",
"tagline": "Mapping development footprints",
"icon": "",
"hub_port": port,
},
"systems": SYSTEMS,
"components": COMPONENTS,
}
if managed:
cfg["managed"] = managed
return cfg
# ---------------------------------------------------------------------------
# Templates
# ---------------------------------------------------------------------------
def tpl_soleprint_compose() -> str:
return """\
# Soleprint Services - Docker Compose
#
# Usage:
# cd gen/<room>/soleprint && docker compose up -d
name: ${DEPLOYMENT_NAME}
services:
soleprint:
build:
context: .
dockerfile: Dockerfile
container_name: ${DEPLOYMENT_NAME}
user: "${UID:-1000}:${GID:-1000}"
volumes:
- .:/app
ports:
- "${SOLEPRINT_PORT}:8000"
env_file:
- .env
environment:
- ARTERY_EXTERNAL_URL=/artery
- ATLAS_EXTERNAL_URL=/atlas
- STATION_EXTERNAL_URL=/station
networks:
- default
command: uvicorn run:app --host 0.0.0.0 --port 8000 --reload
networks:
default:
external: true
name: ${NETWORK_NAME}
"""
def tpl_soleprint_env(room: str, port: int) -> str:
return f"""\
# =============================================================================
# {room} — Soleprint Configuration
# =============================================================================
DEPLOYMENT_NAME={room}_spr
NETWORK_NAME={room}_network
SOLEPRINT_PORT={port}
# Google OAuth (configure if using Google vein)
# GOOGLE_CLIENT_ID=
# GOOGLE_CLIENT_SECRET=
# GOOGLE_REDIRECT_URI=http://{room}.spr.local.ar/artery/google/oauth/callback
AUTH_BYPASS=true
AUTH_SESSION_SECRET={room}-dev-secret-change-in-production
"""
def tpl_managed_compose(room: str, app_name: str, has_frontend: bool) -> str:
services = f"""\
# {room} — Managed App Services
name: {room}_app
services:
backend:
build:
context: ./{app_name}/backend
dockerfile: ../../Dockerfile.backend
container_name: {room}_backend
env_file:
- .env
ports:
- "${{BACKEND_PORT}}:8000"
networks:
- default
"""
if has_frontend:
services += f"""
frontend:
build:
context: ./{app_name}/frontend
dockerfile: ../../Dockerfile.frontend
container_name: {room}_frontend
env_file:
- .env
ports:
- "${{FRONTEND_PORT}}:3000"
networks:
- default
"""
services += f"""
networks:
default:
external: true
name: {room}_network
"""
return services
TPL_DOCKERFILE_BACKEND = {
"django": """\
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
EXPOSE 8000
CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"]
""",
"fastapi": """\
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
EXPOSE 8000
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
""",
"express": """\
FROM node:20-slim
WORKDIR /app
COPY package*.json ./
RUN npm ci
COPY . .
EXPOSE 8000
CMD ["node", "index.js"]
""",
"other": """\
# Customize this Dockerfile for your backend framework
FROM python:3.12-slim
WORKDIR /app
COPY . .
EXPOSE 8000
CMD ["echo", "Configure your start command"]
""",
}
TPL_DOCKERFILE_FRONTEND = {
"nextjs": """\
FROM node:20-slim
WORKDIR /app
COPY package*.json ./
RUN npm ci
COPY . .
EXPOSE 3000
CMD ["npm", "run", "dev"]
""",
"react": """\
FROM node:20-slim
WORKDIR /app
COPY package*.json ./
RUN npm ci
COPY . .
EXPOSE 3000
CMD ["npm", "start"]
""",
"vue": """\
FROM node:20-slim
WORKDIR /app
COPY package*.json ./
RUN npm ci
COPY . .
EXPOSE 3000
CMD ["npm", "run", "dev"]
""",
"other": """\
# Customize this Dockerfile for your frontend framework
FROM node:20-slim
WORKDIR /app
COPY . .
EXPOSE 3000
CMD ["echo", "Configure your start command"]
""",
}
def tpl_managed_env(room: str, backend_port: int, frontend_port: int | None) -> str:
lines = f"""\
# =============================================================================
# {room} — Managed App Configuration
# =============================================================================
BACKEND_PORT={backend_port}
"""
if frontend_port:
lines += f"FRONTEND_PORT={frontend_port}\n"
lines += f"""
# Database
DB_HOST={room}_db
DB_PORT=5432
DB_NAME={room}
DB_USER=postgres
DB_PASSWORD=localdev123
"""
return lines
def tpl_link_main() -> str:
return """\
\"\"\"
Link — Database bridge for soleprint tools.
Provides read access to the managed app's database so soleprint tools
(databrowse, datagen, tester) can inspect and manipulate data without
touching the app's source code.
\"\"\"
from fastapi import FastAPI
app = FastAPI(title="Link - DB Bridge")
@app.get("/health")
def health():
return {"status": "ok"}
# Import adapters here:
# from adapters.django import router as django_router
# app.include_router(django_router, prefix="/django")
"""
def tpl_link_adapter_django() -> str:
return """\
\"\"\"
Django adapter — reads Django ORM metadata to provide schema + data access.
\"\"\"
# from fastapi import APIRouter
# router = APIRouter()
# TODO: Implement adapter for your Django models
# See cfg/amar/link/adapters/django.py for a working example
"""
def tpl_link_adapter_fastapi() -> str:
return """\
\"\"\"
FastAPI/SQLAlchemy adapter — reads SQLAlchemy metadata for schema + data access.
\"\"\"
# from fastapi import APIRouter
# router = APIRouter()
# TODO: Implement adapter for your SQLAlchemy models
"""
def tpl_link_requirements() -> str:
return """\
fastapi>=0.100.0
uvicorn>=0.23.0
psycopg2-binary>=2.9.0
sqlalchemy>=2.0.0
"""
def tpl_link_dockerfile() -> str:
return """\
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
EXPOSE 8000
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
"""
def tpl_link_compose(room: str) -> str:
return f"""\
# Link — DB Bridge
name: {room}_link
services:
link:
build:
context: .
dockerfile: Dockerfile
container_name: {room}_link
env_file:
- ../soleprint/.env
ports:
- "${{LINK_PORT:-8001}}:8000"
networks:
- default
networks:
default:
external: true
name: {room}_network
"""
def tpl_ctrl_start(room: str, app_name: str | None) -> str:
lines = f"""\
#!/bin/bash
# Start all {room} services
# Usage: ./ctrl/start.sh [-d]
SCRIPT_DIR="$(cd "$(dirname "${{BASH_SOURCE[0]}}")" && pwd)"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
DETACH=""
if [[ "$1" == "-d" ]]; then
DETACH="-d"
fi
echo "=== Starting {room} services ==="
# Start soleprint
echo "Starting soleprint..."
cd "$ROOT_DIR/soleprint"
docker compose up $DETACH &
"""
if app_name:
lines += f"""
# Start managed app
if [[ -f "$ROOT_DIR/{app_name}/docker-compose.yml" ]]; then
echo "Starting {app_name}..."
cd "$ROOT_DIR/{app_name}"
docker compose up $DETACH &
fi
"""
lines += """
wait
echo "=== All services started ==="
"""
return lines
def tpl_ctrl_stop(room: str, app_name: str | None) -> str:
lines = f"""\
#!/bin/bash
# Stop all {room} services
SCRIPT_DIR="$(cd "$(dirname "${{BASH_SOURCE[0]}}")" && pwd)"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
echo "=== Stopping {room} services ==="
cd "$ROOT_DIR/soleprint"
docker compose down
"""
if app_name:
lines += f"""
if [[ -f "$ROOT_DIR/{app_name}/docker-compose.yml" ]]; then
cd "$ROOT_DIR/{app_name}"
docker compose down
fi
"""
lines += f"""
echo "=== All {room} services stopped ==="
"""
return lines
def tpl_ctrl_status(room: str) -> str:
return f"""\
#!/bin/bash
# Show status of {room} services
SCRIPT_DIR="$(cd "$(dirname "${{BASH_SOURCE[0]}}")" && pwd)"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
echo "=== {room} service status ==="
cd "$ROOT_DIR"
for d in */; do
if [[ -f "$d/docker-compose.yml" ]]; then
echo ""
echo "--- $d ---"
cd "$ROOT_DIR/$d"
docker compose ps
fi
done
"""
def tpl_ctrl_logs(room: str) -> str:
return f"""\
#!/bin/bash
# Show logs for {room} services
# Usage: ./ctrl/logs.sh [service_name]
SCRIPT_DIR="$(cd "$(dirname "${{BASH_SOURCE[0]}}")" && pwd)"
ROOT_DIR="$(dirname "$SCRIPT_DIR")"
cd "$ROOT_DIR/soleprint"
if [[ -n "$1" ]]; then
docker compose logs -f "$1"
else
docker compose logs -f
fi
"""
def tpl_tester_environments(url: str) -> str:
return json.dumps({
"environments": [
{"name": "local", "url": url, "auth_type": "none"}
]
}, indent=2) + "\n"
def tpl_tester_base() -> str:
return """\
\"\"\"
Room-specific test base class.
Import from core tester and extend as needed.
\"\"\"
from station.tools.tester.base import ContractTestCase # noqa: F401
"""
def tpl_nginx_conf(room: str, app_name: str) -> str:
return f"""\
# {room} — Nginx Config for Docker
#
# Routes:
# {room}.spr.local.ar — frontend with soleprint sidebar
# {room}.local.ar — frontend without sidebar
# {room}.spr.local.ar - frontend with soleprint sidebar
server {{
listen 80;
server_name {room}.spr.local.ar;
# Soleprint routes
location /spr/ {{
proxy_pass http://soleprint:8000/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}}
# Backend API
location /api/ {{
proxy_pass http://backend:8000/api/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}}
# Frontend with sidebar injection
location / {{
proxy_pass http://frontend:3000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Accept-Encoding "";
# Inject sidebar
sub_filter '</head>' '<link rel="stylesheet" href="/spr/sidebar.css"><script src="/spr/sidebar.js" defer></script></head>';
sub_filter_once off;
sub_filter_types text/html;
}}
}}
# {room}.local.ar - frontend without sidebar
server {{
listen 80;
server_name {room}.local.ar;
location /api/ {{
proxy_pass http://backend:8000/api/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}}
location / {{
proxy_pass http://frontend:3000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}}
}}
"""
def tpl_nginx_compose(room: str) -> str:
return f"""\
# Nginx Reverse Proxy for {room}
#
# Usage:
# docker compose -f docker-compose.yml -f docker-compose.nginx.yml up -d
#
# Requires /etc/hosts entries:
# 127.0.0.1 {room}.spr.local.ar {room}.local.ar
name: ${{DEPLOYMENT_NAME}}_nginx
services:
nginx:
image: nginx:alpine
container_name: ${{DEPLOYMENT_NAME}}_nginx
ports:
- "${{NGINX_PORT:-80}}:80"
volumes:
- ./nginx/local.conf:/etc/nginx/conf.d/default.conf:ro
networks:
- default
depends_on:
- soleprint
restart: unless-stopped
networks:
default:
name: ${{NETWORK_NAME}}
"""
# ---------------------------------------------------------------------------
# Layer generators — pure functions, no I/O prompts
# ---------------------------------------------------------------------------
def generate_layer0(room_dir: Path, room: str, port: int, is_managed: bool) -> dict:
"""Layer 0: config.json + data/*.json. Returns the config dict."""
log.info("-- Layer 0: Config + Data --")
managed = {"name": room} if is_managed else None
config = make_config(room, port, managed)
write_file(room_dir / "config.json", json.dumps(config, indent=2) + "\n")
data_dir = room_dir / "data"
for name in DATA_FILES:
write_file(data_dir / f"{name}.json", json.dumps({"items": []}, indent=2) + "\n")
write_file(data_dir / "__init__.py", "")
return config
def generate_layer1(room_dir: Path, room: str, port: int):
"""Layer 1: soleprint docker-compose + .env."""
log.info("-- Layer 1: Docker --")
spr_dir = room_dir / "soleprint"
write_file(spr_dir / "docker-compose.yml", tpl_soleprint_compose())
write_file(spr_dir / ".env", tpl_soleprint_env(room, port))
def generate_layer2(
room_dir: Path, room: str, config: dict,
app_name: str, backend_path: str, backend_framework: str,
frontend_path: str | None = None, frontend_framework: str | None = None,
) -> dict:
"""Layer 2: Managed app scaffolding. Returns app info dict."""
log.info("-- Layer 2: Managed App --")
has_frontend = bool(frontend_path)
# Update config with managed section
repos = {"backend": backend_path}
if has_frontend:
repos["frontend"] = frontend_path
config["managed"] = {"name": app_name, "repos": repos}
write_file(room_dir / "config.json", json.dumps(config, indent=2) + "\n")
# Docker files
write_file(room_dir / "docker-compose.yml", tpl_managed_compose(room, app_name, has_frontend))
write_file(room_dir / "Dockerfile.backend", TPL_DOCKERFILE_BACKEND[backend_framework])
if has_frontend and frontend_framework:
write_file(room_dir / "Dockerfile.frontend", TPL_DOCKERFILE_FRONTEND[frontend_framework])
# .env
backend_port = next_free_port() + 100
frontend_port = backend_port + 10 if has_frontend else None
write_file(room_dir / ".env", tpl_managed_env(room, backend_port, frontend_port))
# Dumps dir
dumps_dir = room_dir / app_name / "dumps"
dumps_dir.mkdir(parents=True, exist_ok=True)
(dumps_dir / ".gitkeep").touch()
log.info(" + %s", (dumps_dir / ".gitkeep").relative_to(SPR_ROOT))
return {
"app_name": app_name,
"backend_framework": backend_framework,
"has_frontend": has_frontend,
"frontend_framework": frontend_framework,
}
def generate_layer3(room_dir: Path, room: str, backend_framework: str):
"""Layer 3: Link (DB bridge) scaffolding."""
log.info("-- Layer 3: Link --")
link_dir = room_dir / "link"
write_file(link_dir / "main.py", tpl_link_main())
write_file(link_dir / "adapters" / "__init__.py", "")
write_file(link_dir / "requirements.txt", tpl_link_requirements())
write_file(link_dir / "Dockerfile", tpl_link_dockerfile())
write_file(link_dir / "docker-compose.yml", tpl_link_compose(room))
if backend_framework == "django":
write_file(link_dir / "adapters" / "django.py", tpl_link_adapter_django())
elif backend_framework in ("fastapi", "other"):
write_file(link_dir / "adapters" / "sqlalchemy.py", tpl_link_adapter_fastapi())
def generate_layer4(room_dir: Path, room: str, app_name: str | None):
"""Layer 4: ctrl/ scripts."""
log.info("-- Layer 4: Scripts --")
ctrl_dir = room_dir / "ctrl"
write_file(ctrl_dir / "start.sh", tpl_ctrl_start(room, app_name))
write_file(ctrl_dir / "stop.sh", tpl_ctrl_stop(room, app_name))
write_file(ctrl_dir / "status.sh", tpl_ctrl_status(room))
write_file(ctrl_dir / "logs.sh", tpl_ctrl_logs(room))
def generate_layer5(room_dir: Path, room: str, test_url: str = "http://localhost:8000"):
"""Layer 5: Test suite scaffolding."""
log.info("-- Layer 5: Test Suite --")
tester_dir = room_dir / "soleprint" / "station" / "tools" / "tester"
write_file(tester_dir / "environments.json", tpl_tester_environments(test_url))
write_file(tester_dir / "tests" / "__init__.py", "")
write_file(tester_dir / "tests" / "base.py", tpl_tester_base())
def generate_layer6(room_dir: Path, room: str, app_name: str):
"""Layer 6: Nginx sidebar injection."""
log.info("-- Layer 6: Nginx --")
spr_dir = room_dir / "soleprint"
write_file(spr_dir / "nginx" / "local.conf", tpl_nginx_conf(room, app_name))
write_file(spr_dir / "docker-compose.nginx.yml", tpl_nginx_compose(room))
log.info(" /etc/hosts: 127.0.0.1 %s.spr.local.ar %s.local.ar", room, room)
# ---------------------------------------------------------------------------
# Clone
# ---------------------------------------------------------------------------
def clone_room(source_name: str, target_name: str):
"""Clone an existing room as a variant with new ports/names."""
source_dir = CFG_DIR / source_name
target_dir = CFG_DIR / target_name
if not source_dir.exists():
log.error("source room '%s' not found in cfg/", source_name)
return False
if target_dir.exists():
log.error("'%s' already exists in cfg/", target_name)
return False
log.info("=== Cloning %s -> %s ===", source_name, target_name)
shutil.copytree(source_dir, target_dir)
log.info(" Copied cfg/%s/ -> cfg/%s/", source_name, target_name)
# Patch config.json
config_path = target_dir / "config.json"
if config_path.exists():
config = json.loads(config_path.read_text())
new_port = next_free_port()
config["framework"]["hub_port"] = new_port
config_path.write_text(json.dumps(config, indent=2) + "\n")
log.info(" Patched config.json: hub_port=%d", new_port)
# Patch all .env files
for env_file in target_dir.rglob(".env"):
content = env_file.read_text()
content = content.replace(source_name, target_name)
env_file.write_text(content)
log.info(" Patched %s", env_file.relative_to(SPR_ROOT))
# Patch nginx configs
for conf_file in target_dir.rglob("*.conf"):
content = conf_file.read_text()
content = content.replace(source_name, target_name)
conf_file.write_text(content)
log.info(" Patched %s", conf_file.relative_to(SPR_ROOT))
log.info("Done! Build with: python build.py --cfg %s", target_name)
return True