- examples/fixture-invoicing/: FastAPI + Vue + Postgres demo (4-entity invoice fixture)
- cfg/sample/: wraps the fixture (managed.repos points at examples/)
- ctrl/kind-{up,down,status}.sh + per-room k8s render in soleprint/ctrl/k8s/
- build.py: relative repo paths, resilient rmtree, optional k8s render hook
- cfg/.gitignore: stop ignoring sample/ and standalone/ template rooms
Manifests render cleanly but kind cluster has not been run end-to-end yet.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
99 lines
3.5 KiB
Python
99 lines
3.5 KiB
Python
"""Render per-room manifests for deploy into the shared `spr` kind cluster.
|
|
|
|
The `spr` cluster itself is created via `ctrl/kind-up.sh` at the repo root
|
|
(one cluster, all rooms). Each room becomes a namespace inside it.
|
|
|
|
Called from build.py when a room opts in to k8s output. Emits:
|
|
|
|
gen/<room>/ctrl/k8s/ base + overlays/dev manifests
|
|
gen/<room>/ctrl/k8s-up.sh apply this room's manifests into spr
|
|
gen/<room>/ctrl/k8s-down.sh delete this room's namespace from spr
|
|
gen/<room>/ctrl/k8s-load.sh build images and `kind load` into spr
|
|
|
|
No jinja2 dep — Python-string templates, matching init/core.py.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import logging
|
|
from pathlib import Path
|
|
|
|
from . import templates as T
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
CLUSTER = "spr"
|
|
DEFAULT_NODEPORT = 30080
|
|
|
|
|
|
def _write(path: Path, content: str, mode: int | None = None) -> None:
|
|
path.parent.mkdir(parents=True, exist_ok=True)
|
|
path.write_text(content)
|
|
if mode is not None:
|
|
path.chmod(mode)
|
|
|
|
|
|
def render_k8s(*, room: str, config: dict, gen_dir: Path) -> None:
|
|
managed = config.get("managed") or {}
|
|
managed_name = managed.get("name") or room
|
|
has_frontend = "frontend" in (managed.get("repos") or {})
|
|
has_link = (gen_dir / "link").exists()
|
|
has_managed = bool(managed)
|
|
|
|
k8s_cfg = config.get("k8s") or {}
|
|
nodeport = int(k8s_cfg.get("nodeport", DEFAULT_NODEPORT))
|
|
|
|
ctrl_dir = gen_dir / "ctrl"
|
|
k8s_dir = ctrl_dir / "k8s"
|
|
base_dir = k8s_dir / "base"
|
|
dev_dir = k8s_dir / "overlays" / "dev"
|
|
|
|
log.info("Rendering k8s manifests (cluster=%s, namespace=%s, nodeport=%d)...",
|
|
CLUSTER, room, nodeport)
|
|
|
|
resources: list[str] = []
|
|
_write(base_dir / "namespace.yaml", T.namespace(room=room))
|
|
resources.append("namespace.yaml")
|
|
|
|
_write(base_dir / "configmap.yaml", T.configmap(room=room))
|
|
resources.append("configmap.yaml")
|
|
|
|
if has_managed:
|
|
_write(base_dir / "postgres.yaml", T.postgres(room=room))
|
|
resources.append("postgres.yaml")
|
|
|
|
_write(base_dir / "backend.yaml", T.backend(room=room, managed_name=managed_name))
|
|
resources.append("backend.yaml")
|
|
if has_frontend:
|
|
_write(base_dir / "frontend.yaml", T.frontend(room=room, managed_name=managed_name))
|
|
resources.append("frontend.yaml")
|
|
|
|
if has_link:
|
|
_write(base_dir / "link.yaml", T.link(room=room))
|
|
resources.append("link.yaml")
|
|
|
|
_write(base_dir / "soleprint.yaml", T.soleprint(room=room))
|
|
resources.append("soleprint.yaml")
|
|
|
|
_write(base_dir / "gateway.yaml", T.gateway(room=room, nodeport=nodeport))
|
|
resources.append("gateway.yaml")
|
|
_write(base_dir / "envoy.yaml", T.envoy(
|
|
room=room, has_backend=has_managed, has_frontend=has_frontend,
|
|
))
|
|
|
|
_write(base_dir / "kustomization.yaml", T.kustomization_base(resources=resources))
|
|
_write(dev_dir / "kustomization.yaml", T.kustomization_dev(room=room))
|
|
|
|
_write(ctrl_dir / "k8s-up.sh", T.k8s_up_sh(room=room, cluster=CLUSTER, nodeport=nodeport), mode=0o755)
|
|
_write(ctrl_dir / "k8s-down.sh", T.k8s_down_sh(room=room, cluster=CLUSTER), mode=0o755)
|
|
_write(ctrl_dir / "k8s-load.sh", T.k8s_load_sh(
|
|
room=room, cluster=CLUSTER, managed_name=managed_name,
|
|
has_managed=has_managed, has_frontend=has_frontend, has_link=has_link,
|
|
), mode=0o755)
|
|
|
|
log.info(" %d manifests + 3 lifecycle scripts", len(resources) + 1)
|
|
|
|
|
|
def k8s_enabled(config: dict) -> bool:
|
|
return bool((config.get("k8s") or {}).get("enabled"))
|