almosther isolating soleprint
This commit is contained in:
8
station/tools/modelgen/__init__.py
Normal file
8
station/tools/modelgen/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""
|
||||
Framework Generator System
|
||||
|
||||
Generates complete framework instances (pawprint, soleprint, etc.)
|
||||
from configuration files.
|
||||
"""
|
||||
|
||||
__version__ = "0.1.0"
|
||||
202
station/tools/modelgen/__main__.py
Normal file
202
station/tools/modelgen/__main__.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
Modelgen - Generic Model Generation Tool
|
||||
|
||||
Generates typed models from various sources to various formats.
|
||||
|
||||
Input sources:
|
||||
- Configuration files (soleprint.config.json style)
|
||||
- JSON Schema (planned)
|
||||
- Existing codebases: Django, SQLAlchemy, Prisma (planned - for databrowse)
|
||||
|
||||
Output formats:
|
||||
- pydantic: Pydantic BaseModel classes
|
||||
- django: Django ORM models (planned)
|
||||
- prisma: Prisma schema (planned)
|
||||
- sqlalchemy: SQLAlchemy models (planned)
|
||||
|
||||
Usage:
|
||||
python -m station.tools.modelgen --help
|
||||
python -m station.tools.modelgen from-config -c config.json -o models/ -f pydantic
|
||||
python -m station.tools.modelgen from-schema -s schema.json -o models/ -f pydantic
|
||||
python -m station.tools.modelgen extract -s /path/to/django/app -o models/ -f pydantic
|
||||
|
||||
This is a GENERIC tool. For soleprint-specific builds, use:
|
||||
python build.py dev|deploy
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def cmd_from_config(args):
|
||||
"""Generate models from a configuration file (soleprint.config.json style)."""
|
||||
from .config_loader import load_config
|
||||
from .model_generator import ModelGenerator
|
||||
|
||||
config_path = Path(args.config)
|
||||
if not config_path.exists():
|
||||
print(f"Error: Config file not found: {config_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
output_path = Path(args.output)
|
||||
|
||||
print(f"Loading config: {config_path}")
|
||||
config = load_config(config_path)
|
||||
|
||||
print(f"Generating {args.format} models to: {output_path}")
|
||||
generator = ModelGenerator(
|
||||
config=config,
|
||||
output_path=output_path,
|
||||
output_format=args.format,
|
||||
)
|
||||
result_path = generator.generate()
|
||||
|
||||
print(f"✓ Models generated: {result_path}")
|
||||
|
||||
|
||||
def cmd_from_schema(args):
|
||||
"""Generate models from JSON Schema."""
|
||||
print("Error: from-schema not yet implemented", file=sys.stderr)
|
||||
print("Use from-config with a soleprint.config.json file for now", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def cmd_extract(args):
|
||||
"""Extract models from existing codebase (for databrowse graphs)."""
|
||||
print("Error: extract not yet implemented", file=sys.stderr)
|
||||
print(
|
||||
"This will extract models from Django/SQLAlchemy/Prisma codebases.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print("Use cases:", file=sys.stderr)
|
||||
print(" - Generate browsable graphs for databrowse tool", file=sys.stderr)
|
||||
print(" - Convert between ORM formats", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def cmd_list_formats(args):
|
||||
"""List available output formats."""
|
||||
from .model_generator import ModelGenerator
|
||||
|
||||
print("Available output formats:")
|
||||
for fmt in ModelGenerator.available_formats():
|
||||
print(f" - {fmt}")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Modelgen - Generic Model Generation Tool",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=__doc__,
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
# from-config command
|
||||
config_parser = subparsers.add_parser(
|
||||
"from-config",
|
||||
help="Generate models from configuration file",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--config",
|
||||
"-c",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to configuration file (e.g., soleprint.config.json)",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--format",
|
||||
"-f",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic", "django", "prisma", "sqlalchemy"],
|
||||
help="Output format (default: pydantic)",
|
||||
)
|
||||
config_parser.set_defaults(func=cmd_from_config)
|
||||
|
||||
# from-schema command (placeholder)
|
||||
schema_parser = subparsers.add_parser(
|
||||
"from-schema",
|
||||
help="Generate models from JSON Schema (not yet implemented)",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--schema",
|
||||
"-s",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to JSON Schema file",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--format",
|
||||
"-f",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic", "django", "prisma", "sqlalchemy"],
|
||||
help="Output format (default: pydantic)",
|
||||
)
|
||||
schema_parser.set_defaults(func=cmd_from_schema)
|
||||
|
||||
# extract command (placeholder for databrowse)
|
||||
extract_parser = subparsers.add_parser(
|
||||
"extract",
|
||||
help="Extract models from existing codebase (not yet implemented)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--source",
|
||||
"-s",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to source codebase",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--framework",
|
||||
type=str,
|
||||
choices=["django", "sqlalchemy", "prisma", "auto"],
|
||||
default="auto",
|
||||
help="Source framework to extract from (default: auto-detect)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--format",
|
||||
"-f",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic", "django", "prisma", "sqlalchemy"],
|
||||
help="Output format (default: pydantic)",
|
||||
)
|
||||
extract_parser.set_defaults(func=cmd_extract)
|
||||
|
||||
# list-formats command
|
||||
formats_parser = subparsers.add_parser(
|
||||
"list-formats",
|
||||
help="List available output formats",
|
||||
)
|
||||
formats_parser.set_defaults(func=cmd_list_formats)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
418
station/tools/modelgen/code_generator.py
Normal file
418
station/tools/modelgen/code_generator.py
Normal file
@@ -0,0 +1,418 @@
|
||||
"""
|
||||
Code Generator
|
||||
|
||||
Generates Python code files (main.py, data layer, system main files).
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from .config_loader import ConfigLoader
|
||||
|
||||
|
||||
class CodeGenerator:
|
||||
"""Generates Python code from configuration"""
|
||||
|
||||
def __init__(self, config: ConfigLoader, output_dir: Path):
|
||||
self.config = config
|
||||
self.output_dir = Path(output_dir)
|
||||
|
||||
def generate(self):
|
||||
"""Generate all code files"""
|
||||
|
||||
# Generate hub main.py
|
||||
self._generate_hub_main()
|
||||
|
||||
# Generate data layer
|
||||
self._generate_data_layer()
|
||||
|
||||
# Generate system main files
|
||||
for system in self.config.systems:
|
||||
self._generate_system_main(system)
|
||||
|
||||
print(f"Generated code in {self.output_dir}")
|
||||
|
||||
def _generate_hub_main(self):
|
||||
"""Generate hub main.py file"""
|
||||
|
||||
fw = self.config.framework
|
||||
systems = self.config.systems
|
||||
|
||||
# Build system URL mappings
|
||||
system_urls = "\n".join([
|
||||
f'{s.name.upper()}_URL = os.getenv("{s.name.upper()}_URL", "http://localhost:{s.port}")'
|
||||
for s in systems
|
||||
])
|
||||
|
||||
system_external_urls = "\n".join([
|
||||
f'{s.name.upper()}_EXTERNAL_URL = os.getenv("{s.name.upper()}_EXTERNAL_URL", {s.name.upper()}_URL)'
|
||||
for s in systems
|
||||
])
|
||||
|
||||
system_health = ",\n ".join([
|
||||
f'"{s.name}": {s.name.upper()}_URL'
|
||||
for s in systems
|
||||
])
|
||||
|
||||
system_routes = "\n".join([
|
||||
f' "{s.name}": {s.name.upper()}_EXTERNAL_URL,'
|
||||
for s in systems
|
||||
])
|
||||
|
||||
system_redirects = "\n\n".join([
|
||||
f'''@app.get("/{s.name}")
|
||||
@app.get("/{s.name}/{{path:path}}")
|
||||
def {s.name}_redirect(path: str = ""):
|
||||
"""Redirect to {s.name} service."""
|
||||
target = os.getenv("{s.name.upper()}_URL")
|
||||
if target:
|
||||
return RedirectResponse(url=f"{{target}}/{{path}}")
|
||||
return {{"error": "{s.name.upper()}_URL not configured"}}'''
|
||||
for s in systems
|
||||
])
|
||||
|
||||
content = f'''"""
|
||||
{fw.name.capitalize()} - Overview and routing hub.
|
||||
|
||||
{fw.description}
|
||||
{fw.icon} {fw.tagline}
|
||||
|
||||
Systems:
|
||||
'''
|
||||
|
||||
# Add system documentation
|
||||
for s in systems:
|
||||
content += f' {s.icon} {s.title} ({s.name}) - {s.tagline}\n'
|
||||
|
||||
content += f'''
|
||||
Routes:
|
||||
/ → index
|
||||
/health → health check
|
||||
'''
|
||||
|
||||
# Add data routes
|
||||
for s in systems:
|
||||
content += f' /api/data/{s.name} → {s.name} data\n'
|
||||
|
||||
# Add system redirects
|
||||
for s in systems:
|
||||
content += f' /{s.name}/* → proxy to {s.name} service\n'
|
||||
|
||||
content += f'''"""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import RedirectResponse
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
# Import data functions
|
||||
from data import get_{systems[0].name}_data, get_{systems[1].name}_data, get_{systems[2].name}_data
|
||||
|
||||
app = FastAPI(title="{fw.name.capitalize()}", version="{fw.version}")
|
||||
|
||||
templates = Jinja2Templates(directory=Path(__file__).parent)
|
||||
|
||||
# Service URLs (internal for API calls)
|
||||
{system_urls}
|
||||
|
||||
# External URLs (for frontend links, falls back to internal)
|
||||
{system_external_urls}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health():
|
||||
return {{
|
||||
"status": "ok",
|
||||
"service": "{fw.name}",
|
||||
"subsystems": {{
|
||||
{system_health},
|
||||
}}
|
||||
}}
|
||||
|
||||
|
||||
# === Data API ===
|
||||
|
||||
@app.get("/api/data/{systems[0].name}")
|
||||
def api_{systems[0].name}_data():
|
||||
"""Data for {systems[0].name} service."""
|
||||
return get_{systems[0].name}_data()
|
||||
|
||||
|
||||
@app.get("/api/data/{systems[1].name}")
|
||||
def api_{systems[1].name}_data():
|
||||
"""Data for {systems[1].name} service."""
|
||||
return get_{systems[1].name}_data()
|
||||
|
||||
|
||||
@app.get("/api/data/{systems[2].name}")
|
||||
def api_{systems[2].name}_data():
|
||||
"""Data for {systems[2].name} service."""
|
||||
return get_{systems[2].name}_data()
|
||||
|
||||
|
||||
@app.get("/")
|
||||
def index(request: Request):
|
||||
return templates.TemplateResponse("index.html", {{
|
||||
"request": request,
|
||||
{system_routes}
|
||||
}})
|
||||
|
||||
|
||||
# === Cross-system redirects ===
|
||||
# These allow {fw.name} to act as a hub, redirecting to subsystem routes
|
||||
|
||||
{system_redirects}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=int(os.getenv("PORT", "{fw.hub_port}")),
|
||||
reload=os.getenv("DEV", "").lower() in ("1", "true"),
|
||||
)
|
||||
'''
|
||||
|
||||
(self.output_dir / "main.py").write_text(content)
|
||||
|
||||
def _generate_data_layer(self):
|
||||
"""Generate data/__init__.py file"""
|
||||
|
||||
# Get all component names for imports
|
||||
connector = self.config.get_component('data_flow', 'connector')
|
||||
pattern = self.config.get_component('documentation', 'pattern')
|
||||
tool = self.config.get_component('execution', 'utility')
|
||||
monitor = self.config.get_component('execution', 'watcher')
|
||||
cabinet = self.config.get_component('execution', 'container')
|
||||
config_comp = self.config.get_shared_component('config')
|
||||
data_comp = self.config.get_shared_component('data')
|
||||
|
||||
pulse = self.config.get_component('data_flow', 'composed')
|
||||
doc_composed = self.config.get_component('documentation', 'composed')
|
||||
exec_composed = self.config.get_component('execution', 'composed')
|
||||
|
||||
systems = self.config.systems
|
||||
|
||||
# Build imports
|
||||
imports = f'''from models.pydantic import (
|
||||
{connector.title}, {config_comp.title}, {data_comp.title}, {pattern.title}, {tool.title},
|
||||
{pulse.title}, {doc_composed.title}, {exec_composed.title},
|
||||
{connector.title}Collection, {config_comp.title}Collection, {data_comp.title}Collection,
|
||||
{pattern.title}Collection, {tool.title}Collection,
|
||||
{pulse.title}Collection, {doc_composed.title}Collection, {exec_composed.title}Collection,
|
||||
Status
|
||||
)'''
|
||||
|
||||
# Build loader functions
|
||||
loaders = f'''
|
||||
def get_{connector.plural}() -> List[{connector.title}]:
|
||||
data = _load_json("{connector.plural}.json")
|
||||
return {connector.title}Collection(**data).items
|
||||
|
||||
|
||||
def get_{config_comp.plural}() -> List[{config_comp.title}]:
|
||||
data = _load_json("{config_comp.plural}.json")
|
||||
return {config_comp.title}Collection(**data).items
|
||||
|
||||
|
||||
def get_{data_comp.plural}() -> List[{data_comp.title}]:
|
||||
data = _load_json("{data_comp.plural}.json")
|
||||
return {data_comp.title}Collection(**data).items
|
||||
|
||||
|
||||
def get_{pattern.plural}() -> List[{pattern.title}]:
|
||||
data = _load_json("{pattern.plural}.json")
|
||||
return {pattern.title}Collection(**data).items
|
||||
|
||||
|
||||
def get_{tool.plural}() -> List[{tool.title}]:
|
||||
data = _load_json("{tool.plural}.json")
|
||||
return {tool.title}Collection(**data).items
|
||||
|
||||
|
||||
def get_{cabinet.plural}() -> list:
|
||||
"""Load {cabinet.plural} (simple dict, no pydantic yet)."""
|
||||
data = _load_json("{cabinet.plural}.json")
|
||||
return data.get("items", [])
|
||||
|
||||
|
||||
def get_{monitor.plural}() -> list:
|
||||
"""Load {monitor.plural} (simple dict, no pydantic yet)."""
|
||||
data = _load_json("{monitor.plural}.json")
|
||||
return data.get("items", [])
|
||||
|
||||
|
||||
def get_{pulse.plural}() -> List[{pulse.title}]:
|
||||
data = _load_json("{pulse.plural}.json")
|
||||
return {pulse.title}Collection(**data).items
|
||||
|
||||
|
||||
def get_{doc_composed.plural}() -> List[{doc_composed.title}]:
|
||||
data = _load_json("{doc_composed.plural}.json")
|
||||
return {doc_composed.title}Collection(**data).items
|
||||
|
||||
|
||||
def get_{exec_composed.plural}() -> List[{exec_composed.title}]:
|
||||
data = _load_json("{exec_composed.plural}.json")
|
||||
return {exec_composed.title}Collection(**data).items
|
||||
'''
|
||||
|
||||
# Build system data functions
|
||||
data_flow_sys = systems[0]
|
||||
doc_sys = systems[1]
|
||||
exec_sys = systems[2]
|
||||
|
||||
system_data = f'''
|
||||
def get_{data_flow_sys.name}_data() -> dict:
|
||||
"""Data for {data_flow_sys.name} frontend."""
|
||||
return {{
|
||||
"{connector.plural}": [v.model_dump() for v in get_{connector.plural}()],
|
||||
"{config_comp.plural}": [n.model_dump() for n in get_{config_comp.plural}()],
|
||||
"{data_comp.plural}": [l.model_dump() for l in get_{data_comp.plural}()],
|
||||
"{pulse.plural}": [p.model_dump() for p in get_{pulse.plural}()],
|
||||
}}
|
||||
|
||||
|
||||
def get_{doc_sys.name}_data() -> dict:
|
||||
"""Data for {doc_sys.name} frontend."""
|
||||
return {{
|
||||
"{pattern.plural}": [t.model_dump() for t in get_{pattern.plural}()],
|
||||
"{data_comp.plural}": [l.model_dump() for l in get_{data_comp.plural}()],
|
||||
"{doc_composed.plural}": [b.model_dump() for b in get_{doc_composed.plural}()],
|
||||
}}
|
||||
|
||||
|
||||
def get_{exec_sys.name}_data() -> dict:
|
||||
"""Data for {exec_sys.name} frontend."""
|
||||
return {{
|
||||
"{tool.plural}": [t.model_dump() for t in get_{tool.plural}()],
|
||||
"{monitor.plural}": get_{monitor.plural}(),
|
||||
"{cabinet.plural}": get_{cabinet.plural}(),
|
||||
"{config_comp.plural}": [n.model_dump() for n in get_{config_comp.plural}()],
|
||||
"{data_comp.plural}": [l.model_dump() for l in get_{data_comp.plural}()],
|
||||
"{exec_composed.plural}": [t.model_dump() for t in get_{exec_composed.plural}()],
|
||||
}}
|
||||
'''
|
||||
|
||||
content = f'''"""
|
||||
{self.config.framework.name.capitalize()} Data Layer
|
||||
|
||||
JSON file storage (future: MongoDB)
|
||||
"""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
# Add parent to path for models import
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
{imports}
|
||||
|
||||
DATA_DIR = Path(__file__).parent.resolve()
|
||||
|
||||
|
||||
def _load_json(filename: str) -> dict:
|
||||
filepath = DATA_DIR / filename
|
||||
if filepath.exists():
|
||||
with open(filepath) as f:
|
||||
return json.load(f)
|
||||
return {{"items": []}}
|
||||
|
||||
|
||||
def _save_json(filename: str, data: dict):
|
||||
filepath = DATA_DIR / filename
|
||||
with open(filepath, 'w') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
|
||||
# === Loaders ===
|
||||
{loaders}
|
||||
|
||||
# === For frontend rendering ===
|
||||
{system_data}
|
||||
'''
|
||||
|
||||
(self.output_dir / "data" / "__init__.py").write_text(content)
|
||||
|
||||
def _generate_system_main(self, system):
|
||||
"""Generate main.py for a system"""
|
||||
|
||||
fw = self.config.framework
|
||||
|
||||
content = f'''"""
|
||||
{system.title} - {system.tagline}
|
||||
"""
|
||||
|
||||
import os
|
||||
import httpx
|
||||
from pathlib import Path
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
app = FastAPI(title="{system.title}", version="{fw.version}")
|
||||
|
||||
templates = Jinja2Templates(directory=Path(__file__).parent)
|
||||
|
||||
# {fw.name.capitalize()} URL for data fetching
|
||||
{fw.name.upper()}_URL = os.getenv("{fw.name.upper()}_URL", "http://localhost:{fw.hub_port}")
|
||||
|
||||
|
||||
def get_data():
|
||||
"""Fetch data from {fw.name} hub."""
|
||||
try:
|
||||
resp = httpx.get(f"{{{fw.name.upper()}_URL}}/api/data/{system.name}", timeout=5.0)
|
||||
if resp.status_code == 200:
|
||||
return resp.json()
|
||||
except Exception as e:
|
||||
print(f"Failed to fetch data from {fw.name}: {{e}}")
|
||||
return {{"items": []}}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health():
|
||||
return {{"status": "ok", "service": "{system.name}"}}
|
||||
|
||||
|
||||
@app.get("/")
|
||||
def index(request: Request):
|
||||
data = get_data()
|
||||
return templates.TemplateResponse("index.html", {{
|
||||
"request": request,
|
||||
"{fw.name}_url": os.getenv("{fw.name.upper()}_EXTERNAL_URL", {fw.name.upper()}_URL),
|
||||
**data,
|
||||
}})
|
||||
|
||||
|
||||
@app.get("/api/data")
|
||||
def api_data():
|
||||
"""API endpoint for frontend data (proxied from {fw.name})."""
|
||||
return get_data()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=int(os.getenv("PORT", "{system.port}")),
|
||||
reload=os.getenv("DEV", "").lower() in ("1", "true"),
|
||||
)
|
||||
'''
|
||||
|
||||
(self.output_dir / system.name / "main.py").write_text(content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from .config_loader import load_config
|
||||
|
||||
# Test with soleprint config
|
||||
config_path = Path(__file__).parent.parent / "soleprint.config.json"
|
||||
config = load_config(config_path)
|
||||
|
||||
output_dir = Path(__file__).parent.parent
|
||||
generator = CodeGenerator(config, output_dir)
|
||||
generator.generate()
|
||||
|
||||
print("Code generated successfully!")
|
||||
130
station/tools/modelgen/config_loader.py
Normal file
130
station/tools/modelgen/config_loader.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""
|
||||
Configuration Loader
|
||||
|
||||
Loads and validates framework configuration files.
|
||||
"""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, List, Optional
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class FrameworkConfig:
|
||||
"""Framework metadata"""
|
||||
name: str
|
||||
slug: str
|
||||
version: str
|
||||
description: str
|
||||
tagline: str
|
||||
icon: str
|
||||
hub_port: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class SystemConfig:
|
||||
"""System configuration"""
|
||||
key: str
|
||||
name: str
|
||||
slug: str
|
||||
title: str
|
||||
tagline: str
|
||||
port: int
|
||||
icon: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ComponentConfig:
|
||||
"""Component configuration"""
|
||||
name: str
|
||||
title: str
|
||||
description: str
|
||||
plural: Optional[str] = None
|
||||
formula: Optional[str] = None
|
||||
|
||||
|
||||
class ConfigLoader:
|
||||
"""Loads and parses framework configuration"""
|
||||
|
||||
def __init__(self, config_path: Path):
|
||||
self.config_path = Path(config_path)
|
||||
self.raw_config: Dict[str, Any] = {}
|
||||
self.framework: Optional[FrameworkConfig] = None
|
||||
self.systems: List[SystemConfig] = []
|
||||
self.components: Dict[str, Dict[str, ComponentConfig]] = {}
|
||||
|
||||
def load(self) -> 'ConfigLoader':
|
||||
"""Load configuration from file"""
|
||||
with open(self.config_path) as f:
|
||||
self.raw_config = json.load(f)
|
||||
|
||||
self._parse_framework()
|
||||
self._parse_systems()
|
||||
self._parse_components()
|
||||
|
||||
return self
|
||||
|
||||
def _parse_framework(self):
|
||||
"""Parse framework metadata"""
|
||||
fw = self.raw_config['framework']
|
||||
self.framework = FrameworkConfig(**fw)
|
||||
|
||||
def _parse_systems(self):
|
||||
"""Parse system configurations"""
|
||||
for sys in self.raw_config['systems']:
|
||||
self.systems.append(SystemConfig(**sys))
|
||||
|
||||
def _parse_components(self):
|
||||
"""Parse component configurations"""
|
||||
comps = self.raw_config['components']
|
||||
|
||||
# Shared components
|
||||
self.components['shared'] = {}
|
||||
for key, value in comps.get('shared', {}).items():
|
||||
self.components['shared'][key] = ComponentConfig(**value)
|
||||
|
||||
# System-specific components
|
||||
for system_key in ['data_flow', 'documentation', 'execution']:
|
||||
self.components[system_key] = {}
|
||||
for comp_key, comp_value in comps.get(system_key, {}).items():
|
||||
self.components[system_key][comp_key] = ComponentConfig(**comp_value)
|
||||
|
||||
def get_system(self, key: str) -> Optional[SystemConfig]:
|
||||
"""Get system config by key"""
|
||||
for sys in self.systems:
|
||||
if sys.key == key:
|
||||
return sys
|
||||
return None
|
||||
|
||||
def get_component(self, system_key: str, component_key: str) -> Optional[ComponentConfig]:
|
||||
"""Get component config"""
|
||||
return self.components.get(system_key, {}).get(component_key)
|
||||
|
||||
def get_shared_component(self, key: str) -> Optional[ComponentConfig]:
|
||||
"""Get shared component config"""
|
||||
return self.components.get('shared', {}).get(key)
|
||||
|
||||
|
||||
def load_config(config_path: str | Path) -> ConfigLoader:
|
||||
"""Load and validate configuration file"""
|
||||
loader = ConfigLoader(config_path)
|
||||
return loader.load()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test with pawprint config
|
||||
import sys
|
||||
config_path = Path(__file__).parent.parent / "pawprint.config.json"
|
||||
|
||||
loader = load_config(config_path)
|
||||
|
||||
print(f"Framework: {loader.framework.name} v{loader.framework.version}")
|
||||
print(f"Tagline: {loader.framework.tagline}")
|
||||
print(f"\nSystems:")
|
||||
for sys in loader.systems:
|
||||
print(f" {sys.icon} {sys.title} ({sys.name}) - {sys.tagline}")
|
||||
|
||||
print(f"\nShared Components:")
|
||||
for key, comp in loader.components['shared'].items():
|
||||
print(f" {comp.name} - {comp.description}")
|
||||
370
station/tools/modelgen/model_generator.py
Normal file
370
station/tools/modelgen/model_generator.py
Normal file
@@ -0,0 +1,370 @@
|
||||
"""
|
||||
Model Generator
|
||||
|
||||
Generic model generation from configuration files.
|
||||
Supports multiple output formats and is extensible for bidirectional conversion.
|
||||
|
||||
Output formats:
|
||||
- pydantic: Pydantic BaseModel classes
|
||||
- django: Django ORM models (planned)
|
||||
- prisma: Prisma schema (planned)
|
||||
- sqlalchemy: SQLAlchemy models (planned)
|
||||
|
||||
Future: Extract models FROM existing codebases (reverse direction)
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Dict, Type
|
||||
|
||||
from .config_loader import ConfigLoader
|
||||
|
||||
|
||||
class BaseModelWriter(ABC):
|
||||
"""Abstract base for model output writers."""
|
||||
|
||||
@abstractmethod
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
"""Write models to the specified path."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def file_extension(self) -> str:
|
||||
"""Return the file extension for this format."""
|
||||
pass
|
||||
|
||||
|
||||
class PydanticWriter(BaseModelWriter):
|
||||
"""Generates Pydantic model files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
"""Write Pydantic models to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
content = self._generate_content(config)
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_content(self, config: ConfigLoader) -> str:
|
||||
"""Generate the Pydantic models file content."""
|
||||
|
||||
# Get component names from config
|
||||
config_comp = config.get_shared_component("config")
|
||||
data_comp = config.get_shared_component("data")
|
||||
|
||||
data_flow_sys = config.get_system("data_flow")
|
||||
doc_sys = config.get_system("documentation")
|
||||
exec_sys = config.get_system("execution")
|
||||
|
||||
connector_comp = config.get_component("data_flow", "connector")
|
||||
pulse_comp = config.get_component("data_flow", "composed")
|
||||
|
||||
pattern_comp = config.get_component("documentation", "pattern")
|
||||
doc_composed = config.get_component("documentation", "composed")
|
||||
|
||||
tool_comp = config.get_component("execution", "utility")
|
||||
monitor_comp = config.get_component("execution", "watcher")
|
||||
cabinet_comp = config.get_component("execution", "container")
|
||||
exec_composed = config.get_component("execution", "composed")
|
||||
|
||||
return f'''"""
|
||||
Pydantic models - Generated from {config.framework.name}.config.json
|
||||
|
||||
DO NOT EDIT MANUALLY - Regenerate from config
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Literal, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class Status(str, Enum):
|
||||
PENDING = "pending"
|
||||
PLANNED = "planned"
|
||||
BUILDING = "building"
|
||||
DEV = "dev"
|
||||
LIVE = "live"
|
||||
READY = "ready"
|
||||
|
||||
|
||||
class System(str, Enum):
|
||||
{data_flow_sys.name.upper()} = "{data_flow_sys.name}"
|
||||
{doc_sys.name.upper()} = "{doc_sys.name}"
|
||||
{exec_sys.name.upper()} = "{exec_sys.name}"
|
||||
|
||||
|
||||
class ToolType(str, Enum):
|
||||
APP = "app"
|
||||
CLI = "cli"
|
||||
|
||||
|
||||
# === Shared Components ===
|
||||
|
||||
|
||||
class {config_comp.title}(BaseModel):
|
||||
"""{config_comp.description}. Shared across {data_flow_sys.name}, {exec_sys.name}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
config_path: Optional[str] = None
|
||||
|
||||
|
||||
class {data_comp.title}(BaseModel):
|
||||
"""{data_comp.description}. Shared across all systems."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
source_template: Optional[str] = None
|
||||
data_path: Optional[str] = None
|
||||
|
||||
|
||||
# === System-Specific Components ===
|
||||
|
||||
|
||||
class {connector_comp.title}(BaseModel):
|
||||
"""{connector_comp.description} ({data_flow_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
mock: Optional[bool] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class {pattern_comp.title}(BaseModel):
|
||||
"""{pattern_comp.description} ({doc_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template_path: Optional[str] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {tool_comp.title}(BaseModel):
|
||||
"""{tool_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
type: Optional[ToolType] = None
|
||||
description: Optional[str] = None
|
||||
path: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
cli: Optional[str] = None
|
||||
|
||||
|
||||
class {monitor_comp.title}(BaseModel):
|
||||
"""{monitor_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
class {cabinet_comp.title}(BaseModel):
|
||||
"""{cabinet_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
tools: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Composed Types ===
|
||||
|
||||
|
||||
class {pulse_comp.title}(BaseModel):
|
||||
"""{pulse_comp.description} ({data_flow_sys.name}). Formula: {pulse_comp.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
{connector_comp.name}: Optional[{connector_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
|
||||
|
||||
class {doc_composed.title}(BaseModel):
|
||||
"""{doc_composed.description} ({doc_sys.name}). Formula: {doc_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template: Optional[{pattern_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
output_{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {exec_composed.title}(BaseModel):
|
||||
"""{exec_composed.description} ({exec_sys.name}). Formula: {exec_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
cabinet: Optional[{cabinet_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.plural}: List[{data_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Collection wrappers for JSON files ===
|
||||
|
||||
|
||||
class {config_comp.title}Collection(BaseModel):
|
||||
items: List[{config_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {data_comp.title}Collection(BaseModel):
|
||||
items: List[{data_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {connector_comp.title}Collection(BaseModel):
|
||||
items: List[{connector_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pattern_comp.title}Collection(BaseModel):
|
||||
items: List[{pattern_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {tool_comp.title}Collection(BaseModel):
|
||||
items: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {monitor_comp.title}Collection(BaseModel):
|
||||
items: List[{monitor_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {cabinet_comp.title}Collection(BaseModel):
|
||||
items: List[{cabinet_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pulse_comp.title}Collection(BaseModel):
|
||||
items: List[{pulse_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {doc_composed.title}Collection(BaseModel):
|
||||
items: List[{doc_composed.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {exec_composed.title}Collection(BaseModel):
|
||||
items: List[{exec_composed.title}] = Field(default_factory=list)
|
||||
'''
|
||||
|
||||
|
||||
class DjangoWriter(BaseModelWriter):
|
||||
"""Generates Django model files (placeholder)."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
raise NotImplementedError("Django model generation not yet implemented")
|
||||
|
||||
|
||||
class PrismaWriter(BaseModelWriter):
|
||||
"""Generates Prisma schema files (placeholder)."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".prisma"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
raise NotImplementedError("Prisma schema generation not yet implemented")
|
||||
|
||||
|
||||
class SQLAlchemyWriter(BaseModelWriter):
|
||||
"""Generates SQLAlchemy model files (placeholder)."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
raise NotImplementedError("SQLAlchemy model generation not yet implemented")
|
||||
|
||||
|
||||
# Registry of available writers
|
||||
WRITERS: Dict[str, Type[BaseModelWriter]] = {
|
||||
"pydantic": PydanticWriter,
|
||||
"django": DjangoWriter,
|
||||
"prisma": PrismaWriter,
|
||||
"sqlalchemy": SQLAlchemyWriter,
|
||||
}
|
||||
|
||||
|
||||
class ModelGenerator:
|
||||
"""
|
||||
Generates typed models from configuration.
|
||||
|
||||
This is the main entry point for model generation.
|
||||
Delegates to format-specific writers.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: ConfigLoader,
|
||||
output_path: Path,
|
||||
output_format: str = "pydantic",
|
||||
):
|
||||
"""
|
||||
Initialize the generator.
|
||||
|
||||
Args:
|
||||
config: Loaded configuration
|
||||
output_path: Exact path where to write (file or directory depending on format)
|
||||
output_format: Output format (pydantic, django, prisma, sqlalchemy)
|
||||
"""
|
||||
self.config = config
|
||||
self.output_path = Path(output_path)
|
||||
self.output_format = output_format
|
||||
|
||||
if output_format not in WRITERS:
|
||||
raise ValueError(
|
||||
f"Unknown output format: {output_format}. "
|
||||
f"Available: {list(WRITERS.keys())}"
|
||||
)
|
||||
|
||||
self.writer = WRITERS[output_format]()
|
||||
|
||||
def generate(self) -> Path:
|
||||
"""
|
||||
Generate models to the specified output path.
|
||||
|
||||
Returns:
|
||||
Path to the generated file/directory
|
||||
"""
|
||||
# Determine output file path
|
||||
if self.output_path.suffix:
|
||||
# User specified a file path
|
||||
output_file = self.output_path
|
||||
else:
|
||||
# User specified a directory, add default filename
|
||||
output_file = self.output_path / f"__init__{self.writer.file_extension()}"
|
||||
|
||||
self.writer.write(self.config, output_file)
|
||||
print(f"Generated {self.output_format} models: {output_file}")
|
||||
return output_file
|
||||
|
||||
@classmethod
|
||||
def available_formats(cls) -> list:
|
||||
"""Return list of available output formats."""
|
||||
return list(WRITERS.keys())
|
||||
128
station/tools/modelgen/orchestrator.py
Normal file
128
station/tools/modelgen/orchestrator.py
Normal file
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Framework Generator Orchestrator
|
||||
|
||||
Generates complete framework from configuration file.
|
||||
|
||||
Usage:
|
||||
python generators/orchestrator.py [--config CONFIG_PATH]
|
||||
|
||||
Example:
|
||||
python generators/orchestrator.py
|
||||
python generators/orchestrator.py --config custom.config.json
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from .config_loader import load_config
|
||||
from .structure_generator import StructureGenerator
|
||||
from .model_generator import ModelGenerator
|
||||
from .code_generator import CodeGenerator
|
||||
|
||||
# Setup logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(message)s'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_framework(config_path: Path, output_dir: Path):
|
||||
"""Generate complete framework from configuration"""
|
||||
|
||||
logger.info("="*60)
|
||||
logger.info("Framework Generator")
|
||||
logger.info("="*60)
|
||||
|
||||
# Load configuration
|
||||
logger.info(f"Loading configuration from {config_path}...")
|
||||
config = load_config(config_path)
|
||||
|
||||
logger.info(f"\nGenerating {config.framework.name.capitalize()}")
|
||||
logger.info(f" {config.framework.tagline}")
|
||||
logger.info(f" Version: {config.framework.version}")
|
||||
|
||||
logger.info(f"\nOutput directory: {output_dir}")
|
||||
|
||||
logger.info(f"\nSystems:")
|
||||
for sys in config.systems:
|
||||
logger.info(f" {sys.title} ({sys.name}) - {sys.tagline}")
|
||||
|
||||
# Generate structure
|
||||
logger.info(f"\n[1/4] Generating folder structure...")
|
||||
struct_gen = StructureGenerator(config, output_dir)
|
||||
struct_gen.generate()
|
||||
|
||||
# Generate models
|
||||
logger.info(f"\n[2/4] Generating Pydantic models...")
|
||||
model_gen = ModelGenerator(config, output_dir)
|
||||
model_gen.generate()
|
||||
|
||||
# Generate code
|
||||
logger.info(f"\n[3/4] Generating Python code...")
|
||||
code_gen = CodeGenerator(config, output_dir)
|
||||
code_gen.generate()
|
||||
|
||||
# Copy templates
|
||||
logger.info(f"\n[4/4] Copying templates...")
|
||||
templates_dir = Path(__file__).parent.parent / "templates"
|
||||
if (templates_dir / "index.html").exists():
|
||||
shutil.copy(templates_dir / "index.html", output_dir / "index.html")
|
||||
logger.info(f" Copied index.html")
|
||||
if (templates_dir / "requirements.txt").exists():
|
||||
shutil.copy(templates_dir / "requirements.txt", output_dir / "requirements.txt")
|
||||
logger.info(f" Copied requirements.txt")
|
||||
|
||||
logger.info(f"\n{'='*60}")
|
||||
logger.info(f"Framework generated successfully!")
|
||||
logger.info(f"{'='*60}\n")
|
||||
|
||||
logger.info(f"Next steps:")
|
||||
logger.info(f" 1. Review generated files in {output_dir}")
|
||||
logger.info(f" 2. Install dependencies: pip install -r requirements.txt")
|
||||
logger.info(f" 3. Run hub: python {output_dir}/main.py")
|
||||
logger.info(f" 4. Visit http://localhost:{config.framework.hub_port}")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate framework from configuration"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config",
|
||||
default="soleprint.config.json",
|
||||
help="Path to configuration file (default: soleprint.config.json)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
default=None,
|
||||
help="Output directory (default: same as config directory)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config_path = Path(args.config)
|
||||
if not config_path.exists():
|
||||
# Try relative to script location
|
||||
script_dir = Path(__file__).parent.parent
|
||||
config_path = script_dir / args.config
|
||||
|
||||
if not config_path.exists():
|
||||
logger.error(f"Configuration file not found: {args.config}")
|
||||
return 1
|
||||
|
||||
# Output directory defaults to config directory
|
||||
if args.output:
|
||||
output_dir = Path(args.output)
|
||||
else:
|
||||
output_dir = config_path.parent
|
||||
|
||||
generate_framework(config_path, output_dir)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exit(main())
|
||||
127
station/tools/modelgen/structure_generator.py
Normal file
127
station/tools/modelgen/structure_generator.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""
|
||||
Structure Generator
|
||||
|
||||
Creates folder structure for framework instance.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from .config_loader import ConfigLoader
|
||||
|
||||
|
||||
class StructureGenerator:
|
||||
"""Generates folder structure from configuration"""
|
||||
|
||||
def __init__(self, config: ConfigLoader, output_dir: Path):
|
||||
self.config = config
|
||||
self.output_dir = Path(output_dir)
|
||||
|
||||
def generate(self):
|
||||
"""Generate complete folder structure"""
|
||||
|
||||
# Note: output_dir is the framework root (spr/), not soleprint-room/
|
||||
# soleprint-room/ is generated separately as Docker orchestration
|
||||
|
||||
# Create models/
|
||||
(self.output_dir / "models").mkdir(parents=True, exist_ok=True)
|
||||
(self.output_dir / "models" / "pydantic").mkdir(exist_ok=True)
|
||||
|
||||
# Create data/
|
||||
data_dir = self.output_dir / "data"
|
||||
data_dir.mkdir(exist_ok=True)
|
||||
|
||||
# Create ctrl/ (for local scripts)
|
||||
(self.output_dir / "ctrl").mkdir(exist_ok=True)
|
||||
|
||||
# Get component names
|
||||
connector = self.config.get_component('data_flow', 'connector')
|
||||
pattern = self.config.get_component('documentation', 'pattern')
|
||||
tool = self.config.get_component('execution', 'utility')
|
||||
monitor = self.config.get_component('execution', 'watcher')
|
||||
config_comp = self.config.get_shared_component('config')
|
||||
data_comp = self.config.get_shared_component('data')
|
||||
|
||||
# Create system directories
|
||||
for system in self.config.systems:
|
||||
sys_dir = self.output_dir / system.name
|
||||
sys_dir.mkdir(exist_ok=True)
|
||||
|
||||
# Create __init__.py markers
|
||||
(sys_dir / "__init__.py").touch()
|
||||
|
||||
# System-specific structure
|
||||
if system.key == 'data_flow':
|
||||
# artery/vein/, artery/pulse/, artery/room/, artery/depot/
|
||||
(sys_dir / connector.plural).mkdir(exist_ok=True)
|
||||
(sys_dir / self.config.get_component('data_flow', 'composed').plural).mkdir(exist_ok=True)
|
||||
(sys_dir / config_comp.plural).mkdir(exist_ok=True)
|
||||
(sys_dir / data_comp.plural).mkdir(exist_ok=True)
|
||||
|
||||
elif system.key == 'documentation':
|
||||
# atlas/template/, atlas/maps/, atlas/depot/
|
||||
(sys_dir / pattern.plural).mkdir(exist_ok=True)
|
||||
(sys_dir / self.config.get_component('documentation', 'library').name).mkdir(exist_ok=True)
|
||||
(sys_dir / data_comp.plural).mkdir(exist_ok=True)
|
||||
|
||||
elif system.key == 'execution':
|
||||
# station/tools/, station/monitors/, station/desk/, station/room/, station/depot/
|
||||
(sys_dir / tool.plural).mkdir(exist_ok=True)
|
||||
(sys_dir / monitor.plural).mkdir(exist_ok=True)
|
||||
exec_composed = self.config.get_component('execution', 'composed')
|
||||
(sys_dir / exec_composed.plural).mkdir(exist_ok=True)
|
||||
(sys_dir / config_comp.plural).mkdir(exist_ok=True)
|
||||
(sys_dir / data_comp.plural).mkdir(exist_ok=True)
|
||||
|
||||
# Create data JSON files
|
||||
self._create_data_files(data_dir)
|
||||
|
||||
print(f"Generated structure in {self.output_dir}")
|
||||
|
||||
def _create_data_files(self, data_dir: Path):
|
||||
"""Create empty data JSON files"""
|
||||
|
||||
# Get component names for plurals
|
||||
connector = self.config.get_component('data_flow', 'connector')
|
||||
pattern = self.config.get_component('documentation', 'pattern')
|
||||
tool = self.config.get_component('execution', 'utility')
|
||||
monitor = self.config.get_component('execution', 'watcher')
|
||||
cabinet = self.config.get_component('execution', 'container')
|
||||
config_comp = self.config.get_shared_component('config')
|
||||
data_comp = self.config.get_shared_component('data')
|
||||
|
||||
pulse = self.config.get_component('data_flow', 'composed')
|
||||
doc_composed = self.config.get_component('documentation', 'composed')
|
||||
exec_composed = self.config.get_component('execution', 'composed')
|
||||
|
||||
# Create JSON files with empty items arrays
|
||||
files = [
|
||||
f"{connector.plural}.json",
|
||||
f"{pattern.plural}.json",
|
||||
f"{tool.plural}.json",
|
||||
f"{monitor.plural}.json",
|
||||
f"{cabinet.plural}.json",
|
||||
f"{config_comp.plural}.json",
|
||||
f"{data_comp.plural}.json",
|
||||
f"{pulse.plural}.json",
|
||||
f"{doc_composed.plural}.json",
|
||||
f"{exec_composed.plural}.json",
|
||||
]
|
||||
|
||||
for filename in files:
|
||||
filepath = data_dir / filename
|
||||
if not filepath.exists():
|
||||
filepath.write_text('{\n "items": []\n}\n')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from .config_loader import load_config
|
||||
|
||||
# Test with soleprint config
|
||||
config_path = Path(__file__).parent.parent / "soleprint.config.json"
|
||||
config = load_config(config_path)
|
||||
|
||||
# Output to framework root (spr/), not soleprint-room/
|
||||
output_dir = Path(__file__).parent.parent
|
||||
generator = StructureGenerator(config, output_dir)
|
||||
generator.generate()
|
||||
|
||||
print("Structure generated successfully!")
|
||||
Reference in New Issue
Block a user