Compare commits
5 Commits
dcc5191ba3
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 47b4b87851 | |||
| b4081cff3e | |||
|
|
35796c0c3b | ||
|
|
0351e5c7a6 | ||
|
|
3df1465bf5 |
30
build.py
30
build.py
@@ -26,7 +26,6 @@ import argparse
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
@@ -219,6 +218,8 @@ def build_link(output_dir: Path, cfg_name: str):
|
||||
|
||||
def generate_models(output_dir: Path, room: str):
|
||||
"""Generate models using modelgen tool."""
|
||||
from soleprint.station.tools.modelgen import ModelGenerator, load_config
|
||||
|
||||
config_path = SPR_ROOT / "cfg" / room / "config.json"
|
||||
|
||||
if not config_path.exists():
|
||||
@@ -228,21 +229,18 @@ def generate_models(output_dir: Path, room: str):
|
||||
models_file = output_dir / "models" / "pydantic" / "__init__.py"
|
||||
models_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"soleprint.station.tools.modelgen",
|
||||
"from-config",
|
||||
"--config",
|
||||
str(config_path),
|
||||
"--output",
|
||||
str(models_file),
|
||||
"--format",
|
||||
"pydantic",
|
||||
]
|
||||
|
||||
result = subprocess.run(cmd, cwd=SPR_ROOT)
|
||||
return result.returncode == 0
|
||||
try:
|
||||
config = load_config(config_path)
|
||||
generator = ModelGenerator(
|
||||
config=config,
|
||||
output_path=models_file,
|
||||
output_format="pydantic",
|
||||
)
|
||||
generator.generate()
|
||||
return True
|
||||
except Exception as e:
|
||||
log.error(f"Model generation failed: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def copy_cfg(output_dir: Path, room: str):
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
<g id="clust6" class="cluster">
|
||||
<title>cluster_room</title>
|
||||
<polygon fill="none" stroke="#7b1fa2" stroke-dasharray="5,2" points="642,-92.75 642,-184.38 952,-184.38 952,-92.75 642,-92.75"/>
|
||||
<text xml:space="preserve" text-anchor="middle" x="797" y="-165.18" font-family="Helvetica,sans-Serif" font-size="16.00">Managed Room (e.g., AMAR)</text>
|
||||
<text xml:space="preserve" text-anchor="middle" x="797" y="-165.18" font-family="Helvetica,sans-Serif" font-size="16.00">Managed Room</text>
|
||||
</g>
|
||||
<!-- hub -->
|
||||
<g id="node1" class="node">
|
||||
|
||||
|
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
@@ -56,7 +56,10 @@
|
||||
<header>
|
||||
<div id="lang-toggle"></div>
|
||||
<h1>Atlas</h1>
|
||||
<p class="subtitle">Todo lo escrito</p>
|
||||
<p class="subtitle">
|
||||
<span class="lang-en">Actionable Documentation</span
|
||||
><span class="lang-es">Documentacion Accionable</span>
|
||||
</p>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
@@ -192,58 +195,6 @@
|
||||
</article>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section class="findings-section">
|
||||
<h2>
|
||||
<span class="lang-en">Examples</span
|
||||
><span class="lang-es">Ejemplos</span>
|
||||
</h2>
|
||||
<div class="findings-grid">
|
||||
<article class="finding-card">
|
||||
<h3>Feature Flow</h3>
|
||||
<p class="lang-en">
|
||||
Plain book - HTML presentation explaining the
|
||||
standardization pipeline.
|
||||
</p>
|
||||
<p class="lang-es">
|
||||
Plain book - Presentacion HTML explicando el
|
||||
pipeline de estandarizacion.
|
||||
</p>
|
||||
</article>
|
||||
<article class="finding-card">
|
||||
<h3>Feature Form Samples</h3>
|
||||
<p class="lang-en">
|
||||
Templated book - Form template + depot of actual
|
||||
feature forms.
|
||||
</p>
|
||||
<p class="lang-es">
|
||||
Templated book - Template de formulario + depot de
|
||||
feature forms reales.
|
||||
</p>
|
||||
</article>
|
||||
<article class="finding-card">
|
||||
<h3>Gherkin Samples</h3>
|
||||
<p class="lang-en">
|
||||
Templated book - Gherkin viewer + depot of .feature
|
||||
files.
|
||||
</p>
|
||||
<p class="lang-es">
|
||||
Templated book - Visor Gherkin + depot de archivos
|
||||
.feature.
|
||||
</p>
|
||||
</article>
|
||||
<article class="finding-card">
|
||||
<h3>Arch Model</h3>
|
||||
<p class="lang-en">
|
||||
Plain book - Static site with architecture diagrams.
|
||||
</p>
|
||||
<p class="lang-es">
|
||||
Plain book - Sitio estatico con diagramas de
|
||||
arquitectura.
|
||||
</p>
|
||||
</article>
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
|
||||
@@ -56,7 +56,10 @@
|
||||
<header>
|
||||
<div id="lang-toggle"></div>
|
||||
<h1>Station</h1>
|
||||
<p class="subtitle">Todo lo construido</p>
|
||||
<p class="subtitle">
|
||||
<span class="lang-en">Monitors, Environments & Tools</span
|
||||
><span class="lang-es">Monitores, Entornos y Herramientas</span>
|
||||
</p>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
|
||||
@@ -443,14 +443,7 @@
|
||||
<div
|
||||
class="vein{% if vein.status == 'live' or vein.status == 'building' %} active{% else %} disabled{% endif %}{% if loop.first %} selected{% endif %}"
|
||||
data-tab="{{ vein.slug }}"
|
||||
{%
|
||||
if
|
||||
vein.status=""
|
||||
="planned"
|
||||
%}data-disabled="true"
|
||||
{%
|
||||
endif
|
||||
%}
|
||||
{% if vein.status == "planned" %}data-disabled="true"{% endif %}
|
||||
>
|
||||
<h3>{{ vein.title }}</h3>
|
||||
</div>
|
||||
|
||||
0
soleprint/artery/shunts/__init__.py
Normal file → Executable file
0
soleprint/artery/shunts/__init__.py
Normal file → Executable file
0
soleprint/artery/shunts/example/README.md
Normal file → Executable file
0
soleprint/artery/shunts/example/README.md
Normal file → Executable file
0
soleprint/artery/shunts/example/depot/responses.json
Normal file → Executable file
0
soleprint/artery/shunts/example/depot/responses.json
Normal file → Executable file
0
soleprint/artery/shunts/example/main.py
Normal file → Executable file
0
soleprint/artery/shunts/example/main.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/.env.example
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/.env.example
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/README.md
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/README.md
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/__init__.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/__init__.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/api/__init__.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/api/__init__.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/api/routes.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/api/routes.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/core/__init__.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/core/__init__.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/core/config.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/core/config.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/main.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/main.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/requirements.txt
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/requirements.txt
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/run.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/run.py
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/templates/index.html
Normal file → Executable file
0
soleprint/artery/shunts/mercadopago/templates/index.html
Normal file → Executable file
@@ -4,24 +4,38 @@ Modelgen - Generic Model Generation Tool
|
||||
Generates typed models from various sources to various output formats.
|
||||
|
||||
Input sources:
|
||||
- Configuration files (soleprint.config.json style)
|
||||
- JSON Schema (planned)
|
||||
- Existing codebases: Django, SQLAlchemy, Prisma (planned - for databrowse)
|
||||
- Configuration files (soleprint config.json style)
|
||||
- Python dataclasses in schema/ folder
|
||||
- Existing codebases: Django, SQLAlchemy, Prisma (for extraction)
|
||||
|
||||
Output formats:
|
||||
- pydantic: Pydantic BaseModel classes
|
||||
- django: Django ORM models (planned)
|
||||
- prisma: Prisma schema (planned)
|
||||
- sqlalchemy: SQLAlchemy models (planned)
|
||||
- django: Django ORM models
|
||||
- typescript: TypeScript interfaces
|
||||
- protobuf: Protocol Buffer definitions
|
||||
- prisma: Prisma schema
|
||||
|
||||
Usage:
|
||||
python -m station.tools.modelgen from-config -c config.json -o models.py -f pydantic
|
||||
python -m station.tools.modelgen list-formats
|
||||
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||
python -m soleprint.station.tools.modelgen list-formats
|
||||
"""
|
||||
|
||||
__version__ = "0.1.0"
|
||||
__version__ = "0.2.0"
|
||||
|
||||
from .config_loader import ConfigLoader, load_config
|
||||
from .model_generator import WRITERS, ModelGenerator
|
||||
from .generator import GENERATORS, BaseGenerator
|
||||
from .loader import ConfigLoader, load_config
|
||||
from .model_generator import ModelGenerator
|
||||
|
||||
__all__ = ["ModelGenerator", "ConfigLoader", "load_config", "WRITERS"]
|
||||
# Backwards compatibility
|
||||
WRITERS = GENERATORS
|
||||
|
||||
__all__ = [
|
||||
"ModelGenerator",
|
||||
"ConfigLoader",
|
||||
"load_config",
|
||||
"GENERATORS",
|
||||
"WRITERS",
|
||||
"BaseGenerator",
|
||||
]
|
||||
|
||||
@@ -4,34 +4,34 @@ Modelgen - Generic Model Generation Tool
|
||||
Generates typed models from various sources to various formats.
|
||||
|
||||
Input sources:
|
||||
- Configuration files (soleprint.config.json style)
|
||||
- JSON Schema (planned)
|
||||
- Existing codebases: Django, SQLAlchemy, Prisma (planned - for databrowse)
|
||||
- from-config: Configuration files (soleprint config.json style)
|
||||
- from-schema: Python dataclasses in schema/ folder
|
||||
- extract: Existing codebases (Django, SQLAlchemy, Prisma)
|
||||
|
||||
Output formats:
|
||||
- pydantic: Pydantic BaseModel classes
|
||||
- django: Django ORM models (planned)
|
||||
- prisma: Prisma schema (planned)
|
||||
- sqlalchemy: SQLAlchemy models (planned)
|
||||
- django: Django ORM models
|
||||
- typescript: TypeScript interfaces
|
||||
- protobuf: Protocol Buffer definitions
|
||||
- prisma: Prisma schema
|
||||
|
||||
Usage:
|
||||
python -m station.tools.modelgen --help
|
||||
python -m station.tools.modelgen from-config -c config.json -o models/ -f pydantic
|
||||
python -m station.tools.modelgen from-schema -s schema.json -o models/ -f pydantic
|
||||
python -m station.tools.modelgen extract -s /path/to/django/app -o models/ -f pydantic
|
||||
|
||||
This is a GENERIC tool. For soleprint-specific builds, use:
|
||||
python build.py dev|deploy
|
||||
python -m soleprint.station.tools.modelgen --help
|
||||
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from .generator import GENERATORS
|
||||
|
||||
|
||||
def cmd_from_config(args):
|
||||
"""Generate models from a configuration file (soleprint.config.json style)."""
|
||||
from .config_loader import load_config
|
||||
"""Generate models from a configuration file (soleprint config.json style)."""
|
||||
from .loader import load_config
|
||||
from .model_generator import ModelGenerator
|
||||
|
||||
config_path = Path(args.config)
|
||||
@@ -52,35 +52,121 @@ def cmd_from_config(args):
|
||||
)
|
||||
result_path = generator.generate()
|
||||
|
||||
print(f"✓ Models generated: {result_path}")
|
||||
print(f"Models generated: {result_path}")
|
||||
|
||||
|
||||
def cmd_from_schema(args):
|
||||
"""Generate models from JSON Schema."""
|
||||
print("Error: from-schema not yet implemented", file=sys.stderr)
|
||||
print("Use from-config with a soleprint.config.json file for now", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
"""Generate models from Python dataclasses in schema/ folder."""
|
||||
from .loader import load_schema
|
||||
from .writer import write_file
|
||||
|
||||
# Determine schema path
|
||||
schema_path = Path(args.schema) if args.schema else Path.cwd() / "schema"
|
||||
|
||||
if not schema_path.exists():
|
||||
print(f"Error: Schema folder not found: {schema_path}", file=sys.stderr)
|
||||
print(
|
||||
"Create a schema/ folder with Python dataclasses and an __init__.py",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print("that exports DATACLASSES and ENUMS lists.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Loading schema: {schema_path}")
|
||||
schema = load_schema(schema_path)
|
||||
|
||||
print(f"Found {len(schema.models)} models, {len(schema.enums)} enums")
|
||||
|
||||
# Parse targets
|
||||
targets = [t.strip() for t in args.targets.split(",")]
|
||||
output_dir = Path(args.output)
|
||||
|
||||
for target in targets:
|
||||
if target not in GENERATORS:
|
||||
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||
continue
|
||||
|
||||
generator = GENERATORS[target]()
|
||||
ext = generator.file_extension()
|
||||
|
||||
# Determine output filename (use target name to avoid overwrites)
|
||||
if len(targets) == 1 and args.output.endswith(ext):
|
||||
output_file = output_dir
|
||||
else:
|
||||
output_file = output_dir / f"models_{target}{ext}"
|
||||
|
||||
print(f"Generating {target} to: {output_file}")
|
||||
generator.generate(schema, output_file)
|
||||
|
||||
print("Done!")
|
||||
|
||||
|
||||
def cmd_extract(args):
|
||||
"""Extract models from existing codebase (for databrowse graphs)."""
|
||||
print("Error: extract not yet implemented", file=sys.stderr)
|
||||
print(
|
||||
"This will extract models from Django/SQLAlchemy/Prisma codebases.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print("Use cases:", file=sys.stderr)
|
||||
print(" - Generate browsable graphs for databrowse tool", file=sys.stderr)
|
||||
print(" - Convert between ORM formats", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
"""Extract models from existing codebase."""
|
||||
from .loader.extract import EXTRACTORS
|
||||
|
||||
source_path = Path(args.source)
|
||||
if not source_path.exists():
|
||||
print(f"Error: Source path not found: {source_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Auto-detect or use specified framework
|
||||
framework = args.framework
|
||||
extractor = None
|
||||
|
||||
if framework == "auto":
|
||||
for name, extractor_cls in EXTRACTORS.items():
|
||||
ext = extractor_cls(source_path)
|
||||
if ext.detect():
|
||||
framework = name
|
||||
extractor = ext
|
||||
print(f"Detected framework: {framework}")
|
||||
break
|
||||
|
||||
if not extractor:
|
||||
print("Error: Could not auto-detect framework", file=sys.stderr)
|
||||
print(f"Available frameworks: {list(EXTRACTORS.keys())}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
if framework not in EXTRACTORS:
|
||||
print(f"Error: Unknown framework: {framework}", file=sys.stderr)
|
||||
print(f"Available: {list(EXTRACTORS.keys())}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
extractor = EXTRACTORS[framework](source_path)
|
||||
|
||||
print(f"Extracting from: {source_path}")
|
||||
models, enums = extractor.extract()
|
||||
|
||||
print(f"Extracted {len(models)} models, {len(enums)} enums")
|
||||
|
||||
# Parse targets
|
||||
targets = [t.strip() for t in args.targets.split(",")]
|
||||
output_dir = Path(args.output)
|
||||
|
||||
for target in targets:
|
||||
if target not in GENERATORS:
|
||||
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||
continue
|
||||
|
||||
generator = GENERATORS[target]()
|
||||
ext = generator.file_extension()
|
||||
|
||||
# Determine output filename (use target name to avoid overwrites)
|
||||
if len(targets) == 1 and args.output.endswith(ext):
|
||||
output_file = output_dir
|
||||
else:
|
||||
output_file = output_dir / f"models_{target}{ext}"
|
||||
|
||||
print(f"Generating {target} to: {output_file}")
|
||||
generator.generate((models, enums), output_file)
|
||||
|
||||
print("Done!")
|
||||
|
||||
|
||||
def cmd_list_formats(args):
|
||||
"""List available output formats."""
|
||||
from .model_generator import ModelGenerator
|
||||
|
||||
print("Available output formats:")
|
||||
for fmt in ModelGenerator.available_formats():
|
||||
for fmt in GENERATORS.keys():
|
||||
print(f" - {fmt}")
|
||||
|
||||
|
||||
@@ -88,22 +174,25 @@ def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Modelgen - Generic Model Generation Tool",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=__doc__,
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
# Available formats for help text
|
||||
formats = list(GENERATORS.keys())
|
||||
formats_str = ", ".join(formats)
|
||||
|
||||
# from-config command
|
||||
config_parser = subparsers.add_parser(
|
||||
"from-config",
|
||||
help="Generate models from configuration file",
|
||||
help="Generate models from soleprint configuration file",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--config",
|
||||
"-c",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to configuration file (e.g., soleprint.config.json)",
|
||||
help="Path to configuration file (e.g., config.json)",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--output",
|
||||
@@ -117,22 +206,22 @@ def main():
|
||||
"-f",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic", "django", "prisma", "sqlalchemy"],
|
||||
choices=["pydantic"], # Only pydantic for config mode
|
||||
help="Output format (default: pydantic)",
|
||||
)
|
||||
config_parser.set_defaults(func=cmd_from_config)
|
||||
|
||||
# from-schema command (placeholder)
|
||||
# from-schema command
|
||||
schema_parser = subparsers.add_parser(
|
||||
"from-schema",
|
||||
help="Generate models from JSON Schema (not yet implemented)",
|
||||
help="Generate models from Python dataclasses in schema/ folder",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--schema",
|
||||
"-s",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to JSON Schema file",
|
||||
default=None,
|
||||
help="Path to schema folder (default: ./schema)",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--output",
|
||||
@@ -142,19 +231,18 @@ def main():
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
schema_parser.add_argument(
|
||||
"--format",
|
||||
"-f",
|
||||
"--targets",
|
||||
"-t",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic", "django", "prisma", "sqlalchemy"],
|
||||
help="Output format (default: pydantic)",
|
||||
help=f"Comma-separated output targets ({formats_str})",
|
||||
)
|
||||
schema_parser.set_defaults(func=cmd_from_schema)
|
||||
|
||||
# extract command (placeholder for databrowse)
|
||||
# extract command
|
||||
extract_parser = subparsers.add_parser(
|
||||
"extract",
|
||||
help="Extract models from existing codebase (not yet implemented)",
|
||||
help="Extract models from existing codebase",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--source",
|
||||
@@ -165,10 +253,11 @@ def main():
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--framework",
|
||||
"-f",
|
||||
type=str,
|
||||
choices=["django", "sqlalchemy", "prisma", "auto"],
|
||||
default="auto",
|
||||
help="Source framework to extract from (default: auto-detect)",
|
||||
help="Source framework (default: auto-detect)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--output",
|
||||
@@ -178,12 +267,11 @@ def main():
|
||||
help="Output path (file or directory)",
|
||||
)
|
||||
extract_parser.add_argument(
|
||||
"--format",
|
||||
"-f",
|
||||
"--targets",
|
||||
"-t",
|
||||
type=str,
|
||||
default="pydantic",
|
||||
choices=["pydantic", "django", "prisma", "sqlalchemy"],
|
||||
help="Output format (default: pydantic)",
|
||||
help=f"Comma-separated output targets ({formats_str})",
|
||||
)
|
||||
extract_parser.set_defaults(func=cmd_extract)
|
||||
|
||||
|
||||
40
soleprint/station/tools/modelgen/generator/__init__.py
Normal file
40
soleprint/station/tools/modelgen/generator/__init__.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""
|
||||
Generator - Stack-specific code generators for modelgen.
|
||||
|
||||
Supported generators:
|
||||
- PydanticGenerator: Pydantic BaseModel classes
|
||||
- DjangoGenerator: Django ORM models
|
||||
- TypeScriptGenerator: TypeScript interfaces
|
||||
- ProtobufGenerator: Protocol Buffer definitions
|
||||
- PrismaGenerator: Prisma schema
|
||||
"""
|
||||
|
||||
from typing import Dict, Type
|
||||
|
||||
from .base import BaseGenerator
|
||||
from .django import DjangoGenerator
|
||||
from .prisma import PrismaGenerator
|
||||
from .protobuf import ProtobufGenerator
|
||||
from .pydantic import PydanticGenerator
|
||||
from .typescript import TypeScriptGenerator
|
||||
|
||||
# Registry of available generators
|
||||
GENERATORS: Dict[str, Type[BaseGenerator]] = {
|
||||
"pydantic": PydanticGenerator,
|
||||
"django": DjangoGenerator,
|
||||
"typescript": TypeScriptGenerator,
|
||||
"ts": TypeScriptGenerator, # Alias
|
||||
"protobuf": ProtobufGenerator,
|
||||
"proto": ProtobufGenerator, # Alias
|
||||
"prisma": PrismaGenerator,
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"BaseGenerator",
|
||||
"PydanticGenerator",
|
||||
"DjangoGenerator",
|
||||
"TypeScriptGenerator",
|
||||
"ProtobufGenerator",
|
||||
"PrismaGenerator",
|
||||
"GENERATORS",
|
||||
]
|
||||
23
soleprint/station/tools/modelgen/generator/base.py
Normal file
23
soleprint/station/tools/modelgen/generator/base.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Base Generator
|
||||
|
||||
Abstract base class for all code generators.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
class BaseGenerator(ABC):
|
||||
"""Abstract base for code generators."""
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, models: Any, output_path: Path) -> None:
|
||||
"""Generate code for the given models to the specified path."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def file_extension(self) -> str:
|
||||
"""Return the file extension for this format."""
|
||||
pass
|
||||
268
soleprint/station/tools/modelgen/generator/django.py
Normal file
268
soleprint/station/tools/modelgen/generator/django.py
Normal file
@@ -0,0 +1,268 @@
|
||||
"""
|
||||
Django Generator
|
||||
|
||||
Generates Django ORM models from model definitions.
|
||||
"""
|
||||
|
||||
import dataclasses as dc
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import format_opts, get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||
from ..types import DJANGO_SPECIAL, DJANGO_TYPES
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class DjangoGenerator(BaseGenerator):
|
||||
"""Generates Django ORM model files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Django models to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader or similar
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums as TextChoices
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_text_choices(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate models
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.extend(["", ""])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.extend(["", ""])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header."""
|
||||
return [
|
||||
'"""',
|
||||
"Django ORM Models - GENERATED FILE",
|
||||
"",
|
||||
"Do not edit directly. Regenerate using modelgen.",
|
||||
'"""',
|
||||
"",
|
||||
"import uuid",
|
||||
"from django.db import models",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_text_choices(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate Django TextChoices from EnumDefinition."""
|
||||
lines = [
|
||||
f"class {enum_def.name}(models.TextChoices):",
|
||||
]
|
||||
for name, value in enum_def.values:
|
||||
label = name.replace("_", " ").title()
|
||||
lines.append(f' {name} = "{value}", "{label}"')
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate Django model from ModelDefinition."""
|
||||
docstring = model_def.docstring or model_def.name
|
||||
lines = [
|
||||
f"class {model_def.name}(models.Model):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
"",
|
||||
]
|
||||
|
||||
for field in model_def.fields:
|
||||
django_field = self._resolve_field_type(
|
||||
field.name, field.type_hint, field.default, field.optional
|
||||
)
|
||||
lines.append(f" {field.name} = {django_field}")
|
||||
|
||||
# Add Meta and __str__
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
" class Meta:",
|
||||
' ordering = ["-created_at"]'
|
||||
if any(f.name == "created_at" for f in model_def.fields)
|
||||
else " pass",
|
||||
"",
|
||||
" def __str__(self):",
|
||||
]
|
||||
)
|
||||
|
||||
# Determine __str__ return
|
||||
field_names = [f.name for f in model_def.fields]
|
||||
if "filename" in field_names:
|
||||
lines.append(" return self.filename")
|
||||
elif "name" in field_names:
|
||||
lines.append(" return self.name")
|
||||
else:
|
||||
lines.append(" return str(self.id)")
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate Django model from a dataclass (MPR style)."""
|
||||
docstring = cls.__doc__ or cls.__name__
|
||||
lines = [
|
||||
f"class {cls.__name__}(models.Model):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
"",
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
# Check for enums and add Status inner class if needed
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
lines.append(" class Status(models.TextChoices):")
|
||||
for member in base:
|
||||
label = member.name.replace("_", " ").title()
|
||||
lines.append(f' {member.name} = "{member.value}", "{label}"')
|
||||
lines.append("")
|
||||
break
|
||||
|
||||
# Generate fields
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
field = fields.get(name)
|
||||
default = dc.MISSING
|
||||
if field and field.default is not dc.MISSING:
|
||||
default = field.default
|
||||
django_field = self._resolve_field_type(name, type_hint, default, False)
|
||||
lines.append(f" {name} = {django_field}")
|
||||
|
||||
# Add Meta and __str__
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
" class Meta:",
|
||||
' ordering = ["-created_at"]'
|
||||
if "created_at" in hints
|
||||
else " pass",
|
||||
"",
|
||||
" def __str__(self):",
|
||||
]
|
||||
)
|
||||
|
||||
if "filename" in hints:
|
||||
lines.append(" return self.filename")
|
||||
elif "name" in hints:
|
||||
lines.append(" return self.name")
|
||||
else:
|
||||
lines.append(" return str(self.id)")
|
||||
|
||||
return lines
|
||||
|
||||
def _resolve_field_type(
|
||||
self, name: str, type_hint: Any, default: Any, optional: bool
|
||||
) -> str:
|
||||
"""Resolve Python type to Django field."""
|
||||
# Special fields
|
||||
if name in DJANGO_SPECIAL:
|
||||
return DJANGO_SPECIAL[name]
|
||||
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
opts = format_opts(optional)
|
||||
|
||||
# Container types
|
||||
if origin == "dict":
|
||||
return DJANGO_TYPES["dict"]
|
||||
if origin == "list":
|
||||
return DJANGO_TYPES["list"]
|
||||
|
||||
# UUID / datetime
|
||||
if type_name == "UUID":
|
||||
return DJANGO_TYPES["UUID"].format(opts=opts)
|
||||
if type_name == "datetime":
|
||||
return DJANGO_TYPES["datetime"].format(opts=opts)
|
||||
|
||||
# Enum
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
extra = []
|
||||
if optional:
|
||||
extra.append("null=True, blank=True")
|
||||
if default is not dc.MISSING and isinstance(default, Enum):
|
||||
extra.append(f"default=Status.{default.name}")
|
||||
return DJANGO_TYPES["enum"].format(
|
||||
opts=", " + ", ".join(extra) if extra else ""
|
||||
)
|
||||
|
||||
# Text fields (based on name heuristics)
|
||||
if base is str and any(
|
||||
x in name for x in ("message", "comments", "description")
|
||||
):
|
||||
return DJANGO_TYPES["text"]
|
||||
|
||||
# BigInt fields
|
||||
if base is int and name in ("file_size", "bitrate"):
|
||||
return DJANGO_TYPES["bigint"].format(opts=opts)
|
||||
|
||||
# String with max_length
|
||||
if base is str:
|
||||
max_length = 1000 if "path" in name else 500 if "filename" in name else 255
|
||||
return DJANGO_TYPES[str].format(
|
||||
max_length=max_length, opts=", " + opts if opts else ""
|
||||
)
|
||||
|
||||
# Integer
|
||||
if base is int:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[int].format(opts=", ".join(extra))
|
||||
|
||||
# Float
|
||||
if base is float:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[float].format(opts=", ".join(extra))
|
||||
|
||||
# Boolean
|
||||
if base is bool:
|
||||
default_val = default if default is not dc.MISSING else False
|
||||
return DJANGO_TYPES[bool].format(default=default_val)
|
||||
|
||||
# Fallback to CharField
|
||||
return DJANGO_TYPES[str].format(
|
||||
max_length=255, opts=", " + opts if opts else ""
|
||||
)
|
||||
173
soleprint/station/tools/modelgen/generator/prisma.py
Normal file
173
soleprint/station/tools/modelgen/generator/prisma.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""
|
||||
Prisma Generator
|
||||
|
||||
Generates Prisma schema from model definitions.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||
from ..types import PRISMA_SPECIAL, PRISMA_TYPES
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class PrismaGenerator(BaseGenerator):
|
||||
"""Generates Prisma schema files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".prisma"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Prisma schema to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate models
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Collect and generate enums first
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
lines.extend(self._generate_enum_from_python(base))
|
||||
lines.append("")
|
||||
enums_generated.add(base.__name__)
|
||||
|
||||
# Generate models
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header with datasource and generator."""
|
||||
return [
|
||||
"// Prisma Schema - GENERATED FILE",
|
||||
"//",
|
||||
"// Do not edit directly. Regenerate using modelgen.",
|
||||
"",
|
||||
"generator client {",
|
||||
' provider = "prisma-client-py"',
|
||||
"}",
|
||||
"",
|
||||
"datasource db {",
|
||||
' provider = "postgresql"',
|
||||
' url = env("DATABASE_URL")',
|
||||
"}",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate Prisma enum from EnumDefinition."""
|
||||
lines = [f"enum {enum_def.name} {{"]
|
||||
for name, _ in enum_def.values:
|
||||
lines.append(f" {name}")
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||
"""Generate Prisma enum from Python Enum."""
|
||||
lines = [f"enum {enum_cls.__name__} {{"]
|
||||
for member in enum_cls:
|
||||
lines.append(f" {member.name}")
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate Prisma model from ModelDefinition."""
|
||||
lines = [f"model {model_def.name} {{"]
|
||||
|
||||
for field in model_def.fields:
|
||||
prisma_type = self._resolve_type(
|
||||
field.name, field.type_hint, field.optional
|
||||
)
|
||||
lines.append(f" {field.name} {prisma_type}")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate Prisma model from a dataclass."""
|
||||
lines = [f"model {cls.__name__} {{"]
|
||||
|
||||
for name, type_hint in get_type_hints(cls).items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
prisma_type = self._resolve_type(name, type_hint, False)
|
||||
lines.append(f" {name} {prisma_type}")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, name: str, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to Prisma type string."""
|
||||
# Special fields
|
||||
if name in PRISMA_SPECIAL:
|
||||
return PRISMA_SPECIAL[name]
|
||||
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Container types
|
||||
if origin == "dict" or origin == "list":
|
||||
result = PRISMA_TYPES.get(origin, "Json")
|
||||
return f"{result}?" if optional else result
|
||||
|
||||
# UUID / datetime
|
||||
if type_name in ("UUID", "datetime"):
|
||||
result = PRISMA_TYPES.get(type_name, "String")
|
||||
return f"{result}?" if optional else result
|
||||
|
||||
# Enum
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
result = base.__name__
|
||||
return f"{result}?" if optional else result
|
||||
|
||||
# Basic types
|
||||
result = PRISMA_TYPES.get(base, "String")
|
||||
return f"{result}?" if optional else result
|
||||
168
soleprint/station/tools/modelgen/generator/protobuf.py
Normal file
168
soleprint/station/tools/modelgen/generator/protobuf.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
Protobuf Generator
|
||||
|
||||
Generates Protocol Buffer definitions from model definitions.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, unwrap_optional
|
||||
from ..loader.schema import GrpcServiceDefinition, ModelDefinition
|
||||
from ..types import PROTO_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class ProtobufGenerator(BaseGenerator):
|
||||
"""Generates Protocol Buffer definition files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".proto"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate protobuf definitions to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "grpc_messages"):
|
||||
# SchemaLoader with gRPC definitions
|
||||
content = self._generate_from_loader(models)
|
||||
elif isinstance(models, tuple) and len(models) >= 3:
|
||||
# (messages, service_def) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_loader(self, loader) -> str:
|
||||
"""Generate from SchemaLoader."""
|
||||
messages = loader.grpc_messages
|
||||
service = loader.grpc_service
|
||||
|
||||
lines = self._generate_header(
|
||||
service.package if service else "service",
|
||||
service.name if service else "Service",
|
||||
service.methods if service else [],
|
||||
)
|
||||
|
||||
for model_def in messages:
|
||||
lines.extend(self._generate_message_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, messages: List[ModelDefinition], service: GrpcServiceDefinition
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header(service.package, service.name, service.methods)
|
||||
|
||||
for model_def in messages:
|
||||
lines.extend(self._generate_message_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header("service", "Service", [])
|
||||
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_message_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(
|
||||
self, package: str, service_name: str, methods: List[dict]
|
||||
) -> List[str]:
|
||||
"""Generate file header with service definition."""
|
||||
lines = [
|
||||
"// Protocol Buffer Definitions - GENERATED FILE",
|
||||
"//",
|
||||
"// Do not edit directly. Regenerate using modelgen.",
|
||||
"",
|
||||
'syntax = "proto3";',
|
||||
"",
|
||||
f"package {package};",
|
||||
"",
|
||||
]
|
||||
|
||||
if methods:
|
||||
lines.append(f"service {service_name} {{")
|
||||
for m in methods:
|
||||
req = (
|
||||
m["request"].__name__
|
||||
if hasattr(m["request"], "__name__")
|
||||
else str(m["request"])
|
||||
)
|
||||
resp = (
|
||||
m["response"].__name__
|
||||
if hasattr(m["response"], "__name__")
|
||||
else str(m["response"])
|
||||
)
|
||||
returns = f"stream {resp}" if m.get("stream_response") else resp
|
||||
lines.append(f" rpc {m['name']}({req}) returns ({returns});")
|
||||
lines.extend(["}", ""])
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_message_from_definition(
|
||||
self, model_def: ModelDefinition
|
||||
) -> List[str]:
|
||||
"""Generate proto message from ModelDefinition."""
|
||||
lines = [f"message {model_def.name} {{"]
|
||||
|
||||
if not model_def.fields:
|
||||
lines.append(" // Empty")
|
||||
else:
|
||||
for i, field in enumerate(model_def.fields, 1):
|
||||
proto_type, optional = self._resolve_type(field.type_hint)
|
||||
prefix = (
|
||||
"optional "
|
||||
if optional and not proto_type.startswith("repeated")
|
||||
else ""
|
||||
)
|
||||
lines.append(f" {prefix}{proto_type} {field.name} = {i};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_message_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate proto message from a dataclass."""
|
||||
lines = [f"message {cls.__name__} {{"]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
if not hints:
|
||||
lines.append(" // Empty")
|
||||
else:
|
||||
for i, (name, type_hint) in enumerate(hints.items(), 1):
|
||||
proto_type, optional = self._resolve_type(type_hint)
|
||||
prefix = (
|
||||
"optional "
|
||||
if optional and not proto_type.startswith("repeated")
|
||||
else ""
|
||||
)
|
||||
lines.append(f" {prefix}{proto_type} {name} = {i};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any) -> tuple[str, bool]:
|
||||
"""Resolve Python type to proto type. Returns (type, is_optional)."""
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = PROTO_RESOLVERS.get(origin) or PROTO_RESOLVERS.get(base)
|
||||
|
||||
if resolver:
|
||||
result = resolver(base)
|
||||
is_repeated = result.startswith("repeated")
|
||||
return result, optional and not is_repeated
|
||||
|
||||
return "string", optional
|
||||
427
soleprint/station/tools/modelgen/generator/pydantic.py
Normal file
427
soleprint/station/tools/modelgen/generator/pydantic.py
Normal file
@@ -0,0 +1,427 @@
|
||||
"""
|
||||
Pydantic Generator
|
||||
|
||||
Generates Pydantic BaseModel classes from model definitions.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from ..types import PYDANTIC_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class PydanticGenerator(BaseGenerator):
|
||||
"""Generates Pydantic model files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Pydantic models to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Detect input type and generate accordingly
|
||||
if hasattr(models, "get_shared_component"):
|
||||
# ConfigLoader (soleprint config)
|
||||
content = self._generate_from_config(models)
|
||||
elif hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple from extractor
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects (schema/extract mode)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate models
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Collect and generate enums first
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
lines.extend(self._generate_enum_from_python(base))
|
||||
lines.append("")
|
||||
enums_generated.add(base.__name__)
|
||||
|
||||
# Generate models
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header."""
|
||||
return [
|
||||
'"""',
|
||||
"Pydantic Models - GENERATED FILE",
|
||||
"",
|
||||
"Do not edit directly. Regenerate using modelgen.",
|
||||
'"""',
|
||||
"",
|
||||
"from datetime import datetime",
|
||||
"from enum import Enum",
|
||||
"from typing import Any, Dict, List, Optional",
|
||||
"from uuid import UUID",
|
||||
"",
|
||||
"from pydantic import BaseModel, Field",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate Pydantic enum from EnumDefinition."""
|
||||
lines = [f"class {enum_def.name}(str, Enum):"]
|
||||
for name, value in enum_def.values:
|
||||
lines.append(f' {name} = "{value}"')
|
||||
return lines
|
||||
|
||||
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||
"""Generate Pydantic enum from Python Enum."""
|
||||
lines = [f"class {enum_cls.__name__}(str, Enum):"]
|
||||
for member in enum_cls:
|
||||
lines.append(f' {member.name} = "{member.value}"')
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate Pydantic model from ModelDefinition."""
|
||||
docstring = model_def.docstring or model_def.name
|
||||
lines = [
|
||||
f"class {model_def.name}(BaseModel):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
]
|
||||
|
||||
if not model_def.fields:
|
||||
lines.append(" pass")
|
||||
else:
|
||||
for field in model_def.fields:
|
||||
py_type = self._resolve_type(field.type_hint, field.optional)
|
||||
default = self._format_default(field.default, field.optional)
|
||||
lines.append(f" {field.name}: {py_type}{default}")
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate Pydantic model from a dataclass."""
|
||||
import dataclasses as dc
|
||||
|
||||
docstring = cls.__doc__ or cls.__name__
|
||||
lines = [
|
||||
f"class {cls.__name__}(BaseModel):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
|
||||
field = fields.get(name)
|
||||
default_val = dc.MISSING
|
||||
if field:
|
||||
if field.default is not dc.MISSING:
|
||||
default_val = field.default
|
||||
|
||||
py_type = self._resolve_type(type_hint, False)
|
||||
default = self._format_default(default_val, "Optional" in py_type)
|
||||
lines.append(f" {name}: {py_type}{default}")
|
||||
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to Pydantic type string."""
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = (
|
||||
PYDANTIC_RESOLVERS.get(origin)
|
||||
or PYDANTIC_RESOLVERS.get(type_name)
|
||||
or PYDANTIC_RESOLVERS.get(base)
|
||||
or (
|
||||
PYDANTIC_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "str"
|
||||
return f"Optional[{result}]" if optional else result
|
||||
|
||||
def _format_default(self, default: Any, optional: bool) -> str:
|
||||
"""Format default value for field."""
|
||||
import dataclasses as dc
|
||||
|
||||
if optional:
|
||||
return " = None"
|
||||
if default is dc.MISSING or default is None:
|
||||
return ""
|
||||
if isinstance(default, str):
|
||||
return f' = "{default}"'
|
||||
if isinstance(default, Enum):
|
||||
return f" = {default.__class__.__name__}.{default.name}"
|
||||
if callable(default):
|
||||
return " = Field(default_factory=list)" if "list" in str(default) else ""
|
||||
return f" = {default!r}"
|
||||
|
||||
def _generate_from_config(self, config) -> str:
|
||||
"""Generate from ConfigLoader (soleprint config.json mode)."""
|
||||
# Get component names from config
|
||||
config_comp = config.get_shared_component("config")
|
||||
data_comp = config.get_shared_component("data")
|
||||
|
||||
data_flow_sys = config.get_system("data_flow")
|
||||
doc_sys = config.get_system("documentation")
|
||||
exec_sys = config.get_system("execution")
|
||||
|
||||
connector_comp = config.get_component("data_flow", "connector")
|
||||
pulse_comp = config.get_component("data_flow", "composed")
|
||||
|
||||
pattern_comp = config.get_component("documentation", "pattern")
|
||||
doc_composed = config.get_component("documentation", "composed")
|
||||
|
||||
tool_comp = config.get_component("execution", "utility")
|
||||
monitor_comp = config.get_component("execution", "watcher")
|
||||
cabinet_comp = config.get_component("execution", "container")
|
||||
exec_composed = config.get_component("execution", "composed")
|
||||
|
||||
return f'''"""
|
||||
Pydantic models - Generated from {config.framework.name}.config.json
|
||||
|
||||
DO NOT EDIT MANUALLY - Regenerate from config
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Literal, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class Status(str, Enum):
|
||||
PENDING = "pending"
|
||||
PLANNED = "planned"
|
||||
BUILDING = "building"
|
||||
DEV = "dev"
|
||||
LIVE = "live"
|
||||
READY = "ready"
|
||||
|
||||
|
||||
class System(str, Enum):
|
||||
{data_flow_sys.name.upper()} = "{data_flow_sys.name}"
|
||||
{doc_sys.name.upper()} = "{doc_sys.name}"
|
||||
{exec_sys.name.upper()} = "{exec_sys.name}"
|
||||
|
||||
|
||||
class ToolType(str, Enum):
|
||||
APP = "app"
|
||||
CLI = "cli"
|
||||
|
||||
|
||||
# === Shared Components ===
|
||||
|
||||
|
||||
class {config_comp.title}(BaseModel):
|
||||
"""{config_comp.description}. Shared across {data_flow_sys.name}, {exec_sys.name}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
config_path: Optional[str] = None
|
||||
|
||||
|
||||
class {data_comp.title}(BaseModel):
|
||||
"""{data_comp.description}. Shared across all systems."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
source_template: Optional[str] = None
|
||||
data_path: Optional[str] = None
|
||||
|
||||
|
||||
# === System-Specific Components ===
|
||||
|
||||
|
||||
class {connector_comp.title}(BaseModel):
|
||||
"""{connector_comp.description} ({data_flow_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
mock: Optional[bool] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class {pattern_comp.title}(BaseModel):
|
||||
"""{pattern_comp.description} ({doc_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template_path: Optional[str] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {tool_comp.title}(BaseModel):
|
||||
"""{tool_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
type: Optional[ToolType] = None
|
||||
description: Optional[str] = None
|
||||
path: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
cli: Optional[str] = None
|
||||
|
||||
|
||||
class {monitor_comp.title}(BaseModel):
|
||||
"""{monitor_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
class {cabinet_comp.title}(BaseModel):
|
||||
"""{cabinet_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
tools: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Composed Types ===
|
||||
|
||||
|
||||
class {pulse_comp.title}(BaseModel):
|
||||
"""{pulse_comp.description} ({data_flow_sys.name}). Formula: {pulse_comp.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
{connector_comp.name}: Optional[{connector_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
|
||||
|
||||
class {doc_composed.title}(BaseModel):
|
||||
"""{doc_composed.description} ({doc_sys.name}). Formula: {doc_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template: Optional[{pattern_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
output_{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {exec_composed.title}(BaseModel):
|
||||
"""{exec_composed.description} ({exec_sys.name}). Formula: {exec_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
cabinet: Optional[{cabinet_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.plural}: List[{data_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Collection wrappers for JSON files ===
|
||||
|
||||
|
||||
class {config_comp.title}Collection(BaseModel):
|
||||
items: List[{config_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {data_comp.title}Collection(BaseModel):
|
||||
items: List[{data_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {connector_comp.title}Collection(BaseModel):
|
||||
items: List[{connector_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pattern_comp.title}Collection(BaseModel):
|
||||
items: List[{pattern_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {tool_comp.title}Collection(BaseModel):
|
||||
items: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {monitor_comp.title}Collection(BaseModel):
|
||||
items: List[{monitor_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {cabinet_comp.title}Collection(BaseModel):
|
||||
items: List[{cabinet_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pulse_comp.title}Collection(BaseModel):
|
||||
items: List[{pulse_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {doc_composed.title}Collection(BaseModel):
|
||||
items: List[{doc_composed.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {exec_composed.title}Collection(BaseModel):
|
||||
items: List[{exec_composed.title}] = Field(default_factory=list)
|
||||
'''
|
||||
144
soleprint/station/tools/modelgen/generator/typescript.py
Normal file
144
soleprint/station/tools/modelgen/generator/typescript.py
Normal file
@@ -0,0 +1,144 @@
|
||||
"""
|
||||
TypeScript Generator
|
||||
|
||||
Generates TypeScript interfaces from model definitions.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from ..types import TS_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class TypeScriptGenerator(BaseGenerator):
|
||||
"""Generates TypeScript interface files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".ts"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate TypeScript types to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums as union types
|
||||
for enum_def in enums:
|
||||
values = " | ".join(f'"{v}"' for _, v in enum_def.values)
|
||||
lines.append(f"export type {enum_def.name} = {values};")
|
||||
lines.append("")
|
||||
|
||||
# Generate interfaces
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_interface_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Collect and generate enums first
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
values = " | ".join(f'"{m.value}"' for m in base)
|
||||
lines.append(f"export type {base.__name__} = {values};")
|
||||
enums_generated.add(base.__name__)
|
||||
lines.append("")
|
||||
|
||||
# Generate interfaces
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_interface_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header."""
|
||||
return [
|
||||
"/**",
|
||||
" * TypeScript Types - GENERATED FILE",
|
||||
" *",
|
||||
" * Do not edit directly. Regenerate using modelgen.",
|
||||
" */",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_interface_from_definition(
|
||||
self, model_def: ModelDefinition
|
||||
) -> List[str]:
|
||||
"""Generate TypeScript interface from ModelDefinition."""
|
||||
lines = [f"export interface {model_def.name} {{"]
|
||||
|
||||
for field in model_def.fields:
|
||||
ts_type = self._resolve_type(field.type_hint, field.optional)
|
||||
lines.append(f" {field.name}: {ts_type};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_interface_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate TypeScript interface from a dataclass."""
|
||||
lines = [f"export interface {cls.__name__} {{"]
|
||||
|
||||
for name, type_hint in get_type_hints(cls).items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
ts_type = self._resolve_type(type_hint, False)
|
||||
lines.append(f" {name}: {ts_type};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to TypeScript type string."""
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = (
|
||||
TS_RESOLVERS.get(origin)
|
||||
or TS_RESOLVERS.get(type_name)
|
||||
or TS_RESOLVERS.get(base)
|
||||
or (
|
||||
TS_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "string"
|
||||
return f"{result} | null" if optional else result
|
||||
72
soleprint/station/tools/modelgen/helpers.py
Normal file
72
soleprint/station/tools/modelgen/helpers.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""
|
||||
Type Helpers
|
||||
|
||||
Utilities for type introspection and resolution.
|
||||
Used by generators and loaders.
|
||||
"""
|
||||
|
||||
import dataclasses as dc
|
||||
from enum import Enum
|
||||
from typing import Any, Union, get_args, get_origin
|
||||
|
||||
|
||||
def unwrap_optional(type_hint: Any) -> tuple[Any, bool]:
|
||||
"""Unwrap Optional[T] -> (T, True) or (T, False) if not optional."""
|
||||
origin = get_origin(type_hint)
|
||||
if origin is Union:
|
||||
args = [a for a in get_args(type_hint) if a is not type(None)]
|
||||
return (args[0] if args else str, True)
|
||||
return (type_hint, False)
|
||||
|
||||
|
||||
def get_origin_name(type_hint: Any) -> str | None:
|
||||
"""Get origin type name: 'dict', 'list', or None."""
|
||||
origin = get_origin(type_hint)
|
||||
if origin is dict:
|
||||
return "dict"
|
||||
if origin is list:
|
||||
return "list"
|
||||
return None
|
||||
|
||||
|
||||
def get_type_name(type_hint: Any) -> str | None:
|
||||
"""Get type name for special types like UUID, datetime."""
|
||||
if hasattr(type_hint, "__name__"):
|
||||
return type_hint.__name__
|
||||
return None
|
||||
|
||||
|
||||
def get_list_inner(type_hint: Any) -> str:
|
||||
"""Get inner type of List[T]."""
|
||||
args = get_args(type_hint)
|
||||
if args and args[0] in (str, int, float, bool):
|
||||
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||
return "str"
|
||||
|
||||
|
||||
def get_field_default(field: dc.Field) -> Any:
|
||||
"""Get default value from dataclass field."""
|
||||
if field.default is not dc.MISSING:
|
||||
return field.default
|
||||
return dc.MISSING
|
||||
|
||||
|
||||
def format_opts(optional: bool, extra: list[str] | None = None) -> str:
|
||||
"""Format field options string for Django."""
|
||||
parts = []
|
||||
if optional:
|
||||
parts.append("null=True, blank=True")
|
||||
if extra:
|
||||
parts.extend(extra)
|
||||
return ", ".join(parts)
|
||||
|
||||
|
||||
def is_enum(type_hint: Any) -> bool:
|
||||
"""Check if type is an Enum."""
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
return isinstance(base, type) and issubclass(base, Enum)
|
||||
|
||||
|
||||
def get_enum_values(enum_class: type) -> list[tuple[str, str]]:
|
||||
"""Get list of (name, value) pairs from an Enum."""
|
||||
return [(m.name, m.value) for m in enum_class]
|
||||
37
soleprint/station/tools/modelgen/loader/__init__.py
Normal file
37
soleprint/station/tools/modelgen/loader/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
Loader - Input source handlers for modelgen.
|
||||
|
||||
Supported loaders:
|
||||
- ConfigLoader: Load from soleprint config.json
|
||||
- SchemaLoader: Load from Python dataclasses in schema/ folder
|
||||
- Extractors: Extract from existing codebases (Django, SQLAlchemy, Prisma)
|
||||
"""
|
||||
|
||||
from .config import ConfigLoader, load_config
|
||||
from .extract import EXTRACTORS, BaseExtractor, DjangoExtractor
|
||||
from .schema import (
|
||||
EnumDefinition,
|
||||
FieldDefinition,
|
||||
GrpcServiceDefinition,
|
||||
ModelDefinition,
|
||||
SchemaLoader,
|
||||
load_schema,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Config loader
|
||||
"ConfigLoader",
|
||||
"load_config",
|
||||
# Schema loader
|
||||
"SchemaLoader",
|
||||
"load_schema",
|
||||
# Model definitions
|
||||
"ModelDefinition",
|
||||
"FieldDefinition",
|
||||
"EnumDefinition",
|
||||
"GrpcServiceDefinition",
|
||||
# Extractors
|
||||
"BaseExtractor",
|
||||
"DjangoExtractor",
|
||||
"EXTRACTORS",
|
||||
]
|
||||
@@ -1,7 +1,7 @@
|
||||
"""
|
||||
Configuration Loader
|
||||
|
||||
Loads and validates framework configuration files.
|
||||
Loads and validates framework configuration files (soleprint config.json style).
|
||||
"""
|
||||
|
||||
import json
|
||||
@@ -114,22 +114,3 @@ def load_config(config_path: str | Path) -> ConfigLoader:
|
||||
"""Load and validate configuration file"""
|
||||
loader = ConfigLoader(config_path)
|
||||
return loader.load()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test with pawprint config
|
||||
import sys
|
||||
|
||||
config_path = Path(__file__).parent.parent / "pawprint.config.json"
|
||||
|
||||
loader = load_config(config_path)
|
||||
|
||||
print(f"Framework: {loader.framework.name} v{loader.framework.version}")
|
||||
print(f"Tagline: {loader.framework.tagline}")
|
||||
print(f"\nSystems:")
|
||||
for sys in loader.systems:
|
||||
print(f" {sys.icon} {sys.title} ({sys.name}) - {sys.tagline}")
|
||||
|
||||
print(f"\nShared Components:")
|
||||
for key, comp in loader.components["shared"].items():
|
||||
print(f" {comp.name} - {comp.description}")
|
||||
20
soleprint/station/tools/modelgen/loader/extract/__init__.py
Normal file
20
soleprint/station/tools/modelgen/loader/extract/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
Extractors - Extract model definitions from existing codebases.
|
||||
|
||||
Supported frameworks:
|
||||
- Django: Extract from Django ORM models
|
||||
- SQLAlchemy: Extract from SQLAlchemy models (planned)
|
||||
- Prisma: Extract from Prisma schema (planned)
|
||||
"""
|
||||
|
||||
from typing import Dict, Type
|
||||
|
||||
from .base import BaseExtractor
|
||||
from .django import DjangoExtractor
|
||||
|
||||
# Registry of available extractors
|
||||
EXTRACTORS: Dict[str, Type[BaseExtractor]] = {
|
||||
"django": DjangoExtractor,
|
||||
}
|
||||
|
||||
__all__ = ["BaseExtractor", "DjangoExtractor", "EXTRACTORS"]
|
||||
38
soleprint/station/tools/modelgen/loader/extract/base.py
Normal file
38
soleprint/station/tools/modelgen/loader/extract/base.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
Base Extractor
|
||||
|
||||
Abstract base class for model extractors.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from ..schema import EnumDefinition, ModelDefinition
|
||||
|
||||
|
||||
class BaseExtractor(ABC):
|
||||
"""Abstract base for codebase model extractors."""
|
||||
|
||||
def __init__(self, source_path: Path):
|
||||
self.source_path = Path(source_path)
|
||||
|
||||
@abstractmethod
|
||||
def extract(self) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||
"""
|
||||
Extract model definitions from source codebase.
|
||||
|
||||
Returns:
|
||||
Tuple of (models, enums)
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def detect(self) -> bool:
|
||||
"""
|
||||
Detect if this extractor can handle the source path.
|
||||
|
||||
Returns:
|
||||
True if this extractor can handle the source
|
||||
"""
|
||||
pass
|
||||
237
soleprint/station/tools/modelgen/loader/extract/django.py
Normal file
237
soleprint/station/tools/modelgen/loader/extract/django.py
Normal file
@@ -0,0 +1,237 @@
|
||||
"""
|
||||
Django Extractor
|
||||
|
||||
Extracts model definitions from Django ORM models.
|
||||
"""
|
||||
|
||||
import ast
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from ..schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from .base import BaseExtractor
|
||||
|
||||
# Django field type mappings to Python types
|
||||
DJANGO_FIELD_TYPES = {
|
||||
"CharField": str,
|
||||
"TextField": str,
|
||||
"EmailField": str,
|
||||
"URLField": str,
|
||||
"SlugField": str,
|
||||
"UUIDField": "UUID",
|
||||
"IntegerField": int,
|
||||
"BigIntegerField": "bigint",
|
||||
"SmallIntegerField": int,
|
||||
"PositiveIntegerField": int,
|
||||
"FloatField": float,
|
||||
"DecimalField": float,
|
||||
"BooleanField": bool,
|
||||
"NullBooleanField": bool,
|
||||
"DateField": "datetime",
|
||||
"DateTimeField": "datetime",
|
||||
"TimeField": "datetime",
|
||||
"JSONField": "dict",
|
||||
"ForeignKey": "FK",
|
||||
"OneToOneField": "FK",
|
||||
"ManyToManyField": "M2M",
|
||||
}
|
||||
|
||||
|
||||
class DjangoExtractor(BaseExtractor):
|
||||
"""Extracts models from Django ORM."""
|
||||
|
||||
def detect(self) -> bool:
|
||||
"""Check if this is a Django project."""
|
||||
# Look for manage.py or settings.py
|
||||
manage_py = self.source_path / "manage.py"
|
||||
settings_py = self.source_path / "settings.py"
|
||||
|
||||
if manage_py.exists():
|
||||
return True
|
||||
|
||||
# Check for Django imports in any models.py
|
||||
for models_file in self.source_path.rglob("models.py"):
|
||||
content = models_file.read_text()
|
||||
if "from django.db import models" in content:
|
||||
return True
|
||||
|
||||
return settings_py.exists()
|
||||
|
||||
def extract(self) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||
"""Extract Django models using AST parsing."""
|
||||
models = []
|
||||
enums = []
|
||||
|
||||
# Find all models.py files
|
||||
for models_file in self.source_path.rglob("models.py"):
|
||||
file_models, file_enums = self._extract_from_file(models_file)
|
||||
models.extend(file_models)
|
||||
enums.extend(file_enums)
|
||||
|
||||
return models, enums
|
||||
|
||||
def _extract_from_file(
|
||||
self, file_path: Path
|
||||
) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||
"""Extract models from a single models.py file."""
|
||||
models = []
|
||||
enums = []
|
||||
|
||||
content = file_path.read_text()
|
||||
tree = ast.parse(content)
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
# Check if it inherits from models.Model
|
||||
if self._is_django_model(node):
|
||||
model_def = self._parse_model_class(node)
|
||||
if model_def:
|
||||
models.append(model_def)
|
||||
# Check if it's a TextChoices/IntegerChoices enum
|
||||
elif self._is_django_choices(node):
|
||||
enum_def = self._parse_choices_class(node)
|
||||
if enum_def:
|
||||
enums.append(enum_def)
|
||||
|
||||
return models, enums
|
||||
|
||||
def _is_django_model(self, node: ast.ClassDef) -> bool:
|
||||
"""Check if class inherits from models.Model."""
|
||||
for base in node.bases:
|
||||
if isinstance(base, ast.Attribute):
|
||||
if base.attr == "Model":
|
||||
return True
|
||||
elif isinstance(base, ast.Name):
|
||||
if base.id in ("Model", "AbstractUser", "AbstractBaseUser"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_django_choices(self, node: ast.ClassDef) -> bool:
|
||||
"""Check if class is a Django TextChoices/IntegerChoices."""
|
||||
for base in node.bases:
|
||||
if isinstance(base, ast.Attribute):
|
||||
if base.attr in ("TextChoices", "IntegerChoices"):
|
||||
return True
|
||||
elif isinstance(base, ast.Name):
|
||||
if base.id in ("TextChoices", "IntegerChoices"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _parse_model_class(self, node: ast.ClassDef) -> Optional[ModelDefinition]:
|
||||
"""Parse a Django model class into ModelDefinition."""
|
||||
fields = []
|
||||
|
||||
for item in node.body:
|
||||
if isinstance(item, ast.Assign):
|
||||
field_def = self._parse_field_assignment(item)
|
||||
if field_def:
|
||||
fields.append(field_def)
|
||||
elif isinstance(item, ast.AnnAssign):
|
||||
# Handle annotated assignments (Django 4.0+ style)
|
||||
field_def = self._parse_annotated_field(item)
|
||||
if field_def:
|
||||
fields.append(field_def)
|
||||
|
||||
# Get docstring
|
||||
docstring = ast.get_docstring(node)
|
||||
|
||||
return ModelDefinition(
|
||||
name=node.name,
|
||||
fields=fields,
|
||||
docstring=docstring,
|
||||
)
|
||||
|
||||
def _parse_field_assignment(self, node: ast.Assign) -> Optional[FieldDefinition]:
|
||||
"""Parse a field assignment like: name = models.CharField(...)"""
|
||||
if not node.targets or not isinstance(node.targets[0], ast.Name):
|
||||
return None
|
||||
|
||||
field_name = node.targets[0].id
|
||||
|
||||
# Skip private fields and Meta class
|
||||
if field_name.startswith("_") or field_name == "Meta":
|
||||
return None
|
||||
|
||||
# Parse the field call
|
||||
if isinstance(node.value, ast.Call):
|
||||
return self._parse_field_call(field_name, node.value)
|
||||
|
||||
return None
|
||||
|
||||
def _parse_annotated_field(self, node: ast.AnnAssign) -> Optional[FieldDefinition]:
|
||||
"""Parse an annotated field assignment."""
|
||||
if not isinstance(node.target, ast.Name):
|
||||
return None
|
||||
|
||||
field_name = node.target.id
|
||||
|
||||
if field_name.startswith("_"):
|
||||
return None
|
||||
|
||||
if node.value and isinstance(node.value, ast.Call):
|
||||
return self._parse_field_call(field_name, node.value)
|
||||
|
||||
return None
|
||||
|
||||
def _parse_field_call(
|
||||
self, field_name: str, call: ast.Call
|
||||
) -> Optional[FieldDefinition]:
|
||||
"""Parse a Django field call like models.CharField(max_length=100)."""
|
||||
# Get field type name
|
||||
field_type_name = None
|
||||
|
||||
if isinstance(call.func, ast.Attribute):
|
||||
field_type_name = call.func.attr
|
||||
elif isinstance(call.func, ast.Name):
|
||||
field_type_name = call.func.id
|
||||
|
||||
if not field_type_name:
|
||||
return None
|
||||
|
||||
# Map to Python type
|
||||
python_type = DJANGO_FIELD_TYPES.get(field_type_name, str)
|
||||
|
||||
# Check for null=True
|
||||
optional = False
|
||||
default = None
|
||||
|
||||
for keyword in call.keywords:
|
||||
if keyword.arg == "null":
|
||||
if isinstance(keyword.value, ast.Constant):
|
||||
optional = keyword.value.value is True
|
||||
elif keyword.arg == "default":
|
||||
if isinstance(keyword.value, ast.Constant):
|
||||
default = keyword.value.value
|
||||
|
||||
return FieldDefinition(
|
||||
name=field_name,
|
||||
type_hint=python_type,
|
||||
default=default if default is not None else None,
|
||||
optional=optional,
|
||||
)
|
||||
|
||||
def _parse_choices_class(self, node: ast.ClassDef) -> Optional[EnumDefinition]:
|
||||
"""Parse a Django TextChoices/IntegerChoices class."""
|
||||
values = []
|
||||
|
||||
for item in node.body:
|
||||
if isinstance(item, ast.Assign):
|
||||
if item.targets and isinstance(item.targets[0], ast.Name):
|
||||
name = item.targets[0].id
|
||||
if name.isupper(): # Enum values are typically uppercase
|
||||
# Get the value
|
||||
value = name.lower() # Default to lowercase name
|
||||
if isinstance(item.value, ast.Constant):
|
||||
value = str(item.value.value)
|
||||
elif isinstance(item.value, ast.Tuple) and item.value.elts:
|
||||
# TextChoices: NAME = "value", "Label"
|
||||
if isinstance(item.value.elts[0], ast.Constant):
|
||||
value = str(item.value.elts[0].value)
|
||||
|
||||
values.append((name, value))
|
||||
|
||||
if not values:
|
||||
return None
|
||||
|
||||
return EnumDefinition(name=node.name, values=values)
|
||||
169
soleprint/station/tools/modelgen/loader/schema.py
Normal file
169
soleprint/station/tools/modelgen/loader/schema.py
Normal file
@@ -0,0 +1,169 @@
|
||||
"""
|
||||
Schema Loader
|
||||
|
||||
Loads Python dataclasses from a schema/ folder.
|
||||
Expects the folder to have an __init__.py that exports:
|
||||
- DATACLASSES: List of dataclass types to generate
|
||||
- ENUMS: List of Enum types to include
|
||||
- GRPC_MESSAGES: (optional) List of gRPC message types
|
||||
- GRPC_SERVICE: (optional) gRPC service definition dict
|
||||
"""
|
||||
|
||||
import dataclasses as dc
|
||||
import importlib.util
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Type, get_type_hints
|
||||
|
||||
|
||||
@dataclass
|
||||
class FieldDefinition:
|
||||
"""Represents a model field."""
|
||||
|
||||
name: str
|
||||
type_hint: Any
|
||||
default: Any = dc.MISSING
|
||||
optional: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelDefinition:
|
||||
"""Represents a model/dataclass."""
|
||||
|
||||
name: str
|
||||
fields: List[FieldDefinition]
|
||||
docstring: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnumDefinition:
|
||||
"""Represents an enum."""
|
||||
|
||||
name: str
|
||||
values: List[tuple[str, str]] # (name, value) pairs
|
||||
|
||||
|
||||
@dataclass
|
||||
class GrpcServiceDefinition:
|
||||
"""Represents a gRPC service."""
|
||||
|
||||
package: str
|
||||
name: str
|
||||
methods: List[Dict[str, Any]]
|
||||
|
||||
|
||||
class SchemaLoader:
|
||||
"""Loads model definitions from Python dataclasses in schema/ folder."""
|
||||
|
||||
def __init__(self, schema_path: Path):
|
||||
self.schema_path = Path(schema_path)
|
||||
self.models: List[ModelDefinition] = []
|
||||
self.enums: List[EnumDefinition] = []
|
||||
self.grpc_messages: List[ModelDefinition] = []
|
||||
self.grpc_service: Optional[GrpcServiceDefinition] = None
|
||||
|
||||
def load(self) -> "SchemaLoader":
|
||||
"""Load schema definitions from the schema folder."""
|
||||
init_path = self.schema_path / "__init__.py"
|
||||
|
||||
if not init_path.exists():
|
||||
raise FileNotFoundError(f"Schema folder must have __init__.py: {init_path}")
|
||||
|
||||
# Import the schema module
|
||||
module = self._import_module(init_path)
|
||||
|
||||
# Extract DATACLASSES
|
||||
dataclasses = getattr(module, "DATACLASSES", [])
|
||||
for cls in dataclasses:
|
||||
self.models.append(self._parse_dataclass(cls))
|
||||
|
||||
# Extract ENUMS
|
||||
enums = getattr(module, "ENUMS", [])
|
||||
for enum_cls in enums:
|
||||
self.enums.append(self._parse_enum(enum_cls))
|
||||
|
||||
# Extract GRPC_MESSAGES (optional)
|
||||
grpc_messages = getattr(module, "GRPC_MESSAGES", [])
|
||||
for cls in grpc_messages:
|
||||
self.grpc_messages.append(self._parse_dataclass(cls))
|
||||
|
||||
# Extract GRPC_SERVICE (optional)
|
||||
grpc_service = getattr(module, "GRPC_SERVICE", None)
|
||||
if grpc_service:
|
||||
self.grpc_service = GrpcServiceDefinition(
|
||||
package=grpc_service.get("package", "service"),
|
||||
name=grpc_service.get("name", "Service"),
|
||||
methods=grpc_service.get("methods", []),
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def _import_module(self, path: Path):
|
||||
"""Import a Python module from a file path."""
|
||||
spec = importlib.util.spec_from_file_location("schema", path)
|
||||
if spec is None or spec.loader is None:
|
||||
raise ImportError(f"Could not load module from {path}")
|
||||
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
sys.modules["schema"] = module
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
def _parse_dataclass(self, cls: Type) -> ModelDefinition:
|
||||
"""Parse a dataclass into a ModelDefinition."""
|
||||
hints = get_type_hints(cls)
|
||||
fields_info = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
fields = []
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
|
||||
field_info = fields_info.get(name)
|
||||
default = dc.MISSING
|
||||
if field_info:
|
||||
if field_info.default is not dc.MISSING:
|
||||
default = field_info.default
|
||||
elif field_info.default_factory is not dc.MISSING:
|
||||
default = field_info.default_factory
|
||||
|
||||
# Check if optional (Union with None)
|
||||
optional = self._is_optional(type_hint)
|
||||
|
||||
fields.append(
|
||||
FieldDefinition(
|
||||
name=name,
|
||||
type_hint=type_hint,
|
||||
default=default,
|
||||
optional=optional,
|
||||
)
|
||||
)
|
||||
|
||||
return ModelDefinition(
|
||||
name=cls.__name__,
|
||||
fields=fields,
|
||||
docstring=cls.__doc__,
|
||||
)
|
||||
|
||||
def _parse_enum(self, enum_cls: Type[Enum]) -> EnumDefinition:
|
||||
"""Parse an Enum into an EnumDefinition."""
|
||||
values = [(m.name, m.value) for m in enum_cls]
|
||||
return EnumDefinition(name=enum_cls.__name__, values=values)
|
||||
|
||||
def _is_optional(self, type_hint: Any) -> bool:
|
||||
"""Check if a type hint is Optional (Union with None)."""
|
||||
from typing import Union, get_args, get_origin
|
||||
|
||||
origin = get_origin(type_hint)
|
||||
if origin is Union:
|
||||
args = get_args(type_hint)
|
||||
return type(None) in args
|
||||
return False
|
||||
|
||||
|
||||
def load_schema(schema_path: str | Path) -> SchemaLoader:
|
||||
"""Load schema definitions from folder."""
|
||||
loader = SchemaLoader(schema_path)
|
||||
return loader.load()
|
||||
@@ -1,314 +1,15 @@
|
||||
"""
|
||||
Model Generator
|
||||
|
||||
Generic model generation from configuration files.
|
||||
Supports multiple output formats and is extensible for bidirectional conversion.
|
||||
|
||||
Output formats:
|
||||
- pydantic: Pydantic BaseModel classes
|
||||
- django: Django ORM models (planned)
|
||||
- prisma: Prisma schema (planned)
|
||||
- sqlalchemy: SQLAlchemy models (planned)
|
||||
|
||||
Future: Extract models FROM existing codebases (reverse direction)
|
||||
Orchestrates model generation from various sources to various formats.
|
||||
Delegates to loaders for input and generators for output.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Dict, Type
|
||||
|
||||
from .config_loader import ConfigLoader
|
||||
|
||||
|
||||
class BaseModelWriter(ABC):
|
||||
"""Abstract base for model output writers."""
|
||||
|
||||
@abstractmethod
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
"""Write models to the specified path."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def file_extension(self) -> str:
|
||||
"""Return the file extension for this format."""
|
||||
pass
|
||||
|
||||
|
||||
class PydanticWriter(BaseModelWriter):
|
||||
"""Generates Pydantic model files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
"""Write Pydantic models to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
content = self._generate_content(config)
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_content(self, config: ConfigLoader) -> str:
|
||||
"""Generate the Pydantic models file content."""
|
||||
|
||||
# Get component names from config
|
||||
config_comp = config.get_shared_component("config")
|
||||
data_comp = config.get_shared_component("data")
|
||||
|
||||
data_flow_sys = config.get_system("data_flow")
|
||||
doc_sys = config.get_system("documentation")
|
||||
exec_sys = config.get_system("execution")
|
||||
|
||||
connector_comp = config.get_component("data_flow", "connector")
|
||||
pulse_comp = config.get_component("data_flow", "composed")
|
||||
|
||||
pattern_comp = config.get_component("documentation", "pattern")
|
||||
doc_composed = config.get_component("documentation", "composed")
|
||||
|
||||
tool_comp = config.get_component("execution", "utility")
|
||||
monitor_comp = config.get_component("execution", "watcher")
|
||||
cabinet_comp = config.get_component("execution", "container")
|
||||
exec_composed = config.get_component("execution", "composed")
|
||||
|
||||
return f'''"""
|
||||
Pydantic models - Generated from {config.framework.name}.config.json
|
||||
|
||||
DO NOT EDIT MANUALLY - Regenerate from config
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Literal, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class Status(str, Enum):
|
||||
PENDING = "pending"
|
||||
PLANNED = "planned"
|
||||
BUILDING = "building"
|
||||
DEV = "dev"
|
||||
LIVE = "live"
|
||||
READY = "ready"
|
||||
|
||||
|
||||
class System(str, Enum):
|
||||
{data_flow_sys.name.upper()} = "{data_flow_sys.name}"
|
||||
{doc_sys.name.upper()} = "{doc_sys.name}"
|
||||
{exec_sys.name.upper()} = "{exec_sys.name}"
|
||||
|
||||
|
||||
class ToolType(str, Enum):
|
||||
APP = "app"
|
||||
CLI = "cli"
|
||||
|
||||
|
||||
# === Shared Components ===
|
||||
|
||||
|
||||
class {config_comp.title}(BaseModel):
|
||||
"""{config_comp.description}. Shared across {data_flow_sys.name}, {exec_sys.name}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
config_path: Optional[str] = None
|
||||
|
||||
|
||||
class {data_comp.title}(BaseModel):
|
||||
"""{data_comp.description}. Shared across all systems."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
source_template: Optional[str] = None
|
||||
data_path: Optional[str] = None
|
||||
|
||||
|
||||
# === System-Specific Components ===
|
||||
|
||||
|
||||
class {connector_comp.title}(BaseModel):
|
||||
"""{connector_comp.description} ({data_flow_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
mock: Optional[bool] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class {pattern_comp.title}(BaseModel):
|
||||
"""{pattern_comp.description} ({doc_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template_path: Optional[str] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {tool_comp.title}(BaseModel):
|
||||
"""{tool_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
type: Optional[ToolType] = None
|
||||
description: Optional[str] = None
|
||||
path: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
cli: Optional[str] = None
|
||||
|
||||
|
||||
class {monitor_comp.title}(BaseModel):
|
||||
"""{monitor_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
class {cabinet_comp.title}(BaseModel):
|
||||
"""{cabinet_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
tools: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Composed Types ===
|
||||
|
||||
|
||||
class {pulse_comp.title}(BaseModel):
|
||||
"""{pulse_comp.description} ({data_flow_sys.name}). Formula: {pulse_comp.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
{connector_comp.name}: Optional[{connector_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
|
||||
|
||||
class {doc_composed.title}(BaseModel):
|
||||
"""{doc_composed.description} ({doc_sys.name}). Formula: {doc_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template: Optional[{pattern_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
output_{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {exec_composed.title}(BaseModel):
|
||||
"""{exec_composed.description} ({exec_sys.name}). Formula: {exec_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
cabinet: Optional[{cabinet_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.plural}: List[{data_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Collection wrappers for JSON files ===
|
||||
|
||||
|
||||
class {config_comp.title}Collection(BaseModel):
|
||||
items: List[{config_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {data_comp.title}Collection(BaseModel):
|
||||
items: List[{data_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {connector_comp.title}Collection(BaseModel):
|
||||
items: List[{connector_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pattern_comp.title}Collection(BaseModel):
|
||||
items: List[{pattern_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {tool_comp.title}Collection(BaseModel):
|
||||
items: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {monitor_comp.title}Collection(BaseModel):
|
||||
items: List[{monitor_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {cabinet_comp.title}Collection(BaseModel):
|
||||
items: List[{cabinet_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pulse_comp.title}Collection(BaseModel):
|
||||
items: List[{pulse_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {doc_composed.title}Collection(BaseModel):
|
||||
items: List[{doc_composed.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {exec_composed.title}Collection(BaseModel):
|
||||
items: List[{exec_composed.title}] = Field(default_factory=list)
|
||||
'''
|
||||
|
||||
|
||||
class DjangoWriter(BaseModelWriter):
|
||||
"""Generates Django model files (placeholder)."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
raise NotImplementedError("Django model generation not yet implemented")
|
||||
|
||||
|
||||
class PrismaWriter(BaseModelWriter):
|
||||
"""Generates Prisma schema files (placeholder)."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".prisma"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
raise NotImplementedError("Prisma schema generation not yet implemented")
|
||||
|
||||
|
||||
class SQLAlchemyWriter(BaseModelWriter):
|
||||
"""Generates SQLAlchemy model files (placeholder)."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def write(self, config: ConfigLoader, output_path: Path) -> None:
|
||||
raise NotImplementedError("SQLAlchemy model generation not yet implemented")
|
||||
|
||||
|
||||
# Registry of available writers
|
||||
WRITERS: Dict[str, Type[BaseModelWriter]] = {
|
||||
"pydantic": PydanticWriter,
|
||||
"django": DjangoWriter,
|
||||
"prisma": PrismaWriter,
|
||||
"sqlalchemy": SQLAlchemyWriter,
|
||||
}
|
||||
from .generator import GENERATORS, BaseGenerator
|
||||
from .loader import ConfigLoader
|
||||
|
||||
|
||||
class ModelGenerator:
|
||||
@@ -316,7 +17,7 @@ class ModelGenerator:
|
||||
Generates typed models from configuration.
|
||||
|
||||
This is the main entry point for model generation.
|
||||
Delegates to format-specific writers.
|
||||
Delegates to format-specific generators.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -331,19 +32,19 @@ class ModelGenerator:
|
||||
Args:
|
||||
config: Loaded configuration
|
||||
output_path: Exact path where to write (file or directory depending on format)
|
||||
output_format: Output format (pydantic, django, prisma, sqlalchemy)
|
||||
output_format: Output format (pydantic, django, prisma, typescript, protobuf)
|
||||
"""
|
||||
self.config = config
|
||||
self.output_path = Path(output_path)
|
||||
self.output_format = output_format
|
||||
|
||||
if output_format not in WRITERS:
|
||||
if output_format not in GENERATORS:
|
||||
raise ValueError(
|
||||
f"Unknown output format: {output_format}. "
|
||||
f"Available: {list(WRITERS.keys())}"
|
||||
f"Available: {list(GENERATORS.keys())}"
|
||||
)
|
||||
|
||||
self.writer = WRITERS[output_format]()
|
||||
self.generator = GENERATORS[output_format]()
|
||||
|
||||
def generate(self) -> Path:
|
||||
"""
|
||||
@@ -358,13 +59,19 @@ class ModelGenerator:
|
||||
output_file = self.output_path
|
||||
else:
|
||||
# User specified a directory, add default filename
|
||||
output_file = self.output_path / f"__init__{self.writer.file_extension()}"
|
||||
output_file = (
|
||||
self.output_path / f"__init__{self.generator.file_extension()}"
|
||||
)
|
||||
|
||||
self.writer.write(self.config, output_file)
|
||||
self.generator.generate(self.config, output_file)
|
||||
print(f"Generated {self.output_format} models: {output_file}")
|
||||
return output_file
|
||||
|
||||
@classmethod
|
||||
def available_formats(cls) -> list:
|
||||
"""Return list of available output formats."""
|
||||
return list(WRITERS.keys())
|
||||
return list(GENERATORS.keys())
|
||||
|
||||
|
||||
# Re-export for backwards compatibility
|
||||
WRITERS = GENERATORS
|
||||
|
||||
0
soleprint/station/tools/modelgen/schema/.gitkeep
Normal file
0
soleprint/station/tools/modelgen/schema/.gitkeep
Normal file
139
soleprint/station/tools/modelgen/types.py
Normal file
139
soleprint/station/tools/modelgen/types.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
Type Dispatch Tables
|
||||
|
||||
Type mappings for each output format.
|
||||
Used by generators to convert Python types to target framework types.
|
||||
"""
|
||||
|
||||
from typing import Any, Callable, get_args
|
||||
|
||||
# =============================================================================
|
||||
# Django Type Mappings
|
||||
# =============================================================================
|
||||
|
||||
DJANGO_TYPES: dict[Any, str] = {
|
||||
str: "models.CharField(max_length={max_length}{opts})",
|
||||
int: "models.IntegerField({opts})",
|
||||
float: "models.FloatField({opts})",
|
||||
bool: "models.BooleanField(default={default})",
|
||||
"UUID": "models.UUIDField({opts})",
|
||||
"datetime": "models.DateTimeField({opts})",
|
||||
"dict": "models.JSONField(default=dict, blank=True)",
|
||||
"list": "models.JSONField(default=list, blank=True)",
|
||||
"text": "models.TextField(blank=True, default='')",
|
||||
"bigint": "models.BigIntegerField({opts})",
|
||||
"enum": "models.CharField(max_length=20, choices=Status.choices{opts})",
|
||||
}
|
||||
|
||||
DJANGO_SPECIAL: dict[str, str] = {
|
||||
"id": "models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)",
|
||||
"created_at": "models.DateTimeField(auto_now_add=True)",
|
||||
"updated_at": "models.DateTimeField(auto_now=True)",
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Pydantic Type Resolvers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def _get_list_inner(type_hint: Any) -> str:
|
||||
"""Get inner type of List[T] for Pydantic."""
|
||||
args = get_args(type_hint)
|
||||
if args and args[0] in (str, int, float, bool):
|
||||
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||
return "str"
|
||||
|
||||
|
||||
PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "str",
|
||||
int: lambda _: "int",
|
||||
float: lambda _: "float",
|
||||
bool: lambda _: "bool",
|
||||
"UUID": lambda _: "UUID",
|
||||
"datetime": lambda _: "datetime",
|
||||
"dict": lambda _: "Dict[str, Any]",
|
||||
"list": lambda base: f"List[{_get_list_inner(base)}]",
|
||||
"enum": lambda base: base.__name__,
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# TypeScript Type Resolvers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def _resolve_ts_list(base: Any) -> str:
|
||||
"""Resolve TypeScript list type."""
|
||||
args = get_args(base)
|
||||
if args:
|
||||
inner = args[0]
|
||||
if inner is str:
|
||||
return "string[]"
|
||||
elif inner is int or inner is float:
|
||||
return "number[]"
|
||||
elif inner is bool:
|
||||
return "boolean[]"
|
||||
return "string[]"
|
||||
|
||||
|
||||
TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "string",
|
||||
int: lambda _: "number",
|
||||
float: lambda _: "number",
|
||||
bool: lambda _: "boolean",
|
||||
"UUID": lambda _: "string",
|
||||
"datetime": lambda _: "string",
|
||||
"dict": lambda _: "Record<string, unknown>",
|
||||
"list": _resolve_ts_list,
|
||||
"enum": lambda base: base.__name__,
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Protobuf Type Resolvers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def _resolve_proto_list(base: Any) -> str:
|
||||
"""Resolve Protobuf repeated type."""
|
||||
args = get_args(base)
|
||||
if args:
|
||||
inner = args[0]
|
||||
if inner is str:
|
||||
return "repeated string"
|
||||
elif inner is int:
|
||||
return "repeated int32"
|
||||
elif inner is float:
|
||||
return "repeated float"
|
||||
elif inner is bool:
|
||||
return "repeated bool"
|
||||
return "repeated string"
|
||||
|
||||
|
||||
PROTO_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "string",
|
||||
int: lambda _: "int32",
|
||||
float: lambda _: "float",
|
||||
bool: lambda _: "bool",
|
||||
"list": _resolve_proto_list,
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Prisma Type Mappings
|
||||
# =============================================================================
|
||||
|
||||
PRISMA_TYPES: dict[Any, str] = {
|
||||
str: "String",
|
||||
int: "Int",
|
||||
float: "Float",
|
||||
bool: "Boolean",
|
||||
"UUID": "String @default(uuid())",
|
||||
"datetime": "DateTime",
|
||||
"dict": "Json",
|
||||
"list": "Json",
|
||||
"bigint": "BigInt",
|
||||
}
|
||||
|
||||
PRISMA_SPECIAL: dict[str, str] = {
|
||||
"id": "String @id @default(uuid())",
|
||||
"created_at": "DateTime @default(now())",
|
||||
"updated_at": "DateTime @updatedAt",
|
||||
}
|
||||
7
soleprint/station/tools/modelgen/writer/__init__.py
Normal file
7
soleprint/station/tools/modelgen/writer/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
Writer - File writing utilities for modelgen.
|
||||
"""
|
||||
|
||||
from .file import write_file, write_multiple
|
||||
|
||||
__all__ = ["write_file", "write_multiple"]
|
||||
30
soleprint/station/tools/modelgen/writer/file.py
Normal file
30
soleprint/station/tools/modelgen/writer/file.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""
|
||||
File Writer
|
||||
|
||||
Utilities for writing generated files to disk.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
|
||||
def write_file(path: Path, content: str) -> None:
|
||||
"""Write content to file, creating directories as needed."""
|
||||
path = Path(path)
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(content)
|
||||
|
||||
|
||||
def write_multiple(directory: Path, files: Dict[str, str]) -> None:
|
||||
"""Write multiple files to a directory.
|
||||
|
||||
Args:
|
||||
directory: Target directory
|
||||
files: Dict mapping filename to content
|
||||
"""
|
||||
directory = Path(directory)
|
||||
directory.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for filename, content in files.items():
|
||||
file_path = directory / filename
|
||||
file_path.write_text(content)
|
||||
Reference in New Issue
Block a user