fixes and modelgen insert
This commit is contained in:
40
tools/modelgen/generator/__init__.py
Normal file
40
tools/modelgen/generator/__init__.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""
|
||||
Generator - Stack-specific code generators for modelgen.
|
||||
|
||||
Supported generators:
|
||||
- PydanticGenerator: Pydantic BaseModel classes
|
||||
- DjangoGenerator: Django ORM models
|
||||
- TypeScriptGenerator: TypeScript interfaces
|
||||
- ProtobufGenerator: Protocol Buffer definitions
|
||||
- PrismaGenerator: Prisma schema
|
||||
"""
|
||||
|
||||
from typing import Dict, Type
|
||||
|
||||
from .base import BaseGenerator
|
||||
from .django import DjangoGenerator
|
||||
from .prisma import PrismaGenerator
|
||||
from .protobuf import ProtobufGenerator
|
||||
from .pydantic import PydanticGenerator
|
||||
from .typescript import TypeScriptGenerator
|
||||
|
||||
# Registry of available generators
|
||||
GENERATORS: Dict[str, Type[BaseGenerator]] = {
|
||||
"pydantic": PydanticGenerator,
|
||||
"django": DjangoGenerator,
|
||||
"typescript": TypeScriptGenerator,
|
||||
"ts": TypeScriptGenerator, # Alias
|
||||
"protobuf": ProtobufGenerator,
|
||||
"proto": ProtobufGenerator, # Alias
|
||||
"prisma": PrismaGenerator,
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"BaseGenerator",
|
||||
"PydanticGenerator",
|
||||
"DjangoGenerator",
|
||||
"TypeScriptGenerator",
|
||||
"ProtobufGenerator",
|
||||
"PrismaGenerator",
|
||||
"GENERATORS",
|
||||
]
|
||||
23
tools/modelgen/generator/base.py
Normal file
23
tools/modelgen/generator/base.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Base Generator
|
||||
|
||||
Abstract base class for all code generators.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
class BaseGenerator(ABC):
|
||||
"""Abstract base for code generators."""
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, models: Any, output_path: Path) -> None:
|
||||
"""Generate code for the given models to the specified path."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def file_extension(self) -> str:
|
||||
"""Return the file extension for this format."""
|
||||
pass
|
||||
268
tools/modelgen/generator/django.py
Normal file
268
tools/modelgen/generator/django.py
Normal file
@@ -0,0 +1,268 @@
|
||||
"""
|
||||
Django Generator
|
||||
|
||||
Generates Django ORM models from model definitions.
|
||||
"""
|
||||
|
||||
import dataclasses as dc
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import format_opts, get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||
from ..types import DJANGO_SPECIAL, DJANGO_TYPES
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class DjangoGenerator(BaseGenerator):
|
||||
"""Generates Django ORM model files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Django models to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader or similar
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums as TextChoices
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_text_choices(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate models
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.extend(["", ""])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.extend(["", ""])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header."""
|
||||
return [
|
||||
'"""',
|
||||
"Django ORM Models - GENERATED FILE",
|
||||
"",
|
||||
"Do not edit directly. Regenerate using modelgen.",
|
||||
'"""',
|
||||
"",
|
||||
"import uuid",
|
||||
"from django.db import models",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_text_choices(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate Django TextChoices from EnumDefinition."""
|
||||
lines = [
|
||||
f"class {enum_def.name}(models.TextChoices):",
|
||||
]
|
||||
for name, value in enum_def.values:
|
||||
label = name.replace("_", " ").title()
|
||||
lines.append(f' {name} = "{value}", "{label}"')
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate Django model from ModelDefinition."""
|
||||
docstring = model_def.docstring or model_def.name
|
||||
lines = [
|
||||
f"class {model_def.name}(models.Model):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
"",
|
||||
]
|
||||
|
||||
for field in model_def.fields:
|
||||
django_field = self._resolve_field_type(
|
||||
field.name, field.type_hint, field.default, field.optional
|
||||
)
|
||||
lines.append(f" {field.name} = {django_field}")
|
||||
|
||||
# Add Meta and __str__
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
" class Meta:",
|
||||
' ordering = ["-created_at"]'
|
||||
if any(f.name == "created_at" for f in model_def.fields)
|
||||
else " pass",
|
||||
"",
|
||||
" def __str__(self):",
|
||||
]
|
||||
)
|
||||
|
||||
# Determine __str__ return
|
||||
field_names = [f.name for f in model_def.fields]
|
||||
if "filename" in field_names:
|
||||
lines.append(" return self.filename")
|
||||
elif "name" in field_names:
|
||||
lines.append(" return self.name")
|
||||
else:
|
||||
lines.append(" return str(self.id)")
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate Django model from a dataclass (MPR style)."""
|
||||
docstring = cls.__doc__ or cls.__name__
|
||||
lines = [
|
||||
f"class {cls.__name__}(models.Model):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
"",
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
# Check for enums and add Status inner class if needed
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
lines.append(" class Status(models.TextChoices):")
|
||||
for member in base:
|
||||
label = member.name.replace("_", " ").title()
|
||||
lines.append(f' {member.name} = "{member.value}", "{label}"')
|
||||
lines.append("")
|
||||
break
|
||||
|
||||
# Generate fields
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
field = fields.get(name)
|
||||
default = dc.MISSING
|
||||
if field and field.default is not dc.MISSING:
|
||||
default = field.default
|
||||
django_field = self._resolve_field_type(name, type_hint, default, False)
|
||||
lines.append(f" {name} = {django_field}")
|
||||
|
||||
# Add Meta and __str__
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
" class Meta:",
|
||||
' ordering = ["-created_at"]'
|
||||
if "created_at" in hints
|
||||
else " pass",
|
||||
"",
|
||||
" def __str__(self):",
|
||||
]
|
||||
)
|
||||
|
||||
if "filename" in hints:
|
||||
lines.append(" return self.filename")
|
||||
elif "name" in hints:
|
||||
lines.append(" return self.name")
|
||||
else:
|
||||
lines.append(" return str(self.id)")
|
||||
|
||||
return lines
|
||||
|
||||
def _resolve_field_type(
|
||||
self, name: str, type_hint: Any, default: Any, optional: bool
|
||||
) -> str:
|
||||
"""Resolve Python type to Django field."""
|
||||
# Special fields
|
||||
if name in DJANGO_SPECIAL:
|
||||
return DJANGO_SPECIAL[name]
|
||||
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
opts = format_opts(optional)
|
||||
|
||||
# Container types
|
||||
if origin == "dict":
|
||||
return DJANGO_TYPES["dict"]
|
||||
if origin == "list":
|
||||
return DJANGO_TYPES["list"]
|
||||
|
||||
# UUID / datetime
|
||||
if type_name == "UUID":
|
||||
return DJANGO_TYPES["UUID"].format(opts=opts)
|
||||
if type_name == "datetime":
|
||||
return DJANGO_TYPES["datetime"].format(opts=opts)
|
||||
|
||||
# Enum
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
extra = []
|
||||
if optional:
|
||||
extra.append("null=True, blank=True")
|
||||
if default is not dc.MISSING and isinstance(default, Enum):
|
||||
extra.append(f"default=Status.{default.name}")
|
||||
return DJANGO_TYPES["enum"].format(
|
||||
opts=", " + ", ".join(extra) if extra else ""
|
||||
)
|
||||
|
||||
# Text fields (based on name heuristics)
|
||||
if base is str and any(
|
||||
x in name for x in ("message", "comments", "description")
|
||||
):
|
||||
return DJANGO_TYPES["text"]
|
||||
|
||||
# BigInt fields
|
||||
if base is int and name in ("file_size", "bitrate"):
|
||||
return DJANGO_TYPES["bigint"].format(opts=opts)
|
||||
|
||||
# String with max_length
|
||||
if base is str:
|
||||
max_length = 1000 if "path" in name else 500 if "filename" in name else 255
|
||||
return DJANGO_TYPES[str].format(
|
||||
max_length=max_length, opts=", " + opts if opts else ""
|
||||
)
|
||||
|
||||
# Integer
|
||||
if base is int:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[int].format(opts=", ".join(extra))
|
||||
|
||||
# Float
|
||||
if base is float:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[float].format(opts=", ".join(extra))
|
||||
|
||||
# Boolean
|
||||
if base is bool:
|
||||
default_val = default if default is not dc.MISSING else False
|
||||
return DJANGO_TYPES[bool].format(default=default_val)
|
||||
|
||||
# Fallback to CharField
|
||||
return DJANGO_TYPES[str].format(
|
||||
max_length=255, opts=", " + opts if opts else ""
|
||||
)
|
||||
173
tools/modelgen/generator/prisma.py
Normal file
173
tools/modelgen/generator/prisma.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""
|
||||
Prisma Generator
|
||||
|
||||
Generates Prisma schema from model definitions.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||
from ..types import PRISMA_SPECIAL, PRISMA_TYPES
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class PrismaGenerator(BaseGenerator):
|
||||
"""Generates Prisma schema files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".prisma"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Prisma schema to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate models
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Collect and generate enums first
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
lines.extend(self._generate_enum_from_python(base))
|
||||
lines.append("")
|
||||
enums_generated.add(base.__name__)
|
||||
|
||||
# Generate models
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header with datasource and generator."""
|
||||
return [
|
||||
"// Prisma Schema - GENERATED FILE",
|
||||
"//",
|
||||
"// Do not edit directly. Regenerate using modelgen.",
|
||||
"",
|
||||
"generator client {",
|
||||
' provider = "prisma-client-py"',
|
||||
"}",
|
||||
"",
|
||||
"datasource db {",
|
||||
' provider = "postgresql"',
|
||||
' url = env("DATABASE_URL")',
|
||||
"}",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate Prisma enum from EnumDefinition."""
|
||||
lines = [f"enum {enum_def.name} {{"]
|
||||
for name, _ in enum_def.values:
|
||||
lines.append(f" {name}")
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||
"""Generate Prisma enum from Python Enum."""
|
||||
lines = [f"enum {enum_cls.__name__} {{"]
|
||||
for member in enum_cls:
|
||||
lines.append(f" {member.name}")
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate Prisma model from ModelDefinition."""
|
||||
lines = [f"model {model_def.name} {{"]
|
||||
|
||||
for field in model_def.fields:
|
||||
prisma_type = self._resolve_type(
|
||||
field.name, field.type_hint, field.optional
|
||||
)
|
||||
lines.append(f" {field.name} {prisma_type}")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate Prisma model from a dataclass."""
|
||||
lines = [f"model {cls.__name__} {{"]
|
||||
|
||||
for name, type_hint in get_type_hints(cls).items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
prisma_type = self._resolve_type(name, type_hint, False)
|
||||
lines.append(f" {name} {prisma_type}")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, name: str, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to Prisma type string."""
|
||||
# Special fields
|
||||
if name in PRISMA_SPECIAL:
|
||||
return PRISMA_SPECIAL[name]
|
||||
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Container types
|
||||
if origin == "dict" or origin == "list":
|
||||
result = PRISMA_TYPES.get(origin, "Json")
|
||||
return f"{result}?" if optional else result
|
||||
|
||||
# UUID / datetime
|
||||
if type_name in ("UUID", "datetime"):
|
||||
result = PRISMA_TYPES.get(type_name, "String")
|
||||
return f"{result}?" if optional else result
|
||||
|
||||
# Enum
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
result = base.__name__
|
||||
return f"{result}?" if optional else result
|
||||
|
||||
# Basic types
|
||||
result = PRISMA_TYPES.get(base, "String")
|
||||
return f"{result}?" if optional else result
|
||||
168
tools/modelgen/generator/protobuf.py
Normal file
168
tools/modelgen/generator/protobuf.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
Protobuf Generator
|
||||
|
||||
Generates Protocol Buffer definitions from model definitions.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, unwrap_optional
|
||||
from ..loader.schema import GrpcServiceDefinition, ModelDefinition
|
||||
from ..types import PROTO_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class ProtobufGenerator(BaseGenerator):
|
||||
"""Generates Protocol Buffer definition files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".proto"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate protobuf definitions to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "grpc_messages"):
|
||||
# SchemaLoader with gRPC definitions
|
||||
content = self._generate_from_loader(models)
|
||||
elif isinstance(models, tuple) and len(models) >= 3:
|
||||
# (messages, service_def) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_loader(self, loader) -> str:
|
||||
"""Generate from SchemaLoader."""
|
||||
messages = loader.grpc_messages
|
||||
service = loader.grpc_service
|
||||
|
||||
lines = self._generate_header(
|
||||
service.package if service else "service",
|
||||
service.name if service else "Service",
|
||||
service.methods if service else [],
|
||||
)
|
||||
|
||||
for model_def in messages:
|
||||
lines.extend(self._generate_message_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, messages: List[ModelDefinition], service: GrpcServiceDefinition
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header(service.package, service.name, service.methods)
|
||||
|
||||
for model_def in messages:
|
||||
lines.extend(self._generate_message_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header("service", "Service", [])
|
||||
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_message_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(
|
||||
self, package: str, service_name: str, methods: List[dict]
|
||||
) -> List[str]:
|
||||
"""Generate file header with service definition."""
|
||||
lines = [
|
||||
"// Protocol Buffer Definitions - GENERATED FILE",
|
||||
"//",
|
||||
"// Do not edit directly. Regenerate using modelgen.",
|
||||
"",
|
||||
'syntax = "proto3";',
|
||||
"",
|
||||
f"package {package};",
|
||||
"",
|
||||
]
|
||||
|
||||
if methods:
|
||||
lines.append(f"service {service_name} {{")
|
||||
for m in methods:
|
||||
req = (
|
||||
m["request"].__name__
|
||||
if hasattr(m["request"], "__name__")
|
||||
else str(m["request"])
|
||||
)
|
||||
resp = (
|
||||
m["response"].__name__
|
||||
if hasattr(m["response"], "__name__")
|
||||
else str(m["response"])
|
||||
)
|
||||
returns = f"stream {resp}" if m.get("stream_response") else resp
|
||||
lines.append(f" rpc {m['name']}({req}) returns ({returns});")
|
||||
lines.extend(["}", ""])
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_message_from_definition(
|
||||
self, model_def: ModelDefinition
|
||||
) -> List[str]:
|
||||
"""Generate proto message from ModelDefinition."""
|
||||
lines = [f"message {model_def.name} {{"]
|
||||
|
||||
if not model_def.fields:
|
||||
lines.append(" // Empty")
|
||||
else:
|
||||
for i, field in enumerate(model_def.fields, 1):
|
||||
proto_type, optional = self._resolve_type(field.type_hint)
|
||||
prefix = (
|
||||
"optional "
|
||||
if optional and not proto_type.startswith("repeated")
|
||||
else ""
|
||||
)
|
||||
lines.append(f" {prefix}{proto_type} {field.name} = {i};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_message_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate proto message from a dataclass."""
|
||||
lines = [f"message {cls.__name__} {{"]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
if not hints:
|
||||
lines.append(" // Empty")
|
||||
else:
|
||||
for i, (name, type_hint) in enumerate(hints.items(), 1):
|
||||
proto_type, optional = self._resolve_type(type_hint)
|
||||
prefix = (
|
||||
"optional "
|
||||
if optional and not proto_type.startswith("repeated")
|
||||
else ""
|
||||
)
|
||||
lines.append(f" {prefix}{proto_type} {name} = {i};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any) -> tuple[str, bool]:
|
||||
"""Resolve Python type to proto type. Returns (type, is_optional)."""
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = PROTO_RESOLVERS.get(origin) or PROTO_RESOLVERS.get(base)
|
||||
|
||||
if resolver:
|
||||
result = resolver(base)
|
||||
is_repeated = result.startswith("repeated")
|
||||
return result, optional and not is_repeated
|
||||
|
||||
return "string", optional
|
||||
427
tools/modelgen/generator/pydantic.py
Normal file
427
tools/modelgen/generator/pydantic.py
Normal file
@@ -0,0 +1,427 @@
|
||||
"""
|
||||
Pydantic Generator
|
||||
|
||||
Generates Pydantic BaseModel classes from model definitions.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from ..types import PYDANTIC_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class PydanticGenerator(BaseGenerator):
|
||||
"""Generates Pydantic model files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".py"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Pydantic models to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Detect input type and generate accordingly
|
||||
if hasattr(models, "get_shared_component"):
|
||||
# ConfigLoader (soleprint config)
|
||||
content = self._generate_from_config(models)
|
||||
elif hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple from extractor
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects (schema/extract mode)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate models
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Collect and generate enums first
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
lines.extend(self._generate_enum_from_python(base))
|
||||
lines.append("")
|
||||
enums_generated.add(base.__name__)
|
||||
|
||||
# Generate models
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header."""
|
||||
return [
|
||||
'"""',
|
||||
"Pydantic Models - GENERATED FILE",
|
||||
"",
|
||||
"Do not edit directly. Regenerate using modelgen.",
|
||||
'"""',
|
||||
"",
|
||||
"from datetime import datetime",
|
||||
"from enum import Enum",
|
||||
"from typing import Any, Dict, List, Optional",
|
||||
"from uuid import UUID",
|
||||
"",
|
||||
"from pydantic import BaseModel, Field",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate Pydantic enum from EnumDefinition."""
|
||||
lines = [f"class {enum_def.name}(str, Enum):"]
|
||||
for name, value in enum_def.values:
|
||||
lines.append(f' {name} = "{value}"')
|
||||
return lines
|
||||
|
||||
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||
"""Generate Pydantic enum from Python Enum."""
|
||||
lines = [f"class {enum_cls.__name__}(str, Enum):"]
|
||||
for member in enum_cls:
|
||||
lines.append(f' {member.name} = "{member.value}"')
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate Pydantic model from ModelDefinition."""
|
||||
docstring = model_def.docstring or model_def.name
|
||||
lines = [
|
||||
f"class {model_def.name}(BaseModel):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
]
|
||||
|
||||
if not model_def.fields:
|
||||
lines.append(" pass")
|
||||
else:
|
||||
for field in model_def.fields:
|
||||
py_type = self._resolve_type(field.type_hint, field.optional)
|
||||
default = self._format_default(field.default, field.optional)
|
||||
lines.append(f" {field.name}: {py_type}{default}")
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate Pydantic model from a dataclass."""
|
||||
import dataclasses as dc
|
||||
|
||||
docstring = cls.__doc__ or cls.__name__
|
||||
lines = [
|
||||
f"class {cls.__name__}(BaseModel):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
|
||||
field = fields.get(name)
|
||||
default_val = dc.MISSING
|
||||
if field:
|
||||
if field.default is not dc.MISSING:
|
||||
default_val = field.default
|
||||
|
||||
py_type = self._resolve_type(type_hint, False)
|
||||
default = self._format_default(default_val, "Optional" in py_type)
|
||||
lines.append(f" {name}: {py_type}{default}")
|
||||
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to Pydantic type string."""
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = (
|
||||
PYDANTIC_RESOLVERS.get(origin)
|
||||
or PYDANTIC_RESOLVERS.get(type_name)
|
||||
or PYDANTIC_RESOLVERS.get(base)
|
||||
or (
|
||||
PYDANTIC_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "str"
|
||||
return f"Optional[{result}]" if optional else result
|
||||
|
||||
def _format_default(self, default: Any, optional: bool) -> str:
|
||||
"""Format default value for field."""
|
||||
import dataclasses as dc
|
||||
|
||||
if optional:
|
||||
return " = None"
|
||||
if default is dc.MISSING or default is None:
|
||||
return ""
|
||||
if isinstance(default, str):
|
||||
return f' = "{default}"'
|
||||
if isinstance(default, Enum):
|
||||
return f" = {default.__class__.__name__}.{default.name}"
|
||||
if callable(default):
|
||||
return " = Field(default_factory=list)" if "list" in str(default) else ""
|
||||
return f" = {default!r}"
|
||||
|
||||
def _generate_from_config(self, config) -> str:
|
||||
"""Generate from ConfigLoader (soleprint config.json mode)."""
|
||||
# Get component names from config
|
||||
config_comp = config.get_shared_component("config")
|
||||
data_comp = config.get_shared_component("data")
|
||||
|
||||
data_flow_sys = config.get_system("data_flow")
|
||||
doc_sys = config.get_system("documentation")
|
||||
exec_sys = config.get_system("execution")
|
||||
|
||||
connector_comp = config.get_component("data_flow", "connector")
|
||||
pulse_comp = config.get_component("data_flow", "composed")
|
||||
|
||||
pattern_comp = config.get_component("documentation", "pattern")
|
||||
doc_composed = config.get_component("documentation", "composed")
|
||||
|
||||
tool_comp = config.get_component("execution", "utility")
|
||||
monitor_comp = config.get_component("execution", "watcher")
|
||||
cabinet_comp = config.get_component("execution", "container")
|
||||
exec_composed = config.get_component("execution", "composed")
|
||||
|
||||
return f'''"""
|
||||
Pydantic models - Generated from {config.framework.name}.config.json
|
||||
|
||||
DO NOT EDIT MANUALLY - Regenerate from config
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Literal, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class Status(str, Enum):
|
||||
PENDING = "pending"
|
||||
PLANNED = "planned"
|
||||
BUILDING = "building"
|
||||
DEV = "dev"
|
||||
LIVE = "live"
|
||||
READY = "ready"
|
||||
|
||||
|
||||
class System(str, Enum):
|
||||
{data_flow_sys.name.upper()} = "{data_flow_sys.name}"
|
||||
{doc_sys.name.upper()} = "{doc_sys.name}"
|
||||
{exec_sys.name.upper()} = "{exec_sys.name}"
|
||||
|
||||
|
||||
class ToolType(str, Enum):
|
||||
APP = "app"
|
||||
CLI = "cli"
|
||||
|
||||
|
||||
# === Shared Components ===
|
||||
|
||||
|
||||
class {config_comp.title}(BaseModel):
|
||||
"""{config_comp.description}. Shared across {data_flow_sys.name}, {exec_sys.name}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
config_path: Optional[str] = None
|
||||
|
||||
|
||||
class {data_comp.title}(BaseModel):
|
||||
"""{data_comp.description}. Shared across all systems."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
source_template: Optional[str] = None
|
||||
data_path: Optional[str] = None
|
||||
|
||||
|
||||
# === System-Specific Components ===
|
||||
|
||||
|
||||
class {connector_comp.title}(BaseModel):
|
||||
"""{connector_comp.description} ({data_flow_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
mock: Optional[bool] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class {pattern_comp.title}(BaseModel):
|
||||
"""{pattern_comp.description} ({doc_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template_path: Optional[str] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {tool_comp.title}(BaseModel):
|
||||
"""{tool_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
type: Optional[ToolType] = None
|
||||
description: Optional[str] = None
|
||||
path: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
cli: Optional[str] = None
|
||||
|
||||
|
||||
class {monitor_comp.title}(BaseModel):
|
||||
"""{monitor_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
class {cabinet_comp.title}(BaseModel):
|
||||
"""{cabinet_comp.description} ({exec_sys.name})."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
tools: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Composed Types ===
|
||||
|
||||
|
||||
class {pulse_comp.title}(BaseModel):
|
||||
"""{pulse_comp.description} ({data_flow_sys.name}). Formula: {pulse_comp.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
{connector_comp.name}: Optional[{connector_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||
|
||||
|
||||
class {doc_composed.title}(BaseModel):
|
||||
"""{doc_composed.description} ({doc_sys.name}). Formula: {doc_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
template: Optional[{pattern_comp.title}] = None
|
||||
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
output_{data_comp.name}: Optional[{data_comp.title}] = None
|
||||
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||
|
||||
|
||||
class {exec_composed.title}(BaseModel):
|
||||
"""{exec_composed.description} ({exec_sys.name}). Formula: {exec_composed.formula}."""
|
||||
|
||||
name: str # Unique identifier
|
||||
slug: str # URL-friendly identifier
|
||||
title: str # Display title for UI
|
||||
status: Optional[Status] = None
|
||||
cabinet: Optional[{cabinet_comp.title}] = None
|
||||
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||
{data_comp.plural}: List[{data_comp.title}] = Field(default_factory=list)
|
||||
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||
|
||||
|
||||
# === Collection wrappers for JSON files ===
|
||||
|
||||
|
||||
class {config_comp.title}Collection(BaseModel):
|
||||
items: List[{config_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {data_comp.title}Collection(BaseModel):
|
||||
items: List[{data_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {connector_comp.title}Collection(BaseModel):
|
||||
items: List[{connector_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pattern_comp.title}Collection(BaseModel):
|
||||
items: List[{pattern_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {tool_comp.title}Collection(BaseModel):
|
||||
items: List[{tool_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {monitor_comp.title}Collection(BaseModel):
|
||||
items: List[{monitor_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {cabinet_comp.title}Collection(BaseModel):
|
||||
items: List[{cabinet_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {pulse_comp.title}Collection(BaseModel):
|
||||
items: List[{pulse_comp.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {doc_composed.title}Collection(BaseModel):
|
||||
items: List[{doc_composed.title}] = Field(default_factory=list)
|
||||
|
||||
|
||||
class {exec_composed.title}Collection(BaseModel):
|
||||
items: List[{exec_composed.title}] = Field(default_factory=list)
|
||||
'''
|
||||
144
tools/modelgen/generator/typescript.py
Normal file
144
tools/modelgen/generator/typescript.py
Normal file
@@ -0,0 +1,144 @@
|
||||
"""
|
||||
TypeScript Generator
|
||||
|
||||
Generates TypeScript interfaces from model definitions.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
|
||||
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from ..types import TS_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
|
||||
class TypeScriptGenerator(BaseGenerator):
|
||||
"""Generates TypeScript interface files."""
|
||||
|
||||
def file_extension(self) -> str:
|
||||
return ".ts"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate TypeScript types to output_path."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Generate enums as union types
|
||||
for enum_def in enums:
|
||||
values = " | ".join(f'"{v}"' for _, v in enum_def.values)
|
||||
lines.append(f"export type {enum_def.name} = {values};")
|
||||
lines.append("")
|
||||
|
||||
# Generate interfaces
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_interface_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Collect and generate enums first
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
if base.__name__ not in enums_generated:
|
||||
values = " | ".join(f'"{m.value}"' for m in base)
|
||||
lines.append(f"export type {base.__name__} = {values};")
|
||||
enums_generated.add(base.__name__)
|
||||
lines.append("")
|
||||
|
||||
# Generate interfaces
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_interface_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header."""
|
||||
return [
|
||||
"/**",
|
||||
" * TypeScript Types - GENERATED FILE",
|
||||
" *",
|
||||
" * Do not edit directly. Regenerate using modelgen.",
|
||||
" */",
|
||||
"",
|
||||
]
|
||||
|
||||
def _generate_interface_from_definition(
|
||||
self, model_def: ModelDefinition
|
||||
) -> List[str]:
|
||||
"""Generate TypeScript interface from ModelDefinition."""
|
||||
lines = [f"export interface {model_def.name} {{"]
|
||||
|
||||
for field in model_def.fields:
|
||||
ts_type = self._resolve_type(field.type_hint, field.optional)
|
||||
lines.append(f" {field.name}: {ts_type};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _generate_interface_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate TypeScript interface from a dataclass."""
|
||||
lines = [f"export interface {cls.__name__} {{"]
|
||||
|
||||
for name, type_hint in get_type_hints(cls).items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
ts_type = self._resolve_type(type_hint, False)
|
||||
lines.append(f" {name}: {ts_type};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to TypeScript type string."""
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = (
|
||||
TS_RESOLVERS.get(origin)
|
||||
or TS_RESOLVERS.get(type_name)
|
||||
or TS_RESOLVERS.get(base)
|
||||
or (
|
||||
TS_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "string"
|
||||
return f"{result} | null" if optional else result
|
||||
Reference in New Issue
Block a user